V8 Project
hydrogen.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/hydrogen.h"
6 
7 #include <algorithm>
8 
9 #include "src/v8.h"
10 
12 #include "src/codegen.h"
13 #include "src/full-codegen.h"
14 #include "src/hashmap.h"
15 #include "src/hydrogen-bce.h"
16 #include "src/hydrogen-bch.h"
19 #include "src/hydrogen-dce.h"
20 #include "src/hydrogen-dehoist.h"
23 #include "src/hydrogen-gvn.h"
29 #include "src/hydrogen-osr.h"
34 #include "src/hydrogen-sce.h"
38 #include "src/ic/ic.h"
39 // GetRootConstructor
40 #include "src/ic/ic-inl.h"
41 #include "src/lithium-allocator.h"
42 #include "src/parser.h"
43 #include "src/runtime/runtime.h"
44 #include "src/scopeinfo.h"
45 #include "src/scopes.h"
46 #include "src/typing.h"
47 
48 #if V8_TARGET_ARCH_IA32
49 #include "src/ia32/lithium-codegen-ia32.h" // NOLINT
50 #elif V8_TARGET_ARCH_X64
51 #include "src/x64/lithium-codegen-x64.h" // NOLINT
52 #elif V8_TARGET_ARCH_ARM64
53 #include "src/arm64/lithium-codegen-arm64.h" // NOLINT
54 #elif V8_TARGET_ARCH_ARM
55 #include "src/arm/lithium-codegen-arm.h" // NOLINT
56 #elif V8_TARGET_ARCH_MIPS
57 #include "src/mips/lithium-codegen-mips.h" // NOLINT
58 #elif V8_TARGET_ARCH_MIPS64
59 #include "src/mips64/lithium-codegen-mips64.h" // NOLINT
60 #elif V8_TARGET_ARCH_X87
61 #include "src/x87/lithium-codegen-x87.h" // NOLINT
62 #else
63 #error Unsupported target architecture.
64 #endif
65 
66 namespace v8 {
67 namespace internal {
68 
69 HBasicBlock::HBasicBlock(HGraph* graph)
70  : block_id_(graph->GetNextBlockID()),
71  graph_(graph),
72  phis_(4, graph->zone()),
73  first_(NULL),
74  last_(NULL),
75  end_(NULL),
76  loop_information_(NULL),
77  predecessors_(2, graph->zone()),
78  dominator_(NULL),
79  dominated_blocks_(4, graph->zone()),
80  last_environment_(NULL),
81  argument_count_(-1),
82  first_instruction_index_(-1),
83  last_instruction_index_(-1),
84  deleted_phis_(4, graph->zone()),
85  parent_loop_header_(NULL),
86  inlined_entry_block_(NULL),
87  is_inline_return_target_(false),
88  is_reachable_(true),
89  dominates_loop_successors_(false),
90  is_osr_entry_(false),
91  is_ordered_(false) { }
92 
93 
94 Isolate* HBasicBlock::isolate() const {
95  return graph_->isolate();
96 }
97 
98 
99 void HBasicBlock::MarkUnreachable() {
100  is_reachable_ = false;
101 }
102 
103 
104 void HBasicBlock::AttachLoopInformation() {
105  DCHECK(!IsLoopHeader());
106  loop_information_ = new(zone()) HLoopInformation(this, zone());
107 }
108 
109 
110 void HBasicBlock::DetachLoopInformation() {
111  DCHECK(IsLoopHeader());
112  loop_information_ = NULL;
113 }
114 
115 
116 void HBasicBlock::AddPhi(HPhi* phi) {
117  DCHECK(!IsStartBlock());
118  phis_.Add(phi, zone());
119  phi->SetBlock(this);
120 }
121 
122 
123 void HBasicBlock::RemovePhi(HPhi* phi) {
124  DCHECK(phi->block() == this);
125  DCHECK(phis_.Contains(phi));
126  phi->Kill();
127  phis_.RemoveElement(phi);
128  phi->SetBlock(NULL);
129 }
130 
131 
132 void HBasicBlock::AddInstruction(HInstruction* instr,
133  HSourcePosition position) {
134  DCHECK(!IsStartBlock() || !IsFinished());
135  DCHECK(!instr->IsLinked());
136  DCHECK(!IsFinished());
137 
138  if (!position.IsUnknown()) {
139  instr->set_position(position);
140  }
141  if (first_ == NULL) {
142  DCHECK(last_environment() != NULL);
143  DCHECK(!last_environment()->ast_id().IsNone());
144  HBlockEntry* entry = new(zone()) HBlockEntry();
145  entry->InitializeAsFirst(this);
146  if (!position.IsUnknown()) {
147  entry->set_position(position);
148  } else {
149  DCHECK(!FLAG_hydrogen_track_positions ||
150  !graph()->info()->IsOptimizing());
151  }
152  first_ = last_ = entry;
153  }
154  instr->InsertAfter(last_);
155 }
156 
157 
158 HPhi* HBasicBlock::AddNewPhi(int merged_index) {
159  if (graph()->IsInsideNoSideEffectsScope()) {
160  merged_index = HPhi::kInvalidMergedIndex;
161  }
162  HPhi* phi = new(zone()) HPhi(merged_index, zone());
163  AddPhi(phi);
164  return phi;
165 }
166 
167 
168 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
169  RemovableSimulate removable) {
170  DCHECK(HasEnvironment());
171  HEnvironment* environment = last_environment();
172  DCHECK(ast_id.IsNone() ||
173  ast_id == BailoutId::StubEntry() ||
174  environment->closure()->shared()->VerifyBailoutId(ast_id));
175 
176  int push_count = environment->push_count();
177  int pop_count = environment->pop_count();
178 
179  HSimulate* instr =
180  new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
181 #ifdef DEBUG
182  instr->set_closure(environment->closure());
183 #endif
184  // Order of pushed values: newest (top of stack) first. This allows
185  // HSimulate::MergeWith() to easily append additional pushed values
186  // that are older (from further down the stack).
187  for (int i = 0; i < push_count; ++i) {
188  instr->AddPushedValue(environment->ExpressionStackAt(i));
189  }
190  for (GrowableBitVector::Iterator it(environment->assigned_variables(),
191  zone());
192  !it.Done();
193  it.Advance()) {
194  int index = it.Current();
195  instr->AddAssignedValue(index, environment->Lookup(index));
196  }
197  environment->ClearHistory();
198  return instr;
199 }
200 
201 
202 void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) {
203  DCHECK(!IsFinished());
204  AddInstruction(end, position);
205  end_ = end;
206  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
207  it.Current()->RegisterPredecessor(this);
208  }
209 }
210 
211 
212 void HBasicBlock::Goto(HBasicBlock* block,
213  HSourcePosition position,
214  FunctionState* state,
215  bool add_simulate) {
216  bool drop_extra = state != NULL &&
217  state->inlining_kind() == NORMAL_RETURN;
218 
219  if (block->IsInlineReturnTarget()) {
220  HEnvironment* env = last_environment();
221  int argument_count = env->arguments_environment()->parameter_count();
222  AddInstruction(new(zone())
223  HLeaveInlined(state->entry(), argument_count),
224  position);
225  UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
226  }
227 
228  if (add_simulate) AddNewSimulate(BailoutId::None(), position);
229  HGoto* instr = new(zone()) HGoto(block);
230  Finish(instr, position);
231 }
232 
233 
234 void HBasicBlock::AddLeaveInlined(HValue* return_value,
235  FunctionState* state,
236  HSourcePosition position) {
237  HBasicBlock* target = state->function_return();
238  bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
239 
240  DCHECK(target->IsInlineReturnTarget());
241  DCHECK(return_value != NULL);
242  HEnvironment* env = last_environment();
243  int argument_count = env->arguments_environment()->parameter_count();
244  AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
245  position);
246  UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
247  last_environment()->Push(return_value);
248  AddNewSimulate(BailoutId::None(), position);
249  HGoto* instr = new(zone()) HGoto(target);
250  Finish(instr, position);
251 }
252 
253 
254 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
255  DCHECK(!HasEnvironment());
256  DCHECK(first() == NULL);
257  UpdateEnvironment(env);
258 }
259 
260 
261 void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
262  last_environment_ = env;
263  graph()->update_maximum_environment_size(env->first_expression_index());
264 }
265 
266 
267 void HBasicBlock::SetJoinId(BailoutId ast_id) {
268  int length = predecessors_.length();
269  DCHECK(length > 0);
270  for (int i = 0; i < length; i++) {
271  HBasicBlock* predecessor = predecessors_[i];
272  DCHECK(predecessor->end()->IsGoto());
273  HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
274  DCHECK(i != 0 ||
275  (predecessor->last_environment()->closure().is_null() ||
276  predecessor->last_environment()->closure()->shared()
277  ->VerifyBailoutId(ast_id)));
278  simulate->set_ast_id(ast_id);
279  predecessor->last_environment()->set_ast_id(ast_id);
280  }
281 }
282 
283 
284 bool HBasicBlock::Dominates(HBasicBlock* other) const {
285  HBasicBlock* current = other->dominator();
286  while (current != NULL) {
287  if (current == this) return true;
288  current = current->dominator();
289  }
290  return false;
291 }
292 
293 
294 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
295  if (this == other) return true;
296  return Dominates(other);
297 }
298 
299 
300 int HBasicBlock::LoopNestingDepth() const {
301  const HBasicBlock* current = this;
302  int result = (current->IsLoopHeader()) ? 1 : 0;
303  while (current->parent_loop_header() != NULL) {
304  current = current->parent_loop_header();
305  result++;
306  }
307  return result;
308 }
309 
310 
311 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
312  DCHECK(IsLoopHeader());
313 
314  SetJoinId(stmt->EntryId());
315  if (predecessors()->length() == 1) {
316  // This is a degenerated loop.
317  DetachLoopInformation();
318  return;
319  }
320 
321  // Only the first entry into the loop is from outside the loop. All other
322  // entries must be back edges.
323  for (int i = 1; i < predecessors()->length(); ++i) {
324  loop_information()->RegisterBackEdge(predecessors()->at(i));
325  }
326 }
327 
328 
329 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
330  DCHECK(IsFinished());
331  HBasicBlock* succ_block = end()->SuccessorAt(succ);
332 
333  DCHECK(succ_block->predecessors()->length() == 1);
334  succ_block->MarkUnreachable();
335 }
336 
337 
338 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
339  if (HasPredecessor()) {
340  // Only loop header blocks can have a predecessor added after
341  // instructions have been added to the block (they have phis for all
342  // values in the environment, these phis may be eliminated later).
343  DCHECK(IsLoopHeader() || first_ == NULL);
344  HEnvironment* incoming_env = pred->last_environment();
345  if (IsLoopHeader()) {
346  DCHECK(phis()->length() == incoming_env->length());
347  for (int i = 0; i < phis_.length(); ++i) {
348  phis_[i]->AddInput(incoming_env->values()->at(i));
349  }
350  } else {
351  last_environment()->AddIncomingEdge(this, pred->last_environment());
352  }
353  } else if (!HasEnvironment() && !IsFinished()) {
354  DCHECK(!IsLoopHeader());
355  SetInitialEnvironment(pred->last_environment()->Copy());
356  }
357 
358  predecessors_.Add(pred, zone());
359 }
360 
361 
362 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
363  DCHECK(!dominated_blocks_.Contains(block));
364  // Keep the list of dominated blocks sorted such that if there is two
365  // succeeding block in this list, the predecessor is before the successor.
366  int index = 0;
367  while (index < dominated_blocks_.length() &&
368  dominated_blocks_[index]->block_id() < block->block_id()) {
369  ++index;
370  }
371  dominated_blocks_.InsertAt(index, block, zone());
372 }
373 
374 
375 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
376  if (dominator_ == NULL) {
377  dominator_ = other;
378  other->AddDominatedBlock(this);
379  } else if (other->dominator() != NULL) {
380  HBasicBlock* first = dominator_;
381  HBasicBlock* second = other;
382 
383  while (first != second) {
384  if (first->block_id() > second->block_id()) {
385  first = first->dominator();
386  } else {
387  second = second->dominator();
388  }
389  DCHECK(first != NULL && second != NULL);
390  }
391 
392  if (dominator_ != first) {
393  DCHECK(dominator_->dominated_blocks_.Contains(this));
394  dominator_->dominated_blocks_.RemoveElement(this);
395  dominator_ = first;
396  first->AddDominatedBlock(this);
397  }
398  }
399 }
400 
401 
402 void HBasicBlock::AssignLoopSuccessorDominators() {
403  // Mark blocks that dominate all subsequent reachable blocks inside their
404  // loop. Exploit the fact that blocks are sorted in reverse post order. When
405  // the loop is visited in increasing block id order, if the number of
406  // non-loop-exiting successor edges at the dominator_candidate block doesn't
407  // exceed the number of previously encountered predecessor edges, there is no
408  // path from the loop header to any block with higher id that doesn't go
409  // through the dominator_candidate block. In this case, the
410  // dominator_candidate block is guaranteed to dominate all blocks reachable
411  // from it with higher ids.
412  HBasicBlock* last = loop_information()->GetLastBackEdge();
413  int outstanding_successors = 1; // one edge from the pre-header
414  // Header always dominates everything.
415  MarkAsLoopSuccessorDominator();
416  for (int j = block_id(); j <= last->block_id(); ++j) {
417  HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
418  for (HPredecessorIterator it(dominator_candidate); !it.Done();
419  it.Advance()) {
420  HBasicBlock* predecessor = it.Current();
421  // Don't count back edges.
422  if (predecessor->block_id() < dominator_candidate->block_id()) {
423  outstanding_successors--;
424  }
425  }
426 
427  // If more successors than predecessors have been seen in the loop up to
428  // now, it's not possible to guarantee that the current block dominates
429  // all of the blocks with higher IDs. In this case, assume conservatively
430  // that those paths through loop that don't go through the current block
431  // contain all of the loop's dependencies. Also be careful to record
432  // dominator information about the current loop that's being processed,
433  // and not nested loops, which will be processed when
434  // AssignLoopSuccessorDominators gets called on their header.
435  DCHECK(outstanding_successors >= 0);
436  HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
437  if (outstanding_successors == 0 &&
438  (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
439  dominator_candidate->MarkAsLoopSuccessorDominator();
440  }
441  HControlInstruction* end = dominator_candidate->end();
442  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
443  HBasicBlock* successor = it.Current();
444  // Only count successors that remain inside the loop and don't loop back
445  // to a loop header.
446  if (successor->block_id() > dominator_candidate->block_id() &&
447  successor->block_id() <= last->block_id()) {
448  // Backwards edges must land on loop headers.
449  DCHECK(successor->block_id() > dominator_candidate->block_id() ||
450  successor->IsLoopHeader());
451  outstanding_successors++;
452  }
453  }
454  }
455 }
456 
457 
458 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
459  for (int i = 0; i < predecessors_.length(); ++i) {
460  if (predecessors_[i] == predecessor) return i;
461  }
462  UNREACHABLE();
463  return -1;
464 }
465 
466 
467 #ifdef DEBUG
468 void HBasicBlock::Verify() {
469  // Check that every block is finished.
470  DCHECK(IsFinished());
471  DCHECK(block_id() >= 0);
472 
473  // Check that the incoming edges are in edge split form.
474  if (predecessors_.length() > 1) {
475  for (int i = 0; i < predecessors_.length(); ++i) {
476  DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL);
477  }
478  }
479 }
480 #endif
481 
482 
483 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
484  this->back_edges_.Add(block, block->zone());
485  AddBlock(block);
486 }
487 
488 
489 HBasicBlock* HLoopInformation::GetLastBackEdge() const {
490  int max_id = -1;
491  HBasicBlock* result = NULL;
492  for (int i = 0; i < back_edges_.length(); ++i) {
493  HBasicBlock* cur = back_edges_[i];
494  if (cur->block_id() > max_id) {
495  max_id = cur->block_id();
496  result = cur;
497  }
498  }
499  return result;
500 }
501 
502 
503 void HLoopInformation::AddBlock(HBasicBlock* block) {
504  if (block == loop_header()) return;
505  if (block->parent_loop_header() == loop_header()) return;
506  if (block->parent_loop_header() != NULL) {
507  AddBlock(block->parent_loop_header());
508  } else {
509  block->set_parent_loop_header(loop_header());
510  blocks_.Add(block, block->zone());
511  for (int i = 0; i < block->predecessors()->length(); ++i) {
512  AddBlock(block->predecessors()->at(i));
513  }
514  }
515 }
516 
517 
518 #ifdef DEBUG
519 
520 // Checks reachability of the blocks in this graph and stores a bit in
521 // the BitVector "reachable()" for every block that can be reached
522 // from the start block of the graph. If "dont_visit" is non-null, the given
523 // block is treated as if it would not be part of the graph. "visited_count()"
524 // returns the number of reachable blocks.
525 class ReachabilityAnalyzer BASE_EMBEDDED {
526  public:
527  ReachabilityAnalyzer(HBasicBlock* entry_block,
528  int block_count,
529  HBasicBlock* dont_visit)
530  : visited_count_(0),
531  stack_(16, entry_block->zone()),
532  reachable_(block_count, entry_block->zone()),
533  dont_visit_(dont_visit) {
534  PushBlock(entry_block);
535  Analyze();
536  }
537 
538  int visited_count() const { return visited_count_; }
539  const BitVector* reachable() const { return &reachable_; }
540 
541  private:
542  void PushBlock(HBasicBlock* block) {
543  if (block != NULL && block != dont_visit_ &&
544  !reachable_.Contains(block->block_id())) {
545  reachable_.Add(block->block_id());
546  stack_.Add(block, block->zone());
547  visited_count_++;
548  }
549  }
550 
551  void Analyze() {
552  while (!stack_.is_empty()) {
553  HControlInstruction* end = stack_.RemoveLast()->end();
554  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
555  PushBlock(it.Current());
556  }
557  }
558  }
559 
560  int visited_count_;
561  ZoneList<HBasicBlock*> stack_;
562  BitVector reachable_;
563  HBasicBlock* dont_visit_;
564 };
565 
566 
567 void HGraph::Verify(bool do_full_verify) const {
568  Heap::RelocationLock relocation_lock(isolate()->heap());
569  AllowHandleDereference allow_deref;
570  AllowDeferredHandleDereference allow_deferred_deref;
571  for (int i = 0; i < blocks_.length(); i++) {
572  HBasicBlock* block = blocks_.at(i);
573 
574  block->Verify();
575 
576  // Check that every block contains at least one node and that only the last
577  // node is a control instruction.
578  HInstruction* current = block->first();
579  DCHECK(current != NULL && current->IsBlockEntry());
580  while (current != NULL) {
581  DCHECK((current->next() == NULL) == current->IsControlInstruction());
582  DCHECK(current->block() == block);
583  current->Verify();
584  current = current->next();
585  }
586 
587  // Check that successors are correctly set.
588  HBasicBlock* first = block->end()->FirstSuccessor();
589  HBasicBlock* second = block->end()->SecondSuccessor();
590  DCHECK(second == NULL || first != NULL);
591 
592  // Check that the predecessor array is correct.
593  if (first != NULL) {
594  DCHECK(first->predecessors()->Contains(block));
595  if (second != NULL) {
596  DCHECK(second->predecessors()->Contains(block));
597  }
598  }
599 
600  // Check that phis have correct arguments.
601  for (int j = 0; j < block->phis()->length(); j++) {
602  HPhi* phi = block->phis()->at(j);
603  phi->Verify();
604  }
605 
606  // Check that all join blocks have predecessors that end with an
607  // unconditional goto and agree on their environment node id.
608  if (block->predecessors()->length() >= 2) {
609  BailoutId id =
610  block->predecessors()->first()->last_environment()->ast_id();
611  for (int k = 0; k < block->predecessors()->length(); k++) {
612  HBasicBlock* predecessor = block->predecessors()->at(k);
613  DCHECK(predecessor->end()->IsGoto() ||
614  predecessor->end()->IsDeoptimize());
615  DCHECK(predecessor->last_environment()->ast_id() == id);
616  }
617  }
618  }
619 
620  // Check special property of first block to have no predecessors.
621  DCHECK(blocks_.at(0)->predecessors()->is_empty());
622 
623  if (do_full_verify) {
624  // Check that the graph is fully connected.
625  ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
626  DCHECK(analyzer.visited_count() == blocks_.length());
627 
628  // Check that entry block dominator is NULL.
629  DCHECK(entry_block_->dominator() == NULL);
630 
631  // Check dominators.
632  for (int i = 0; i < blocks_.length(); ++i) {
633  HBasicBlock* block = blocks_.at(i);
634  if (block->dominator() == NULL) {
635  // Only start block may have no dominator assigned to.
636  DCHECK(i == 0);
637  } else {
638  // Assert that block is unreachable if dominator must not be visited.
639  ReachabilityAnalyzer dominator_analyzer(entry_block_,
640  blocks_.length(),
641  block->dominator());
642  DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id()));
643  }
644  }
645  }
646 }
647 
648 #endif
649 
650 
651 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
652  int32_t value) {
653  if (!pointer->is_set()) {
654  // Can't pass GetInvalidContext() to HConstant::New, because that will
655  // recursively call GetConstant
656  HConstant* constant = HConstant::New(zone(), NULL, value);
657  constant->InsertAfter(entry_block()->first());
658  pointer->set(constant);
659  return constant;
660  }
661  return ReinsertConstantIfNecessary(pointer->get());
662 }
663 
664 
665 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
666  if (!constant->IsLinked()) {
667  // The constant was removed from the graph. Reinsert.
668  constant->ClearFlag(HValue::kIsDead);
669  constant->InsertAfter(entry_block()->first());
670  }
671  return constant;
672 }
673 
674 
675 HConstant* HGraph::GetConstant0() {
676  return GetConstant(&constant_0_, 0);
677 }
678 
679 
680 HConstant* HGraph::GetConstant1() {
681  return GetConstant(&constant_1_, 1);
682 }
683 
684 
685 HConstant* HGraph::GetConstantMinus1() {
686  return GetConstant(&constant_minus1_, -1);
687 }
688 
689 
690 #define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value) \
691 HConstant* HGraph::GetConstant##Name() { \
692  if (!constant_##name##_.is_set()) { \
693  HConstant* constant = new(zone()) HConstant( \
694  Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
695  Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()), \
696  false, \
697  Representation::Tagged(), \
698  htype, \
699  true, \
700  boolean_value, \
701  false, \
702  ODDBALL_TYPE); \
703  constant->InsertAfter(entry_block()->first()); \
704  constant_##name##_.set(constant); \
705  } \
706  return ReinsertConstantIfNecessary(constant_##name##_.get()); \
707 }
708 
709 
710 DEFINE_GET_CONSTANT(Undefined, undefined, undefined, HType::Undefined(), false)
711 DEFINE_GET_CONSTANT(True, true, boolean, HType::Boolean(), true)
712 DEFINE_GET_CONSTANT(False, false, boolean, HType::Boolean(), false)
713 DEFINE_GET_CONSTANT(Hole, the_hole, the_hole, HType::None(), false)
714 DEFINE_GET_CONSTANT(Null, null, null, HType::Null(), false)
715 
716 
717 #undef DEFINE_GET_CONSTANT
718 
719 #define DEFINE_IS_CONSTANT(Name, name) \
720 bool HGraph::IsConstant##Name(HConstant* constant) { \
721  return constant_##name##_.is_set() && constant == constant_##name##_.get(); \
722 }
723 DEFINE_IS_CONSTANT(Undefined, undefined)
724 DEFINE_IS_CONSTANT(0, 0)
725 DEFINE_IS_CONSTANT(1, 1)
726 DEFINE_IS_CONSTANT(Minus1, minus1)
729 DEFINE_IS_CONSTANT(Hole, the_hole)
731 
732 #undef DEFINE_IS_CONSTANT
733 
734 
735 HConstant* HGraph::GetInvalidContext() {
736  return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
737 }
738 
739 
740 bool HGraph::IsStandardConstant(HConstant* constant) {
741  if (IsConstantUndefined(constant)) return true;
742  if (IsConstant0(constant)) return true;
743  if (IsConstant1(constant)) return true;
744  if (IsConstantMinus1(constant)) return true;
745  if (IsConstantTrue(constant)) return true;
746  if (IsConstantFalse(constant)) return true;
747  if (IsConstantHole(constant)) return true;
748  if (IsConstantNull(constant)) return true;
749  return false;
750 }
751 
752 
753 HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {}
754 
755 
756 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
757  : needs_compare_(true) {
758  Initialize(builder);
759 }
760 
761 
762 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder,
763  HIfContinuation* continuation)
764  : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) {
765  InitializeDontCreateBlocks(builder);
766  continuation->Continue(&first_true_block_, &first_false_block_);
767 }
768 
769 
770 void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks(
771  HGraphBuilder* builder) {
772  builder_ = builder;
773  finished_ = false;
774  did_then_ = false;
775  did_else_ = false;
776  did_else_if_ = false;
777  did_and_ = false;
778  did_or_ = false;
779  captured_ = false;
780  pending_merge_block_ = false;
781  split_edge_merge_block_ = NULL;
782  merge_at_join_blocks_ = NULL;
783  normal_merge_at_join_block_count_ = 0;
784  deopt_merge_at_join_block_count_ = 0;
785 }
786 
787 
788 void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) {
789  InitializeDontCreateBlocks(builder);
790  HEnvironment* env = builder->environment();
791  first_true_block_ = builder->CreateBasicBlock(env->Copy());
792  first_false_block_ = builder->CreateBasicBlock(env->Copy());
793 }
794 
795 
796 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
797  HControlInstruction* compare) {
798  DCHECK(did_then_ == did_else_);
799  if (did_else_) {
800  // Handle if-then-elseif
801  did_else_if_ = true;
802  did_else_ = false;
803  did_then_ = false;
804  did_and_ = false;
805  did_or_ = false;
806  pending_merge_block_ = false;
807  split_edge_merge_block_ = NULL;
808  HEnvironment* env = builder()->environment();
809  first_true_block_ = builder()->CreateBasicBlock(env->Copy());
810  first_false_block_ = builder()->CreateBasicBlock(env->Copy());
811  }
812  if (split_edge_merge_block_ != NULL) {
813  HEnvironment* env = first_false_block_->last_environment();
814  HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy());
815  if (did_or_) {
816  compare->SetSuccessorAt(0, split_edge);
817  compare->SetSuccessorAt(1, first_false_block_);
818  } else {
819  compare->SetSuccessorAt(0, first_true_block_);
820  compare->SetSuccessorAt(1, split_edge);
821  }
822  builder()->GotoNoSimulate(split_edge, split_edge_merge_block_);
823  } else {
824  compare->SetSuccessorAt(0, first_true_block_);
825  compare->SetSuccessorAt(1, first_false_block_);
826  }
827  builder()->FinishCurrentBlock(compare);
828  needs_compare_ = false;
829  return compare;
830 }
831 
832 
833 void HGraphBuilder::IfBuilder::Or() {
834  DCHECK(!needs_compare_);
835  DCHECK(!did_and_);
836  did_or_ = true;
837  HEnvironment* env = first_false_block_->last_environment();
838  if (split_edge_merge_block_ == NULL) {
839  split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
840  builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
841  first_true_block_ = split_edge_merge_block_;
842  }
843  builder()->set_current_block(first_false_block_);
844  first_false_block_ = builder()->CreateBasicBlock(env->Copy());
845 }
846 
847 
848 void HGraphBuilder::IfBuilder::And() {
849  DCHECK(!needs_compare_);
850  DCHECK(!did_or_);
851  did_and_ = true;
852  HEnvironment* env = first_false_block_->last_environment();
853  if (split_edge_merge_block_ == NULL) {
854  split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
855  builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
856  first_false_block_ = split_edge_merge_block_;
857  }
858  builder()->set_current_block(first_true_block_);
859  first_true_block_ = builder()->CreateBasicBlock(env->Copy());
860 }
861 
862 
863 void HGraphBuilder::IfBuilder::CaptureContinuation(
864  HIfContinuation* continuation) {
865  DCHECK(!did_else_if_);
866  DCHECK(!finished_);
867  DCHECK(!captured_);
868 
869  HBasicBlock* true_block = NULL;
870  HBasicBlock* false_block = NULL;
871  Finish(&true_block, &false_block);
872  DCHECK(true_block != NULL);
873  DCHECK(false_block != NULL);
874  continuation->Capture(true_block, false_block);
875  captured_ = true;
876  builder()->set_current_block(NULL);
877  End();
878 }
879 
880 
881 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
882  DCHECK(!did_else_if_);
883  DCHECK(!finished_);
884  DCHECK(!captured_);
885  HBasicBlock* true_block = NULL;
886  HBasicBlock* false_block = NULL;
887  Finish(&true_block, &false_block);
888  merge_at_join_blocks_ = NULL;
889  if (true_block != NULL && !true_block->IsFinished()) {
890  DCHECK(continuation->IsTrueReachable());
891  builder()->GotoNoSimulate(true_block, continuation->true_branch());
892  }
893  if (false_block != NULL && !false_block->IsFinished()) {
894  DCHECK(continuation->IsFalseReachable());
895  builder()->GotoNoSimulate(false_block, continuation->false_branch());
896  }
897  captured_ = true;
898  End();
899 }
900 
901 
902 void HGraphBuilder::IfBuilder::Then() {
903  DCHECK(!captured_);
904  DCHECK(!finished_);
905  did_then_ = true;
906  if (needs_compare_) {
907  // Handle if's without any expressions, they jump directly to the "else"
908  // branch. However, we must pretend that the "then" branch is reachable,
909  // so that the graph builder visits it and sees any live range extending
910  // constructs within it.
911  HConstant* constant_false = builder()->graph()->GetConstantFalse();
912  ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
913  boolean_type.Add(ToBooleanStub::BOOLEAN);
914  HBranch* branch = builder()->New<HBranch>(
915  constant_false, boolean_type, first_true_block_, first_false_block_);
916  builder()->FinishCurrentBlock(branch);
917  }
918  builder()->set_current_block(first_true_block_);
919  pending_merge_block_ = true;
920 }
921 
922 
923 void HGraphBuilder::IfBuilder::Else() {
924  DCHECK(did_then_);
925  DCHECK(!captured_);
926  DCHECK(!finished_);
927  AddMergeAtJoinBlock(false);
928  builder()->set_current_block(first_false_block_);
929  pending_merge_block_ = true;
930  did_else_ = true;
931 }
932 
933 
934 void HGraphBuilder::IfBuilder::Deopt(const char* reason) {
935  DCHECK(did_then_);
936  builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
937  AddMergeAtJoinBlock(true);
938 }
939 
940 
941 void HGraphBuilder::IfBuilder::Return(HValue* value) {
942  HValue* parameter_count = builder()->graph()->GetConstantMinus1();
943  builder()->FinishExitCurrentBlock(
944  builder()->New<HReturn>(value, parameter_count));
945  AddMergeAtJoinBlock(false);
946 }
947 
948 
949 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
950  if (!pending_merge_block_) return;
951  HBasicBlock* block = builder()->current_block();
952  DCHECK(block == NULL || !block->IsFinished());
953  MergeAtJoinBlock* record = new (builder()->zone())
954  MergeAtJoinBlock(block, deopt, merge_at_join_blocks_);
955  merge_at_join_blocks_ = record;
956  if (block != NULL) {
957  DCHECK(block->end() == NULL);
958  if (deopt) {
959  normal_merge_at_join_block_count_++;
960  } else {
961  deopt_merge_at_join_block_count_++;
962  }
963  }
964  builder()->set_current_block(NULL);
965  pending_merge_block_ = false;
966 }
967 
968 
969 void HGraphBuilder::IfBuilder::Finish() {
970  DCHECK(!finished_);
971  if (!did_then_) {
972  Then();
973  }
974  AddMergeAtJoinBlock(false);
975  if (!did_else_) {
976  Else();
977  AddMergeAtJoinBlock(false);
978  }
979  finished_ = true;
980 }
981 
982 
983 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
984  HBasicBlock** else_continuation) {
985  Finish();
986 
987  MergeAtJoinBlock* else_record = merge_at_join_blocks_;
988  if (else_continuation != NULL) {
989  *else_continuation = else_record->block_;
990  }
991  MergeAtJoinBlock* then_record = else_record->next_;
992  if (then_continuation != NULL) {
993  *then_continuation = then_record->block_;
994  }
995  DCHECK(then_record->next_ == NULL);
996 }
997 
998 
999 void HGraphBuilder::IfBuilder::End() {
1000  if (captured_) return;
1001  Finish();
1002 
1003  int total_merged_blocks = normal_merge_at_join_block_count_ +
1004  deopt_merge_at_join_block_count_;
1005  DCHECK(total_merged_blocks >= 1);
1006  HBasicBlock* merge_block =
1007  total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock();
1008 
1009  // Merge non-deopt blocks first to ensure environment has right size for
1010  // padding.
1011  MergeAtJoinBlock* current = merge_at_join_blocks_;
1012  while (current != NULL) {
1013  if (!current->deopt_ && current->block_ != NULL) {
1014  // If there is only one block that makes it through to the end of the
1015  // if, then just set it as the current block and continue rather then
1016  // creating an unnecessary merge block.
1017  if (total_merged_blocks == 1) {
1018  builder()->set_current_block(current->block_);
1019  return;
1020  }
1021  builder()->GotoNoSimulate(current->block_, merge_block);
1022  }
1023  current = current->next_;
1024  }
1025 
1026  // Merge deopt blocks, padding when necessary.
1027  current = merge_at_join_blocks_;
1028  while (current != NULL) {
1029  if (current->deopt_ && current->block_ != NULL) {
1030  current->block_->FinishExit(HAbnormalExit::New(builder()->zone(), NULL),
1031  HSourcePosition::Unknown());
1032  }
1033  current = current->next_;
1034  }
1035  builder()->set_current_block(merge_block);
1036 }
1037 
1038 
1039 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) {
1040  Initialize(builder, NULL, kWhileTrue, NULL);
1041 }
1042 
1043 
1044 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1045  LoopBuilder::Direction direction) {
1046  Initialize(builder, context, direction, builder->graph()->GetConstant1());
1047 }
1048 
1049 
1050 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1051  LoopBuilder::Direction direction,
1052  HValue* increment_amount) {
1053  Initialize(builder, context, direction, increment_amount);
1054  increment_amount_ = increment_amount;
1055 }
1056 
1057 
1058 void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder,
1059  HValue* context,
1060  Direction direction,
1061  HValue* increment_amount) {
1062  builder_ = builder;
1063  context_ = context;
1064  direction_ = direction;
1065  increment_amount_ = increment_amount;
1066 
1067  finished_ = false;
1068  header_block_ = builder->CreateLoopHeaderBlock();
1069  body_block_ = NULL;
1070  exit_block_ = NULL;
1071  exit_trampoline_block_ = NULL;
1072 }
1073 
1074 
1075 HValue* HGraphBuilder::LoopBuilder::BeginBody(
1076  HValue* initial,
1077  HValue* terminating,
1078  Token::Value token) {
1079  DCHECK(direction_ != kWhileTrue);
1080  HEnvironment* env = builder_->environment();
1081  phi_ = header_block_->AddNewPhi(env->values()->length());
1082  phi_->AddInput(initial);
1083  env->Push(initial);
1084  builder_->GotoNoSimulate(header_block_);
1085 
1086  HEnvironment* body_env = env->Copy();
1087  HEnvironment* exit_env = env->Copy();
1088  // Remove the phi from the expression stack
1089  body_env->Pop();
1090  exit_env->Pop();
1091  body_block_ = builder_->CreateBasicBlock(body_env);
1092  exit_block_ = builder_->CreateBasicBlock(exit_env);
1093 
1094  builder_->set_current_block(header_block_);
1095  env->Pop();
1096  builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1097  phi_, terminating, token, body_block_, exit_block_));
1098 
1099  builder_->set_current_block(body_block_);
1100  if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1101  HValue* one = builder_->graph()->GetConstant1();
1102  if (direction_ == kPreIncrement) {
1103  increment_ = HAdd::New(zone(), context_, phi_, one);
1104  } else {
1105  increment_ = HSub::New(zone(), context_, phi_, one);
1106  }
1107  increment_->ClearFlag(HValue::kCanOverflow);
1108  builder_->AddInstruction(increment_);
1109  return increment_;
1110  } else {
1111  return phi_;
1112  }
1113 }
1114 
1115 
1116 void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) {
1117  DCHECK(direction_ == kWhileTrue);
1118  HEnvironment* env = builder_->environment();
1119  builder_->GotoNoSimulate(header_block_);
1120  builder_->set_current_block(header_block_);
1121  env->Drop(drop_count);
1122 }
1123 
1124 
1126  if (exit_trampoline_block_ == NULL) {
1127  // Its the first time we saw a break.
1128  if (direction_ == kWhileTrue) {
1129  HEnvironment* env = builder_->environment()->Copy();
1130  exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1131  } else {
1132  HEnvironment* env = exit_block_->last_environment()->Copy();
1133  exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1134  builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1135  }
1136  }
1137 
1138  builder_->GotoNoSimulate(exit_trampoline_block_);
1139  builder_->set_current_block(NULL);
1140 }
1141 
1142 
1143 void HGraphBuilder::LoopBuilder::EndBody() {
1144  DCHECK(!finished_);
1145 
1146  if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1147  if (direction_ == kPostIncrement) {
1148  increment_ = HAdd::New(zone(), context_, phi_, increment_amount_);
1149  } else {
1150  increment_ = HSub::New(zone(), context_, phi_, increment_amount_);
1151  }
1152  increment_->ClearFlag(HValue::kCanOverflow);
1153  builder_->AddInstruction(increment_);
1154  }
1155 
1156  if (direction_ != kWhileTrue) {
1157  // Push the new increment value on the expression stack to merge into
1158  // the phi.
1159  builder_->environment()->Push(increment_);
1160  }
1161  HBasicBlock* last_block = builder_->current_block();
1162  builder_->GotoNoSimulate(last_block, header_block_);
1163  header_block_->loop_information()->RegisterBackEdge(last_block);
1164 
1165  if (exit_trampoline_block_ != NULL) {
1166  builder_->set_current_block(exit_trampoline_block_);
1167  } else {
1168  builder_->set_current_block(exit_block_);
1169  }
1170  finished_ = true;
1171 }
1172 
1173 
1174 HGraph* HGraphBuilder::CreateGraph() {
1175  graph_ = new(zone()) HGraph(info_);
1176  if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1177  CompilationPhase phase("H_Block building", info_);
1178  set_current_block(graph()->entry_block());
1179  if (!BuildGraph()) return NULL;
1180  graph()->FinalizeUniqueness();
1181  return graph_;
1182 }
1183 
1184 
1185 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1186  DCHECK(current_block() != NULL);
1187  DCHECK(!FLAG_hydrogen_track_positions ||
1188  !position_.IsUnknown() ||
1189  !info_->IsOptimizing());
1190  current_block()->AddInstruction(instr, source_position());
1191  if (graph()->IsInsideNoSideEffectsScope()) {
1192  instr->SetFlag(HValue::kHasNoObservableSideEffects);
1193  }
1194  return instr;
1195 }
1196 
1197 
1198 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1199  DCHECK(!FLAG_hydrogen_track_positions ||
1200  !info_->IsOptimizing() ||
1201  !position_.IsUnknown());
1202  current_block()->Finish(last, source_position());
1203  if (last->IsReturn() || last->IsAbnormalExit()) {
1204  set_current_block(NULL);
1205  }
1206 }
1207 
1208 
1209 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1210  DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1211  !position_.IsUnknown());
1212  current_block()->FinishExit(instruction, source_position());
1213  if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1214  set_current_block(NULL);
1215  }
1216 }
1217 
1218 
1219 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1220  if (FLAG_native_code_counters && counter->Enabled()) {
1221  HValue* reference = Add<HConstant>(ExternalReference(counter));
1222  HValue* old_value = Add<HLoadNamedField>(
1223  reference, static_cast<HValue*>(NULL), HObjectAccess::ForCounter());
1224  HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1225  new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow
1226  Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1227  new_value, STORE_TO_INITIALIZED_ENTRY);
1228  }
1229 }
1230 
1231 
1232 void HGraphBuilder::AddSimulate(BailoutId id,
1233  RemovableSimulate removable) {
1234  DCHECK(current_block() != NULL);
1235  DCHECK(!graph()->IsInsideNoSideEffectsScope());
1236  current_block()->AddNewSimulate(id, source_position(), removable);
1237 }
1238 
1239 
1240 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1241  HBasicBlock* b = graph()->CreateBasicBlock();
1242  b->SetInitialEnvironment(env);
1243  return b;
1244 }
1245 
1246 
1247 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1248  HBasicBlock* header = graph()->CreateBasicBlock();
1249  HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1250  header->SetInitialEnvironment(entry_env);
1251  header->AttachLoopInformation();
1252  return header;
1253 }
1254 
1255 
1256 HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
1257  HValue* map = Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1258  HObjectAccess::ForMap());
1259 
1260  HValue* bit_field2 = Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1261  HObjectAccess::ForMapBitField2());
1262  return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
1263 }
1264 
1265 
1266 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1267  if (obj->type().IsHeapObject()) return obj;
1268  return Add<HCheckHeapObject>(obj);
1269 }
1270 
1271 
1272 void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) {
1273  Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1274  FinishExitCurrentBlock(New<HAbnormalExit>());
1275 }
1276 
1277 
1278 HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1279  if (!string->type().IsString()) {
1280  DCHECK(!string->IsConstant() ||
1281  !HConstant::cast(string)->HasStringValue());
1282  BuildCheckHeapObject(string);
1283  return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1284  }
1285  return string;
1286 }
1287 
1288 
1289 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) {
1290  if (object->type().IsJSObject()) return object;
1291  if (function->IsConstant() &&
1292  HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1294  HConstant::cast(function)->handle(isolate()));
1295  SharedFunctionInfo* shared = f->shared();
1296  if (shared->strict_mode() == STRICT || shared->native()) return object;
1297  }
1298  return Add<HWrapReceiver>(object, function);
1299 }
1300 
1301 
1302 HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1303  HValue* object,
1304  HValue* elements,
1305  ElementsKind kind,
1306  HValue* length,
1307  HValue* key,
1308  bool is_js_array,
1309  PropertyAccessType access_type) {
1310  IfBuilder length_checker(this);
1311 
1312  Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1313  length_checker.If<HCompareNumericAndBranch>(key, length, token);
1314 
1315  length_checker.Then();
1316 
1317  HValue* current_capacity = AddLoadFixedArrayLength(elements);
1318 
1319  IfBuilder capacity_checker(this);
1320 
1321  capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1322  Token::GTE);
1323  capacity_checker.Then();
1324 
1325  HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1326  HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap);
1327 
1328  Add<HBoundsCheck>(key, max_capacity);
1329 
1330  HValue* new_capacity = BuildNewElementsCapacity(key);
1331  HValue* new_elements = BuildGrowElementsCapacity(object, elements,
1332  kind, kind, length,
1333  new_capacity);
1334 
1335  environment()->Push(new_elements);
1336  capacity_checker.Else();
1337 
1338  environment()->Push(elements);
1339  capacity_checker.End();
1340 
1341  if (is_js_array) {
1342  HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1343  new_length->ClearFlag(HValue::kCanOverflow);
1344 
1345  Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1346  new_length);
1347  }
1348 
1349  if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1350  HValue* checked_elements = environment()->Top();
1351 
1352  // Write zero to ensure that the new element is initialized with some smi.
1353  Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), kind);
1354  }
1355 
1356  length_checker.Else();
1357  Add<HBoundsCheck>(key, length);
1358 
1359  environment()->Push(elements);
1360  length_checker.End();
1361 
1362  return environment()->Pop();
1363 }
1364 
1365 
1366 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1367  HValue* elements,
1368  ElementsKind kind,
1369  HValue* length) {
1370  Factory* factory = isolate()->factory();
1371 
1372  IfBuilder cow_checker(this);
1373 
1374  cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1375  cow_checker.Then();
1376 
1377  HValue* capacity = AddLoadFixedArrayLength(elements);
1378 
1379  HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1380  kind, length, capacity);
1381 
1382  environment()->Push(new_elements);
1383 
1384  cow_checker.Else();
1385 
1386  environment()->Push(elements);
1387 
1388  cow_checker.End();
1389 
1390  return environment()->Pop();
1391 }
1392 
1393 
1394 void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
1395  HValue* map,
1396  ElementsKind from_kind,
1397  ElementsKind to_kind,
1398  bool is_jsarray) {
1399  DCHECK(!IsFastHoleyElementsKind(from_kind) ||
1400  IsFastHoleyElementsKind(to_kind));
1401 
1402  if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1403  Add<HTrapAllocationMemento>(object);
1404  }
1405 
1406  if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1407  HInstruction* elements = AddLoadElements(object);
1408 
1409  HInstruction* empty_fixed_array = Add<HConstant>(
1410  isolate()->factory()->empty_fixed_array());
1411 
1412  IfBuilder if_builder(this);
1413 
1414  if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
1415 
1416  if_builder.Then();
1417 
1418  HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1419 
1420  HInstruction* array_length = is_jsarray
1421  ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1422  HObjectAccess::ForArrayLength(from_kind))
1423  : elements_length;
1424 
1425  BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
1426  array_length, elements_length);
1427 
1428  if_builder.End();
1429  }
1430 
1431  Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1432 }
1433 
1434 
1435 void HGraphBuilder::BuildJSObjectCheck(HValue* receiver,
1436  int bit_field_mask) {
1437  // Check that the object isn't a smi.
1438  Add<HCheckHeapObject>(receiver);
1439 
1440  // Get the map of the receiver.
1441  HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1442  HObjectAccess::ForMap());
1443 
1444  // Check the instance type and if an access check is needed, this can be
1445  // done with a single load, since both bytes are adjacent in the map.
1446  HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField());
1447  HValue* instance_type_and_bit_field =
1448  Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), access);
1449 
1450  HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8));
1451  HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND,
1452  instance_type_and_bit_field,
1453  mask);
1454  HValue* sub_result = AddUncasted<HSub>(and_result,
1455  Add<HConstant>(JS_OBJECT_TYPE));
1456  Add<HBoundsCheck>(sub_result,
1457  Add<HConstant>(LAST_JS_OBJECT_TYPE + 1 - JS_OBJECT_TYPE));
1458 }
1459 
1460 
1461 void HGraphBuilder::BuildKeyedIndexCheck(HValue* key,
1462  HIfContinuation* join_continuation) {
1463  // The sometimes unintuitively backward ordering of the ifs below is
1464  // convoluted, but necessary. All of the paths must guarantee that the
1465  // if-true of the continuation returns a smi element index and the if-false of
1466  // the continuation returns either a symbol or a unique string key. All other
1467  // object types cause a deopt to fall back to the runtime.
1468 
1469  IfBuilder key_smi_if(this);
1470  key_smi_if.If<HIsSmiAndBranch>(key);
1471  key_smi_if.Then();
1472  {
1473  Push(key); // Nothing to do, just continue to true of continuation.
1474  }
1475  key_smi_if.Else();
1476  {
1477  HValue* map = Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1478  HObjectAccess::ForMap());
1479  HValue* instance_type =
1480  Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1481  HObjectAccess::ForMapInstanceType());
1482 
1483  // Non-unique string, check for a string with a hash code that is actually
1484  // an index.
1486  IfBuilder not_string_or_name_if(this);
1487  not_string_or_name_if.If<HCompareNumericAndBranch>(
1488  instance_type,
1489  Add<HConstant>(LAST_UNIQUE_NAME_TYPE),
1490  Token::GT);
1491 
1492  not_string_or_name_if.Then();
1493  {
1494  // Non-smi, non-Name, non-String: Try to convert to smi in case of
1495  // HeapNumber.
1496  // TODO(danno): This could call some variant of ToString
1497  Push(AddUncasted<HForceRepresentation>(key, Representation::Smi()));
1498  }
1499  not_string_or_name_if.Else();
1500  {
1501  // String or Name: check explicitly for Name, they can short-circuit
1502  // directly to unique non-index key path.
1503  IfBuilder not_symbol_if(this);
1504  not_symbol_if.If<HCompareNumericAndBranch>(
1505  instance_type,
1506  Add<HConstant>(SYMBOL_TYPE),
1507  Token::NE);
1508 
1509  not_symbol_if.Then();
1510  {
1511  // String: check whether the String is a String of an index. If it is,
1512  // extract the index value from the hash.
1513  HValue* hash =
1514  Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1515  HObjectAccess::ForNameHashField());
1516  HValue* not_index_mask = Add<HConstant>(static_cast<int>(
1517  String::kContainsCachedArrayIndexMask));
1518 
1519  HValue* not_index_test = AddUncasted<HBitwise>(
1520  Token::BIT_AND, hash, not_index_mask);
1521 
1522  IfBuilder string_index_if(this);
1523  string_index_if.If<HCompareNumericAndBranch>(not_index_test,
1524  graph()->GetConstant0(),
1525  Token::EQ);
1526  string_index_if.Then();
1527  {
1528  // String with index in hash: extract string and merge to index path.
1529  Push(BuildDecodeField<String::ArrayIndexValueBits>(hash));
1530  }
1531  string_index_if.Else();
1532  {
1533  // Key is a non-index String, check for uniqueness/internalization.
1534  // If it's not internalized yet, internalize it now.
1535  HValue* not_internalized_bit = AddUncasted<HBitwise>(
1536  Token::BIT_AND,
1537  instance_type,
1538  Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1539 
1540  IfBuilder internalized(this);
1541  internalized.If<HCompareNumericAndBranch>(not_internalized_bit,
1542  graph()->GetConstant0(),
1543  Token::EQ);
1544  internalized.Then();
1545  Push(key);
1546 
1547  internalized.Else();
1548  Add<HPushArguments>(key);
1549  HValue* intern_key = Add<HCallRuntime>(
1550  isolate()->factory()->empty_string(),
1551  Runtime::FunctionForId(Runtime::kInternalizeString), 1);
1552  Push(intern_key);
1553 
1554  internalized.End();
1555  // Key guaranteed to be a unique string
1556  }
1557  string_index_if.JoinContinuation(join_continuation);
1558  }
1559  not_symbol_if.Else();
1560  {
1561  Push(key); // Key is symbol
1562  }
1563  not_symbol_if.JoinContinuation(join_continuation);
1564  }
1565  not_string_or_name_if.JoinContinuation(join_continuation);
1566  }
1567  key_smi_if.JoinContinuation(join_continuation);
1568 }
1569 
1570 
1571 void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) {
1572  // Get the the instance type of the receiver, and make sure that it is
1573  // not one of the global object types.
1574  HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1575  HObjectAccess::ForMap());
1576  HValue* instance_type =
1577  Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1578  HObjectAccess::ForMapInstanceType());
1580  HValue* min_global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE);
1581  HValue* max_global_type = Add<HConstant>(JS_BUILTINS_OBJECT_TYPE);
1582 
1583  IfBuilder if_global_object(this);
1584  if_global_object.If<HCompareNumericAndBranch>(instance_type,
1585  max_global_type,
1586  Token::LTE);
1587  if_global_object.And();
1588  if_global_object.If<HCompareNumericAndBranch>(instance_type,
1589  min_global_type,
1590  Token::GTE);
1591  if_global_object.ThenDeopt("receiver was a global object");
1592  if_global_object.End();
1593 }
1594 
1595 
1596 void HGraphBuilder::BuildTestForDictionaryProperties(
1597  HValue* object,
1598  HIfContinuation* continuation) {
1599  HValue* properties = Add<HLoadNamedField>(
1600  object, static_cast<HValue*>(NULL),
1601  HObjectAccess::ForPropertiesPointer());
1602  HValue* properties_map =
1603  Add<HLoadNamedField>(properties, static_cast<HValue*>(NULL),
1604  HObjectAccess::ForMap());
1605  HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex);
1606  IfBuilder builder(this);
1607  builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map);
1608  builder.CaptureContinuation(continuation);
1609 }
1610 
1611 
1612 HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object,
1613  HValue* key) {
1614  // Load the map of the receiver, compute the keyed lookup cache hash
1615  // based on 32 bits of the map pointer and the string hash.
1616  HValue* object_map =
1617  Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1618  HObjectAccess::ForMapAsInteger32());
1619  HValue* shifted_map = AddUncasted<HShr>(
1620  object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift));
1621  HValue* string_hash =
1622  Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1623  HObjectAccess::ForStringHashField());
1624  HValue* shifted_hash = AddUncasted<HShr>(
1625  string_hash, Add<HConstant>(String::kHashShift));
1626  HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map,
1627  shifted_hash);
1628  int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
1629  return AddUncasted<HBitwise>(Token::BIT_AND, xor_result,
1630  Add<HConstant>(mask));
1631 }
1632 
1633 
1634 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1635  int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1636  HValue* seed = Add<HConstant>(seed_value);
1637  HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1638 
1639  // hash = ~hash + (hash << 15);
1640  HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1641  HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1642  graph()->GetConstantMinus1());
1643  hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1644 
1645  // hash = hash ^ (hash >> 12);
1646  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1647  hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1648 
1649  // hash = hash + (hash << 2);
1650  shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1651  hash = AddUncasted<HAdd>(hash, shifted_hash);
1652 
1653  // hash = hash ^ (hash >> 4);
1654  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1655  hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1656 
1657  // hash = hash * 2057;
1658  hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1659  hash->ClearFlag(HValue::kCanOverflow);
1660 
1661  // hash = hash ^ (hash >> 16);
1662  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1663  return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1664 }
1665 
1666 
1667 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1668  HValue* elements,
1669  HValue* key,
1670  HValue* hash) {
1671  HValue* capacity = Add<HLoadKeyed>(
1672  elements,
1673  Add<HConstant>(NameDictionary::kCapacityIndex),
1674  static_cast<HValue*>(NULL),
1675  FAST_ELEMENTS);
1676 
1677  HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1678  mask->ChangeRepresentation(Representation::Integer32());
1679  mask->ClearFlag(HValue::kCanOverflow);
1680 
1681  HValue* entry = hash;
1682  HValue* count = graph()->GetConstant1();
1683  Push(entry);
1684  Push(count);
1685 
1686  HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(),
1687  graph()->CreateBasicBlock());
1688  HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(),
1689  graph()->CreateBasicBlock());
1690  LoopBuilder probe_loop(this);
1691  probe_loop.BeginBody(2); // Drop entry, count from last environment to
1692  // appease live range building without simulates.
1693 
1694  count = Pop();
1695  entry = Pop();
1696  entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask);
1697  int entry_size = SeededNumberDictionary::kEntrySize;
1698  HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size));
1699  base_index->ClearFlag(HValue::kCanOverflow);
1700  int start_offset = SeededNumberDictionary::kElementsStartIndex;
1701  HValue* key_index =
1702  AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset));
1703  key_index->ClearFlag(HValue::kCanOverflow);
1704 
1705  HValue* candidate_key = Add<HLoadKeyed>(
1706  elements, key_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1707  IfBuilder if_undefined(this);
1708  if_undefined.If<HCompareObjectEqAndBranch>(candidate_key,
1709  graph()->GetConstantUndefined());
1710  if_undefined.Then();
1711  {
1712  // element == undefined means "not found". Call the runtime.
1713  // TODO(jkummerow): walk the prototype chain instead.
1714  Add<HPushArguments>(receiver, key);
1715  Push(Add<HCallRuntime>(isolate()->factory()->empty_string(),
1716  Runtime::FunctionForId(Runtime::kKeyedGetProperty),
1717  2));
1718  }
1719  if_undefined.Else();
1720  {
1721  IfBuilder if_match(this);
1722  if_match.If<HCompareObjectEqAndBranch>(candidate_key, key);
1723  if_match.Then();
1724  if_match.Else();
1725 
1726  // Update non-internalized string in the dictionary with internalized key?
1727  IfBuilder if_update_with_internalized(this);
1728  HValue* smi_check =
1729  if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key);
1730  if_update_with_internalized.And();
1731  HValue* map = AddLoadMap(candidate_key, smi_check);
1732  HValue* instance_type = Add<HLoadNamedField>(
1733  map, static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType());
1734  HValue* not_internalized_bit = AddUncasted<HBitwise>(
1735  Token::BIT_AND, instance_type,
1736  Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1737  if_update_with_internalized.If<HCompareNumericAndBranch>(
1738  not_internalized_bit, graph()->GetConstant0(), Token::NE);
1739  if_update_with_internalized.And();
1740  if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>(
1741  candidate_key, graph()->GetConstantHole());
1742  if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key,
1743  key, Token::EQ);
1744  if_update_with_internalized.Then();
1745  // Replace a key that is a non-internalized string by the equivalent
1746  // internalized string for faster further lookups.
1747  Add<HStoreKeyed>(elements, key_index, key, FAST_ELEMENTS);
1748  if_update_with_internalized.Else();
1749 
1750  if_update_with_internalized.JoinContinuation(&found_key_match_continuation);
1751  if_match.JoinContinuation(&found_key_match_continuation);
1752 
1753  IfBuilder found_key_match(this, &found_key_match_continuation);
1754  found_key_match.Then();
1755  // Key at current probe matches. Relevant bits in the |details| field must
1756  // be zero, otherwise the dictionary element requires special handling.
1757  HValue* details_index =
1758  AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2));
1759  details_index->ClearFlag(HValue::kCanOverflow);
1760  HValue* details = Add<HLoadKeyed>(
1761  elements, details_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1762  int details_mask = PropertyDetails::TypeField::kMask |
1763  PropertyDetails::DeletedField::kMask;
1764  details = AddUncasted<HBitwise>(Token::BIT_AND, details,
1765  Add<HConstant>(details_mask));
1766  IfBuilder details_compare(this);
1767  details_compare.If<HCompareNumericAndBranch>(
1768  details, graph()->GetConstant0(), Token::EQ);
1769  details_compare.Then();
1770  HValue* result_index =
1771  AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1));
1772  result_index->ClearFlag(HValue::kCanOverflow);
1773  Push(Add<HLoadKeyed>(elements, result_index, static_cast<HValue*>(NULL),
1774  FAST_ELEMENTS));
1775  details_compare.Else();
1776  Add<HPushArguments>(receiver, key);
1777  Push(Add<HCallRuntime>(isolate()->factory()->empty_string(),
1778  Runtime::FunctionForId(Runtime::kKeyedGetProperty),
1779  2));
1780  details_compare.End();
1781 
1782  found_key_match.Else();
1783  found_key_match.JoinContinuation(&return_or_loop_continuation);
1784  }
1785  if_undefined.JoinContinuation(&return_or_loop_continuation);
1786 
1787  IfBuilder return_or_loop(this, &return_or_loop_continuation);
1788  return_or_loop.Then();
1789  probe_loop.Break();
1790 
1791  return_or_loop.Else();
1792  entry = AddUncasted<HAdd>(entry, count);
1793  entry->ClearFlag(HValue::kCanOverflow);
1794  count = AddUncasted<HAdd>(count, graph()->GetConstant1());
1795  count->ClearFlag(HValue::kCanOverflow);
1796  Push(entry);
1797  Push(count);
1798 
1799  probe_loop.EndBody();
1800 
1801  return_or_loop.End();
1802 
1803  return Pop();
1804 }
1805 
1806 
1807 HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length,
1808  HValue* index,
1809  HValue* input) {
1810  NoObservableSideEffectsScope scope(this);
1811  HConstant* max_length = Add<HConstant>(JSObject::kInitialMaxFastElementArray);
1812  Add<HBoundsCheck>(length, max_length);
1813 
1814  // Generate size calculation code here in order to make it dominate
1815  // the JSRegExpResult allocation.
1816  ElementsKind elements_kind = FAST_ELEMENTS;
1817  HValue* size = BuildCalculateElementsSize(elements_kind, length);
1818 
1819  // Allocate the JSRegExpResult and the FixedArray in one step.
1820  HValue* result = Add<HAllocate>(
1821  Add<HConstant>(JSRegExpResult::kSize), HType::JSArray(),
1823 
1824  // Initialize the JSRegExpResult header.
1825  HValue* global_object = Add<HLoadNamedField>(
1826  context(), static_cast<HValue*>(NULL),
1827  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1828  HValue* native_context = Add<HLoadNamedField>(
1829  global_object, static_cast<HValue*>(NULL),
1830  HObjectAccess::ForGlobalObjectNativeContext());
1831  Add<HStoreNamedField>(
1832  result, HObjectAccess::ForMap(),
1833  Add<HLoadNamedField>(
1834  native_context, static_cast<HValue*>(NULL),
1835  HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
1836  HConstant* empty_fixed_array =
1837  Add<HConstant>(isolate()->factory()->empty_fixed_array());
1838  Add<HStoreNamedField>(
1839  result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
1840  empty_fixed_array);
1841  Add<HStoreNamedField>(
1842  result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1843  empty_fixed_array);
1844  Add<HStoreNamedField>(
1845  result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
1846 
1847  // Initialize the additional fields.
1848  Add<HStoreNamedField>(
1849  result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
1850  index);
1851  Add<HStoreNamedField>(
1852  result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
1853  input);
1854 
1855  // Allocate and initialize the elements header.
1856  HAllocate* elements = BuildAllocateElements(elements_kind, size);
1857  BuildInitializeElementsHeader(elements, elements_kind, length);
1858 
1859  if (!elements->has_size_upper_bound()) {
1860  HConstant* size_in_bytes_upper_bound = EstablishElementsAllocationSize(
1861  elements_kind, max_length->Integer32Value());
1862  elements->set_size_upper_bound(size_in_bytes_upper_bound);
1863  }
1864 
1865  Add<HStoreNamedField>(
1866  result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1867  elements);
1868 
1869  // Initialize the elements contents with undefined.
1870  BuildFillElementsWithValue(
1871  elements, elements_kind, graph()->GetConstant0(), length,
1872  graph()->GetConstantUndefined());
1873 
1874  return result;
1875 }
1876 
1877 
1878 HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
1879  NoObservableSideEffectsScope scope(this);
1880 
1881  // Convert constant numbers at compile time.
1882  if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
1883  Handle<Object> number = HConstant::cast(object)->handle(isolate());
1884  Handle<String> result = isolate()->factory()->NumberToString(number);
1885  return Add<HConstant>(result);
1886  }
1887 
1888  // Create a joinable continuation.
1889  HIfContinuation found(graph()->CreateBasicBlock(),
1890  graph()->CreateBasicBlock());
1891 
1892  // Load the number string cache.
1893  HValue* number_string_cache =
1894  Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1895 
1896  // Make the hash mask from the length of the number string cache. It
1897  // contains two elements (number and string) for each cache entry.
1898  HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1899  mask->set_type(HType::Smi());
1900  mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1901  mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1902 
1903  // Check whether object is a smi.
1904  IfBuilder if_objectissmi(this);
1905  if_objectissmi.If<HIsSmiAndBranch>(object);
1906  if_objectissmi.Then();
1907  {
1908  // Compute hash for smi similar to smi_get_hash().
1909  HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1910 
1911  // Load the key.
1912  HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1913  HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1914  static_cast<HValue*>(NULL),
1916 
1917  // Check if object == key.
1918  IfBuilder if_objectiskey(this);
1919  if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
1920  if_objectiskey.Then();
1921  {
1922  // Make the key_index available.
1923  Push(key_index);
1924  }
1925  if_objectiskey.JoinContinuation(&found);
1926  }
1927  if_objectissmi.Else();
1928  {
1929  if (type->Is(Type::SignedSmall())) {
1930  if_objectissmi.Deopt("Expected smi");
1931  } else {
1932  // Check if the object is a heap number.
1933  IfBuilder if_objectisnumber(this);
1934  HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
1935  object, isolate()->factory()->heap_number_map());
1936  if_objectisnumber.Then();
1937  {
1938  // Compute hash for heap number similar to double_get_hash().
1939  HValue* low = Add<HLoadNamedField>(
1940  object, objectisnumber,
1941  HObjectAccess::ForHeapNumberValueLowestBits());
1942  HValue* high = Add<HLoadNamedField>(
1943  object, objectisnumber,
1944  HObjectAccess::ForHeapNumberValueHighestBits());
1945  HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1946  hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1947 
1948  // Load the key.
1949  HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1950  HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1951  static_cast<HValue*>(NULL),
1953 
1954  // Check if the key is a heap number and compare it with the object.
1955  IfBuilder if_keyisnotsmi(this);
1956  HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1957  if_keyisnotsmi.Then();
1958  {
1959  IfBuilder if_keyisheapnumber(this);
1960  if_keyisheapnumber.If<HCompareMap>(
1961  key, isolate()->factory()->heap_number_map());
1962  if_keyisheapnumber.Then();
1963  {
1964  // Check if values of key and object match.
1965  IfBuilder if_keyeqobject(this);
1966  if_keyeqobject.If<HCompareNumericAndBranch>(
1967  Add<HLoadNamedField>(key, keyisnotsmi,
1968  HObjectAccess::ForHeapNumberValue()),
1969  Add<HLoadNamedField>(object, objectisnumber,
1970  HObjectAccess::ForHeapNumberValue()),
1971  Token::EQ);
1972  if_keyeqobject.Then();
1973  {
1974  // Make the key_index available.
1975  Push(key_index);
1976  }
1977  if_keyeqobject.JoinContinuation(&found);
1978  }
1979  if_keyisheapnumber.JoinContinuation(&found);
1980  }
1981  if_keyisnotsmi.JoinContinuation(&found);
1982  }
1983  if_objectisnumber.Else();
1984  {
1985  if (type->Is(Type::Number())) {
1986  if_objectisnumber.Deopt("Expected heap number");
1987  }
1988  }
1989  if_objectisnumber.JoinContinuation(&found);
1990  }
1991  }
1992  if_objectissmi.JoinContinuation(&found);
1993 
1994  // Check for cache hit.
1995  IfBuilder if_found(this, &found);
1996  if_found.Then();
1997  {
1998  // Count number to string operation in native code.
1999  AddIncrementCounter(isolate()->counters()->number_to_string_native());
2000 
2001  // Load the value in case of cache hit.
2002  HValue* key_index = Pop();
2003  HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
2004  Push(Add<HLoadKeyed>(number_string_cache, value_index,
2005  static_cast<HValue*>(NULL),
2007  }
2008  if_found.Else();
2009  {
2010  // Cache miss, fallback to runtime.
2011  Add<HPushArguments>(object);
2012  Push(Add<HCallRuntime>(
2013  isolate()->factory()->empty_string(),
2014  Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
2015  1));
2016  }
2017  if_found.End();
2018 
2019  return Pop();
2020 }
2021 
2022 
2023 HAllocate* HGraphBuilder::BuildAllocate(
2024  HValue* object_size,
2025  HType type,
2026  InstanceType instance_type,
2027  HAllocationMode allocation_mode) {
2028  // Compute the effective allocation size.
2029  HValue* size = object_size;
2030  if (allocation_mode.CreateAllocationMementos()) {
2031  size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
2032  size->ClearFlag(HValue::kCanOverflow);
2033  }
2034 
2035  // Perform the actual allocation.
2036  HAllocate* object = Add<HAllocate>(
2037  size, type, allocation_mode.GetPretenureMode(),
2038  instance_type, allocation_mode.feedback_site());
2039 
2040  // Setup the allocation memento.
2041  if (allocation_mode.CreateAllocationMementos()) {
2042  BuildCreateAllocationMemento(
2043  object, object_size, allocation_mode.current_site());
2044  }
2045 
2046  return object;
2047 }
2048 
2049 
2050 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
2051  HValue* right_length) {
2052  // Compute the combined string length and check against max string length.
2053  HValue* length = AddUncasted<HAdd>(left_length, right_length);
2054  // Check that length <= kMaxLength <=> length < MaxLength + 1.
2055  HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
2056  Add<HBoundsCheck>(length, max_length);
2057  return length;
2058 }
2059 
2060 
2061 HValue* HGraphBuilder::BuildCreateConsString(
2062  HValue* length,
2063  HValue* left,
2064  HValue* right,
2065  HAllocationMode allocation_mode) {
2066  // Determine the string instance types.
2067  HInstruction* left_instance_type = AddLoadStringInstanceType(left);
2068  HInstruction* right_instance_type = AddLoadStringInstanceType(right);
2069 
2070  // Allocate the cons string object. HAllocate does not care whether we
2071  // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use
2072  // CONS_STRING_TYPE here. Below we decide whether the cons string is
2073  // one-byte or two-byte and set the appropriate map.
2074  DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
2076  HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
2077  HType::String(), CONS_STRING_TYPE,
2078  allocation_mode);
2079 
2080  // Compute intersection and difference of instance types.
2081  HValue* anded_instance_types = AddUncasted<HBitwise>(
2082  Token::BIT_AND, left_instance_type, right_instance_type);
2083  HValue* xored_instance_types = AddUncasted<HBitwise>(
2084  Token::BIT_XOR, left_instance_type, right_instance_type);
2085 
2086  // We create a one-byte cons string if
2087  // 1. both strings are one-byte, or
2088  // 2. at least one of the strings is two-byte, but happens to contain only
2089  // one-byte characters.
2090  // To do this, we check
2091  // 1. if both strings are one-byte, or if the one-byte data hint is set in
2092  // both strings, or
2093  // 2. if one of the strings has the one-byte data hint set and the other
2094  // string is one-byte.
2095  IfBuilder if_onebyte(this);
2098  if_onebyte.If<HCompareNumericAndBranch>(
2099  AddUncasted<HBitwise>(
2100  Token::BIT_AND, anded_instance_types,
2101  Add<HConstant>(static_cast<int32_t>(
2103  graph()->GetConstant0(), Token::NE);
2104  if_onebyte.Or();
2106  kOneByteDataHintTag != 0 &&
2108  if_onebyte.If<HCompareNumericAndBranch>(
2109  AddUncasted<HBitwise>(
2110  Token::BIT_AND, xored_instance_types,
2111  Add<HConstant>(static_cast<int32_t>(
2113  Add<HConstant>(static_cast<int32_t>(
2115  if_onebyte.Then();
2116  {
2117  // We can safely skip the write barrier for storing the map here.
2118  Add<HStoreNamedField>(
2119  result, HObjectAccess::ForMap(),
2120  Add<HConstant>(isolate()->factory()->cons_one_byte_string_map()));
2121  }
2122  if_onebyte.Else();
2123  {
2124  // We can safely skip the write barrier for storing the map here.
2125  Add<HStoreNamedField>(
2126  result, HObjectAccess::ForMap(),
2127  Add<HConstant>(isolate()->factory()->cons_string_map()));
2128  }
2129  if_onebyte.End();
2130 
2131  // Initialize the cons string fields.
2132  Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2133  Add<HConstant>(String::kEmptyHashField));
2134  Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2135  Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
2136  Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
2137 
2138  // Count the native string addition.
2139  AddIncrementCounter(isolate()->counters()->string_add_native());
2140 
2141  return result;
2142 }
2143 
2144 
2145 void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
2146  HValue* src_offset,
2147  String::Encoding src_encoding,
2148  HValue* dst,
2149  HValue* dst_offset,
2150  String::Encoding dst_encoding,
2151  HValue* length) {
2152  DCHECK(dst_encoding != String::ONE_BYTE_ENCODING ||
2153  src_encoding == String::ONE_BYTE_ENCODING);
2154  LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
2155  HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
2156  {
2157  HValue* src_index = AddUncasted<HAdd>(src_offset, index);
2158  HValue* value =
2159  AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
2160  HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
2161  Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
2162  }
2163  loop.EndBody();
2164 }
2165 
2166 
2167 HValue* HGraphBuilder::BuildObjectSizeAlignment(
2168  HValue* unaligned_size, int header_size) {
2169  DCHECK((header_size & kObjectAlignmentMask) == 0);
2170  HValue* size = AddUncasted<HAdd>(
2171  unaligned_size, Add<HConstant>(static_cast<int32_t>(
2172  header_size + kObjectAlignmentMask)));
2173  size->ClearFlag(HValue::kCanOverflow);
2174  return AddUncasted<HBitwise>(
2175  Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
2177 }
2178 
2179 
2180 HValue* HGraphBuilder::BuildUncheckedStringAdd(
2181  HValue* left,
2182  HValue* right,
2183  HAllocationMode allocation_mode) {
2184  // Determine the string lengths.
2185  HValue* left_length = AddLoadStringLength(left);
2186  HValue* right_length = AddLoadStringLength(right);
2187 
2188  // Compute the combined string length.
2189  HValue* length = BuildAddStringLengths(left_length, right_length);
2190 
2191  // Do some manual constant folding here.
2192  if (left_length->IsConstant()) {
2193  HConstant* c_left_length = HConstant::cast(left_length);
2194  DCHECK_NE(0, c_left_length->Integer32Value());
2195  if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2196  // The right string contains at least one character.
2197  return BuildCreateConsString(length, left, right, allocation_mode);
2198  }
2199  } else if (right_length->IsConstant()) {
2200  HConstant* c_right_length = HConstant::cast(right_length);
2201  DCHECK_NE(0, c_right_length->Integer32Value());
2202  if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2203  // The left string contains at least one character.
2204  return BuildCreateConsString(length, left, right, allocation_mode);
2205  }
2206  }
2207 
2208  // Check if we should create a cons string.
2209  IfBuilder if_createcons(this);
2210  if_createcons.If<HCompareNumericAndBranch>(
2211  length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
2212  if_createcons.Then();
2213  {
2214  // Create a cons string.
2215  Push(BuildCreateConsString(length, left, right, allocation_mode));
2216  }
2217  if_createcons.Else();
2218  {
2219  // Determine the string instance types.
2220  HValue* left_instance_type = AddLoadStringInstanceType(left);
2221  HValue* right_instance_type = AddLoadStringInstanceType(right);
2222 
2223  // Compute union and difference of instance types.
2224  HValue* ored_instance_types = AddUncasted<HBitwise>(
2225  Token::BIT_OR, left_instance_type, right_instance_type);
2226  HValue* xored_instance_types = AddUncasted<HBitwise>(
2227  Token::BIT_XOR, left_instance_type, right_instance_type);
2228 
2229  // Check if both strings have the same encoding and both are
2230  // sequential.
2231  IfBuilder if_sameencodingandsequential(this);
2232  if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2233  AddUncasted<HBitwise>(
2234  Token::BIT_AND, xored_instance_types,
2235  Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2236  graph()->GetConstant0(), Token::EQ);
2237  if_sameencodingandsequential.And();
2239  if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2240  AddUncasted<HBitwise>(
2241  Token::BIT_AND, ored_instance_types,
2242  Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
2243  graph()->GetConstant0(), Token::EQ);
2244  if_sameencodingandsequential.Then();
2245  {
2246  HConstant* string_map =
2247  Add<HConstant>(isolate()->factory()->string_map());
2248  HConstant* one_byte_string_map =
2249  Add<HConstant>(isolate()->factory()->one_byte_string_map());
2250 
2251  // Determine map and size depending on whether result is one-byte string.
2252  IfBuilder if_onebyte(this);
2254  if_onebyte.If<HCompareNumericAndBranch>(
2255  AddUncasted<HBitwise>(
2256  Token::BIT_AND, ored_instance_types,
2257  Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2258  graph()->GetConstant0(), Token::NE);
2259  if_onebyte.Then();
2260  {
2261  // Allocate sequential one-byte string object.
2262  Push(length);
2263  Push(one_byte_string_map);
2264  }
2265  if_onebyte.Else();
2266  {
2267  // Allocate sequential two-byte string object.
2268  HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
2269  size->ClearFlag(HValue::kCanOverflow);
2270  size->SetFlag(HValue::kUint32);
2271  Push(size);
2272  Push(string_map);
2273  }
2274  if_onebyte.End();
2275  HValue* map = Pop();
2276 
2277  // Calculate the number of bytes needed for the characters in the
2278  // string while observing object alignment.
2279  STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
2280  HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
2281 
2282  // Allocate the string object. HAllocate does not care whether we pass
2283  // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE.
2284  HAllocate* result = BuildAllocate(
2285  size, HType::String(), STRING_TYPE, allocation_mode);
2286  Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
2287 
2288  // Initialize the string fields.
2289  Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2290  Add<HConstant>(String::kEmptyHashField));
2291  Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2292 
2293  // Copy characters to the result string.
2294  IfBuilder if_twobyte(this);
2295  if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2296  if_twobyte.Then();
2297  {
2298  // Copy characters from the left string.
2299  BuildCopySeqStringChars(
2300  left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2301  result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2302  left_length);
2303 
2304  // Copy characters from the right string.
2305  BuildCopySeqStringChars(
2306  right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
2307  result, left_length, String::TWO_BYTE_ENCODING,
2308  right_length);
2309  }
2310  if_twobyte.Else();
2311  {
2312  // Copy characters from the left string.
2313  BuildCopySeqStringChars(
2314  left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2315  result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2316  left_length);
2317 
2318  // Copy characters from the right string.
2319  BuildCopySeqStringChars(
2320  right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
2321  result, left_length, String::ONE_BYTE_ENCODING,
2322  right_length);
2323  }
2324  if_twobyte.End();
2325 
2326  // Count the native string addition.
2327  AddIncrementCounter(isolate()->counters()->string_add_native());
2328 
2329  // Return the sequential string.
2330  Push(result);
2331  }
2332  if_sameencodingandsequential.Else();
2333  {
2334  // Fallback to the runtime to add the two strings.
2335  Add<HPushArguments>(left, right);
2336  Push(Add<HCallRuntime>(
2337  isolate()->factory()->empty_string(),
2338  Runtime::FunctionForId(Runtime::kStringAdd),
2339  2));
2340  }
2341  if_sameencodingandsequential.End();
2342  }
2343  if_createcons.End();
2344 
2345  return Pop();
2346 }
2347 
2348 
2349 HValue* HGraphBuilder::BuildStringAdd(
2350  HValue* left,
2351  HValue* right,
2352  HAllocationMode allocation_mode) {
2353  NoObservableSideEffectsScope no_effects(this);
2354 
2355  // Determine string lengths.
2356  HValue* left_length = AddLoadStringLength(left);
2357  HValue* right_length = AddLoadStringLength(right);
2358 
2359  // Check if left string is empty.
2360  IfBuilder if_leftempty(this);
2361  if_leftempty.If<HCompareNumericAndBranch>(
2362  left_length, graph()->GetConstant0(), Token::EQ);
2363  if_leftempty.Then();
2364  {
2365  // Count the native string addition.
2366  AddIncrementCounter(isolate()->counters()->string_add_native());
2367 
2368  // Just return the right string.
2369  Push(right);
2370  }
2371  if_leftempty.Else();
2372  {
2373  // Check if right string is empty.
2374  IfBuilder if_rightempty(this);
2375  if_rightempty.If<HCompareNumericAndBranch>(
2376  right_length, graph()->GetConstant0(), Token::EQ);
2377  if_rightempty.Then();
2378  {
2379  // Count the native string addition.
2380  AddIncrementCounter(isolate()->counters()->string_add_native());
2381 
2382  // Just return the left string.
2383  Push(left);
2384  }
2385  if_rightempty.Else();
2386  {
2387  // Add the two non-empty strings.
2388  Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2389  }
2390  if_rightempty.End();
2391  }
2392  if_leftempty.End();
2393 
2394  return Pop();
2395 }
2396 
2397 
2398 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2399  HValue* checked_object,
2400  HValue* key,
2401  HValue* val,
2402  bool is_js_array,
2403  ElementsKind elements_kind,
2404  PropertyAccessType access_type,
2405  LoadKeyedHoleMode load_mode,
2406  KeyedAccessStoreMode store_mode) {
2407  DCHECK((!IsExternalArrayElementsKind(elements_kind) &&
2408  !IsFixedTypedArrayElementsKind(elements_kind)) ||
2409  !is_js_array);
2410  // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2411  // on a HElementsTransition instruction. The flag can also be removed if the
2412  // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2413  // ElementsKind transitions. Finally, the dependency can be removed for stores
2414  // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2415  // generated store code.
2416  if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2417  (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2418  checked_object->ClearDependsOnFlag(kElementsKind);
2419  }
2420 
2421  bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2422  bool fast_elements = IsFastObjectElementsKind(elements_kind);
2423  HValue* elements = AddLoadElements(checked_object);
2424  if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2425  store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2426  HCheckMaps* check_cow_map = Add<HCheckMaps>(
2427  elements, isolate()->factory()->fixed_array_map());
2428  check_cow_map->ClearDependsOnFlag(kElementsKind);
2429  }
2430  HInstruction* length = NULL;
2431  if (is_js_array) {
2432  length = Add<HLoadNamedField>(
2433  checked_object->ActualValue(), checked_object,
2434  HObjectAccess::ForArrayLength(elements_kind));
2435  } else {
2436  length = AddLoadFixedArrayLength(elements);
2437  }
2438  length->set_type(HType::Smi());
2439  HValue* checked_key = NULL;
2440  if (IsExternalArrayElementsKind(elements_kind) ||
2441  IsFixedTypedArrayElementsKind(elements_kind)) {
2442  HValue* backing_store;
2443  if (IsExternalArrayElementsKind(elements_kind)) {
2444  backing_store = Add<HLoadNamedField>(
2445  elements, static_cast<HValue*>(NULL),
2446  HObjectAccess::ForExternalArrayExternalPointer());
2447  } else {
2448  backing_store = elements;
2449  }
2450  if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2451  NoObservableSideEffectsScope no_effects(this);
2452  IfBuilder length_checker(this);
2453  length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2454  length_checker.Then();
2455  IfBuilder negative_checker(this);
2456  HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2457  key, graph()->GetConstant0(), Token::GTE);
2458  negative_checker.Then();
2459  HInstruction* result = AddElementAccess(
2460  backing_store, key, val, bounds_check, elements_kind, access_type);
2461  negative_checker.ElseDeopt("Negative key encountered");
2462  negative_checker.End();
2463  length_checker.End();
2464  return result;
2465  } else {
2466  DCHECK(store_mode == STANDARD_STORE);
2467  checked_key = Add<HBoundsCheck>(key, length);
2468  return AddElementAccess(
2469  backing_store, checked_key, val,
2470  checked_object, elements_kind, access_type);
2471  }
2472  }
2473  DCHECK(fast_smi_only_elements ||
2474  fast_elements ||
2475  IsFastDoubleElementsKind(elements_kind));
2476 
2477  // In case val is stored into a fast smi array, assure that the value is a smi
2478  // before manipulating the backing store. Otherwise the actual store may
2479  // deopt, leaving the backing store in an invalid state.
2480  if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2481  !val->type().IsSmi()) {
2482  val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2483  }
2484 
2485  if (IsGrowStoreMode(store_mode)) {
2486  NoObservableSideEffectsScope no_effects(this);
2487  Representation representation = HStoreKeyed::RequiredValueRepresentation(
2488  elements_kind, STORE_TO_INITIALIZED_ENTRY);
2489  val = AddUncasted<HForceRepresentation>(val, representation);
2490  elements = BuildCheckForCapacityGrow(checked_object, elements,
2491  elements_kind, length, key,
2492  is_js_array, access_type);
2493  checked_key = key;
2494  } else {
2495  checked_key = Add<HBoundsCheck>(key, length);
2496 
2497  if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2498  if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2499  NoObservableSideEffectsScope no_effects(this);
2500  elements = BuildCopyElementsOnWrite(checked_object, elements,
2501  elements_kind, length);
2502  } else {
2503  HCheckMaps* check_cow_map = Add<HCheckMaps>(
2504  elements, isolate()->factory()->fixed_array_map());
2505  check_cow_map->ClearDependsOnFlag(kElementsKind);
2506  }
2507  }
2508  }
2509  return AddElementAccess(elements, checked_key, val, checked_object,
2510  elements_kind, access_type, load_mode);
2511 }
2512 
2513 
2514 HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2515  JSArrayBuilder* array_builder,
2516  HValue* length_argument) {
2517  if (length_argument->IsConstant() &&
2518  HConstant::cast(length_argument)->HasSmiValue()) {
2519  int array_length = HConstant::cast(length_argument)->Integer32Value();
2520  if (array_length == 0) {
2521  return array_builder->AllocateEmptyArray();
2522  } else {
2523  return array_builder->AllocateArray(length_argument,
2524  array_length,
2525  length_argument);
2526  }
2527  }
2528 
2529  HValue* constant_zero = graph()->GetConstant0();
2530  HConstant* max_alloc_length =
2531  Add<HConstant>(JSObject::kInitialMaxFastElementArray);
2532  HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2533  max_alloc_length);
2534  IfBuilder if_builder(this);
2535  if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
2536  Token::EQ);
2537  if_builder.Then();
2538  const int initial_capacity = JSArray::kPreallocatedArrayElements;
2539  HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2540  Push(initial_capacity_node); // capacity
2541  Push(constant_zero); // length
2542  if_builder.Else();
2543  if (!(top_info()->IsStub()) &&
2544  IsFastPackedElementsKind(array_builder->kind())) {
2545  // We'll come back later with better (holey) feedback.
2546  if_builder.Deopt("Holey array despite packed elements_kind feedback");
2547  } else {
2548  Push(checked_length); // capacity
2549  Push(checked_length); // length
2550  }
2551  if_builder.End();
2552 
2553  // Figure out total size
2554  HValue* length = Pop();
2555  HValue* capacity = Pop();
2556  return array_builder->AllocateArray(capacity, max_alloc_length, length);
2557 }
2558 
2559 
2560 HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind,
2561  HValue* capacity) {
2562  int elements_size = IsFastDoubleElementsKind(kind)
2563  ? kDoubleSize
2564  : kPointerSize;
2565 
2566  HConstant* elements_size_value = Add<HConstant>(elements_size);
2567  HInstruction* mul = HMul::NewImul(zone(), context(),
2568  capacity->ActualValue(),
2569  elements_size_value);
2570  AddInstruction(mul);
2571  mul->ClearFlag(HValue::kCanOverflow);
2572 
2573  STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2574 
2575  HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2576  HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2577  total_size->ClearFlag(HValue::kCanOverflow);
2578  return total_size;
2579 }
2580 
2581 
2582 HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) {
2583  int base_size = JSArray::kSize;
2584  if (mode == TRACK_ALLOCATION_SITE) {
2585  base_size += AllocationMemento::kSize;
2586  }
2587  HConstant* size_in_bytes = Add<HConstant>(base_size);
2588  return Add<HAllocate>(
2589  size_in_bytes, HType::JSArray(), NOT_TENURED, JS_OBJECT_TYPE);
2590 }
2591 
2592 
2593 HConstant* HGraphBuilder::EstablishElementsAllocationSize(
2594  ElementsKind kind,
2595  int capacity) {
2596  int base_size = IsFastDoubleElementsKind(kind)
2597  ? FixedDoubleArray::SizeFor(capacity)
2598  : FixedArray::SizeFor(capacity);
2599 
2600  return Add<HConstant>(base_size);
2601 }
2602 
2603 
2604 HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2605  HValue* size_in_bytes) {
2606  InstanceType instance_type = IsFastDoubleElementsKind(kind)
2608  : FIXED_ARRAY_TYPE;
2609 
2610  return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED,
2611  instance_type);
2612 }
2613 
2614 
2615 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2616  ElementsKind kind,
2617  HValue* capacity) {
2618  Factory* factory = isolate()->factory();
2620  ? factory->fixed_double_array_map()
2621  : factory->fixed_array_map();
2622 
2623  Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map));
2624  Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2625  capacity);
2626 }
2627 
2628 
2629 HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader(
2630  ElementsKind kind,
2631  HValue* capacity) {
2632  // The HForceRepresentation is to prevent possible deopt on int-smi
2633  // conversion after allocation but before the new object fields are set.
2634  capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2635  HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity);
2636  HValue* new_elements = BuildAllocateElements(kind, size_in_bytes);
2637  BuildInitializeElementsHeader(new_elements, kind, capacity);
2638  return new_elements;
2639 }
2640 
2641 
2642 void HGraphBuilder::BuildJSArrayHeader(HValue* array,
2643  HValue* array_map,
2644  HValue* elements,
2646  ElementsKind elements_kind,
2647  HValue* allocation_site_payload,
2648  HValue* length_field) {
2649  Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2650 
2651  HConstant* empty_fixed_array =
2652  Add<HConstant>(isolate()->factory()->empty_fixed_array());
2653 
2654  Add<HStoreNamedField>(
2655  array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array);
2656 
2657  Add<HStoreNamedField>(
2658  array, HObjectAccess::ForElementsPointer(),
2659  elements != NULL ? elements : empty_fixed_array);
2660 
2661  Add<HStoreNamedField>(
2662  array, HObjectAccess::ForArrayLength(elements_kind), length_field);
2663 
2664  if (mode == TRACK_ALLOCATION_SITE) {
2665  BuildCreateAllocationMemento(
2666  array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2667  }
2668 }
2669 
2670 
2671 HInstruction* HGraphBuilder::AddElementAccess(
2672  HValue* elements,
2673  HValue* checked_key,
2674  HValue* val,
2675  HValue* dependency,
2676  ElementsKind elements_kind,
2677  PropertyAccessType access_type,
2678  LoadKeyedHoleMode load_mode) {
2679  if (access_type == STORE) {
2680  DCHECK(val != NULL);
2681  if (elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2682  elements_kind == UINT8_CLAMPED_ELEMENTS) {
2683  val = Add<HClampToUint8>(val);
2684  }
2685  return Add<HStoreKeyed>(elements, checked_key, val, elements_kind,
2687  }
2688 
2689  DCHECK(access_type == LOAD);
2690  DCHECK(val == NULL);
2691  HLoadKeyed* load = Add<HLoadKeyed>(
2692  elements, checked_key, dependency, elements_kind, load_mode);
2693  if (FLAG_opt_safe_uint32_operations &&
2694  (elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2695  elements_kind == UINT32_ELEMENTS)) {
2696  graph()->RecordUint32Instruction(load);
2697  }
2698  return load;
2699 }
2700 
2701 
2702 HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object,
2703  HValue* dependency) {
2704  return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap());
2705 }
2706 
2707 
2708 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
2709  HValue* dependency) {
2710  return Add<HLoadNamedField>(
2711  object, dependency, HObjectAccess::ForElementsPointer());
2712 }
2713 
2714 
2715 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
2716  HValue* array,
2717  HValue* dependency) {
2718  return Add<HLoadNamedField>(
2719  array, dependency, HObjectAccess::ForFixedArrayLength());
2720 }
2721 
2722 
2723 HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
2724  ElementsKind kind,
2725  HValue* dependency) {
2726  return Add<HLoadNamedField>(
2727  array, dependency, HObjectAccess::ForArrayLength(kind));
2728 }
2729 
2730 
2731 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
2732  HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2733  graph_->GetConstant1());
2734 
2735  HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2736  new_capacity->ClearFlag(HValue::kCanOverflow);
2737 
2738  HValue* min_growth = Add<HConstant>(16);
2739 
2740  new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2741  new_capacity->ClearFlag(HValue::kCanOverflow);
2742 
2743  return new_capacity;
2744 }
2745 
2746 
2747 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
2748  HValue* elements,
2749  ElementsKind kind,
2750  ElementsKind new_kind,
2751  HValue* length,
2752  HValue* new_capacity) {
2753  Add<HBoundsCheck>(new_capacity, Add<HConstant>(
2754  (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
2755  ElementsKindToShiftSize(new_kind)));
2756 
2757  HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
2758  new_kind, new_capacity);
2759 
2760  BuildCopyElements(elements, kind, new_elements,
2761  new_kind, length, new_capacity);
2762 
2763  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2764  new_elements);
2765 
2766  return new_elements;
2767 }
2768 
2769 
2770 void HGraphBuilder::BuildFillElementsWithValue(HValue* elements,
2771  ElementsKind elements_kind,
2772  HValue* from,
2773  HValue* to,
2774  HValue* value) {
2775  if (to == NULL) {
2776  to = AddLoadFixedArrayLength(elements);
2777  }
2778 
2779  // Special loop unfolding case
2780  STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
2781  kElementLoopUnrollThreshold);
2782  int initial_capacity = -1;
2783  if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
2784  int constant_from = from->GetInteger32Constant();
2785  int constant_to = to->GetInteger32Constant();
2786 
2787  if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
2788  initial_capacity = constant_to;
2789  }
2790  }
2791 
2792  // Since we're about to store a hole value, the store instruction below must
2793  // assume an elements kind that supports heap object values.
2794  if (IsFastSmiOrObjectElementsKind(elements_kind)) {
2795  elements_kind = FAST_HOLEY_ELEMENTS;
2796  }
2797 
2798  if (initial_capacity >= 0) {
2799  for (int i = 0; i < initial_capacity; i++) {
2800  HInstruction* key = Add<HConstant>(i);
2801  Add<HStoreKeyed>(elements, key, value, elements_kind);
2802  }
2803  } else {
2804  // Carefully loop backwards so that the "from" remains live through the loop
2805  // rather than the to. This often corresponds to keeping length live rather
2806  // then capacity, which helps register allocation, since length is used more
2807  // other than capacity after filling with holes.
2808  LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2809 
2810  HValue* key = builder.BeginBody(to, from, Token::GT);
2811 
2812  HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
2813  adjusted_key->ClearFlag(HValue::kCanOverflow);
2814 
2815  Add<HStoreKeyed>(elements, adjusted_key, value, elements_kind);
2816 
2817  builder.EndBody();
2818  }
2819 }
2820 
2821 
2822 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
2823  ElementsKind elements_kind,
2824  HValue* from,
2825  HValue* to) {
2826  // Fast elements kinds need to be initialized in case statements below cause a
2827  // garbage collection.
2828  Factory* factory = isolate()->factory();
2829 
2830  double nan_double = FixedDoubleArray::hole_nan_as_double();
2831  HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
2832  ? Add<HConstant>(factory->the_hole_value())
2833  : Add<HConstant>(nan_double);
2834 
2835  BuildFillElementsWithValue(elements, elements_kind, from, to, hole);
2836 }
2837 
2838 
2839 void HGraphBuilder::BuildCopyElements(HValue* from_elements,
2840  ElementsKind from_elements_kind,
2841  HValue* to_elements,
2842  ElementsKind to_elements_kind,
2843  HValue* length,
2844  HValue* capacity) {
2845  int constant_capacity = -1;
2846  if (capacity != NULL &&
2847  capacity->IsConstant() &&
2848  HConstant::cast(capacity)->HasInteger32Value()) {
2849  int constant_candidate = HConstant::cast(capacity)->Integer32Value();
2850  if (constant_candidate <= kElementLoopUnrollThreshold) {
2851  constant_capacity = constant_candidate;
2852  }
2853  }
2854 
2855  bool pre_fill_with_holes =
2856  IsFastDoubleElementsKind(from_elements_kind) &&
2857  IsFastObjectElementsKind(to_elements_kind);
2858  if (pre_fill_with_holes) {
2859  // If the copy might trigger a GC, make sure that the FixedArray is
2860  // pre-initialized with holes to make sure that it's always in a
2861  // consistent state.
2862  BuildFillElementsWithHole(to_elements, to_elements_kind,
2863  graph()->GetConstant0(), NULL);
2864  }
2865 
2866  if (constant_capacity != -1) {
2867  // Unroll the loop for small elements kinds.
2868  for (int i = 0; i < constant_capacity; i++) {
2869  HValue* key_constant = Add<HConstant>(i);
2870  HInstruction* value = Add<HLoadKeyed>(from_elements, key_constant,
2871  static_cast<HValue*>(NULL),
2872  from_elements_kind);
2873  Add<HStoreKeyed>(to_elements, key_constant, value, to_elements_kind);
2874  }
2875  } else {
2876  if (!pre_fill_with_holes &&
2877  (capacity == NULL || !length->Equals(capacity))) {
2878  BuildFillElementsWithHole(to_elements, to_elements_kind,
2879  length, NULL);
2880  }
2881 
2882  if (capacity == NULL) {
2883  capacity = AddLoadFixedArrayLength(to_elements);
2884  }
2885 
2886  LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2887 
2888  HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
2889  Token::GT);
2890 
2891  key = AddUncasted<HSub>(key, graph()->GetConstant1());
2892  key->ClearFlag(HValue::kCanOverflow);
2893 
2894  HValue* element = Add<HLoadKeyed>(from_elements, key,
2895  static_cast<HValue*>(NULL),
2896  from_elements_kind,
2898 
2899  ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
2900  IsFastSmiElementsKind(to_elements_kind))
2901  ? FAST_HOLEY_ELEMENTS : to_elements_kind;
2902 
2903  if (IsHoleyElementsKind(from_elements_kind) &&
2904  from_elements_kind != to_elements_kind) {
2905  IfBuilder if_hole(this);
2906  if_hole.If<HCompareHoleAndBranch>(element);
2907  if_hole.Then();
2908  HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
2909  ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
2910  : graph()->GetConstantHole();
2911  Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
2912  if_hole.Else();
2913  HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2914  store->SetFlag(HValue::kAllowUndefinedAsNaN);
2915  if_hole.End();
2916  } else {
2917  HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2918  store->SetFlag(HValue::kAllowUndefinedAsNaN);
2919  }
2920 
2921  builder.EndBody();
2922  }
2923 
2924  Counters* counters = isolate()->counters();
2925  AddIncrementCounter(counters->inlined_copied_elements());
2926 }
2927 
2928 
2929 HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
2930  HValue* allocation_site,
2932  ElementsKind kind) {
2933  HAllocate* array = AllocateJSArrayObject(mode);
2934 
2935  HValue* map = AddLoadMap(boilerplate);
2936  HValue* elements = AddLoadElements(boilerplate);
2937  HValue* length = AddLoadArrayLength(boilerplate, kind);
2938 
2939  BuildJSArrayHeader(array,
2940  map,
2941  elements,
2942  mode,
2943  FAST_ELEMENTS,
2944  allocation_site,
2945  length);
2946  return array;
2947 }
2948 
2949 
2950 HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
2951  HValue* allocation_site,
2953  HAllocate* array = AllocateJSArrayObject(mode);
2954 
2955  HValue* map = AddLoadMap(boilerplate);
2956 
2957  BuildJSArrayHeader(array,
2958  map,
2959  NULL, // set elements to empty fixed array
2960  mode,
2961  FAST_ELEMENTS,
2962  allocation_site,
2963  graph()->GetConstant0());
2964  return array;
2965 }
2966 
2967 
2968 HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
2969  HValue* allocation_site,
2971  ElementsKind kind) {
2972  HValue* boilerplate_elements = AddLoadElements(boilerplate);
2973  HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
2974 
2975  // Generate size calculation code here in order to make it dominate
2976  // the JSArray allocation.
2977  HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
2978 
2979  // Create empty JSArray object for now, store elimination should remove
2980  // redundant initialization of elements and length fields and at the same
2981  // time the object will be fully prepared for GC if it happens during
2982  // elements allocation.
2983  HValue* result = BuildCloneShallowArrayEmpty(
2984  boilerplate, allocation_site, mode);
2985 
2986  HAllocate* elements = BuildAllocateElements(kind, elements_size);
2987 
2988  // This function implicitly relies on the fact that the
2989  // FastCloneShallowArrayStub is called only for literals shorter than
2990  // JSObject::kInitialMaxFastElementArray.
2991  // Can't add HBoundsCheck here because otherwise the stub will eager a frame.
2992  HConstant* size_upper_bound = EstablishElementsAllocationSize(
2993  kind, JSObject::kInitialMaxFastElementArray);
2994  elements->set_size_upper_bound(size_upper_bound);
2995 
2996  Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements);
2997 
2998  // The allocation for the cloned array above causes register pressure on
2999  // machines with low register counts. Force a reload of the boilerplate
3000  // elements here to free up a register for the allocation to avoid unnecessary
3001  // spillage.
3002  boilerplate_elements = AddLoadElements(boilerplate);
3003  boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
3004 
3005  // Copy the elements array header.
3006  for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
3007  HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
3008  Add<HStoreNamedField>(elements, access,
3009  Add<HLoadNamedField>(boilerplate_elements,
3010  static_cast<HValue*>(NULL), access));
3011  }
3012 
3013  // And the result of the length
3014  HValue* length = AddLoadArrayLength(boilerplate, kind);
3015  Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length);
3016 
3017  BuildCopyElements(boilerplate_elements, kind, elements,
3018  kind, length, NULL);
3019  return result;
3020 }
3021 
3022 
3023 void HGraphBuilder::BuildCompareNil(
3024  HValue* value,
3025  Type* type,
3026  HIfContinuation* continuation) {
3027  IfBuilder if_nil(this);
3028  bool some_case_handled = false;
3029  bool some_case_missing = false;
3030 
3031  if (type->Maybe(Type::Null())) {
3032  if (some_case_handled) if_nil.Or();
3033  if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
3034  some_case_handled = true;
3035  } else {
3036  some_case_missing = true;
3037  }
3038 
3039  if (type->Maybe(Type::Undefined())) {
3040  if (some_case_handled) if_nil.Or();
3041  if_nil.If<HCompareObjectEqAndBranch>(value,
3042  graph()->GetConstantUndefined());
3043  some_case_handled = true;
3044  } else {
3045  some_case_missing = true;
3046  }
3047 
3048  if (type->Maybe(Type::Undetectable())) {
3049  if (some_case_handled) if_nil.Or();
3050  if_nil.If<HIsUndetectableAndBranch>(value);
3051  some_case_handled = true;
3052  } else {
3053  some_case_missing = true;
3054  }
3055 
3056  if (some_case_missing) {
3057  if_nil.Then();
3058  if_nil.Else();
3059  if (type->NumClasses() == 1) {
3060  BuildCheckHeapObject(value);
3061  // For ICs, the map checked below is a sentinel map that gets replaced by
3062  // the monomorphic map when the code is used as a template to generate a
3063  // new IC. For optimized functions, there is no sentinel map, the map
3064  // emitted below is the actual monomorphic map.
3065  Add<HCheckMaps>(value, type->Classes().Current());
3066  } else {
3067  if_nil.Deopt("Too many undetectable types");
3068  }
3069  }
3070 
3071  if_nil.CaptureContinuation(continuation);
3072 }
3073 
3074 
3075 void HGraphBuilder::BuildCreateAllocationMemento(
3076  HValue* previous_object,
3077  HValue* previous_object_size,
3078  HValue* allocation_site) {
3079  DCHECK(allocation_site != NULL);
3080  HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
3081  previous_object, previous_object_size, HType::HeapObject());
3082  AddStoreMapConstant(
3083  allocation_memento, isolate()->factory()->allocation_memento_map());
3084  Add<HStoreNamedField>(
3085  allocation_memento,
3086  HObjectAccess::ForAllocationMementoSite(),
3087  allocation_site);
3088  if (FLAG_allocation_site_pretenuring) {
3089  HValue* memento_create_count = Add<HLoadNamedField>(
3090  allocation_site, static_cast<HValue*>(NULL),
3091  HObjectAccess::ForAllocationSiteOffset(
3092  AllocationSite::kPretenureCreateCountOffset));
3093  memento_create_count = AddUncasted<HAdd>(
3094  memento_create_count, graph()->GetConstant1());
3095  // This smi value is reset to zero after every gc, overflow isn't a problem
3096  // since the counter is bounded by the new space size.
3097  memento_create_count->ClearFlag(HValue::kCanOverflow);
3098  Add<HStoreNamedField>(
3099  allocation_site, HObjectAccess::ForAllocationSiteOffset(
3100  AllocationSite::kPretenureCreateCountOffset), memento_create_count);
3101  }
3102 }
3103 
3104 
3105 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
3106  // Get the global context, then the native context
3107  HInstruction* context =
3108  Add<HLoadNamedField>(closure, static_cast<HValue*>(NULL),
3109  HObjectAccess::ForFunctionContextPointer());
3110  HInstruction* global_object = Add<HLoadNamedField>(
3111  context, static_cast<HValue*>(NULL),
3112  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
3113  HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3114  GlobalObject::kNativeContextOffset);
3115  return Add<HLoadNamedField>(
3116  global_object, static_cast<HValue*>(NULL), access);
3117 }
3118 
3119 
3120 HInstruction* HGraphBuilder::BuildGetNativeContext() {
3121  // Get the global context, then the native context
3122  HValue* global_object = Add<HLoadNamedField>(
3123  context(), static_cast<HValue*>(NULL),
3124  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
3125  return Add<HLoadNamedField>(
3126  global_object, static_cast<HValue*>(NULL),
3127  HObjectAccess::ForObservableJSObjectOffset(
3128  GlobalObject::kNativeContextOffset));
3129 }
3130 
3131 
3132 HInstruction* HGraphBuilder::BuildGetArrayFunction() {
3133  HInstruction* native_context = BuildGetNativeContext();
3134  HInstruction* index =
3135  Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
3136  return Add<HLoadKeyed>(
3137  native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
3138 }
3139 
3140 
3141 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3142  ElementsKind kind,
3143  HValue* allocation_site_payload,
3144  HValue* constructor_function,
3145  AllocationSiteOverrideMode override_mode) :
3146  builder_(builder),
3147  kind_(kind),
3148  allocation_site_payload_(allocation_site_payload),
3149  constructor_function_(constructor_function) {
3150  DCHECK(!allocation_site_payload->IsConstant() ||
3151  HConstant::cast(allocation_site_payload)->handle(
3152  builder_->isolate())->IsAllocationSite());
3153  mode_ = override_mode == DISABLE_ALLOCATION_SITES
3155  : AllocationSite::GetMode(kind);
3156 }
3157 
3158 
3159 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3160  ElementsKind kind,
3161  HValue* constructor_function) :
3162  builder_(builder),
3163  kind_(kind),
3165  allocation_site_payload_(NULL),
3166  constructor_function_(constructor_function) {
3167 }
3168 
3169 
3170 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
3171  if (!builder()->top_info()->IsStub()) {
3172  // A constant map is fine.
3173  Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
3174  builder()->isolate());
3175  return builder()->Add<HConstant>(map);
3176  }
3177 
3178  if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
3179  // No need for a context lookup if the kind_ matches the initial
3180  // map, because we can just load the map in that case.
3181  HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3182  return builder()->Add<HLoadNamedField>(
3183  constructor_function_, static_cast<HValue*>(NULL), access);
3184  }
3185 
3186  // TODO(mvstanton): we should always have a constructor function if we
3187  // are creating a stub.
3188  HInstruction* native_context = constructor_function_ != NULL
3189  ? builder()->BuildGetNativeContext(constructor_function_)
3190  : builder()->BuildGetNativeContext();
3191 
3192  HInstruction* index = builder()->Add<HConstant>(
3193  static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX));
3194 
3195  HInstruction* map_array = builder()->Add<HLoadKeyed>(
3196  native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
3197 
3198  HInstruction* kind_index = builder()->Add<HConstant>(kind_);
3199 
3200  return builder()->Add<HLoadKeyed>(
3201  map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
3202 }
3203 
3204 
3205 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
3206  // Find the map near the constructor function
3207  HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3208  return builder()->Add<HLoadNamedField>(
3209  constructor_function_, static_cast<HValue*>(NULL), access);
3210 }
3211 
3212 
3213 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
3214  HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
3215  return AllocateArray(capacity,
3216  capacity,
3217  builder()->graph()->GetConstant0());
3218 }
3219 
3220 
3221 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3222  HValue* capacity,
3223  HConstant* capacity_upper_bound,
3224  HValue* length_field,
3225  FillMode fill_mode) {
3226  return AllocateArray(capacity,
3227  capacity_upper_bound->GetInteger32Constant(),
3228  length_field,
3229  fill_mode);
3230 }
3231 
3232 
3233 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3234  HValue* capacity,
3235  int capacity_upper_bound,
3236  HValue* length_field,
3237  FillMode fill_mode) {
3238  HConstant* elememts_size_upper_bound = capacity->IsInteger32Constant()
3239  ? HConstant::cast(capacity)
3240  : builder()->EstablishElementsAllocationSize(kind_, capacity_upper_bound);
3241 
3242  HAllocate* array = AllocateArray(capacity, length_field, fill_mode);
3243  if (!elements_location_->has_size_upper_bound()) {
3244  elements_location_->set_size_upper_bound(elememts_size_upper_bound);
3245  }
3246  return array;
3247 }
3248 
3249 
3250 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3251  HValue* capacity,
3252  HValue* length_field,
3253  FillMode fill_mode) {
3254  // These HForceRepresentations are because we store these as fields in the
3255  // objects we construct, and an int32-to-smi HChange could deopt. Accept
3256  // the deopt possibility now, before allocation occurs.
3257  capacity =
3258  builder()->AddUncasted<HForceRepresentation>(capacity,
3260  length_field =
3261  builder()->AddUncasted<HForceRepresentation>(length_field,
3263 
3264  // Generate size calculation code here in order to make it dominate
3265  // the JSArray allocation.
3266  HValue* elements_size =
3267  builder()->BuildCalculateElementsSize(kind_, capacity);
3268 
3269  // Allocate (dealing with failure appropriately)
3270  HAllocate* array_object = builder()->AllocateJSArrayObject(mode_);
3271 
3272  // Fill in the fields: map, properties, length
3273  HValue* map;
3274  if (allocation_site_payload_ == NULL) {
3275  map = EmitInternalMapCode();
3276  } else {
3277  map = EmitMapCode();
3278  }
3279 
3280  builder()->BuildJSArrayHeader(array_object,
3281  map,
3282  NULL, // set elements to empty fixed array
3283  mode_,
3284  kind_,
3285  allocation_site_payload_,
3286  length_field);
3287 
3288  // Allocate and initialize the elements
3289  elements_location_ = builder()->BuildAllocateElements(kind_, elements_size);
3290 
3291  builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
3292 
3293  // Set the elements
3294  builder()->Add<HStoreNamedField>(
3295  array_object, HObjectAccess::ForElementsPointer(), elements_location_);
3296 
3297  if (fill_mode == FILL_WITH_HOLE) {
3298  builder()->BuildFillElementsWithHole(elements_location_, kind_,
3299  graph()->GetConstant0(), capacity);
3300  }
3301 
3302  return array_object;
3303 }
3304 
3305 
3307  HValue* global_object = Add<HLoadNamedField>(
3308  context(), static_cast<HValue*>(NULL),
3309  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
3310  HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3312  HValue* builtins = Add<HLoadNamedField>(
3313  global_object, static_cast<HValue*>(NULL), access);
3314  HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset(
3316  return Add<HLoadNamedField>(
3317  builtins, static_cast<HValue*>(NULL), function_access);
3318 }
3319 
3320 
3322  : HGraphBuilder(info),
3323  function_state_(NULL),
3324  initial_function_state_(this, info, NORMAL_RETURN, 0),
3325  ast_context_(NULL),
3326  break_scope_(NULL),
3327  inlined_count_(0),
3328  globals_(10, info->zone()),
3329  osr_(new(info->zone()) HOsrBuilder(this)) {
3330  // This is not initialized in the initializer list because the
3331  // constructor for the initial state relies on function_state_ == NULL
3332  // to know it's the initial state.
3334  InitializeAstVisitor(info->zone());
3335  if (FLAG_hydrogen_track_positions) {
3336  SetSourcePosition(info->shared_info()->start_position());
3337  }
3338 }
3339 
3340 
3341 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
3342  HBasicBlock* second,
3343  BailoutId join_id) {
3344  if (first == NULL) {
3345  return second;
3346  } else if (second == NULL) {
3347  return first;
3348  } else {
3349  HBasicBlock* join_block = graph()->CreateBasicBlock();
3350  Goto(first, join_block);
3351  Goto(second, join_block);
3352  join_block->SetJoinId(join_id);
3353  return join_block;
3354  }
3355 }
3356 
3357 
3359  HBasicBlock* exit_block,
3360  HBasicBlock* continue_block) {
3361  if (continue_block != NULL) {
3362  if (exit_block != NULL) Goto(exit_block, continue_block);
3363  continue_block->SetJoinId(statement->ContinueId());
3364  return continue_block;
3365  }
3366  return exit_block;
3367 }
3368 
3369 
3371  HBasicBlock* loop_entry,
3372  HBasicBlock* body_exit,
3373  HBasicBlock* loop_successor,
3374  HBasicBlock* break_block) {
3375  if (body_exit != NULL) Goto(body_exit, loop_entry);
3376  loop_entry->PostProcessLoopHeader(statement);
3377  if (break_block != NULL) {
3378  if (loop_successor != NULL) Goto(loop_successor, break_block);
3379  break_block->SetJoinId(statement->ExitId());
3380  return break_block;
3381  }
3382  return loop_successor;
3383 }
3384 
3385 
3386 // Build a new loop header block and set it as the current block.
3388  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3389  Goto(loop_entry);
3390  set_current_block(loop_entry);
3391  return loop_entry;
3392 }
3393 
3394 
3396  IterationStatement* statement) {
3397  HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement)
3398  ? osr()->BuildOsrLoopEntry(statement)
3399  : BuildLoopEntry();
3400  return loop_entry;
3401 }
3402 
3403 
3404 void HBasicBlock::FinishExit(HControlInstruction* instruction,
3405  HSourcePosition position) {
3406  Finish(instruction, position);
3407  ClearEnvironment();
3408 }
3409 
3410 
3411 OStream& operator<<(OStream& os, const HBasicBlock& b) {
3412  return os << "B" << b.block_id();
3413 }
3414 
3415 
3416 HGraph::HGraph(CompilationInfo* info)
3417  : isolate_(info->isolate()),
3418  next_block_id_(0),
3419  entry_block_(NULL),
3420  blocks_(8, info->zone()),
3421  values_(16, info->zone()),
3422  phi_list_(NULL),
3423  uint32_instructions_(NULL),
3424  osr_(NULL),
3425  info_(info),
3426  zone_(info->zone()),
3427  is_recursive_(false),
3428  use_optimistic_licm_(false),
3429  depends_on_empty_array_proto_elements_(false),
3430  type_change_checksum_(0),
3431  maximum_environment_size_(0),
3432  no_side_effects_scope_count_(0),
3433  disallow_adding_new_values_(false),
3434  next_inline_id_(0),
3435  inlined_functions_(5, info->zone()) {
3436  if (info->IsStub()) {
3437  CallInterfaceDescriptor descriptor =
3438  info->code_stub()->GetCallInterfaceDescriptor();
3439  start_environment_ = new (zone_)
3440  HEnvironment(zone_, descriptor.GetEnvironmentParameterCount());
3441  } else {
3442  TraceInlinedFunction(info->shared_info(), HSourcePosition::Unknown());
3443  start_environment_ =
3444  new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3445  }
3446  start_environment_->set_ast_id(BailoutId::FunctionEntry());
3447  entry_block_ = CreateBasicBlock();
3448  entry_block_->SetInitialEnvironment(start_environment_);
3449 }
3450 
3451 
3452 HBasicBlock* HGraph::CreateBasicBlock() {
3453  HBasicBlock* result = new(zone()) HBasicBlock(this);
3454  blocks_.Add(result, zone());
3455  return result;
3456 }
3457 
3458 
3459 void HGraph::FinalizeUniqueness() {
3460  DisallowHeapAllocation no_gc;
3461  DCHECK(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
3462  for (int i = 0; i < blocks()->length(); ++i) {
3463  for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3464  it.Current()->FinalizeUniqueness();
3465  }
3466  }
3467 }
3468 
3469 
3470 int HGraph::TraceInlinedFunction(
3471  Handle<SharedFunctionInfo> shared,
3472  HSourcePosition position) {
3473  if (!FLAG_hydrogen_track_positions) {
3474  return 0;
3475  }
3476 
3477  int id = 0;
3478  for (; id < inlined_functions_.length(); id++) {
3479  if (inlined_functions_[id].shared().is_identical_to(shared)) {
3480  break;
3481  }
3482  }
3483 
3484  if (id == inlined_functions_.length()) {
3485  inlined_functions_.Add(InlinedFunctionInfo(shared), zone());
3486 
3487  if (!shared->script()->IsUndefined()) {
3488  Handle<Script> script(Script::cast(shared->script()));
3489  if (!script->source()->IsUndefined()) {
3490  CodeTracer::Scope tracing_scopex(isolate()->GetCodeTracer());
3491  OFStream os(tracing_scopex.file());
3492  os << "--- FUNCTION SOURCE (" << shared->DebugName()->ToCString().get()
3493  << ") id{" << info()->optimization_id() << "," << id << "} ---\n";
3494  {
3495  ConsStringIteratorOp op;
3496  StringCharacterStream stream(String::cast(script->source()),
3497  &op,
3498  shared->start_position());
3499  // fun->end_position() points to the last character in the stream. We
3500  // need to compensate by adding one to calculate the length.
3501  int source_len =
3502  shared->end_position() - shared->start_position() + 1;
3503  for (int i = 0; i < source_len; i++) {
3504  if (stream.HasMore()) {
3505  os << AsReversiblyEscapedUC16(stream.GetNext());
3506  }
3507  }
3508  }
3509 
3510  os << "\n--- END ---\n";
3511  }
3512  }
3513  }
3514 
3515  int inline_id = next_inline_id_++;
3516 
3517  if (inline_id != 0) {
3518  CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
3519  OFStream os(tracing_scope.file());
3520  os << "INLINE (" << shared->DebugName()->ToCString().get() << ") id{"
3521  << info()->optimization_id() << "," << id << "} AS " << inline_id
3522  << " AT " << position << endl;
3523  }
3524 
3525  return inline_id;
3526 }
3527 
3528 
3529 int HGraph::SourcePositionToScriptPosition(HSourcePosition pos) {
3530  if (!FLAG_hydrogen_track_positions || pos.IsUnknown()) {
3531  return pos.raw();
3532  }
3533 
3534  return inlined_functions_[pos.inlining_id()].start_position() +
3535  pos.position();
3536 }
3537 
3538 
3539 // Block ordering was implemented with two mutually recursive methods,
3540 // HGraph::Postorder and HGraph::PostorderLoopBlocks.
3541 // The recursion could lead to stack overflow so the algorithm has been
3542 // implemented iteratively.
3543 // At a high level the algorithm looks like this:
3544 //
3545 // Postorder(block, loop_header) : {
3546 // if (block has already been visited or is of another loop) return;
3547 // mark block as visited;
3548 // if (block is a loop header) {
3549 // VisitLoopMembers(block, loop_header);
3550 // VisitSuccessorsOfLoopHeader(block);
3551 // } else {
3552 // VisitSuccessors(block)
3553 // }
3554 // put block in result list;
3555 // }
3556 //
3557 // VisitLoopMembers(block, outer_loop_header) {
3558 // foreach (block b in block loop members) {
3559 // VisitSuccessorsOfLoopMember(b, outer_loop_header);
3560 // if (b is loop header) VisitLoopMembers(b);
3561 // }
3562 // }
3563 //
3564 // VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3565 // foreach (block b in block successors) Postorder(b, outer_loop_header)
3566 // }
3567 //
3568 // VisitSuccessorsOfLoopHeader(block) {
3569 // foreach (block b in block successors) Postorder(b, block)
3570 // }
3571 //
3572 // VisitSuccessors(block, loop_header) {
3573 // foreach (block b in block successors) Postorder(b, loop_header)
3574 // }
3575 //
3576 // The ordering is started calling Postorder(entry, NULL).
3577 //
3578 // Each instance of PostorderProcessor represents the "stack frame" of the
3579 // recursion, and particularly keeps the state of the loop (iteration) of the
3580 // "Visit..." function it represents.
3581 // To recycle memory we keep all the frames in a double linked list but
3582 // this means that we cannot use constructors to initialize the frames.
3583 //
3585  public:
3586  // Back link (towards the stack bottom).
3587  PostorderProcessor* parent() {return father_; }
3588  // Forward link (towards the stack top).
3589  PostorderProcessor* child() {return child_; }
3590  HBasicBlock* block() { return block_; }
3591  HLoopInformation* loop() { return loop_; }
3592  HBasicBlock* loop_header() { return loop_header_; }
3593 
3595  HBasicBlock* block) {
3596  PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3597  return result->SetupSuccessors(zone, block, NULL);
3598  }
3599 
3601  ZoneList<HBasicBlock*>* order) {
3602  PostorderProcessor* next =
3603  PerformNonBacktrackingStep(zone, order);
3604  if (next != NULL) {
3605  return next;
3606  } else {
3607  return Backtrack(zone, order);
3608  }
3609  }
3610 
3611  private:
3613  : father_(father), child_(NULL), successor_iterator(NULL) { }
3614 
3615  // Each enum value states the cycle whose state is kept by this instance.
3616  enum LoopKind {
3621  SUCCESSORS_OF_LOOP_MEMBER
3622  };
3623 
3624  // Each "Setup..." method is like a constructor for a cycle state.
3626  HBasicBlock* block,
3627  HBasicBlock* loop_header) {
3628  if (block == NULL || block->IsOrdered() ||
3629  block->parent_loop_header() != loop_header) {
3630  kind_ = NONE;
3631  block_ = NULL;
3632  loop_ = NULL;
3633  loop_header_ = NULL;
3634  return this;
3635  } else {
3636  block_ = block;
3637  loop_ = NULL;
3638  block->MarkAsOrdered();
3639 
3640  if (block->IsLoopHeader()) {
3641  kind_ = SUCCESSORS_OF_LOOP_HEADER;
3642  loop_header_ = block;
3643  InitializeSuccessors();
3644  PostorderProcessor* result = Push(zone);
3645  return result->SetupLoopMembers(zone, block, block->loop_information(),
3646  loop_header);
3647  } else {
3648  DCHECK(block->IsFinished());
3649  kind_ = SUCCESSORS;
3650  loop_header_ = loop_header;
3651  InitializeSuccessors();
3652  return this;
3653  }
3654  }
3655  }
3656 
3658  HBasicBlock* block,
3659  HLoopInformation* loop,
3660  HBasicBlock* loop_header) {
3661  kind_ = LOOP_MEMBERS;
3662  block_ = block;
3663  loop_ = loop;
3664  loop_header_ = loop_header;
3665  InitializeLoopMembers();
3666  return this;
3667  }
3668 
3670  HBasicBlock* block,
3671  HLoopInformation* loop,
3672  HBasicBlock* loop_header) {
3673  kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3674  block_ = block;
3675  loop_ = loop;
3676  loop_header_ = loop_header;
3677  InitializeSuccessors();
3678  return this;
3679  }
3680 
3681  // This method "allocates" a new stack frame.
3683  if (child_ == NULL) {
3684  child_ = new(zone) PostorderProcessor(this);
3685  }
3686  return child_;
3687  }
3688 
3690  DCHECK(block_->end()->FirstSuccessor() == NULL ||
3691  order->Contains(block_->end()->FirstSuccessor()) ||
3692  block_->end()->FirstSuccessor()->IsLoopHeader());
3693  DCHECK(block_->end()->SecondSuccessor() == NULL ||
3694  order->Contains(block_->end()->SecondSuccessor()) ||
3695  block_->end()->SecondSuccessor()->IsLoopHeader());
3696  order->Add(block_, zone);
3697  }
3698 
3699  // This method is the basic block to walk up the stack.
3701  ZoneList<HBasicBlock*>* order) {
3702  switch (kind_) {
3703  case SUCCESSORS:
3704  case SUCCESSORS_OF_LOOP_HEADER:
3705  ClosePostorder(order, zone);
3706  return father_;
3707  case LOOP_MEMBERS:
3708  return father_;
3709  case SUCCESSORS_OF_LOOP_MEMBER:
3710  if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
3711  // In this case we need to perform a LOOP_MEMBERS cycle so we
3712  // initialize it and return this instead of father.
3713  return SetupLoopMembers(zone, block(),
3714  block()->loop_information(), loop_header_);
3715  } else {
3716  return father_;
3717  }
3718  case NONE:
3719  return father_;
3720  }
3721  UNREACHABLE();
3722  return NULL;
3723  }
3724 
3725  // Walks up the stack.
3727  ZoneList<HBasicBlock*>* order) {
3728  PostorderProcessor* parent = Pop(zone, order);
3729  while (parent != NULL) {
3730  PostorderProcessor* next =
3731  parent->PerformNonBacktrackingStep(zone, order);
3732  if (next != NULL) {
3733  return next;
3734  } else {
3735  parent = parent->Pop(zone, order);
3736  }
3737  }
3738  return NULL;
3739  }
3740 
3742  Zone* zone,
3743  ZoneList<HBasicBlock*>* order) {
3744  HBasicBlock* next_block;
3745  switch (kind_) {
3746  case SUCCESSORS:
3747  next_block = AdvanceSuccessors();
3748  if (next_block != NULL) {
3749  PostorderProcessor* result = Push(zone);
3750  return result->SetupSuccessors(zone, next_block, loop_header_);
3751  }
3752  break;
3753  case SUCCESSORS_OF_LOOP_HEADER:
3754  next_block = AdvanceSuccessors();
3755  if (next_block != NULL) {
3756  PostorderProcessor* result = Push(zone);
3757  return result->SetupSuccessors(zone, next_block, block());
3758  }
3759  break;
3760  case LOOP_MEMBERS:
3761  next_block = AdvanceLoopMembers();
3762  if (next_block != NULL) {
3763  PostorderProcessor* result = Push(zone);
3764  return result->SetupSuccessorsOfLoopMember(next_block,
3765  loop_, loop_header_);
3766  }
3767  break;
3768  case SUCCESSORS_OF_LOOP_MEMBER:
3769  next_block = AdvanceSuccessors();
3770  if (next_block != NULL) {
3771  PostorderProcessor* result = Push(zone);
3772  return result->SetupSuccessors(zone, next_block, loop_header_);
3773  }
3774  break;
3775  case NONE:
3776  return NULL;
3777  }
3778  return NULL;
3779  }
3780 
3781  // The following two methods implement a "foreach b in successors" cycle.
3783  loop_index = 0;
3784  loop_length = 0;
3785  successor_iterator = HSuccessorIterator(block_->end());
3786  }
3787 
3788  HBasicBlock* AdvanceSuccessors() {
3789  if (!successor_iterator.Done()) {
3790  HBasicBlock* result = successor_iterator.Current();
3791  successor_iterator.Advance();
3792  return result;
3793  }
3794  return NULL;
3795  }
3796 
3797  // The following two methods implement a "foreach b in loop members" cycle.
3799  loop_index = 0;
3800  loop_length = loop_->blocks()->length();
3801  }
3802 
3803  HBasicBlock* AdvanceLoopMembers() {
3804  if (loop_index < loop_length) {
3805  HBasicBlock* result = loop_->blocks()->at(loop_index);
3806  loop_index++;
3807  return result;
3808  } else {
3809  return NULL;
3810  }
3811  }
3812 
3816  HLoopInformation* loop_;
3817  HBasicBlock* block_;
3818  HBasicBlock* loop_header_;
3821  HSuccessorIterator successor_iterator;
3822 };
3823 
3824 
3825 void HGraph::OrderBlocks() {
3826  CompilationPhase phase("H_Block ordering", info());
3827 
3828 #ifdef DEBUG
3829  // Initially the blocks must not be ordered.
3830  for (int i = 0; i < blocks_.length(); ++i) {
3831  DCHECK(!blocks_[i]->IsOrdered());
3832  }
3833 #endif
3834 
3835  PostorderProcessor* postorder =
3836  PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]);
3837  blocks_.Rewind(0);
3838  while (postorder) {
3839  postorder = postorder->PerformStep(zone(), &blocks_);
3840  }
3841 
3842 #ifdef DEBUG
3843  // Now all blocks must be marked as ordered.
3844  for (int i = 0; i < blocks_.length(); ++i) {
3845  DCHECK(blocks_[i]->IsOrdered());
3846  }
3847 #endif
3848 
3849  // Reverse block list and assign block IDs.
3850  for (int i = 0, j = blocks_.length(); --j >= i; ++i) {
3851  HBasicBlock* bi = blocks_[i];
3852  HBasicBlock* bj = blocks_[j];
3853  bi->set_block_id(j);
3854  bj->set_block_id(i);
3855  blocks_[i] = bj;
3856  blocks_[j] = bi;
3857  }
3858 }
3859 
3860 
3861 void HGraph::AssignDominators() {
3862  HPhase phase("H_Assign dominators", this);
3863  for (int i = 0; i < blocks_.length(); ++i) {
3864  HBasicBlock* block = blocks_[i];
3865  if (block->IsLoopHeader()) {
3866  // Only the first predecessor of a loop header is from outside the loop.
3867  // All others are back edges, and thus cannot dominate the loop header.
3868  block->AssignCommonDominator(block->predecessors()->first());
3869  block->AssignLoopSuccessorDominators();
3870  } else {
3871  for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
3872  blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
3873  }
3874  }
3875  }
3876 }
3877 
3878 
3879 bool HGraph::CheckArgumentsPhiUses() {
3880  int block_count = blocks_.length();
3881  for (int i = 0; i < block_count; ++i) {
3882  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3883  HPhi* phi = blocks_[i]->phis()->at(j);
3884  // We don't support phi uses of arguments for now.
3885  if (phi->CheckFlag(HValue::kIsArguments)) return false;
3886  }
3887  }
3888  return true;
3889 }
3890 
3891 
3892 bool HGraph::CheckConstPhiUses() {
3893  int block_count = blocks_.length();
3894  for (int i = 0; i < block_count; ++i) {
3895  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3896  HPhi* phi = blocks_[i]->phis()->at(j);
3897  // Check for the hole value (from an uninitialized const).
3898  for (int k = 0; k < phi->OperandCount(); k++) {
3899  if (phi->OperandAt(k) == GetConstantHole()) return false;
3900  }
3901  }
3902  }
3903  return true;
3904 }
3905 
3906 
3907 void HGraph::CollectPhis() {
3908  int block_count = blocks_.length();
3909  phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
3910  for (int i = 0; i < block_count; ++i) {
3911  for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3912  HPhi* phi = blocks_[i]->phis()->at(j);
3913  phi_list_->Add(phi, zone());
3914  }
3915  }
3916 }
3917 
3918 
3919 // Implementation of utility class to encapsulate the translation state for
3920 // a (possibly inlined) function.
3921 FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
3922  CompilationInfo* info,
3923  InliningKind inlining_kind,
3924  int inlining_id)
3925  : owner_(owner),
3926  compilation_info_(info),
3927  call_context_(NULL),
3928  inlining_kind_(inlining_kind),
3929  function_return_(NULL),
3930  test_context_(NULL),
3931  entry_(NULL),
3932  arguments_object_(NULL),
3933  arguments_elements_(NULL),
3934  inlining_id_(inlining_id),
3935  outer_source_position_(HSourcePosition::Unknown()),
3936  outer_(owner->function_state()) {
3937  if (outer_ != NULL) {
3938  // State for an inline function.
3939  if (owner->ast_context()->IsTest()) {
3940  HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
3941  HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
3942  if_true->MarkAsInlineReturnTarget(owner->current_block());
3943  if_false->MarkAsInlineReturnTarget(owner->current_block());
3944  TestContext* outer_test_context = TestContext::cast(owner->ast_context());
3945  Expression* cond = outer_test_context->condition();
3946  // The AstContext constructor pushed on the context stack. This newed
3947  // instance is the reason that AstContext can't be BASE_EMBEDDED.
3948  test_context_ = new TestContext(owner, cond, if_true, if_false);
3949  } else {
3950  function_return_ = owner->graph()->CreateBasicBlock();
3951  function_return()->MarkAsInlineReturnTarget(owner->current_block());
3952  }
3953  // Set this after possibly allocating a new TestContext above.
3954  call_context_ = owner->ast_context();
3955  }
3956 
3957  // Push on the state stack.
3958  owner->set_function_state(this);
3959 
3960  if (FLAG_hydrogen_track_positions) {
3961  outer_source_position_ = owner->source_position();
3962  owner->EnterInlinedSource(
3963  info->shared_info()->start_position(),
3964  inlining_id);
3965  owner->SetSourcePosition(info->shared_info()->start_position());
3966  }
3967 }
3968 
3969 
3970 FunctionState::~FunctionState() {
3971  delete test_context_;
3972  owner_->set_function_state(outer_);
3973 
3974  if (FLAG_hydrogen_track_positions) {
3975  owner_->set_source_position(outer_source_position_);
3976  owner_->EnterInlinedSource(
3977  outer_->compilation_info()->shared_info()->start_position(),
3978  outer_->inlining_id());
3979  }
3980 }
3981 
3982 
3983 // Implementation of utility classes to represent an expression's context in
3984 // the AST.
3986  : owner_(owner),
3987  kind_(kind),
3988  outer_(owner->ast_context()),
3989  for_typeof_(false) {
3990  owner->set_ast_context(this); // Push.
3991 #ifdef DEBUG
3992  DCHECK(owner->environment()->frame_type() == JS_FUNCTION);
3993  original_length_ = owner->environment()->length();
3994 #endif
3995 }
3996 
3997 
3999  owner_->set_ast_context(outer_); // Pop.
4000 }
4001 
4002 
4003 EffectContext::~EffectContext() {
4004  DCHECK(owner()->HasStackOverflow() ||
4005  owner()->current_block() == NULL ||
4006  (owner()->environment()->length() == original_length_ &&
4007  owner()->environment()->frame_type() == JS_FUNCTION));
4008 }
4009 
4010 
4011 ValueContext::~ValueContext() {
4012  DCHECK(owner()->HasStackOverflow() ||
4013  owner()->current_block() == NULL ||
4014  (owner()->environment()->length() == original_length_ + 1 &&
4015  owner()->environment()->frame_type() == JS_FUNCTION));
4016 }
4017 
4018 
4019 void EffectContext::ReturnValue(HValue* value) {
4020  // The value is simply ignored.
4021 }
4022 
4023 
4024 void ValueContext::ReturnValue(HValue* value) {
4025  // The value is tracked in the bailout environment, and communicated
4026  // through the environment as the result of the expression.
4027  if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
4028  owner()->Bailout(kBadValueContextForArgumentsValue);
4029  }
4030  owner()->Push(value);
4031 }
4032 
4033 
4034 void TestContext::ReturnValue(HValue* value) {
4035  BuildBranch(value);
4036 }
4037 
4038 
4039 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4040  DCHECK(!instr->IsControlInstruction());
4041  owner()->AddInstruction(instr);
4042  if (instr->HasObservableSideEffects()) {
4043  owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4044  }
4045 }
4046 
4047 
4048 void EffectContext::ReturnControl(HControlInstruction* instr,
4049  BailoutId ast_id) {
4050  DCHECK(!instr->HasObservableSideEffects());
4051  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4052  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4053  instr->SetSuccessorAt(0, empty_true);
4054  instr->SetSuccessorAt(1, empty_false);
4055  owner()->FinishCurrentBlock(instr);
4056  HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
4057  owner()->set_current_block(join);
4058 }
4059 
4060 
4061 void EffectContext::ReturnContinuation(HIfContinuation* continuation,
4062  BailoutId ast_id) {
4063  HBasicBlock* true_branch = NULL;
4064  HBasicBlock* false_branch = NULL;
4065  continuation->Continue(&true_branch, &false_branch);
4066  if (!continuation->IsTrueReachable()) {
4067  owner()->set_current_block(false_branch);
4068  } else if (!continuation->IsFalseReachable()) {
4069  owner()->set_current_block(true_branch);
4070  } else {
4071  HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
4072  owner()->set_current_block(join);
4073  }
4074 }
4075 
4076 
4077 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4078  DCHECK(!instr->IsControlInstruction());
4079  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4080  return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4081  }
4082  owner()->AddInstruction(instr);
4083  owner()->Push(instr);
4084  if (instr->HasObservableSideEffects()) {
4085  owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4086  }
4087 }
4088 
4089 
4090 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4091  DCHECK(!instr->HasObservableSideEffects());
4092  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4093  return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4094  }
4095  HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
4096  HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
4097  instr->SetSuccessorAt(0, materialize_true);
4098  instr->SetSuccessorAt(1, materialize_false);
4099  owner()->FinishCurrentBlock(instr);
4100  owner()->set_current_block(materialize_true);
4101  owner()->Push(owner()->graph()->GetConstantTrue());
4102  owner()->set_current_block(materialize_false);
4103  owner()->Push(owner()->graph()->GetConstantFalse());
4104  HBasicBlock* join =
4105  owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4106  owner()->set_current_block(join);
4107 }
4108 
4109 
4110 void ValueContext::ReturnContinuation(HIfContinuation* continuation,
4111  BailoutId ast_id) {
4112  HBasicBlock* materialize_true = NULL;
4113  HBasicBlock* materialize_false = NULL;
4114  continuation->Continue(&materialize_true, &materialize_false);
4115  if (continuation->IsTrueReachable()) {
4116  owner()->set_current_block(materialize_true);
4117  owner()->Push(owner()->graph()->GetConstantTrue());
4118  owner()->set_current_block(materialize_true);
4119  }
4120  if (continuation->IsFalseReachable()) {
4121  owner()->set_current_block(materialize_false);
4122  owner()->Push(owner()->graph()->GetConstantFalse());
4123  owner()->set_current_block(materialize_false);
4124  }
4125  if (continuation->TrueAndFalseReachable()) {
4126  HBasicBlock* join =
4127  owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4128  owner()->set_current_block(join);
4129  }
4130 }
4131 
4132 
4133 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4134  DCHECK(!instr->IsControlInstruction());
4135  HOptimizedGraphBuilder* builder = owner();
4136  builder->AddInstruction(instr);
4137  // We expect a simulate after every expression with side effects, though
4138  // this one isn't actually needed (and wouldn't work if it were targeted).
4139  if (instr->HasObservableSideEffects()) {
4140  builder->Push(instr);
4141  builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4142  builder->Pop();
4143  }
4144  BuildBranch(instr);
4145 }
4146 
4147 
4148 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4149  DCHECK(!instr->HasObservableSideEffects());
4150  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4151  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4152  instr->SetSuccessorAt(0, empty_true);
4153  instr->SetSuccessorAt(1, empty_false);
4154  owner()->FinishCurrentBlock(instr);
4155  owner()->Goto(empty_true, if_true(), owner()->function_state());
4156  owner()->Goto(empty_false, if_false(), owner()->function_state());
4157  owner()->set_current_block(NULL);
4158 }
4159 
4160 
4161 void TestContext::ReturnContinuation(HIfContinuation* continuation,
4162  BailoutId ast_id) {
4163  HBasicBlock* true_branch = NULL;
4164  HBasicBlock* false_branch = NULL;
4165  continuation->Continue(&true_branch, &false_branch);
4166  if (continuation->IsTrueReachable()) {
4167  owner()->Goto(true_branch, if_true(), owner()->function_state());
4168  }
4169  if (continuation->IsFalseReachable()) {
4170  owner()->Goto(false_branch, if_false(), owner()->function_state());
4171  }
4172  owner()->set_current_block(NULL);
4173 }
4174 
4175 
4176 void TestContext::BuildBranch(HValue* value) {
4177  // We expect the graph to be in edge-split form: there is no edge that
4178  // connects a branch node to a join node. We conservatively ensure that
4179  // property by always adding an empty block on the outgoing edges of this
4180  // branch.
4181  HOptimizedGraphBuilder* builder = owner();
4182  if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
4183  builder->Bailout(kArgumentsObjectValueInATestContext);
4184  }
4185  ToBooleanStub::Types expected(condition()->to_boolean_types());
4186  ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
4187 }
4188 
4189 
4190 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
4191 #define CHECK_BAILOUT(call) \
4192  do { \
4193  call; \
4194  if (HasStackOverflow()) return; \
4195  } while (false)
4196 
4197 
4198 #define CHECK_ALIVE(call) \
4199  do { \
4200  call; \
4201  if (HasStackOverflow() || current_block() == NULL) return; \
4202  } while (false)
4203 
4204 
4205 #define CHECK_ALIVE_OR_RETURN(call, value) \
4206  do { \
4207  call; \
4208  if (HasStackOverflow() || current_block() == NULL) return value; \
4209  } while (false)
4210 
4211 
4213  current_info()->AbortOptimization(reason);
4214  SetStackOverflow();
4215 }
4216 
4217 
4219  EffectContext for_effect(this);
4220  Visit(expr);
4221 }
4222 
4223 
4226  ValueContext for_value(this, flag);
4227  Visit(expr);
4228 }
4229 
4230 
4232  ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
4233  for_value.set_for_typeof(true);
4234  Visit(expr);
4235 }
4236 
4237 
4239  HBasicBlock* true_block,
4240  HBasicBlock* false_block) {
4241  TestContext for_test(this, expr, true_block, false_block);
4242  Visit(expr);
4243 }
4244 
4245 
4247  ZoneList<Expression*>* exprs) {
4248  for (int i = 0; i < exprs->length(); ++i) {
4249  CHECK_ALIVE(VisitForValue(exprs->at(i)));
4250  }
4251 }
4252 
4253 
4255  if (current_info()->function()->is_generator()) {
4256  Bailout(kFunctionIsAGenerator);
4257  return false;
4258  }
4259  Scope* scope = current_info()->scope();
4260  if (scope->HasIllegalRedeclaration()) {
4261  Bailout(kFunctionWithIllegalRedeclaration);
4262  return false;
4263  }
4264  if (scope->calls_eval()) {
4265  Bailout(kFunctionCallsEval);
4266  return false;
4267  }
4268  SetUpScope(scope);
4269 
4270  // Add an edge to the body entry. This is warty: the graph's start
4271  // environment will be used by the Lithium translation as the initial
4272  // environment on graph entry, but it has now been mutated by the
4273  // Hydrogen translation of the instructions in the start block. This
4274  // environment uses values which have not been defined yet. These
4275  // Hydrogen instructions will then be replayed by the Lithium
4276  // translation, so they cannot have an environment effect. The edge to
4277  // the body's entry block (along with some special logic for the start
4278  // block in HInstruction::InsertAfter) seals the start block from
4279  // getting unwanted instructions inserted.
4280  //
4281  // TODO(kmillikin): Fix this. Stop mutating the initial environment.
4282  // Make the Hydrogen instructions in the initial block into Hydrogen
4283  // values (but not instructions), present in the initial environment and
4284  // not replayed by the Lithium translation.
4285  HEnvironment* initial_env = environment()->CopyWithoutHistory();
4286  HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4287  Goto(body_entry);
4288  body_entry->SetJoinId(BailoutId::FunctionEntry());
4289  set_current_block(body_entry);
4290 
4291  // Handle implicit declaration of the function name in named function
4292  // expressions before other declarations.
4293  if (scope->is_function_scope() && scope->function() != NULL) {
4294  VisitVariableDeclaration(scope->function());
4295  }
4297  Add<HSimulate>(BailoutId::Declarations());
4298 
4299  Add<HStackCheck>(HStackCheck::kFunctionEntry);
4300 
4301  VisitStatements(current_info()->function()->body());
4302  if (HasStackOverflow()) return false;
4303 
4304  if (current_block() != NULL) {
4305  Add<HReturn>(graph()->GetConstantUndefined());
4307  }
4308 
4309  // If the checksum of the number of type info changes is the same as the
4310  // last time this function was compiled, then this recompile is likely not
4311  // due to missing/inadequate type feedback, but rather too aggressive
4312  // optimization. Disable optimistic LICM in that case.
4313  Handle<Code> unoptimized_code(current_info()->shared_info()->code());
4314  DCHECK(unoptimized_code->kind() == Code::FUNCTION);
4315  Handle<TypeFeedbackInfo> type_info(
4316  TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
4317  int checksum = type_info->own_type_change_checksum();
4318  int composite_checksum = graph()->update_type_change_checksum(checksum);
4319  graph()->set_use_optimistic_licm(
4320  !type_info->matches_inlined_type_change_checksum(composite_checksum));
4321  type_info->set_inlined_type_change_checksum(composite_checksum);
4322 
4323  // Perform any necessary OSR-specific cleanups or changes to the graph.
4324  osr()->FinishGraph();
4325 
4326  return true;
4327 }
4328 
4329 
4330 bool HGraph::Optimize(BailoutReason* bailout_reason) {
4331  OrderBlocks();
4332  AssignDominators();
4333 
4334  // We need to create a HConstant "zero" now so that GVN will fold every
4335  // zero-valued constant in the graph together.
4336  // The constant is needed to make idef-based bounds check work: the pass
4337  // evaluates relations with "zero" and that zero cannot be created after GVN.
4338  GetConstant0();
4339 
4340 #ifdef DEBUG
4341  // Do a full verify after building the graph and computing dominators.
4342  Verify(true);
4343 #endif
4344 
4345  if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
4346  Run<HEnvironmentLivenessAnalysisPhase>();
4347  }
4348 
4349  if (!CheckConstPhiUses()) {
4350  *bailout_reason = kUnsupportedPhiUseOfConstVariable;
4351  return false;
4352  }
4353  Run<HRedundantPhiEliminationPhase>();
4354  if (!CheckArgumentsPhiUses()) {
4355  *bailout_reason = kUnsupportedPhiUseOfArguments;
4356  return false;
4357  }
4358 
4359  // Find and mark unreachable code to simplify optimizations, especially gvn,
4360  // where unreachable code could unnecessarily defeat LICM.
4361  Run<HMarkUnreachableBlocksPhase>();
4362 
4363  if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4364  if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4365 
4366  if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4367 
4368  CollectPhis();
4369 
4370  if (has_osr()) osr()->FinishOsrValues();
4371 
4372  Run<HInferRepresentationPhase>();
4373 
4374  // Remove HSimulate instructions that have turned out not to be needed
4375  // after all by folding them into the following HSimulate.
4376  // This must happen after inferring representations.
4377  Run<HMergeRemovableSimulatesPhase>();
4378 
4379  Run<HMarkDeoptimizeOnUndefinedPhase>();
4380  Run<HRepresentationChangesPhase>();
4381 
4382  Run<HInferTypesPhase>();
4383 
4384  // Must be performed before canonicalization to ensure that Canonicalize
4385  // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4386  // zero.
4387  if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>();
4388 
4389  if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4390 
4391  if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4392 
4393  if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4394 
4395  if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4396 
4397  Run<HRangeAnalysisPhase>();
4398 
4399  Run<HComputeChangeUndefinedToNaN>();
4400 
4401  // Eliminate redundant stack checks on backwards branches.
4402  Run<HStackCheckEliminationPhase>();
4403 
4404  if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
4405  if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>();
4406  if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4407  if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4408 
4409  RestoreActualValues();
4410 
4411  // Find unreachable code a second time, GVN and other optimizations may have
4412  // made blocks unreachable that were previously reachable.
4413  Run<HMarkUnreachableBlocksPhase>();
4414 
4415  return true;
4416 }
4417 
4418 
4419 void HGraph::RestoreActualValues() {
4420  HPhase phase("H_Restore actual values", this);
4421 
4422  for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4423  HBasicBlock* block = blocks()->at(block_index);
4424 
4425 #ifdef DEBUG
4426  for (int i = 0; i < block->phis()->length(); i++) {
4427  HPhi* phi = block->phis()->at(i);
4428  DCHECK(phi->ActualValue() == phi);
4429  }
4430 #endif
4431 
4432  for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4433  HInstruction* instruction = it.Current();
4434  if (instruction->ActualValue() == instruction) continue;
4435  if (instruction->CheckFlag(HValue::kIsDead)) {
4436  // The instruction was marked as deleted but left in the graph
4437  // as a control flow dependency point for subsequent
4438  // instructions.
4439  instruction->DeleteAndReplaceWith(instruction->ActualValue());
4440  } else {
4441  DCHECK(instruction->IsInformativeDefinition());
4442  if (instruction->IsPurelyInformativeDefinition()) {
4443  instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4444  } else {
4445  instruction->ReplaceAllUsesWith(instruction->ActualValue());
4446  }
4447  }
4448  }
4449  }
4450 }
4451 
4452 
4454  ZoneList<HValue*> arguments(count, zone());
4455  for (int i = 0; i < count; ++i) {
4456  arguments.Add(Pop(), zone());
4457  }
4458 
4459  HPushArguments* push_args = New<HPushArguments>();
4460  while (!arguments.is_empty()) {
4461  push_args->AddInput(arguments.RemoveLast());
4462  }
4463  AddInstruction(push_args);
4464 }
4465 
4466 
4467 template <class Instruction>
4469  PushArgumentsFromEnvironment(call->argument_count());
4470  return call;
4471 }
4472 
4473 
4475  // First special is HContext.
4476  HInstruction* context = Add<HContext>();
4477  environment()->BindContext(context);
4478 
4479  // Create an arguments object containing the initial parameters. Set the
4480  // initial values of parameters including "this" having parameter index 0.
4481  DCHECK_EQ(scope->num_parameters() + 1, environment()->parameter_count());
4482  HArgumentsObject* arguments_object =
4483  New<HArgumentsObject>(environment()->parameter_count());
4484  for (int i = 0; i < environment()->parameter_count(); ++i) {
4485  HInstruction* parameter = Add<HParameter>(i);
4486  arguments_object->AddArgument(parameter, zone());
4487  environment()->Bind(i, parameter);
4488  }
4489  AddInstruction(arguments_object);
4490  graph()->SetArgumentsObject(arguments_object);
4491 
4492  HConstant* undefined_constant = graph()->GetConstantUndefined();
4493  // Initialize specials and locals to undefined.
4494  for (int i = environment()->parameter_count() + 1;
4495  i < environment()->length();
4496  ++i) {
4497  environment()->Bind(i, undefined_constant);
4498  }
4499 
4500  // Handle the arguments and arguments shadow variables specially (they do
4501  // not have declarations).
4502  if (scope->arguments() != NULL) {
4503  if (!scope->arguments()->IsStackAllocated()) {
4504  return Bailout(kContextAllocatedArguments);
4505  }
4506 
4507  environment()->Bind(scope->arguments(),
4508  graph()->GetArgumentsObject());
4509  }
4510 }
4511 
4512 
4514  return IC::MapToType<Type>(map, zone());
4515 }
4516 
4517 
4519  for (int i = 0; i < statements->length(); i++) {
4520  Statement* stmt = statements->at(i);
4521  CHECK_ALIVE(Visit(stmt));
4522  if (stmt->IsJump()) break;
4523  }
4524 }
4525 
4526 
4527 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4528  DCHECK(!HasStackOverflow());
4529  DCHECK(current_block() != NULL);
4530  DCHECK(current_block()->HasPredecessor());
4531 
4532  Scope* outer_scope = scope();
4533  Scope* scope = stmt->scope();
4534  BreakAndContinueInfo break_info(stmt, outer_scope);
4535 
4536  { BreakAndContinueScope push(&break_info, this);
4537  if (scope != NULL) {
4538  // Load the function object.
4539  Scope* declaration_scope = scope->DeclarationScope();
4540  HInstruction* function;
4541  HValue* outer_context = environment()->context();
4542  if (declaration_scope->is_global_scope() ||
4543  declaration_scope->is_eval_scope()) {
4544  function = new(zone()) HLoadContextSlot(
4545  outer_context, Context::CLOSURE_INDEX, HLoadContextSlot::kNoCheck);
4546  } else {
4547  function = New<HThisFunction>();
4548  }
4549  AddInstruction(function);
4550  // Allocate a block context and store it to the stack frame.
4551  HInstruction* inner_context = Add<HAllocateBlockContext>(
4552  outer_context, function, scope->GetScopeInfo());
4553  HInstruction* instr = Add<HStoreFrameContext>(inner_context);
4554  if (instr->HasObservableSideEffects()) {
4555  AddSimulate(stmt->EntryId(), REMOVABLE_SIMULATE);
4556  }
4557  set_scope(scope);
4558  environment()->BindContext(inner_context);
4560  AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE);
4561  }
4562  CHECK_BAILOUT(VisitStatements(stmt->statements()));
4563  }
4564  set_scope(outer_scope);
4565  if (scope != NULL && current_block() != NULL) {
4566  HValue* inner_context = environment()->context();
4567  HValue* outer_context = Add<HLoadNamedField>(
4568  inner_context, static_cast<HValue*>(NULL),
4569  HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4570 
4571  HInstruction* instr = Add<HStoreFrameContext>(outer_context);
4572  if (instr->HasObservableSideEffects()) {
4573  AddSimulate(stmt->ExitId(), REMOVABLE_SIMULATE);
4574  }
4575  environment()->BindContext(outer_context);
4576  }
4577  HBasicBlock* break_block = break_info.break_block();
4578  if (break_block != NULL) {
4579  if (current_block() != NULL) Goto(break_block);
4580  break_block->SetJoinId(stmt->ExitId());
4581  set_current_block(break_block);
4582  }
4583 }
4584 
4585 
4586 void HOptimizedGraphBuilder::VisitExpressionStatement(
4587  ExpressionStatement* stmt) {
4588  DCHECK(!HasStackOverflow());
4589  DCHECK(current_block() != NULL);
4590  DCHECK(current_block()->HasPredecessor());
4591  VisitForEffect(stmt->expression());
4592 }
4593 
4594 
4595 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4596  DCHECK(!HasStackOverflow());
4597  DCHECK(current_block() != NULL);
4598  DCHECK(current_block()->HasPredecessor());
4599 }
4600 
4601 
4602 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4603  DCHECK(!HasStackOverflow());
4604  DCHECK(current_block() != NULL);
4605  DCHECK(current_block()->HasPredecessor());
4606  if (stmt->condition()->ToBooleanIsTrue()) {
4607  Add<HSimulate>(stmt->ThenId());
4608  Visit(stmt->then_statement());
4609  } else if (stmt->condition()->ToBooleanIsFalse()) {
4610  Add<HSimulate>(stmt->ElseId());
4611  Visit(stmt->else_statement());
4612  } else {
4613  HBasicBlock* cond_true = graph()->CreateBasicBlock();
4614  HBasicBlock* cond_false = graph()->CreateBasicBlock();
4615  CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4616 
4617  if (cond_true->HasPredecessor()) {
4618  cond_true->SetJoinId(stmt->ThenId());
4619  set_current_block(cond_true);
4620  CHECK_BAILOUT(Visit(stmt->then_statement()));
4621  cond_true = current_block();
4622  } else {
4623  cond_true = NULL;
4624  }
4625 
4626  if (cond_false->HasPredecessor()) {
4627  cond_false->SetJoinId(stmt->ElseId());
4628  set_current_block(cond_false);
4629  CHECK_BAILOUT(Visit(stmt->else_statement()));
4630  cond_false = current_block();
4631  } else {
4632  cond_false = NULL;
4633  }
4634 
4635  HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4636  set_current_block(join);
4637  }
4638 }
4639 
4640 
4641 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4642  BreakableStatement* stmt,
4643  BreakType type,
4644  Scope** scope,
4645  int* drop_extra) {
4646  *drop_extra = 0;
4647  BreakAndContinueScope* current = this;
4648  while (current != NULL && current->info()->target() != stmt) {
4649  *drop_extra += current->info()->drop_extra();
4650  current = current->next();
4651  }
4652  DCHECK(current != NULL); // Always found (unless stack is malformed).
4653  *scope = current->info()->scope();
4654 
4655  if (type == BREAK) {
4656  *drop_extra += current->info()->drop_extra();
4657  }
4658 
4659  HBasicBlock* block = NULL;
4660  switch (type) {
4661  case BREAK:
4662  block = current->info()->break_block();
4663  if (block == NULL) {
4664  block = current->owner()->graph()->CreateBasicBlock();
4665  current->info()->set_break_block(block);
4666  }
4667  break;
4668 
4669  case CONTINUE:
4670  block = current->info()->continue_block();
4671  if (block == NULL) {
4672  block = current->owner()->graph()->CreateBasicBlock();
4673  current->info()->set_continue_block(block);
4674  }
4675  break;
4676  }
4677 
4678  return block;
4679 }
4680 
4681 
4682 void HOptimizedGraphBuilder::VisitContinueStatement(
4683  ContinueStatement* stmt) {
4684  DCHECK(!HasStackOverflow());
4685  DCHECK(current_block() != NULL);
4686  DCHECK(current_block()->HasPredecessor());
4687  Scope* outer_scope = NULL;
4688  Scope* inner_scope = scope();
4689  int drop_extra = 0;
4690  HBasicBlock* continue_block = break_scope()->Get(
4691  stmt->target(), BreakAndContinueScope::CONTINUE,
4692  &outer_scope, &drop_extra);
4693  HValue* context = environment()->context();
4694  Drop(drop_extra);
4695  int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4696  if (context_pop_count > 0) {
4697  while (context_pop_count-- > 0) {
4698  HInstruction* context_instruction = Add<HLoadNamedField>(
4699  context, static_cast<HValue*>(NULL),
4700  HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4701  context = context_instruction;
4702  }
4703  HInstruction* instr = Add<HStoreFrameContext>(context);
4704  if (instr->HasObservableSideEffects()) {
4705  AddSimulate(stmt->target()->EntryId(), REMOVABLE_SIMULATE);
4706  }
4707  environment()->BindContext(context);
4708  }
4709 
4710  Goto(continue_block);
4712 }
4713 
4714 
4715 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4716  DCHECK(!HasStackOverflow());
4717  DCHECK(current_block() != NULL);
4718  DCHECK(current_block()->HasPredecessor());
4719  Scope* outer_scope = NULL;
4720  Scope* inner_scope = scope();
4721  int drop_extra = 0;
4722  HBasicBlock* break_block = break_scope()->Get(
4723  stmt->target(), BreakAndContinueScope::BREAK,
4724  &outer_scope, &drop_extra);
4725  HValue* context = environment()->context();
4726  Drop(drop_extra);
4727  int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4728  if (context_pop_count > 0) {
4729  while (context_pop_count-- > 0) {
4730  HInstruction* context_instruction = Add<HLoadNamedField>(
4731  context, static_cast<HValue*>(NULL),
4732  HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4733  context = context_instruction;
4734  }
4735  HInstruction* instr = Add<HStoreFrameContext>(context);
4736  if (instr->HasObservableSideEffects()) {
4737  AddSimulate(stmt->target()->ExitId(), REMOVABLE_SIMULATE);
4738  }
4739  environment()->BindContext(context);
4740  }
4741  Goto(break_block);
4743 }
4744 
4745 
4746 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4747  DCHECK(!HasStackOverflow());
4748  DCHECK(current_block() != NULL);
4749  DCHECK(current_block()->HasPredecessor());
4750  FunctionState* state = function_state();
4752  if (context == NULL) {
4753  // Not an inlined return, so an actual one.
4754  CHECK_ALIVE(VisitForValue(stmt->expression()));
4755  HValue* result = environment()->Pop();
4756  Add<HReturn>(result);
4757  } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4758  // Return from an inlined construct call. In a test context the return value
4759  // will always evaluate to true, in a value context the return value needs
4760  // to be a JSObject.
4761  if (context->IsTest()) {
4762  TestContext* test = TestContext::cast(context);
4763  CHECK_ALIVE(VisitForEffect(stmt->expression()));
4764  Goto(test->if_true(), state);
4765  } else if (context->IsEffect()) {
4766  CHECK_ALIVE(VisitForEffect(stmt->expression()));
4767  Goto(function_return(), state);
4768  } else {
4769  DCHECK(context->IsValue());
4770  CHECK_ALIVE(VisitForValue(stmt->expression()));
4771  HValue* return_value = Pop();
4772  HValue* receiver = environment()->arguments_environment()->Lookup(0);
4773  HHasInstanceTypeAndBranch* typecheck =
4774  New<HHasInstanceTypeAndBranch>(return_value,
4777  HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4778  HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4779  typecheck->SetSuccessorAt(0, if_spec_object);
4780  typecheck->SetSuccessorAt(1, not_spec_object);
4781  FinishCurrentBlock(typecheck);
4782  AddLeaveInlined(if_spec_object, return_value, state);
4783  AddLeaveInlined(not_spec_object, receiver, state);
4784  }
4785  } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4786  // Return from an inlined setter call. The returned value is never used, the
4787  // value of an assignment is always the value of the RHS of the assignment.
4788  CHECK_ALIVE(VisitForEffect(stmt->expression()));
4789  if (context->IsTest()) {
4790  HValue* rhs = environment()->arguments_environment()->Lookup(1);
4791  context->ReturnValue(rhs);
4792  } else if (context->IsEffect()) {
4793  Goto(function_return(), state);
4794  } else {
4795  DCHECK(context->IsValue());
4796  HValue* rhs = environment()->arguments_environment()->Lookup(1);
4797  AddLeaveInlined(rhs, state);
4798  }
4799  } else {
4800  // Return from a normal inlined function. Visit the subexpression in the
4801  // expression context of the call.
4802  if (context->IsTest()) {
4803  TestContext* test = TestContext::cast(context);
4804  VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4805  } else if (context->IsEffect()) {
4806  // Visit in value context and ignore the result. This is needed to keep
4807  // environment in sync with full-codegen since some visitors (e.g.
4808  // VisitCountOperation) use the operand stack differently depending on
4809  // context.
4810  CHECK_ALIVE(VisitForValue(stmt->expression()));
4811  Pop();
4812  Goto(function_return(), state);
4813  } else {
4814  DCHECK(context->IsValue());
4815  CHECK_ALIVE(VisitForValue(stmt->expression()));
4816  AddLeaveInlined(Pop(), state);
4817  }
4818  }
4820 }
4821 
4822 
4823 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4824  DCHECK(!HasStackOverflow());
4825  DCHECK(current_block() != NULL);
4826  DCHECK(current_block()->HasPredecessor());
4827  return Bailout(kWithStatement);
4828 }
4829 
4830 
4831 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4832  DCHECK(!HasStackOverflow());
4833  DCHECK(current_block() != NULL);
4834  DCHECK(current_block()->HasPredecessor());
4835 
4836  ZoneList<CaseClause*>* clauses = stmt->cases();
4837  int clause_count = clauses->length();
4838  ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
4839 
4840  CHECK_ALIVE(VisitForValue(stmt->tag()));
4841  Add<HSimulate>(stmt->EntryId());
4842  HValue* tag_value = Top();
4843  Type* tag_type = stmt->tag()->bounds().lower;
4844 
4845  // 1. Build all the tests, with dangling true branches
4846  BailoutId default_id = BailoutId::None();
4847  for (int i = 0; i < clause_count; ++i) {
4848  CaseClause* clause = clauses->at(i);
4849  if (clause->is_default()) {
4850  body_blocks.Add(NULL, zone());
4851  if (default_id.IsNone()) default_id = clause->EntryId();
4852  continue;
4853  }
4854 
4855  // Generate a compare and branch.
4856  CHECK_ALIVE(VisitForValue(clause->label()));
4857  HValue* label_value = Pop();
4858 
4859  Type* label_type = clause->label()->bounds().lower;
4860  Type* combined_type = clause->compare_type();
4861  HControlInstruction* compare = BuildCompareInstruction(
4862  Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
4863  combined_type,
4864  ScriptPositionToSourcePosition(stmt->tag()->position()),
4865  ScriptPositionToSourcePosition(clause->label()->position()),
4866  PUSH_BEFORE_SIMULATE, clause->id());
4867 
4868  HBasicBlock* next_test_block = graph()->CreateBasicBlock();
4869  HBasicBlock* body_block = graph()->CreateBasicBlock();
4870  body_blocks.Add(body_block, zone());
4871  compare->SetSuccessorAt(0, body_block);
4872  compare->SetSuccessorAt(1, next_test_block);
4873  FinishCurrentBlock(compare);
4874 
4875  set_current_block(body_block);
4876  Drop(1); // tag_value
4877 
4878  set_current_block(next_test_block);
4879  }
4880 
4881  // Save the current block to use for the default or to join with the
4882  // exit.
4883  HBasicBlock* last_block = current_block();
4884  Drop(1); // tag_value
4885 
4886  // 2. Loop over the clauses and the linked list of tests in lockstep,
4887  // translating the clause bodies.
4888  HBasicBlock* fall_through_block = NULL;
4889 
4890  BreakAndContinueInfo break_info(stmt, scope());
4891  { BreakAndContinueScope push(&break_info, this);
4892  for (int i = 0; i < clause_count; ++i) {
4893  CaseClause* clause = clauses->at(i);
4894 
4895  // Identify the block where normal (non-fall-through) control flow
4896  // goes to.
4897  HBasicBlock* normal_block = NULL;
4898  if (clause->is_default()) {
4899  if (last_block == NULL) continue;
4900  normal_block = last_block;
4901  last_block = NULL; // Cleared to indicate we've handled it.
4902  } else {
4903  normal_block = body_blocks[i];
4904  }
4905 
4906  if (fall_through_block == NULL) {
4907  set_current_block(normal_block);
4908  } else {
4909  HBasicBlock* join = CreateJoin(fall_through_block,
4910  normal_block,
4911  clause->EntryId());
4912  set_current_block(join);
4913  }
4914 
4915  CHECK_BAILOUT(VisitStatements(clause->statements()));
4916  fall_through_block = current_block();
4917  }
4918  }
4919 
4920  // Create an up-to-3-way join. Use the break block if it exists since
4921  // it's already a join block.
4922  HBasicBlock* break_block = break_info.break_block();
4923  if (break_block == NULL) {
4924  set_current_block(CreateJoin(fall_through_block,
4925  last_block,
4926  stmt->ExitId()));
4927  } else {
4928  if (fall_through_block != NULL) Goto(fall_through_block, break_block);
4929  if (last_block != NULL) Goto(last_block, break_block);
4930  break_block->SetJoinId(stmt->ExitId());
4931  set_current_block(break_block);
4932  }
4933 }
4934 
4935 
4937  HBasicBlock* loop_entry) {
4938  Add<HSimulate>(stmt->StackCheckId());
4939  HStackCheck* stack_check =
4940  HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
4941  DCHECK(loop_entry->IsLoopHeader());
4942  loop_entry->loop_information()->set_stack_check(stack_check);
4943  CHECK_BAILOUT(Visit(stmt->body()));
4944 }
4945 
4946 
4947 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4948  DCHECK(!HasStackOverflow());
4949  DCHECK(current_block() != NULL);
4950  DCHECK(current_block()->HasPredecessor());
4951  DCHECK(current_block() != NULL);
4952  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4953 
4954  BreakAndContinueInfo break_info(stmt, scope());
4955  {
4956  BreakAndContinueScope push(&break_info, this);
4957  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
4958  }
4959  HBasicBlock* body_exit =
4960  JoinContinue(stmt, current_block(), break_info.continue_block());
4961  HBasicBlock* loop_successor = NULL;
4962  if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
4963  set_current_block(body_exit);
4964  loop_successor = graph()->CreateBasicBlock();
4965  if (stmt->cond()->ToBooleanIsFalse()) {
4966  loop_entry->loop_information()->stack_check()->Eliminate();
4967  Goto(loop_successor);
4968  body_exit = NULL;
4969  } else {
4970  // The block for a true condition, the actual predecessor block of the
4971  // back edge.
4972  body_exit = graph()->CreateBasicBlock();
4973  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
4974  }
4975  if (body_exit != NULL && body_exit->HasPredecessor()) {
4976  body_exit->SetJoinId(stmt->BackEdgeId());
4977  } else {
4978  body_exit = NULL;
4979  }
4980  if (loop_successor->HasPredecessor()) {
4981  loop_successor->SetJoinId(stmt->ExitId());
4982  } else {
4983  loop_successor = NULL;
4984  }
4985  }
4986  HBasicBlock* loop_exit = CreateLoop(stmt,
4987  loop_entry,
4988  body_exit,
4989  loop_successor,
4990  break_info.break_block());
4991  set_current_block(loop_exit);
4992 }
4993 
4994 
4995 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
4996  DCHECK(!HasStackOverflow());
4997  DCHECK(current_block() != NULL);
4998  DCHECK(current_block()->HasPredecessor());
4999  DCHECK(current_block() != NULL);
5000  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5001 
5002  // If the condition is constant true, do not generate a branch.
5003  HBasicBlock* loop_successor = NULL;
5004  if (!stmt->cond()->ToBooleanIsTrue()) {
5005  HBasicBlock* body_entry = graph()->CreateBasicBlock();
5006  loop_successor = graph()->CreateBasicBlock();
5007  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
5008  if (body_entry->HasPredecessor()) {
5009  body_entry->SetJoinId(stmt->BodyId());
5010  set_current_block(body_entry);
5011  }
5012  if (loop_successor->HasPredecessor()) {
5013  loop_successor->SetJoinId(stmt->ExitId());
5014  } else {
5015  loop_successor = NULL;
5016  }
5017  }
5018 
5019  BreakAndContinueInfo break_info(stmt, scope());
5020  if (current_block() != NULL) {
5021  BreakAndContinueScope push(&break_info, this);
5022  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5023  }
5024  HBasicBlock* body_exit =
5025  JoinContinue(stmt, current_block(), break_info.continue_block());
5026  HBasicBlock* loop_exit = CreateLoop(stmt,
5027  loop_entry,
5028  body_exit,
5029  loop_successor,
5030  break_info.break_block());
5031  set_current_block(loop_exit);
5032 }
5033 
5034 
5035 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
5036  DCHECK(!HasStackOverflow());
5037  DCHECK(current_block() != NULL);
5038  DCHECK(current_block()->HasPredecessor());
5039  if (stmt->init() != NULL) {
5040  CHECK_ALIVE(Visit(stmt->init()));
5041  }
5042  DCHECK(current_block() != NULL);
5043  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5044 
5045  HBasicBlock* loop_successor = NULL;
5046  if (stmt->cond() != NULL) {
5047  HBasicBlock* body_entry = graph()->CreateBasicBlock();
5048  loop_successor = graph()->CreateBasicBlock();
5049  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
5050  if (body_entry->HasPredecessor()) {
5051  body_entry->SetJoinId(stmt->BodyId());
5052  set_current_block(body_entry);
5053  }
5054  if (loop_successor->HasPredecessor()) {
5055  loop_successor->SetJoinId(stmt->ExitId());
5056  } else {
5057  loop_successor = NULL;
5058  }
5059  }
5060 
5061  BreakAndContinueInfo break_info(stmt, scope());
5062  if (current_block() != NULL) {
5063  BreakAndContinueScope push(&break_info, this);
5064  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5065  }
5066  HBasicBlock* body_exit =
5067  JoinContinue(stmt, current_block(), break_info.continue_block());
5068 
5069  if (stmt->next() != NULL && body_exit != NULL) {
5070  set_current_block(body_exit);
5071  CHECK_BAILOUT(Visit(stmt->next()));
5072  body_exit = current_block();
5073  }
5074 
5075  HBasicBlock* loop_exit = CreateLoop(stmt,
5076  loop_entry,
5077  body_exit,
5078  loop_successor,
5079  break_info.break_block());
5080  set_current_block(loop_exit);
5081 }
5082 
5083 
5084 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
5085  DCHECK(!HasStackOverflow());
5086  DCHECK(current_block() != NULL);
5087  DCHECK(current_block()->HasPredecessor());
5088 
5089  if (!FLAG_optimize_for_in) {
5090  return Bailout(kForInStatementOptimizationIsDisabled);
5091  }
5092 
5093  if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
5094  return Bailout(kForInStatementIsNotFastCase);
5095  }
5096 
5097  if (!stmt->each()->IsVariableProxy() ||
5098  !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
5099  return Bailout(kForInStatementWithNonLocalEachVariable);
5100  }
5101 
5102  Variable* each_var = stmt->each()->AsVariableProxy()->var();
5103 
5104  CHECK_ALIVE(VisitForValue(stmt->enumerable()));
5105  HValue* enumerable = Top(); // Leave enumerable at the top.
5106 
5107  HInstruction* map = Add<HForInPrepareMap>(enumerable);
5108  Add<HSimulate>(stmt->PrepareId());
5109 
5110  HInstruction* array = Add<HForInCacheArray>(
5112 
5113  HInstruction* enum_length = Add<HMapEnumLength>(map);
5114 
5115  HInstruction* start_index = Add<HConstant>(0);
5116 
5117  Push(map);
5118  Push(array);
5119  Push(enum_length);
5120  Push(start_index);
5121 
5122  HInstruction* index_cache = Add<HForInCacheArray>(
5124  HForInCacheArray::cast(array)->set_index_cache(
5125  HForInCacheArray::cast(index_cache));
5126 
5127  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5128 
5129  HValue* index = environment()->ExpressionStackAt(0);
5130  HValue* limit = environment()->ExpressionStackAt(1);
5131 
5132  // Check that we still have more keys.
5133  HCompareNumericAndBranch* compare_index =
5134  New<HCompareNumericAndBranch>(index, limit, Token::LT);
5135  compare_index->set_observed_input_representation(
5137 
5138  HBasicBlock* loop_body = graph()->CreateBasicBlock();
5139  HBasicBlock* loop_successor = graph()->CreateBasicBlock();
5140 
5141  compare_index->SetSuccessorAt(0, loop_body);
5142  compare_index->SetSuccessorAt(1, loop_successor);
5143  FinishCurrentBlock(compare_index);
5144 
5145  set_current_block(loop_successor);
5146  Drop(5);
5147 
5148  set_current_block(loop_body);
5149 
5150  HValue* key = Add<HLoadKeyed>(
5151  environment()->ExpressionStackAt(2), // Enum cache.
5152  environment()->ExpressionStackAt(0), // Iteration index.
5153  environment()->ExpressionStackAt(0),
5154  FAST_ELEMENTS);
5155 
5156  // Check if the expected map still matches that of the enumerable.
5157  // If not just deoptimize.
5158  Add<HCheckMapValue>(environment()->ExpressionStackAt(4),
5159  environment()->ExpressionStackAt(3));
5160 
5161  Bind(each_var, key);
5162 
5163  BreakAndContinueInfo break_info(stmt, scope(), 5);
5164  {
5165  BreakAndContinueScope push(&break_info, this);
5166  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5167  }
5168 
5169  HBasicBlock* body_exit =
5170  JoinContinue(stmt, current_block(), break_info.continue_block());
5171 
5172  if (body_exit != NULL) {
5173  set_current_block(body_exit);
5174 
5175  HValue* current_index = Pop();
5176  Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1()));
5177  body_exit = current_block();
5178  }
5179 
5180  HBasicBlock* loop_exit = CreateLoop(stmt,
5181  loop_entry,
5182  body_exit,
5183  loop_successor,
5184  break_info.break_block());
5185 
5186  set_current_block(loop_exit);
5187 }
5188 
5189 
5190 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
5191  DCHECK(!HasStackOverflow());
5192  DCHECK(current_block() != NULL);
5193  DCHECK(current_block()->HasPredecessor());
5194  return Bailout(kForOfStatement);
5195 }
5196 
5197 
5198 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
5199  DCHECK(!HasStackOverflow());
5200  DCHECK(current_block() != NULL);
5201  DCHECK(current_block()->HasPredecessor());
5202  return Bailout(kTryCatchStatement);
5203 }
5204 
5205 
5206 void HOptimizedGraphBuilder::VisitTryFinallyStatement(
5207  TryFinallyStatement* stmt) {
5208  DCHECK(!HasStackOverflow());
5209  DCHECK(current_block() != NULL);
5210  DCHECK(current_block()->HasPredecessor());
5211  return Bailout(kTryFinallyStatement);
5212 }
5213 
5214 
5215 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
5216  DCHECK(!HasStackOverflow());
5217  DCHECK(current_block() != NULL);
5218  DCHECK(current_block()->HasPredecessor());
5219  return Bailout(kDebuggerStatement);
5220 }
5221 
5222 
5223 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
5224  UNREACHABLE();
5225 }
5226 
5227 
5228 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
5229  DCHECK(!HasStackOverflow());
5230  DCHECK(current_block() != NULL);
5231  DCHECK(current_block()->HasPredecessor());
5232  Handle<SharedFunctionInfo> shared_info = expr->shared_info();
5233  if (shared_info.is_null()) {
5234  shared_info =
5235  Compiler::BuildFunctionInfo(expr, current_info()->script(), top_info());
5236  }
5237  // We also have a stack overflow if the recursive compilation did.
5238  if (HasStackOverflow()) return;
5239  HFunctionLiteral* instr =
5240  New<HFunctionLiteral>(shared_info, expr->pretenure());
5241  return ast_context()->ReturnInstruction(instr, expr->id());
5242 }
5243 
5244 
5245 void HOptimizedGraphBuilder::VisitClassLiteral(ClassLiteral* lit) {
5246  DCHECK(!HasStackOverflow());
5247  DCHECK(current_block() != NULL);
5248  DCHECK(current_block()->HasPredecessor());
5249  return Bailout(kClassLiteral);
5250 }
5251 
5252 
5253 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
5254  NativeFunctionLiteral* expr) {
5255  DCHECK(!HasStackOverflow());
5256  DCHECK(current_block() != NULL);
5257  DCHECK(current_block()->HasPredecessor());
5258  return Bailout(kNativeFunctionLiteral);
5259 }
5260 
5261 
5262 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
5263  DCHECK(!HasStackOverflow());
5264  DCHECK(current_block() != NULL);
5265  DCHECK(current_block()->HasPredecessor());
5266  HBasicBlock* cond_true = graph()->CreateBasicBlock();
5267  HBasicBlock* cond_false = graph()->CreateBasicBlock();
5268  CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
5269 
5270  // Visit the true and false subexpressions in the same AST context as the
5271  // whole expression.
5272  if (cond_true->HasPredecessor()) {
5273  cond_true->SetJoinId(expr->ThenId());
5274  set_current_block(cond_true);
5275  CHECK_BAILOUT(Visit(expr->then_expression()));
5276  cond_true = current_block();
5277  } else {
5278  cond_true = NULL;
5279  }
5280 
5281  if (cond_false->HasPredecessor()) {
5282  cond_false->SetJoinId(expr->ElseId());
5283  set_current_block(cond_false);
5284  CHECK_BAILOUT(Visit(expr->else_expression()));
5285  cond_false = current_block();
5286  } else {
5287  cond_false = NULL;
5288  }
5289 
5290  if (!ast_context()->IsTest()) {
5291  HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
5292  set_current_block(join);
5293  if (join != NULL && !ast_context()->IsEffect()) {
5294  return ast_context()->ReturnValue(Pop());
5295  }
5296  }
5297 }
5298 
5299 
5302  PropertyAccessType access_type) {
5303  if (var->is_this() || !current_info()->has_global_object()) {
5304  return kUseGeneric;
5305  }
5306 
5307  switch (it->state()) {
5309  case LookupIterator::ACCESS_CHECK:
5310  case LookupIterator::INTERCEPTOR:
5311  case LookupIterator::NOT_FOUND:
5312  return kUseGeneric;
5313  case LookupIterator::DATA:
5314  if (access_type == STORE && it->IsReadOnly()) return kUseGeneric;
5315  return kUseCell;
5316  case LookupIterator::JSPROXY:
5317  case LookupIterator::TRANSITION:
5318  UNREACHABLE();
5319  }
5320  UNREACHABLE();
5321  return kUseGeneric;
5322 }
5323 
5324 
5326  DCHECK(var->IsContextSlot());
5327  HValue* context = environment()->context();
5328  int length = scope()->ContextChainLength(var->scope());
5329  while (length-- > 0) {
5330  context = Add<HLoadNamedField>(
5331  context, static_cast<HValue*>(NULL),
5332  HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5333  }
5334  return context;
5335 }
5336 
5337 
5338 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
5339  if (expr->is_this()) {
5341  }
5342 
5343  DCHECK(!HasStackOverflow());
5344  DCHECK(current_block() != NULL);
5345  DCHECK(current_block()->HasPredecessor());
5346  Variable* variable = expr->var();
5347  switch (variable->location()) {
5348  case Variable::UNALLOCATED: {
5349  if (IsLexicalVariableMode(variable->mode())) {
5350  // TODO(rossberg): should this be an DCHECK?
5351  return Bailout(kReferenceToGlobalLexicalVariable);
5352  }
5353  // Handle known global constants like 'undefined' specially to avoid a
5354  // load from a global cell for them.
5355  Handle<Object> constant_value =
5356  isolate()->factory()->GlobalConstantFor(variable->name());
5357  if (!constant_value.is_null()) {
5358  HConstant* instr = New<HConstant>(constant_value);
5359  return ast_context()->ReturnInstruction(instr, expr->id());
5360  }
5361 
5362  Handle<GlobalObject> global(current_info()->global_object());
5363  LookupIterator it(global, variable->name(),
5364  LookupIterator::OWN_SKIP_INTERCEPTOR);
5365  GlobalPropertyAccess type = LookupGlobalProperty(variable, &it, LOAD);
5366 
5367  if (type == kUseCell) {
5368  Handle<PropertyCell> cell = it.GetPropertyCell();
5369  if (cell->type()->IsConstant()) {
5371  Handle<Object> constant_object = cell->type()->AsConstant()->Value();
5372  if (constant_object->IsConsString()) {
5373  constant_object =
5374  String::Flatten(Handle<String>::cast(constant_object));
5375  }
5376  HConstant* constant = New<HConstant>(constant_object);
5377  return ast_context()->ReturnInstruction(constant, expr->id());
5378  } else {
5379  HLoadGlobalCell* instr =
5380  New<HLoadGlobalCell>(cell, it.property_details());
5381  return ast_context()->ReturnInstruction(instr, expr->id());
5382  }
5383  } else {
5384  HValue* global_object = Add<HLoadNamedField>(
5385  context(), static_cast<HValue*>(NULL),
5386  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
5387  HLoadGlobalGeneric* instr =
5388  New<HLoadGlobalGeneric>(global_object,
5389  variable->name(),
5390  ast_context()->is_for_typeof());
5391  if (FLAG_vector_ics) {
5392  Handle<SharedFunctionInfo> current_shared =
5393  function_state()->compilation_info()->shared_info();
5394  instr->SetVectorAndSlot(
5395  handle(current_shared->feedback_vector(), isolate()),
5396  expr->VariableFeedbackSlot());
5397  }
5398  return ast_context()->ReturnInstruction(instr, expr->id());
5399  }
5400  }
5401 
5402  case Variable::PARAMETER:
5403  case Variable::LOCAL: {
5404  HValue* value = LookupAndMakeLive(variable);
5405  if (value == graph()->GetConstantHole()) {
5406  DCHECK(IsDeclaredVariableMode(variable->mode()) &&
5407  variable->mode() != VAR);
5408  return Bailout(kReferenceToUninitializedVariable);
5409  }
5410  return ast_context()->ReturnValue(value);
5411  }
5412 
5413  case Variable::CONTEXT: {
5414  HValue* context = BuildContextChainWalk(variable);
5415  HLoadContextSlot::Mode mode;
5416  switch (variable->mode()) {
5417  case LET:
5418  case CONST:
5419  mode = HLoadContextSlot::kCheckDeoptimize;
5420  break;
5421  case CONST_LEGACY:
5422  mode = HLoadContextSlot::kCheckReturnUndefined;
5423  break;
5424  default:
5425  mode = HLoadContextSlot::kNoCheck;
5426  break;
5427  }
5428  HLoadContextSlot* instr =
5429  new(zone()) HLoadContextSlot(context, variable->index(), mode);
5430  return ast_context()->ReturnInstruction(instr, expr->id());
5431  }
5432 
5433  case Variable::LOOKUP:
5434  return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
5435  }
5436 }
5437 
5438 
5439 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
5440  DCHECK(!HasStackOverflow());
5441  DCHECK(current_block() != NULL);
5442  DCHECK(current_block()->HasPredecessor());
5443  HConstant* instr = New<HConstant>(expr->value());
5444  return ast_context()->ReturnInstruction(instr, expr->id());
5445 }
5446 
5447 
5448 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
5449  DCHECK(!HasStackOverflow());
5450  DCHECK(current_block() != NULL);
5451  DCHECK(current_block()->HasPredecessor());
5452  Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5453  Handle<FixedArray> literals(closure->literals());
5454  HRegExpLiteral* instr = New<HRegExpLiteral>(literals,
5455  expr->pattern(),
5456  expr->flags(),
5457  expr->literal_index());
5458  return ast_context()->ReturnInstruction(instr, expr->id());
5459 }
5460 
5461 
5462 static bool CanInlinePropertyAccess(Type* type) {
5463  if (type->Is(Type::NumberOrString())) return true;
5464  if (!type->IsClass()) return false;
5465  Handle<Map> map = type->AsClass()->Map();
5466  return map->IsJSObjectMap() &&
5467  !map->is_dictionary_map() &&
5468  !map->has_named_interceptor();
5469 }
5470 
5471 
5472 // Determines whether the given array or object literal boilerplate satisfies
5473 // all limits to be considered for fast deep-copying and computes the total
5474 // size of all objects that are part of the graph.
5475 static bool IsFastLiteral(Handle<JSObject> boilerplate,
5476  int max_depth,
5477  int* max_properties) {
5478  if (boilerplate->map()->is_deprecated() &&
5479  !JSObject::TryMigrateInstance(boilerplate)) {
5480  return false;
5481  }
5482 
5483  DCHECK(max_depth >= 0 && *max_properties >= 0);
5484  if (max_depth == 0) return false;
5485 
5486  Isolate* isolate = boilerplate->GetIsolate();
5487  Handle<FixedArrayBase> elements(boilerplate->elements());
5488  if (elements->length() > 0 &&
5489  elements->map() != isolate->heap()->fixed_cow_array_map()) {
5490  if (boilerplate->HasFastObjectElements()) {
5491  Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5492  int length = elements->length();
5493  for (int i = 0; i < length; i++) {
5494  if ((*max_properties)-- == 0) return false;
5495  Handle<Object> value(fast_elements->get(i), isolate);
5496  if (value->IsJSObject()) {
5497  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5498  if (!IsFastLiteral(value_object,
5499  max_depth - 1,
5500  max_properties)) {
5501  return false;
5502  }
5503  }
5504  }
5505  } else if (!boilerplate->HasFastDoubleElements()) {
5506  return false;
5507  }
5508  }
5509 
5510  Handle<FixedArray> properties(boilerplate->properties());
5511  if (properties->length() > 0) {
5512  return false;
5513  } else {
5514  Handle<DescriptorArray> descriptors(
5515  boilerplate->map()->instance_descriptors());
5516  int limit = boilerplate->map()->NumberOfOwnDescriptors();
5517  for (int i = 0; i < limit; i++) {
5518  PropertyDetails details = descriptors->GetDetails(i);
5519  if (details.type() != FIELD) continue;
5520  int index = descriptors->GetFieldIndex(i);
5521  if ((*max_properties)-- == 0) return false;
5522  Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate);
5523  if (value->IsJSObject()) {
5524  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5525  if (!IsFastLiteral(value_object,
5526  max_depth - 1,
5527  max_properties)) {
5528  return false;
5529  }
5530  }
5531  }
5532  }
5533  return true;
5534 }
5535 
5536 
5537 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5538  DCHECK(!HasStackOverflow());
5539  DCHECK(current_block() != NULL);
5540  DCHECK(current_block()->HasPredecessor());
5541  expr->BuildConstantProperties(isolate());
5542  Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5543  HInstruction* literal;
5544 
5545  // Check whether to use fast or slow deep-copying for boilerplate.
5546  int max_properties = kMaxFastLiteralProperties;
5547  Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()),
5548  isolate());
5550  Handle<JSObject> boilerplate;
5551  if (!literals_cell->IsUndefined()) {
5552  // Retrieve the boilerplate
5553  site = Handle<AllocationSite>::cast(literals_cell);
5554  boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
5555  isolate());
5556  }
5557 
5558  if (!boilerplate.is_null() &&
5559  IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
5560  AllocationSiteUsageContext usage_context(isolate(), site, false);
5561  usage_context.EnterNewScope();
5562  literal = BuildFastLiteral(boilerplate, &usage_context);
5563  usage_context.ExitScope(site, boilerplate);
5564  } else {
5565  NoObservableSideEffectsScope no_effects(this);
5566  Handle<FixedArray> closure_literals(closure->literals(), isolate());
5567  Handle<FixedArray> constant_properties = expr->constant_properties();
5568  int literal_index = expr->literal_index();
5569  int flags = expr->fast_elements()
5570  ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags;
5571  flags |= expr->has_function()
5572  ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags;
5573 
5574  Add<HPushArguments>(Add<HConstant>(closure_literals),
5575  Add<HConstant>(literal_index),
5576  Add<HConstant>(constant_properties),
5577  Add<HConstant>(flags));
5578 
5579  // TODO(mvstanton): Add a flag to turn off creation of any
5580  // AllocationMementos for this call: we are in crankshaft and should have
5581  // learned enough about transition behavior to stop emitting mementos.
5582  Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral;
5583  literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5584  Runtime::FunctionForId(function_id),
5585  4);
5586  }
5587 
5588  // The object is expected in the bailout environment during computation
5589  // of the property values and is the value of the entire expression.
5590  Push(literal);
5591 
5592  expr->CalculateEmitStore(zone());
5593 
5594  for (int i = 0; i < expr->properties()->length(); i++) {
5595  ObjectLiteral::Property* property = expr->properties()->at(i);
5596  if (property->IsCompileTimeValue()) continue;
5597 
5598  Literal* key = property->key();
5599  Expression* value = property->value();
5600 
5601  switch (property->kind()) {
5602  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5604  // Fall through.
5605  case ObjectLiteral::Property::COMPUTED:
5606  if (key->value()->IsInternalizedString()) {
5607  if (property->emit_store()) {
5608  CHECK_ALIVE(VisitForValue(value));
5609  HValue* value = Pop();
5610  Handle<Map> map = property->GetReceiverType();
5611  Handle<String> name = property->key()->AsPropertyName();
5612  HInstruction* store;
5613  if (map.is_null()) {
5614  // If we don't know the monomorphic type, do a generic store.
5616  STORE, NULL, literal, name, value));
5617  } else {
5618  PropertyAccessInfo info(this, STORE, ToType(map), name);
5619  if (info.CanAccessMonomorphic()) {
5620  HValue* checked_literal = Add<HCheckMaps>(literal, map);
5621  DCHECK(!info.IsAccessor());
5622  store = BuildMonomorphicAccess(
5623  &info, literal, checked_literal, value,
5625  } else {
5627  STORE, NULL, literal, name, value));
5628  }
5629  }
5630  AddInstruction(store);
5631  if (store->HasObservableSideEffects()) {
5632  Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
5633  }
5634  } else {
5635  CHECK_ALIVE(VisitForEffect(value));
5636  }
5637  break;
5638  }
5639  // Fall through.
5640  case ObjectLiteral::Property::PROTOTYPE:
5641  case ObjectLiteral::Property::SETTER:
5642  case ObjectLiteral::Property::GETTER:
5643  return Bailout(kObjectLiteralWithComplexProperty);
5644  default: UNREACHABLE();
5645  }
5646  }
5647 
5648  if (expr->has_function()) {
5649  // Return the result of the transformation to fast properties
5650  // instead of the original since this operation changes the map
5651  // of the object. This makes sure that the original object won't
5652  // be used by other optimized code before it is transformed
5653  // (e.g. because of code motion).
5654  HToFastProperties* result = Add<HToFastProperties>(Pop());
5655  return ast_context()->ReturnValue(result);
5656  } else {
5657  return ast_context()->ReturnValue(Pop());
5658  }
5659 }
5660 
5661 
5662 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5663  DCHECK(!HasStackOverflow());
5664  DCHECK(current_block() != NULL);
5665  DCHECK(current_block()->HasPredecessor());
5666  expr->BuildConstantElements(isolate());
5667  ZoneList<Expression*>* subexprs = expr->values();
5668  int length = subexprs->length();
5669  HInstruction* literal;
5670 
5671  Handle<AllocationSite> site;
5672  Handle<FixedArray> literals(environment()->closure()->literals(), isolate());
5673  bool uninitialized = false;
5674  Handle<Object> literals_cell(literals->get(expr->literal_index()),
5675  isolate());
5676  Handle<JSObject> boilerplate_object;
5677  if (literals_cell->IsUndefined()) {
5678  uninitialized = true;
5679  Handle<Object> raw_boilerplate;
5681  isolate(), raw_boilerplate,
5683  isolate(), literals, expr->constant_elements()),
5684  Bailout(kArrayBoilerplateCreationFailed));
5685 
5686  boilerplate_object = Handle<JSObject>::cast(raw_boilerplate);
5687  AllocationSiteCreationContext creation_context(isolate());
5688  site = creation_context.EnterNewScope();
5689  if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) {
5690  return Bailout(kArrayBoilerplateCreationFailed);
5691  }
5692  creation_context.ExitScope(site, boilerplate_object);
5693  literals->set(expr->literal_index(), *site);
5694 
5695  if (boilerplate_object->elements()->map() ==
5696  isolate()->heap()->fixed_cow_array_map()) {
5697  isolate()->counters()->cow_arrays_created_runtime()->Increment();
5698  }
5699  } else {
5700  DCHECK(literals_cell->IsAllocationSite());
5701  site = Handle<AllocationSite>::cast(literals_cell);
5702  boilerplate_object = Handle<JSObject>(
5703  JSObject::cast(site->transition_info()), isolate());
5704  }
5705 
5706  DCHECK(!boilerplate_object.is_null());
5707  DCHECK(site->SitePointsToLiteral());
5708 
5709  ElementsKind boilerplate_elements_kind =
5710  boilerplate_object->GetElementsKind();
5711 
5712  // Check whether to use fast or slow deep-copying for boilerplate.
5713  int max_properties = kMaxFastLiteralProperties;
5714  if (IsFastLiteral(boilerplate_object,
5716  &max_properties)) {
5717  AllocationSiteUsageContext usage_context(isolate(), site, false);
5718  usage_context.EnterNewScope();
5719  literal = BuildFastLiteral(boilerplate_object, &usage_context);
5720  usage_context.ExitScope(site, boilerplate_object);
5721  } else {
5722  NoObservableSideEffectsScope no_effects(this);
5723  // Boilerplate already exists and constant elements are never accessed,
5724  // pass an empty fixed array to the runtime function instead.
5725  Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array();
5726  int literal_index = expr->literal_index();
5727  int flags = expr->depth() == 1
5728  ? ArrayLiteral::kShallowElements
5729  : ArrayLiteral::kNoFlags;
5730  flags |= ArrayLiteral::kDisableMementos;
5731 
5732  Add<HPushArguments>(Add<HConstant>(literals),
5733  Add<HConstant>(literal_index),
5734  Add<HConstant>(constants),
5735  Add<HConstant>(flags));
5736 
5737  // TODO(mvstanton): Consider a flag to turn off creation of any
5738  // AllocationMementos for this call: we are in crankshaft and should have
5739  // learned enough about transition behavior to stop emitting mementos.
5740  Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
5741  literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5742  Runtime::FunctionForId(function_id),
5743  4);
5744 
5745  // De-opt if elements kind changed from boilerplate_elements_kind.
5746  Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate());
5747  literal = Add<HCheckMaps>(literal, map);
5748  }
5749 
5750  // The array is expected in the bailout environment during computation
5751  // of the property values and is the value of the entire expression.
5752  Push(literal);
5753  // The literal index is on the stack, too.
5754  Push(Add<HConstant>(expr->literal_index()));
5755 
5756  HInstruction* elements = NULL;
5757 
5758  for (int i = 0; i < length; i++) {
5759  Expression* subexpr = subexprs->at(i);
5760  // If the subexpression is a literal or a simple materialized literal it
5761  // is already set in the cloned array.
5762  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
5763 
5764  CHECK_ALIVE(VisitForValue(subexpr));
5765  HValue* value = Pop();
5766  if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
5767 
5768  elements = AddLoadElements(literal);
5769 
5770  HValue* key = Add<HConstant>(i);
5771 
5772  switch (boilerplate_elements_kind) {
5773  case FAST_SMI_ELEMENTS:
5775  case FAST_ELEMENTS:
5776  case FAST_HOLEY_ELEMENTS:
5777  case FAST_DOUBLE_ELEMENTS:
5779  HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value,
5780  boilerplate_elements_kind);
5781  instr->SetUninitialized(uninitialized);
5782  break;
5783  }
5784  default:
5785  UNREACHABLE();
5786  break;
5787  }
5788 
5789  Add<HSimulate>(expr->GetIdForElement(i));
5790  }
5791 
5792  Drop(1); // array literal index
5793  return ast_context()->ReturnValue(Pop());
5794 }
5795 
5796 
5798  Handle<Map> map) {
5799  BuildCheckHeapObject(object);
5800  return Add<HCheckMaps>(object, map);
5801 }
5802 
5803 
5805  PropertyAccessInfo* info,
5806  HValue* checked_object) {
5807  // See if this is a load for an immutable property
5808  if (checked_object->ActualValue()->IsConstant()) {
5809  Handle<Object> object(
5810  HConstant::cast(checked_object->ActualValue())->handle(isolate()));
5811 
5812  if (object->IsJSObject()) {
5813  LookupIterator it(object, info->name(),
5814  LookupIterator::OWN_SKIP_INTERCEPTOR);
5816  if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) {
5817  return New<HConstant>(value);
5818  }
5819  }
5820  }
5821 
5822  HObjectAccess access = info->access();
5823  if (access.representation().IsDouble()) {
5824  // Load the heap number.
5825  checked_object = Add<HLoadNamedField>(
5826  checked_object, static_cast<HValue*>(NULL),
5827  access.WithRepresentation(Representation::Tagged()));
5828  // Load the double value from it.
5829  access = HObjectAccess::ForHeapNumberValue();
5830  }
5831 
5832  SmallMapList* map_list = info->field_maps();
5833  if (map_list->length() == 0) {
5834  return New<HLoadNamedField>(checked_object, checked_object, access);
5835  }
5836 
5837  UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone());
5838  for (int i = 0; i < map_list->length(); ++i) {
5839  maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone());
5840  }
5841  return New<HLoadNamedField>(
5842  checked_object, checked_object, access, maps, info->field_type());
5843 }
5844 
5845 
5847  PropertyAccessInfo* info,
5848  HValue* checked_object,
5849  HValue* value) {
5850  bool transition_to_field = info->IsTransition();
5851  // TODO(verwaest): Move this logic into PropertyAccessInfo.
5852  HObjectAccess field_access = info->access();
5853 
5854  HStoreNamedField *instr;
5855  if (field_access.representation().IsDouble()) {
5856  HObjectAccess heap_number_access =
5857  field_access.WithRepresentation(Representation::Tagged());
5858  if (transition_to_field) {
5859  // The store requires a mutable HeapNumber to be allocated.
5860  NoObservableSideEffectsScope no_side_effects(this);
5861  HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
5862 
5863  // TODO(hpayer): Allocation site pretenuring support.
5864  HInstruction* heap_number = Add<HAllocate>(heap_number_size,
5865  HType::HeapObject(),
5866  NOT_TENURED,
5869  heap_number, isolate()->factory()->mutable_heap_number_map());
5870  Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
5871  value);
5872  instr = New<HStoreNamedField>(checked_object->ActualValue(),
5873  heap_number_access,
5874  heap_number);
5875  } else {
5876  // Already holds a HeapNumber; load the box and write its value field.
5877  HInstruction* heap_number = Add<HLoadNamedField>(
5878  checked_object, static_cast<HValue*>(NULL), heap_number_access);
5879  instr = New<HStoreNamedField>(heap_number,
5880  HObjectAccess::ForHeapNumberValue(),
5882  }
5883  } else {
5884  if (field_access.representation().IsHeapObject()) {
5885  BuildCheckHeapObject(value);
5886  }
5887 
5888  if (!info->field_maps()->is_empty()) {
5889  DCHECK(field_access.representation().IsHeapObject());
5890  value = Add<HCheckMaps>(value, info->field_maps());
5891  }
5892 
5893  // This is a normal store.
5894  instr = New<HStoreNamedField>(
5895  checked_object->ActualValue(), field_access, value,
5896  transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
5897  }
5898 
5899  if (transition_to_field) {
5900  Handle<Map> transition(info->transition());
5901  DCHECK(!transition->is_deprecated());
5902  instr->SetTransition(Add<HConstant>(transition));
5903  }
5904  return instr;
5905 }
5906 
5907 
5909  PropertyAccessInfo* info) {
5910  if (!CanInlinePropertyAccess(type_)) return false;
5911 
5912  // Currently only handle Type::Number as a polymorphic case.
5913  // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5914  // instruction.
5915  if (type_->Is(Type::Number())) return false;
5916 
5917  // Values are only compatible for monomorphic load if they all behave the same
5918  // regarding value wrappers.
5919  if (type_->Is(Type::NumberOrString())) {
5920  if (!info->type_->Is(Type::NumberOrString())) return false;
5921  } else {
5922  if (info->type_->Is(Type::NumberOrString())) return false;
5923  }
5924 
5925  if (!LookupDescriptor()) return false;
5926 
5927  if (!IsFound()) {
5928  return (!info->IsFound() || info->has_holder()) &&
5929  map()->prototype() == info->map()->prototype();
5930  }
5931 
5932  // Mismatch if the other access info found the property in the prototype
5933  // chain.
5934  if (info->has_holder()) return false;
5935 
5936  if (IsAccessor()) {
5937  return accessor_.is_identical_to(info->accessor_) &&
5938  api_holder_.is_identical_to(info->api_holder_);
5939  }
5940 
5941  if (IsConstant()) {
5942  return constant_.is_identical_to(info->constant_);
5943  }
5944 
5945  DCHECK(IsField());
5946  if (!info->IsField()) return false;
5947 
5948  Representation r = access_.representation();
5949  if (IsLoad()) {
5950  if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
5951  } else {
5952  if (!info->access_.representation().IsCompatibleForStore(r)) return false;
5953  }
5954  if (info->access_.offset() != access_.offset()) return false;
5955  if (info->access_.IsInobject() != access_.IsInobject()) return false;
5956  if (IsLoad()) {
5957  if (field_maps_.is_empty()) {
5958  info->field_maps_.Clear();
5959  } else if (!info->field_maps_.is_empty()) {
5960  for (int i = 0; i < field_maps_.length(); ++i) {
5961  info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
5962  }
5963  info->field_maps_.Sort();
5964  }
5965  } else {
5966  // We can only merge stores that agree on their field maps. The comparison
5967  // below is safe, since we keep the field maps sorted.
5968  if (field_maps_.length() != info->field_maps_.length()) return false;
5969  for (int i = 0; i < field_maps_.length(); ++i) {
5970  if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
5971  return false;
5972  }
5973  }
5974  }
5975  info->GeneralizeRepresentation(r);
5976  info->field_type_ = info->field_type_.Combine(field_type_);
5977  return true;
5978 }
5979 
5980 
5982  if (!type_->IsClass()) return true;
5983  map()->LookupDescriptor(NULL, *name_, &lookup_);
5984  return LoadResult(map());
5985 }
5986 
5987 
5989  if (!IsLoad() && IsProperty() && IsReadOnly()) {
5990  return false;
5991  }
5992 
5993  if (IsField()) {
5994  // Construct the object field access.
5995  int index = GetLocalFieldIndexFromMap(map);
5996  access_ = HObjectAccess::ForField(map, index, representation(), name_);
5997 
5998  // Load field map for heap objects.
5999  LoadFieldMaps(map);
6000  } else if (IsAccessor()) {
6001  Handle<Object> accessors = GetAccessorsFromMap(map);
6002  if (!accessors->IsAccessorPair()) return false;
6003  Object* raw_accessor =
6004  IsLoad() ? Handle<AccessorPair>::cast(accessors)->getter()
6005  : Handle<AccessorPair>::cast(accessors)->setter();
6006  if (!raw_accessor->IsJSFunction()) return false;
6007  Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor));
6008  if (accessor->shared()->IsApiFunction()) {
6009  CallOptimization call_optimization(accessor);
6010  if (call_optimization.is_simple_api_call()) {
6011  CallOptimization::HolderLookup holder_lookup;
6012  Handle<Map> receiver_map = this->map();
6013  api_holder_ = call_optimization.LookupHolderOfExpectedType(
6014  receiver_map, &holder_lookup);
6015  }
6016  }
6017  accessor_ = accessor;
6018  } else if (IsConstant()) {
6019  constant_ = GetConstantFromMap(map);
6020  }
6021 
6022  return true;
6023 }
6024 
6025 
6027  Handle<Map> map) {
6028  // Clear any previously collected field maps/type.
6029  field_maps_.Clear();
6030  field_type_ = HType::Tagged();
6031 
6032  // Figure out the field type from the accessor map.
6033  Handle<HeapType> field_type = GetFieldTypeFromMap(map);
6034 
6035  // Collect the (stable) maps from the field type.
6036  int num_field_maps = field_type->NumClasses();
6037  if (num_field_maps == 0) return;
6038  DCHECK(access_.representation().IsHeapObject());
6039  field_maps_.Reserve(num_field_maps, zone());
6040  HeapType::Iterator<Map> it = field_type->Classes();
6041  while (!it.Done()) {
6042  Handle<Map> field_map = it.Current();
6043  if (!field_map->is_stable()) {
6044  field_maps_.Clear();
6045  return;
6046  }
6047  field_maps_.Add(field_map, zone());
6048  it.Advance();
6049  }
6050  field_maps_.Sort();
6051  DCHECK_EQ(num_field_maps, field_maps_.length());
6052 
6053  // Determine field HType from field HeapType.
6054  field_type_ = HType::FromType<HeapType>(field_type);
6055  DCHECK(field_type_.IsHeapObject());
6056 
6057  // Add dependency on the map that introduced the field.
6058  Map::AddDependentCompilationInfo(GetFieldOwnerFromMap(map),
6060 }
6061 
6062 
6064  Handle<Map> map = this->map();
6065 
6066  while (map->prototype()->IsJSObject()) {
6067  holder_ = handle(JSObject::cast(map->prototype()));
6068  if (holder_->map()->is_deprecated()) {
6070  }
6071  map = Handle<Map>(holder_->map());
6073  lookup_.NotFound();
6074  return false;
6075  }
6076  map->LookupDescriptor(*holder_, *name_, &lookup_);
6077  if (IsFound()) return LoadResult(map);
6078  }
6079  lookup_.NotFound();
6080  return true;
6081 }
6082 
6083 
6085  if (!CanInlinePropertyAccess(type_)) return false;
6086  if (IsJSObjectFieldAccessor()) return IsLoad();
6087  if (this->map()->function_with_prototype() &&
6088  !this->map()->has_non_instance_prototype() &&
6089  name_.is_identical_to(isolate()->factory()->prototype_string())) {
6090  return IsLoad();
6091  }
6092  if (!LookupDescriptor()) return false;
6093  if (IsFound()) return IsLoad() || !IsReadOnly();
6094  if (!LookupInPrototypes()) return false;
6095  if (IsLoad()) return true;
6096 
6097  if (IsAccessor()) return true;
6098  Handle<Map> map = this->map();
6099  map->LookupTransition(NULL, *name_, &lookup_);
6100  if (lookup_.IsTransitionToField() && map->unused_property_fields() > 0) {
6101  // Construct the object field access.
6102  int descriptor = transition()->LastAdded();
6103  int index =
6104  transition()->instance_descriptors()->GetFieldIndex(descriptor) -
6105  map->inobject_properties();
6106  PropertyDetails details =
6107  transition()->instance_descriptors()->GetDetails(descriptor);
6108  Representation representation = details.representation();
6109  access_ = HObjectAccess::ForField(map, index, representation, name_);
6110 
6111  // Load field map for heap objects.
6112  LoadFieldMaps(transition());
6113  return true;
6114  }
6115  return false;
6116 }
6117 
6118 
6120  SmallMapList* types) {
6121  DCHECK(type_->Is(ToType(types->first())));
6122  if (!CanAccessMonomorphic()) return false;
6124  if (types->length() > kMaxLoadPolymorphism) return false;
6125 
6126  HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6127  if (GetJSObjectFieldAccess(&access)) {
6128  for (int i = 1; i < types->length(); ++i) {
6129  PropertyAccessInfo test_info(
6130  builder_, access_type_, ToType(types->at(i)), name_);
6131  HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default
6132  if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
6133  if (!access.Equals(test_access)) return false;
6134  }
6135  return true;
6136  }
6137 
6138  // Currently only handle Type::Number as a polymorphic case.
6139  // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6140  // instruction.
6141  if (type_->Is(Type::Number())) return false;
6142 
6143  // Multiple maps cannot transition to the same target map.
6144  DCHECK(!IsLoad() || !IsTransition());
6145  if (IsTransition() && types->length() > 1) return false;
6146 
6147  for (int i = 1; i < types->length(); ++i) {
6148  PropertyAccessInfo test_info(
6149  builder_, access_type_, ToType(types->at(i)), name_);
6150  if (!test_info.IsCompatible(this)) return false;
6151  }
6152 
6153  return true;
6154 }
6155 
6156 
6159  type_, current_info()->closure()->context()->native_context());
6160  if (ctor != NULL) return handle(ctor->initial_map());
6161  return type_->AsClass()->Map();
6162 }
6163 
6164 
6165 static bool NeedsWrappingFor(Type* type, Handle<JSFunction> target) {
6166  return type->Is(Type::NumberOrString()) &&
6167  target->shared()->strict_mode() == SLOPPY &&
6168  !target->shared()->native();
6169 }
6170 
6171 
6173  PropertyAccessInfo* info,
6174  HValue* object,
6175  HValue* checked_object,
6176  HValue* value,
6177  BailoutId ast_id,
6178  BailoutId return_id,
6179  bool can_inline_accessor) {
6180 
6181  HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6182  if (info->GetJSObjectFieldAccess(&access)) {
6183  DCHECK(info->IsLoad());
6184  return New<HLoadNamedField>(object, checked_object, access);
6185  }
6186 
6187  if (info->name().is_identical_to(isolate()->factory()->prototype_string()) &&
6188  info->map()->function_with_prototype()) {
6189  DCHECK(!info->map()->has_non_instance_prototype());
6190  return New<HLoadFunctionPrototype>(checked_object);
6191  }
6192 
6193  HValue* checked_holder = checked_object;
6194  if (info->has_holder()) {
6195  Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
6196  checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
6197  }
6198 
6199  if (!info->IsFound()) {
6200  DCHECK(info->IsLoad());
6201  return graph()->GetConstantUndefined();
6202  }
6203 
6204  if (info->IsField()) {
6205  if (info->IsLoad()) {
6206  return BuildLoadNamedField(info, checked_holder);
6207  } else {
6208  return BuildStoreNamedField(info, checked_object, value);
6209  }
6210  }
6211 
6212  if (info->IsTransition()) {
6213  DCHECK(!info->IsLoad());
6214  return BuildStoreNamedField(info, checked_object, value);
6215  }
6216 
6217  if (info->IsAccessor()) {
6218  Push(checked_object);
6219  int argument_count = 1;
6220  if (!info->IsLoad()) {
6221  argument_count = 2;
6222  Push(value);
6223  }
6224 
6225  if (NeedsWrappingFor(info->type(), info->accessor())) {
6226  HValue* function = Add<HConstant>(info->accessor());
6227  PushArgumentsFromEnvironment(argument_count);
6228  return New<HCallFunction>(function, argument_count, WRAP_AND_CALL);
6229  } else if (FLAG_inline_accessors && can_inline_accessor) {
6230  bool success = info->IsLoad()
6231  ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
6232  : TryInlineSetter(
6233  info->accessor(), info->map(), ast_id, return_id, value);
6234  if (success || HasStackOverflow()) return NULL;
6235  }
6236 
6237  PushArgumentsFromEnvironment(argument_count);
6238  return BuildCallConstantFunction(info->accessor(), argument_count);
6239  }
6240 
6241  DCHECK(info->IsConstant());
6242  if (info->IsLoad()) {
6243  return New<HConstant>(info->constant());
6244  } else {
6245  return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
6246  }
6247 }
6248 
6249 
6251  PropertyAccessType access_type,
6252  Expression* expr,
6253  BailoutId ast_id,
6254  BailoutId return_id,
6255  HValue* object,
6256  HValue* value,
6257  SmallMapList* types,
6258  Handle<String> name) {
6259  // Something did not match; must use a polymorphic load.
6260  int count = 0;
6261  HBasicBlock* join = NULL;
6262  HBasicBlock* number_block = NULL;
6263  bool handled_string = false;
6264 
6265  bool handle_smi = false;
6267  int i;
6268  for (i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
6269  PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name);
6270  if (info.type()->Is(Type::String())) {
6271  if (handled_string) continue;
6272  handled_string = true;
6273  }
6274  if (info.CanAccessMonomorphic()) {
6275  count++;
6276  if (info.type()->Is(Type::Number())) {
6277  handle_smi = true;
6278  break;
6279  }
6280  }
6281  }
6282 
6283  if (i < types->length()) {
6284  count = -1;
6285  types->Clear();
6286  } else {
6287  count = 0;
6288  }
6289  HControlInstruction* smi_check = NULL;
6290  handled_string = false;
6291 
6292  for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
6293  PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name);
6294  if (info.type()->Is(Type::String())) {
6295  if (handled_string) continue;
6296  handled_string = true;
6297  }
6298  if (!info.CanAccessMonomorphic()) continue;
6299 
6300  if (count == 0) {
6301  join = graph()->CreateBasicBlock();
6302  if (handle_smi) {
6303  HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
6304  HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
6305  number_block = graph()->CreateBasicBlock();
6306  smi_check = New<HIsSmiAndBranch>(
6307  object, empty_smi_block, not_smi_block);
6308  FinishCurrentBlock(smi_check);
6309  GotoNoSimulate(empty_smi_block, number_block);
6310  set_current_block(not_smi_block);
6311  } else {
6312  BuildCheckHeapObject(object);
6313  }
6314  }
6315  ++count;
6316  HBasicBlock* if_true = graph()->CreateBasicBlock();
6317  HBasicBlock* if_false = graph()->CreateBasicBlock();
6318  HUnaryControlInstruction* compare;
6319 
6320  HValue* dependency;
6321  if (info.type()->Is(Type::Number())) {
6322  Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
6323  compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
6324  dependency = smi_check;
6325  } else if (info.type()->Is(Type::String())) {
6326  compare = New<HIsStringAndBranch>(object, if_true, if_false);
6327  dependency = compare;
6328  } else {
6329  compare = New<HCompareMap>(object, info.map(), if_true, if_false);
6330  dependency = compare;
6331  }
6332  FinishCurrentBlock(compare);
6333 
6334  if (info.type()->Is(Type::Number())) {
6335  GotoNoSimulate(if_true, number_block);
6336  if_true = number_block;
6337  }
6338 
6339  set_current_block(if_true);
6340 
6342  &info, object, dependency, value, ast_id,
6343  return_id, FLAG_polymorphic_inlining);
6344 
6345  HValue* result = NULL;
6346  switch (access_type) {
6347  case LOAD:
6348  result = access;
6349  break;
6350  case STORE:
6351  result = value;
6352  break;
6353  }
6354 
6355  if (access == NULL) {
6356  if (HasStackOverflow()) return;
6357  } else {
6358  if (!access->IsLinked()) AddInstruction(access);
6359  if (!ast_context()->IsEffect()) Push(result);
6360  }
6361 
6362  if (current_block() != NULL) Goto(join);
6363  set_current_block(if_false);
6364  }
6365 
6366  // Finish up. Unconditionally deoptimize if we've handled all the maps we
6367  // know about and do not want to handle ones we've never seen. Otherwise
6368  // use a generic IC.
6369  if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
6370  FinishExitWithHardDeoptimization("Uknown map in polymorphic access");
6371  } else {
6372  HInstruction* instr = BuildNamedGeneric(access_type, expr, object, name,
6373  value);
6374  AddInstruction(instr);
6375  if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
6376 
6377  if (join != NULL) {
6378  Goto(join);
6379  } else {
6380  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6381  if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6382  return;
6383  }
6384  }
6385 
6386  DCHECK(join != NULL);
6387  if (join->HasPredecessor()) {
6388  join->SetJoinId(ast_id);
6389  set_current_block(join);
6390  if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6391  } else {
6393  }
6394 }
6395 
6396 
6398  HValue* receiver,
6399  SmallMapList** t,
6400  Zone* zone) {
6401  SmallMapList* types = expr->GetReceiverTypes();
6402  *t = types;
6403  bool monomorphic = expr->IsMonomorphic();
6404  if (types != NULL && receiver->HasMonomorphicJSObjectType()) {
6405  Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
6406  types->FilterForPossibleTransitions(root_map);
6407  monomorphic = types->length() == 1;
6408  }
6409  return monomorphic &&
6410  CanInlinePropertyAccess(IC::MapToType<Type>(types->first(), zone));
6411 }
6412 
6413 
6414 static bool AreStringTypes(SmallMapList* types) {
6415  for (int i = 0; i < types->length(); i++) {
6416  if (types->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
6417  }
6418  return true;
6419 }
6420 
6421 
6423  Property* prop,
6424  BailoutId ast_id,
6425  BailoutId return_id,
6426  bool is_uninitialized) {
6427  if (!prop->key()->IsPropertyName()) {
6428  // Keyed store.
6429  HValue* value = environment()->ExpressionStackAt(0);
6430  HValue* key = environment()->ExpressionStackAt(1);
6431  HValue* object = environment()->ExpressionStackAt(2);
6432  bool has_side_effects = false;
6433  HandleKeyedElementAccess(object, key, value, expr, ast_id, return_id, STORE,
6434  &has_side_effects);
6435  Drop(3);
6436  Push(value);
6437  Add<HSimulate>(return_id, REMOVABLE_SIMULATE);
6438  return ast_context()->ReturnValue(Pop());
6439  }
6440 
6441  // Named store.
6442  HValue* value = Pop();
6443  HValue* object = Pop();
6444 
6445  Literal* key = prop->key()->AsLiteral();
6446  Handle<String> name = Handle<String>::cast(key->value());
6447  DCHECK(!name.is_null());
6448 
6449  HInstruction* instr = BuildNamedAccess(STORE, ast_id, return_id, expr,
6450  object, name, value, is_uninitialized);
6451  if (instr == NULL) return;
6452 
6453  if (!ast_context()->IsEffect()) Push(value);
6454  AddInstruction(instr);
6455  if (instr->HasObservableSideEffects()) {
6456  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6457  }
6458  if (!ast_context()->IsEffect()) Drop(1);
6459  return ast_context()->ReturnValue(value);
6460 }
6461 
6462 
6464  Property* prop = expr->target()->AsProperty();
6465  DCHECK(prop != NULL);
6466  CHECK_ALIVE(VisitForValue(prop->obj()));
6467  if (!prop->key()->IsPropertyName()) {
6468  CHECK_ALIVE(VisitForValue(prop->key()));
6469  }
6470  CHECK_ALIVE(VisitForValue(expr->value()));
6471  BuildStore(expr, prop, expr->id(),
6472  expr->AssignmentId(), expr->IsUninitialized());
6473 }
6474 
6475 
6476 // Because not every expression has a position and there is not common
6477 // superclass of Assignment and CountOperation, we cannot just pass the
6478 // owning expression instead of position and ast_id separately.
6480  Variable* var,
6481  HValue* value,
6482  BailoutId ast_id) {
6483  Handle<GlobalObject> global(current_info()->global_object());
6484  LookupIterator it(global, var->name(), LookupIterator::OWN_SKIP_INTERCEPTOR);
6486  if (type == kUseCell) {
6487  Handle<PropertyCell> cell = it.GetPropertyCell();
6488  if (cell->type()->IsConstant()) {
6489  Handle<Object> constant = cell->type()->AsConstant()->Value();
6490  if (value->IsConstant()) {
6491  HConstant* c_value = HConstant::cast(value);
6492  if (!constant.is_identical_to(c_value->handle(isolate()))) {
6493  Add<HDeoptimize>("Constant global variable assignment",
6495  }
6496  } else {
6497  HValue* c_constant = Add<HConstant>(constant);
6498  IfBuilder builder(this);
6499  if (constant->IsNumber()) {
6500  builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
6501  } else {
6502  builder.If<HCompareObjectEqAndBranch>(value, c_constant);
6503  }
6504  builder.Then();
6505  builder.Else();
6506  Add<HDeoptimize>("Constant global variable assignment",
6508  builder.End();
6509  }
6510  }
6511  HInstruction* instr =
6512  Add<HStoreGlobalCell>(value, cell, it.property_details());
6513  if (instr->HasObservableSideEffects()) {
6514  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6515  }
6516  } else {
6517  HValue* global_object = Add<HLoadNamedField>(
6518  context(), static_cast<HValue*>(NULL),
6519  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
6520  HStoreNamedGeneric* instr =
6521  Add<HStoreNamedGeneric>(global_object, var->name(),
6522  value, function_strict_mode());
6523  USE(instr);
6524  DCHECK(instr->HasObservableSideEffects());
6525  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6526  }
6527 }
6528 
6529 
6531  Expression* target = expr->target();
6532  VariableProxy* proxy = target->AsVariableProxy();
6533  Property* prop = target->AsProperty();
6534  DCHECK(proxy == NULL || prop == NULL);
6535 
6536  // We have a second position recorded in the FullCodeGenerator to have
6537  // type feedback for the binary operation.
6538  BinaryOperation* operation = expr->binary_operation();
6539 
6540  if (proxy != NULL) {
6541  Variable* var = proxy->var();
6542  if (var->mode() == LET) {
6543  return Bailout(kUnsupportedLetCompoundAssignment);
6544  }
6545 
6546  CHECK_ALIVE(VisitForValue(operation));
6547 
6548  switch (var->location()) {
6549  case Variable::UNALLOCATED:
6551  Top(),
6552  expr->AssignmentId());
6553  break;
6554 
6555  case Variable::PARAMETER:
6556  case Variable::LOCAL:
6557  if (var->mode() == CONST_LEGACY) {
6558  return Bailout(kUnsupportedConstCompoundAssignment);
6559  }
6560  BindIfLive(var, Top());
6561  break;
6562 
6563  case Variable::CONTEXT: {
6564  // Bail out if we try to mutate a parameter value in a function
6565  // using the arguments object. We do not (yet) correctly handle the
6566  // arguments property of the function.
6567  if (current_info()->scope()->arguments() != NULL) {
6568  // Parameters will be allocated to context slots. We have no
6569  // direct way to detect that the variable is a parameter so we do
6570  // a linear search of the parameter variables.
6571  int count = current_info()->scope()->num_parameters();
6572  for (int i = 0; i < count; ++i) {
6573  if (var == current_info()->scope()->parameter(i)) {
6574  Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
6575  }
6576  }
6577  }
6578 
6579  HStoreContextSlot::Mode mode;
6580 
6581  switch (var->mode()) {
6582  case LET:
6583  mode = HStoreContextSlot::kCheckDeoptimize;
6584  break;
6585  case CONST:
6586  // This case is checked statically so no need to
6587  // perform checks here
6588  UNREACHABLE();
6589  case CONST_LEGACY:
6590  return ast_context()->ReturnValue(Pop());
6591  default:
6592  mode = HStoreContextSlot::kNoCheck;
6593  }
6594 
6596  HStoreContextSlot* instr = Add<HStoreContextSlot>(
6597  context, var->index(), mode, Top());
6598  if (instr->HasObservableSideEffects()) {
6599  Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6600  }
6601  break;
6602  }
6603 
6604  case Variable::LOOKUP:
6605  return Bailout(kCompoundAssignmentToLookupSlot);
6606  }
6607  return ast_context()->ReturnValue(Pop());
6608 
6609  } else if (prop != NULL) {
6610  CHECK_ALIVE(VisitForValue(prop->obj()));
6611  HValue* object = Top();
6612  HValue* key = NULL;
6613  if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
6614  CHECK_ALIVE(VisitForValue(prop->key()));
6615  key = Top();
6616  }
6617 
6618  CHECK_ALIVE(PushLoad(prop, object, key));
6619 
6620  CHECK_ALIVE(VisitForValue(expr->value()));
6621  HValue* right = Pop();
6622  HValue* left = Pop();
6623 
6624  Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
6625 
6626  BuildStore(expr, prop, expr->id(),
6627  expr->AssignmentId(), expr->IsUninitialized());
6628  } else {
6629  return Bailout(kInvalidLhsInCompoundAssignment);
6630  }
6631 }
6632 
6633 
6634 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
6635  DCHECK(!HasStackOverflow());
6636  DCHECK(current_block() != NULL);
6637  DCHECK(current_block()->HasPredecessor());
6638  VariableProxy* proxy = expr->target()->AsVariableProxy();
6639  Property* prop = expr->target()->AsProperty();
6640  DCHECK(proxy == NULL || prop == NULL);
6641 
6642  if (expr->is_compound()) {
6644  return;
6645  }
6646 
6647  if (prop != NULL) {
6649  } else if (proxy != NULL) {
6650  Variable* var = proxy->var();
6651 
6652  if (var->mode() == CONST) {
6653  if (expr->op() != Token::INIT_CONST) {
6654  return Bailout(kNonInitializerAssignmentToConst);
6655  }
6656  } else if (var->mode() == CONST_LEGACY) {
6657  if (expr->op() != Token::INIT_CONST_LEGACY) {
6658  CHECK_ALIVE(VisitForValue(expr->value()));
6659  return ast_context()->ReturnValue(Pop());
6660  }
6661 
6662  if (var->IsStackAllocated()) {
6663  // We insert a use of the old value to detect unsupported uses of const
6664  // variables (e.g. initialization inside a loop).
6665  HValue* old_value = environment()->Lookup(var);
6666  Add<HUseConst>(old_value);
6667  }
6668  }
6669 
6670  if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
6671 
6672  // Handle the assignment.
6673  switch (var->location()) {
6674  case Variable::UNALLOCATED:
6675  CHECK_ALIVE(VisitForValue(expr->value()));
6677  Top(),
6678  expr->AssignmentId());
6679  return ast_context()->ReturnValue(Pop());
6680 
6681  case Variable::PARAMETER:
6682  case Variable::LOCAL: {
6683  // Perform an initialization check for let declared variables
6684  // or parameters.
6685  if (var->mode() == LET && expr->op() == Token::ASSIGN) {
6686  HValue* env_value = environment()->Lookup(var);
6687  if (env_value == graph()->GetConstantHole()) {
6688  return Bailout(kAssignmentToLetVariableBeforeInitialization);
6689  }
6690  }
6691  // We do not allow the arguments object to occur in a context where it
6692  // may escape, but assignments to stack-allocated locals are
6693  // permitted.
6694  CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
6695  HValue* value = Pop();
6696  BindIfLive(var, value);
6697  return ast_context()->ReturnValue(value);
6698  }
6699 
6700  case Variable::CONTEXT: {
6701  // Bail out if we try to mutate a parameter value in a function using
6702  // the arguments object. We do not (yet) correctly handle the
6703  // arguments property of the function.
6704  if (current_info()->scope()->arguments() != NULL) {
6705  // Parameters will rewrite to context slots. We have no direct way
6706  // to detect that the variable is a parameter.
6707  int count = current_info()->scope()->num_parameters();
6708  for (int i = 0; i < count; ++i) {
6709  if (var == current_info()->scope()->parameter(i)) {
6710  return Bailout(kAssignmentToParameterInArgumentsObject);
6711  }
6712  }
6713  }
6714 
6715  CHECK_ALIVE(VisitForValue(expr->value()));
6716  HStoreContextSlot::Mode mode;
6717  if (expr->op() == Token::ASSIGN) {
6718  switch (var->mode()) {
6719  case LET:
6720  mode = HStoreContextSlot::kCheckDeoptimize;
6721  break;
6722  case CONST:
6723  // This case is checked statically so no need to
6724  // perform checks here
6725  UNREACHABLE();
6726  case CONST_LEGACY:
6727  return ast_context()->ReturnValue(Pop());
6728  default:
6729  mode = HStoreContextSlot::kNoCheck;
6730  }
6731  } else if (expr->op() == Token::INIT_VAR ||
6732  expr->op() == Token::INIT_LET ||
6733  expr->op() == Token::INIT_CONST) {
6734  mode = HStoreContextSlot::kNoCheck;
6735  } else {
6736  DCHECK(expr->op() == Token::INIT_CONST_LEGACY);
6737 
6738  mode = HStoreContextSlot::kCheckIgnoreAssignment;
6739  }
6740 
6741  HValue* context = BuildContextChainWalk(var);
6742  HStoreContextSlot* instr = Add<HStoreContextSlot>(
6743  context, var->index(), mode, Top());
6744  if (instr->HasObservableSideEffects()) {
6745  Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6746  }
6747  return ast_context()->ReturnValue(Pop());
6748  }
6749 
6750  case Variable::LOOKUP:
6751  return Bailout(kAssignmentToLOOKUPVariable);
6752  }
6753  } else {
6754  return Bailout(kInvalidLeftHandSideInAssignment);
6755  }
6756 }
6757 
6758 
6759 void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
6760  // Generators are not optimized, so we should never get here.
6761  UNREACHABLE();
6762 }
6763 
6764 
6765 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
6766  DCHECK(!HasStackOverflow());
6767  DCHECK(current_block() != NULL);
6768  DCHECK(current_block()->HasPredecessor());
6769  if (!ast_context()->IsEffect()) {
6770  // The parser turns invalid left-hand sides in assignments into throw
6771  // statements, which may not be in effect contexts. We might still try
6772  // to optimize such functions; bail out now if we do.
6773  return Bailout(kInvalidLeftHandSideInAssignment);
6774  }
6775  CHECK_ALIVE(VisitForValue(expr->exception()));
6776 
6777  HValue* value = environment()->Pop();
6778  if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
6779  Add<HPushArguments>(value);
6780  Add<HCallRuntime>(isolate()->factory()->empty_string(),
6781  Runtime::FunctionForId(Runtime::kThrow), 1);
6782  Add<HSimulate>(expr->id());
6783 
6784  // If the throw definitely exits the function, we can finish with a dummy
6785  // control flow at this point. This is not the case if the throw is inside
6786  // an inlined function which may be replaced.
6787  if (call_context() == NULL) {
6788  FinishExitCurrentBlock(New<HAbnormalExit>());
6789  }
6790 }
6791 
6792 
6794  if (string->IsConstant()) {
6795  HConstant* c_string = HConstant::cast(string);
6796  if (c_string->HasStringValue()) {
6797  return Add<HConstant>(c_string->StringValue()->map()->instance_type());
6798  }
6799  }
6800  return Add<HLoadNamedField>(
6801  Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6802  HObjectAccess::ForMap()),
6803  static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType());
6804 }
6805 
6806 
6808  if (string->IsConstant()) {
6809  HConstant* c_string = HConstant::cast(string);
6810  if (c_string->HasStringValue()) {
6811  return Add<HConstant>(c_string->StringValue()->length());
6812  }
6813  }
6814  return Add<HLoadNamedField>(string, static_cast<HValue*>(NULL),
6815  HObjectAccess::ForStringLength());
6816 }
6817 
6818 
6820  PropertyAccessType access_type,
6821  Expression* expr,
6822  HValue* object,
6824  HValue* value,
6825  bool is_uninitialized) {
6826  if (is_uninitialized) {
6827  Add<HDeoptimize>("Insufficient type feedback for generic named access",
6829  }
6830  if (access_type == LOAD) {
6831  HLoadNamedGeneric* result = New<HLoadNamedGeneric>(object, name);
6832  if (FLAG_vector_ics) {
6833  Handle<SharedFunctionInfo> current_shared =
6834  function_state()->compilation_info()->shared_info();
6835  result->SetVectorAndSlot(
6836  handle(current_shared->feedback_vector(), isolate()),
6837  expr->AsProperty()->PropertyFeedbackSlot());
6838  }
6839  return result;
6840  } else {
6841  return New<HStoreNamedGeneric>(object, name, value, function_strict_mode());
6842  }
6843 }
6844 
6845 
6846 
6848  PropertyAccessType access_type,
6849  Expression* expr,
6850  HValue* object,
6851  HValue* key,
6852  HValue* value) {
6853  if (access_type == LOAD) {
6854  HLoadKeyedGeneric* result = New<HLoadKeyedGeneric>(object, key);
6855  if (FLAG_vector_ics) {
6856  Handle<SharedFunctionInfo> current_shared =
6857  function_state()->compilation_info()->shared_info();
6858  result->SetVectorAndSlot(
6859  handle(current_shared->feedback_vector(), isolate()),
6860  expr->AsProperty()->PropertyFeedbackSlot());
6861  }
6862  return result;
6863  } else {
6864  return New<HStoreKeyedGeneric>(object, key, value, function_strict_mode());
6865  }
6866 }
6867 
6868 
6870  // Loads from a "stock" fast holey double arrays can elide the hole check.
6872  if (*map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS) &&
6873  isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
6874  Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
6875  Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
6876  BuildCheckPrototypeMaps(prototype, object_prototype);
6877  load_mode = ALLOW_RETURN_HOLE;
6878  graph()->MarkDependsOnEmptyArrayProtoElements();
6879  }
6880 
6881  return load_mode;
6882 }
6883 
6884 
6886  HValue* object,
6887  HValue* key,
6888  HValue* val,
6889  HValue* dependency,
6890  Handle<Map> map,
6891  PropertyAccessType access_type,
6892  KeyedAccessStoreMode store_mode) {
6893  HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
6894  if (dependency) {
6895  checked_object->ClearDependsOnFlag(kElementsKind);
6896  }
6897 
6898  if (access_type == STORE && map->prototype()->IsJSObject()) {
6899  // monomorphic stores need a prototype chain check because shape
6900  // changes could allow callbacks on elements in the chain that
6901  // aren't compatible with monomorphic keyed stores.
6902  PrototypeIterator iter(map);
6903  JSObject* holder = NULL;
6904  while (!iter.IsAtEnd()) {
6905  holder = JSObject::cast(*PrototypeIterator::GetCurrent(iter));
6906  iter.Advance();
6907  }
6908  DCHECK(holder && holder->IsJSObject());
6909 
6910  BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
6911  Handle<JSObject>(holder));
6912  }
6913 
6916  checked_object, key, val,
6917  map->instance_type() == JS_ARRAY_TYPE,
6918  map->elements_kind(), access_type,
6919  load_mode, store_mode);
6920 }
6921 
6922 
6924  HValue* object,
6925  HValue* key,
6926  HValue* val,
6927  SmallMapList* maps) {
6928  // For polymorphic loads of similar elements kinds (i.e. all tagged or all
6929  // double), always use the "worst case" code without a transition. This is
6930  // much faster than transitioning the elements to the worst case, trading a
6931  // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
6932  bool has_double_maps = false;
6933  bool has_smi_or_object_maps = false;
6934  bool has_js_array_access = false;
6935  bool has_non_js_array_access = false;
6936  bool has_seen_holey_elements = false;
6937  Handle<Map> most_general_consolidated_map;
6938  for (int i = 0; i < maps->length(); ++i) {
6939  Handle<Map> map = maps->at(i);
6940  if (!map->IsJSObjectMap()) return NULL;
6941  // Don't allow mixing of JSArrays with JSObjects.
6942  if (map->instance_type() == JS_ARRAY_TYPE) {
6943  if (has_non_js_array_access) return NULL;
6944  has_js_array_access = true;
6945  } else if (has_js_array_access) {
6946  return NULL;
6947  } else {
6948  has_non_js_array_access = true;
6949  }
6950  // Don't allow mixed, incompatible elements kinds.
6951  if (map->has_fast_double_elements()) {
6952  if (has_smi_or_object_maps) return NULL;
6953  has_double_maps = true;
6954  } else if (map->has_fast_smi_or_object_elements()) {
6955  if (has_double_maps) return NULL;
6956  has_smi_or_object_maps = true;
6957  } else {
6958  return NULL;
6959  }
6960  // Remember if we've ever seen holey elements.
6961  if (IsHoleyElementsKind(map->elements_kind())) {
6962  has_seen_holey_elements = true;
6963  }
6964  // Remember the most general elements kind, the code for its load will
6965  // properly handle all of the more specific cases.
6966  if ((i == 0) || IsMoreGeneralElementsKindTransition(
6967  most_general_consolidated_map->elements_kind(),
6968  map->elements_kind())) {
6969  most_general_consolidated_map = map;
6970  }
6971  }
6972  if (!has_double_maps && !has_smi_or_object_maps) return NULL;
6973 
6974  HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
6975  // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
6976  // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
6977  ElementsKind consolidated_elements_kind = has_seen_holey_elements
6978  ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
6979  : most_general_consolidated_map->elements_kind();
6981  checked_object, key, val,
6982  most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
6983  consolidated_elements_kind,
6985  return instr;
6986 }
6987 
6988 
6990  Expression* expr,
6991  HValue* object,
6992  HValue* key,
6993  HValue* val,
6994  SmallMapList* maps,
6995  PropertyAccessType access_type,
6996  KeyedAccessStoreMode store_mode,
6997  bool* has_side_effects) {
6998  *has_side_effects = false;
6999  BuildCheckHeapObject(object);
7000 
7001  if (access_type == LOAD) {
7002  HInstruction* consolidated_load =
7003  TryBuildConsolidatedElementLoad(object, key, val, maps);
7004  if (consolidated_load != NULL) {
7005  *has_side_effects |= consolidated_load->HasObservableSideEffects();
7006  return consolidated_load;
7007  }
7008  }
7009 
7010  // Elements_kind transition support.
7011  MapHandleList transition_target(maps->length());
7012  // Collect possible transition targets.
7013  MapHandleList possible_transitioned_maps(maps->length());
7014  for (int i = 0; i < maps->length(); ++i) {
7015  Handle<Map> map = maps->at(i);
7016  ElementsKind elements_kind = map->elements_kind();
7017  if (IsFastElementsKind(elements_kind) &&
7018  elements_kind != GetInitialFastElementsKind()) {
7019  possible_transitioned_maps.Add(map);
7020  }
7021  if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) {
7022  HInstruction* result = BuildKeyedGeneric(access_type, expr, object, key,
7023  val);
7024  *has_side_effects = result->HasObservableSideEffects();
7025  return AddInstruction(result);
7026  }
7027  }
7028  // Get transition target for each map (NULL == no transition).
7029  for (int i = 0; i < maps->length(); ++i) {
7030  Handle<Map> map = maps->at(i);
7031  Handle<Map> transitioned_map =
7032  map->FindTransitionedMap(&possible_transitioned_maps);
7033  transition_target.Add(transitioned_map);
7034  }
7035 
7036  MapHandleList untransitionable_maps(maps->length());
7037  HTransitionElementsKind* transition = NULL;
7038  for (int i = 0; i < maps->length(); ++i) {
7039  Handle<Map> map = maps->at(i);
7040  DCHECK(map->IsMap());
7041  if (!transition_target.at(i).is_null()) {
7043  map->elements_kind(),
7044  transition_target.at(i)->elements_kind()));
7045  transition = Add<HTransitionElementsKind>(object, map,
7046  transition_target.at(i));
7047  } else {
7048  untransitionable_maps.Add(map);
7049  }
7050  }
7051 
7052  // If only one map is left after transitioning, handle this case
7053  // monomorphically.
7054  DCHECK(untransitionable_maps.length() >= 1);
7055  if (untransitionable_maps.length() == 1) {
7056  Handle<Map> untransitionable_map = untransitionable_maps[0];
7057  HInstruction* instr = NULL;
7058  if (untransitionable_map->has_slow_elements_kind() ||
7059  !untransitionable_map->IsJSObjectMap()) {
7060  instr = AddInstruction(BuildKeyedGeneric(access_type, expr, object, key,
7061  val));
7062  } else {
7064  object, key, val, transition, untransitionable_map, access_type,
7065  store_mode);
7066  }
7067  *has_side_effects |= instr->HasObservableSideEffects();
7068  return access_type == STORE ? NULL : instr;
7069  }
7070 
7071  HBasicBlock* join = graph()->CreateBasicBlock();
7072 
7073  for (int i = 0; i < untransitionable_maps.length(); ++i) {
7074  Handle<Map> map = untransitionable_maps[i];
7075  if (!map->IsJSObjectMap()) continue;
7076  ElementsKind elements_kind = map->elements_kind();
7077  HBasicBlock* this_map = graph()->CreateBasicBlock();
7078  HBasicBlock* other_map = graph()->CreateBasicBlock();
7079  HCompareMap* mapcompare =
7080  New<HCompareMap>(object, map, this_map, other_map);
7081  FinishCurrentBlock(mapcompare);
7082 
7083  set_current_block(this_map);
7084  HInstruction* access = NULL;
7085  if (IsDictionaryElementsKind(elements_kind)) {
7086  access = AddInstruction(BuildKeyedGeneric(access_type, expr, object, key,
7087  val));
7088  } else {
7089  DCHECK(IsFastElementsKind(elements_kind) ||
7090  IsExternalArrayElementsKind(elements_kind) ||
7091  IsFixedTypedArrayElementsKind(elements_kind));
7093  // Happily, mapcompare is a checked object.
7095  mapcompare, key, val,
7096  map->instance_type() == JS_ARRAY_TYPE,
7097  elements_kind, access_type,
7098  load_mode,
7099  store_mode);
7100  }
7101  *has_side_effects |= access->HasObservableSideEffects();
7102  // The caller will use has_side_effects and add a correct Simulate.
7104  if (access_type == LOAD) {
7105  Push(access);
7106  }
7107  NoObservableSideEffectsScope scope(this);
7108  GotoNoSimulate(join);
7109  set_current_block(other_map);
7110  }
7111 
7112  // Ensure that we visited at least one map above that goes to join. This is
7113  // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
7114  // rather than joining the join block. If this becomes an issue, insert a
7115  // generic access in the case length() == 0.
7116  DCHECK(join->predecessors()->length() > 0);
7117  // Deopt if none of the cases matched.
7118  NoObservableSideEffectsScope scope(this);
7119  FinishExitWithHardDeoptimization("Unknown map in polymorphic element access");
7120  set_current_block(join);
7121  return access_type == STORE ? NULL : Pop();
7122 }
7123 
7124 
7126  HValue* obj, HValue* key, HValue* val, Expression* expr, BailoutId ast_id,
7127  BailoutId return_id, PropertyAccessType access_type,
7128  bool* has_side_effects) {
7129  if (key->ActualValue()->IsConstant()) {
7130  Handle<Object> constant =
7131  HConstant::cast(key->ActualValue())->handle(isolate());
7132  uint32_t array_index;
7133  if (constant->IsString() &&
7134  !Handle<String>::cast(constant)->AsArrayIndex(&array_index)) {
7135  if (!constant->IsUniqueName()) {
7136  constant = isolate()->factory()->InternalizeString(
7137  Handle<String>::cast(constant));
7138  }
7139  HInstruction* instr =
7140  BuildNamedAccess(access_type, ast_id, return_id, expr, obj,
7141  Handle<String>::cast(constant), val, false);
7142  if (instr == NULL || instr->IsLinked()) {
7143  *has_side_effects = false;
7144  } else {
7145  AddInstruction(instr);
7146  *has_side_effects = instr->HasObservableSideEffects();
7147  }
7148  return instr;
7149  }
7150  }
7151 
7152  DCHECK(!expr->IsPropertyName());
7153  HInstruction* instr = NULL;
7154 
7155  SmallMapList* types;
7156  bool monomorphic = ComputeReceiverTypes(expr, obj, &types, zone());
7157 
7158  bool force_generic = false;
7159  if (access_type == STORE &&
7160  (monomorphic || (types != NULL && !types->is_empty()))) {
7161  // Stores can't be mono/polymorphic if their prototype chain has dictionary
7162  // elements. However a receiver map that has dictionary elements itself
7163  // should be left to normal mono/poly behavior (the other maps may benefit
7164  // from highly optimized stores).
7165  for (int i = 0; i < types->length(); i++) {
7166  Handle<Map> current_map = types->at(i);
7167  if (current_map->DictionaryElementsInPrototypeChainOnly()) {
7168  force_generic = true;
7169  monomorphic = false;
7170  break;
7171  }
7172  }
7173  }
7174 
7175  if (monomorphic) {
7176  Handle<Map> map = types->first();
7177  if (map->has_slow_elements_kind() || !map->IsJSObjectMap()) {
7178  instr = AddInstruction(BuildKeyedGeneric(access_type, expr, obj, key,
7179  val));
7180  } else {
7181  BuildCheckHeapObject(obj);
7183  obj, key, val, NULL, map, access_type, expr->GetStoreMode());
7184  }
7185  } else if (!force_generic && (types != NULL && !types->is_empty())) {
7187  expr, obj, key, val, types, access_type,
7188  expr->GetStoreMode(), has_side_effects);
7189  } else {
7190  if (access_type == STORE) {
7191  if (expr->IsAssignment() &&
7192  expr->AsAssignment()->HasNoTypeInformation()) {
7193  Add<HDeoptimize>("Insufficient type feedback for keyed store",
7195  }
7196  } else {
7197  if (expr->AsProperty()->HasNoTypeInformation()) {
7198  Add<HDeoptimize>("Insufficient type feedback for keyed load",
7200  }
7201  }
7202  instr = AddInstruction(BuildKeyedGeneric(access_type, expr, obj, key, val));
7203  }
7204  *has_side_effects = instr->HasObservableSideEffects();
7205  return instr;
7206 }
7207 
7208 
7210  // Outermost function already has arguments on the stack.
7211  if (function_state()->outer() == NULL) return;
7212 
7213  if (function_state()->arguments_pushed()) return;
7214 
7215  // Push arguments when entering inlined function.
7216  HEnterInlined* entry = function_state()->entry();
7217  entry->set_arguments_pushed();
7218 
7219  HArgumentsObject* arguments = entry->arguments_object();
7220  const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
7221 
7222  HInstruction* insert_after = entry;
7223  for (int i = 0; i < arguments_values->length(); i++) {
7224  HValue* argument = arguments_values->at(i);
7225  HInstruction* push_argument = New<HPushArguments>(argument);
7226  push_argument->InsertAfter(insert_after);
7227  insert_after = push_argument;
7228  }
7229 
7230  HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
7231  arguments_elements->ClearFlag(HValue::kUseGVN);
7232  arguments_elements->InsertAfter(insert_after);
7233  function_state()->set_arguments_elements(arguments_elements);
7234 }
7235 
7236 
7238  VariableProxy* proxy = expr->obj()->AsVariableProxy();
7239  if (proxy == NULL) return false;
7240  if (!proxy->var()->IsStackAllocated()) return false;
7241  if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
7242  return false;
7243  }
7244 
7245  HInstruction* result = NULL;
7246  if (expr->key()->IsPropertyName()) {
7247  Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7248  if (!String::Equals(name, isolate()->factory()->length_string())) {
7249  return false;
7250  }
7251 
7252  if (function_state()->outer() == NULL) {
7253  HInstruction* elements = Add<HArgumentsElements>(false);
7254  result = New<HArgumentsLength>(elements);
7255  } else {
7256  // Number of arguments without receiver.
7257  int argument_count = environment()->
7258  arguments_environment()->parameter_count() - 1;
7259  result = New<HConstant>(argument_count);
7260  }
7261  } else {
7262  Push(graph()->GetArgumentsObject());
7263  CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
7264  HValue* key = Pop();
7265  Drop(1); // Arguments object.
7266  if (function_state()->outer() == NULL) {
7267  HInstruction* elements = Add<HArgumentsElements>(false);
7268  HInstruction* length = Add<HArgumentsLength>(elements);
7269  HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7270  result = New<HAccessArgumentsAt>(elements, length, checked_key);
7271  } else {
7273 
7274  // Number of arguments without receiver.
7275  HInstruction* elements = function_state()->arguments_elements();
7276  int argument_count = environment()->
7277  arguments_environment()->parameter_count() - 1;
7278  HInstruction* length = Add<HConstant>(argument_count);
7279  HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7280  result = New<HAccessArgumentsAt>(elements, length, checked_key);
7281  }
7282  }
7283  ast_context()->ReturnInstruction(result, expr->id());
7284  return true;
7285 }
7286 
7287 
7289  PropertyAccessType access,
7290  BailoutId ast_id,
7291  BailoutId return_id,
7292  Expression* expr,
7293  HValue* object,
7295  HValue* value,
7296  bool is_uninitialized) {
7297  SmallMapList* types;
7298  ComputeReceiverTypes(expr, object, &types, zone());
7299  DCHECK(types != NULL);
7300 
7301  if (types->length() > 0) {
7302  PropertyAccessInfo info(this, access, ToType(types->first()), name);
7303  if (!info.CanAccessAsMonomorphic(types)) {
7305  access, expr, ast_id, return_id, object, value, types, name);
7306  return NULL;
7307  }
7308 
7309  HValue* checked_object;
7310  // Type::Number() is only supported by polymorphic load/call handling.
7311  DCHECK(!info.type()->Is(Type::Number()));
7312  BuildCheckHeapObject(object);
7313  if (AreStringTypes(types)) {
7314  checked_object =
7315  Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
7316  } else {
7317  checked_object = Add<HCheckMaps>(object, types);
7318  }
7319  return BuildMonomorphicAccess(
7320  &info, object, checked_object, value, ast_id, return_id);
7321  }
7322 
7323  return BuildNamedGeneric(access, expr, object, name, value, is_uninitialized);
7324 }
7325 
7326 
7328  HValue* object,
7329  HValue* key) {
7330  ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
7331  Push(object);
7332  if (key != NULL) Push(key);
7333  BuildLoad(expr, expr->LoadId());
7334 }
7335 
7336 
7338  BailoutId ast_id) {
7339  HInstruction* instr = NULL;
7340  if (expr->IsStringAccess()) {
7341  HValue* index = Pop();
7342  HValue* string = Pop();
7343  HInstruction* char_code = BuildStringCharCodeAt(string, index);
7344  AddInstruction(char_code);
7345  instr = NewUncasted<HStringCharFromCode>(char_code);
7346 
7347  } else if (expr->key()->IsPropertyName()) {
7348  Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7349  HValue* object = Pop();
7350 
7351  instr = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
7352  object, name, NULL, expr->IsUninitialized());
7353  if (instr == NULL) return;
7354  if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
7355 
7356  } else {
7357  HValue* key = Pop();
7358  HValue* obj = Pop();
7359 
7360  bool has_side_effects = false;
7362  obj, key, NULL, expr, ast_id, expr->LoadId(), LOAD, &has_side_effects);
7363  if (has_side_effects) {
7364  if (ast_context()->IsEffect()) {
7365  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7366  } else {
7367  Push(load);
7368  Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7369  Drop(1);
7370  }
7371  }
7372  if (load == NULL) return;
7373  return ast_context()->ReturnValue(load);
7374  }
7375  return ast_context()->ReturnInstruction(instr, ast_id);
7376 }
7377 
7378 
7379 void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
7380  DCHECK(!HasStackOverflow());
7381  DCHECK(current_block() != NULL);
7382  DCHECK(current_block()->HasPredecessor());
7383 
7384  if (TryArgumentsAccess(expr)) return;
7385 
7386  CHECK_ALIVE(VisitForValue(expr->obj()));
7387  if (!expr->key()->IsPropertyName() || expr->IsStringAccess()) {
7388  CHECK_ALIVE(VisitForValue(expr->key()));
7389  }
7390 
7391  BuildLoad(expr, expr->id());
7392 }
7393 
7394 
7396  HCheckMaps* check = Add<HCheckMaps>(
7397  Add<HConstant>(constant), handle(constant->map()));
7398  check->ClearDependsOnFlag(kElementsKind);
7399  return check;
7400 }
7401 
7402 
7404  Handle<JSObject> holder) {
7405  PrototypeIterator iter(isolate(), prototype,
7407  while (holder.is_null() ||
7408  !PrototypeIterator::GetCurrent(iter).is_identical_to(holder)) {
7411  iter.Advance();
7412  if (iter.IsAtEnd()) {
7413  return NULL;
7414  }
7415  }
7416  return BuildConstantMapCheck(
7418 }
7419 
7420 
7422  Handle<Map> receiver_map) {
7423  if (!holder.is_null()) {
7424  Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
7425  BuildCheckPrototypeMaps(prototype, holder);
7426  }
7427 }
7428 
7429 
7431  HValue* fun, int argument_count, bool pass_argument_count) {
7432  return New<HCallJSFunction>(
7433  fun, argument_count, pass_argument_count);
7434 }
7435 
7436 
7438  HValue* fun, HValue* context,
7439  int argument_count, HValue* expected_param_count) {
7440  ArgumentAdaptorDescriptor descriptor(isolate());
7441  HValue* arity = Add<HConstant>(argument_count - 1);
7442 
7443  HValue* op_vals[] = { context, fun, arity, expected_param_count };
7444 
7445  Handle<Code> adaptor =
7446  isolate()->builtins()->ArgumentsAdaptorTrampoline();
7447  HConstant* adaptor_value = Add<HConstant>(adaptor);
7448 
7449  return New<HCallWithDescriptor>(
7450  adaptor_value, argument_count, descriptor,
7451  Vector<HValue*>(op_vals, descriptor.GetEnvironmentLength()));
7452 }
7453 
7454 
7456  Handle<JSFunction> jsfun, int argument_count) {
7457  HValue* target = Add<HConstant>(jsfun);
7458  // For constant functions, we try to avoid calling the
7459  // argument adaptor and instead call the function directly
7460  int formal_parameter_count = jsfun->shared()->formal_parameter_count();
7461  bool dont_adapt_arguments =
7462  (formal_parameter_count ==
7464  int arity = argument_count - 1;
7465  bool can_invoke_directly =
7466  dont_adapt_arguments || formal_parameter_count == arity;
7467  if (can_invoke_directly) {
7468  if (jsfun.is_identical_to(current_info()->closure())) {
7469  graph()->MarkRecursive();
7470  }
7471  return NewPlainFunctionCall(target, argument_count, dont_adapt_arguments);
7472  } else {
7473  HValue* param_count_value = Add<HConstant>(formal_parameter_count);
7474  HValue* context = Add<HLoadNamedField>(
7475  target, static_cast<HValue*>(NULL),
7476  HObjectAccess::ForFunctionContextPointer());
7477  return NewArgumentAdaptorCall(target, context,
7478  argument_count, param_count_value);
7479  }
7480  UNREACHABLE();
7481  return NULL;
7482 }
7483 
7484 
7486  public:
7487  explicit FunctionSorter(int index = 0, int ticks = 0, int size = 0)
7488  : index_(index), ticks_(ticks), size_(size) {}
7489 
7490  int index() const { return index_; }
7491  int ticks() const { return ticks_; }
7492  int size() const { return size_; }
7493 
7494  private:
7495  int index_;
7496  int ticks_;
7497  int size_;
7498 };
7499 
7500 
7501 inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
7502  int diff = lhs.ticks() - rhs.ticks();
7503  if (diff != 0) return diff > 0;
7504  return lhs.size() < rhs.size();
7505 }
7506 
7507 
7509  Call* expr,
7510  HValue* receiver,
7511  SmallMapList* types,
7512  Handle<String> name) {
7513  int argument_count = expr->arguments()->length() + 1; // Includes receiver.
7515 
7516  bool handle_smi = false;
7517  bool handled_string = false;
7518  int ordered_functions = 0;
7519 
7520  int i;
7521  for (i = 0; i < types->length() && ordered_functions < kMaxCallPolymorphism;
7522  ++i) {
7523  PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name);
7524  if (info.CanAccessMonomorphic() && info.IsConstant() &&
7525  info.constant()->IsJSFunction()) {
7526  if (info.type()->Is(Type::String())) {
7527  if (handled_string) continue;
7528  handled_string = true;
7529  }
7531  if (info.type()->Is(Type::Number())) {
7532  handle_smi = true;
7533  }
7534  expr->set_target(target);
7535  order[ordered_functions++] = FunctionSorter(
7536  i, target->shared()->profiler_ticks(), InliningAstSize(target));
7537  }
7538  }
7539 
7540  std::sort(order, order + ordered_functions);
7541 
7542  if (i < types->length()) {
7543  types->Clear();
7544  ordered_functions = -1;
7545  }
7546 
7547  HBasicBlock* number_block = NULL;
7548  HBasicBlock* join = NULL;
7549  handled_string = false;
7550  int count = 0;
7551 
7552  for (int fn = 0; fn < ordered_functions; ++fn) {
7553  int i = order[fn].index();
7554  PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name);
7555  if (info.type()->Is(Type::String())) {
7556  if (handled_string) continue;
7557  handled_string = true;
7558  }
7559  // Reloads the target.
7560  info.CanAccessMonomorphic();
7562 
7563  expr->set_target(target);
7564  if (count == 0) {
7565  // Only needed once.
7566  join = graph()->CreateBasicBlock();
7567  if (handle_smi) {
7568  HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
7569  HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
7570  number_block = graph()->CreateBasicBlock();
7571  FinishCurrentBlock(New<HIsSmiAndBranch>(
7572  receiver, empty_smi_block, not_smi_block));
7573  GotoNoSimulate(empty_smi_block, number_block);
7574  set_current_block(not_smi_block);
7575  } else {
7576  BuildCheckHeapObject(receiver);
7577  }
7578  }
7579  ++count;
7580  HBasicBlock* if_true = graph()->CreateBasicBlock();
7581  HBasicBlock* if_false = graph()->CreateBasicBlock();
7582  HUnaryControlInstruction* compare;
7583 
7584  Handle<Map> map = info.map();
7585  if (info.type()->Is(Type::Number())) {
7586  Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
7587  compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
7588  } else if (info.type()->Is(Type::String())) {
7589  compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
7590  } else {
7591  compare = New<HCompareMap>(receiver, map, if_true, if_false);
7592  }
7593  FinishCurrentBlock(compare);
7594 
7595  if (info.type()->Is(Type::Number())) {
7596  GotoNoSimulate(if_true, number_block);
7597  if_true = number_block;
7598  }
7599 
7600  set_current_block(if_true);
7601 
7602  AddCheckPrototypeMaps(info.holder(), map);
7603 
7604  HValue* function = Add<HConstant>(expr->target());
7605  environment()->SetExpressionStackAt(0, function);
7606  Push(receiver);
7607  CHECK_ALIVE(VisitExpressions(expr->arguments()));
7608  bool needs_wrapping = NeedsWrappingFor(info.type(), target);
7609  bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
7610  if (FLAG_trace_inlining && try_inline) {
7611  Handle<JSFunction> caller = current_info()->closure();
7612  SmartArrayPointer<char> caller_name =
7613  caller->shared()->DebugName()->ToCString();
7614  PrintF("Trying to inline the polymorphic call to %s from %s\n",
7615  name->ToCString().get(),
7616  caller_name.get());
7617  }
7618  if (try_inline && TryInlineCall(expr)) {
7619  // Trying to inline will signal that we should bailout from the
7620  // entire compilation by setting stack overflow on the visitor.
7621  if (HasStackOverflow()) return;
7622  } else {
7623  // Since HWrapReceiver currently cannot actually wrap numbers and strings,
7624  // use the regular CallFunctionStub for method calls to wrap the receiver.
7625  // TODO(verwaest): Support creation of value wrappers directly in
7626  // HWrapReceiver.
7627  HInstruction* call = needs_wrapping
7628  ? NewUncasted<HCallFunction>(
7629  function, argument_count, WRAP_AND_CALL)
7630  : BuildCallConstantFunction(target, argument_count);
7631  PushArgumentsFromEnvironment(argument_count);
7632  AddInstruction(call);
7633  Drop(1); // Drop the function.
7634  if (!ast_context()->IsEffect()) Push(call);
7635  }
7636 
7637  if (current_block() != NULL) Goto(join);
7638  set_current_block(if_false);
7639  }
7640 
7641  // Finish up. Unconditionally deoptimize if we've handled all the maps we
7642  // know about and do not want to handle ones we've never seen. Otherwise
7643  // use a generic IC.
7644  if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
7645  FinishExitWithHardDeoptimization("Unknown map in polymorphic call");
7646  } else {
7647  Property* prop = expr->expression()->AsProperty();
7648  HInstruction* function = BuildNamedGeneric(
7649  LOAD, prop, receiver, name, NULL, prop->IsUninitialized());
7650  AddInstruction(function);
7651  Push(function);
7652  AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
7653 
7654  environment()->SetExpressionStackAt(1, function);
7655  environment()->SetExpressionStackAt(0, receiver);
7656  CHECK_ALIVE(VisitExpressions(expr->arguments()));
7657 
7658  CallFunctionFlags flags = receiver->type().IsJSObject()
7660  HInstruction* call = New<HCallFunction>(
7661  function, argument_count, flags);
7662 
7663  PushArgumentsFromEnvironment(argument_count);
7664 
7665  Drop(1); // Function.
7666 
7667  if (join != NULL) {
7668  AddInstruction(call);
7669  if (!ast_context()->IsEffect()) Push(call);
7670  Goto(join);
7671  } else {
7672  return ast_context()->ReturnInstruction(call, expr->id());
7673  }
7674  }
7675 
7676  // We assume that control flow is always live after an expression. So
7677  // even without predecessors to the join block, we set it as the exit
7678  // block and continue by adding instructions there.
7679  DCHECK(join != NULL);
7680  if (join->HasPredecessor()) {
7681  set_current_block(join);
7682  join->SetJoinId(expr->id());
7683  if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
7684  } else {
7686  }
7687 }
7688 
7689 
7691  Handle<JSFunction> caller,
7692  const char* reason) {
7693  if (FLAG_trace_inlining) {
7694  SmartArrayPointer<char> target_name =
7695  target->shared()->DebugName()->ToCString();
7696  SmartArrayPointer<char> caller_name =
7697  caller->shared()->DebugName()->ToCString();
7698  if (reason == NULL) {
7699  PrintF("Inlined %s called from %s.\n", target_name.get(),
7700  caller_name.get());
7701  } else {
7702  PrintF("Did not inline %s called from %s (%s).\n",
7703  target_name.get(), caller_name.get(), reason);
7704  }
7705  }
7706 }
7707 
7708 
7709 static const int kNotInlinable = 1000000000;
7710 
7711 
7713  if (!FLAG_use_inlining) return kNotInlinable;
7714 
7715  // Precondition: call is monomorphic and we have found a target with the
7716  // appropriate arity.
7717  Handle<JSFunction> caller = current_info()->closure();
7718  Handle<SharedFunctionInfo> target_shared(target->shared());
7719 
7720  // Always inline builtins marked for inlining.
7721  if (target->IsBuiltin()) {
7722  return target_shared->inline_builtin() ? 0 : kNotInlinable;
7723  }
7724 
7725  if (target_shared->IsApiFunction()) {
7726  TraceInline(target, caller, "target is api function");
7727  return kNotInlinable;
7728  }
7729 
7730  // Do a quick check on source code length to avoid parsing large
7731  // inlining candidates.
7732  if (target_shared->SourceSize() >
7733  Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
7734  TraceInline(target, caller, "target text too big");
7735  return kNotInlinable;
7736  }
7737 
7738  // Target must be inlineable.
7739  if (!target_shared->IsInlineable()) {
7740  TraceInline(target, caller, "target not inlineable");
7741  return kNotInlinable;
7742  }
7743  if (target_shared->DisableOptimizationReason() != kNoReason) {
7744  TraceInline(target, caller, "target contains unsupported syntax [early]");
7745  return kNotInlinable;
7746  }
7747 
7748  int nodes_added = target_shared->ast_node_count();
7749  return nodes_added;
7750 }
7751 
7752 
7754  int arguments_count,
7755  HValue* implicit_return_value,
7756  BailoutId ast_id,
7757  BailoutId return_id,
7758  InliningKind inlining_kind,
7759  HSourcePosition position) {
7760  int nodes_added = InliningAstSize(target);
7761  if (nodes_added == kNotInlinable) return false;
7762 
7763  Handle<JSFunction> caller = current_info()->closure();
7764 
7765  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7766  TraceInline(target, caller, "target AST is too large [early]");
7767  return false;
7768  }
7769 
7770  // Don't inline deeper than the maximum number of inlining levels.
7771  HEnvironment* env = environment();
7772  int current_level = 1;
7773  while (env->outer() != NULL) {
7774  if (current_level == FLAG_max_inlining_levels) {
7775  TraceInline(target, caller, "inline depth limit reached");
7776  return false;
7777  }
7778  if (env->outer()->frame_type() == JS_FUNCTION) {
7779  current_level++;
7780  }
7781  env = env->outer();
7782  }
7783 
7784  // Don't inline recursive functions.
7785  for (FunctionState* state = function_state();
7786  state != NULL;
7787  state = state->outer()) {
7788  if (*state->compilation_info()->closure() == *target) {
7789  TraceInline(target, caller, "target is recursive");
7790  return false;
7791  }
7792  }
7793 
7794  // We don't want to add more than a certain number of nodes from inlining.
7795  if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
7797  TraceInline(target, caller, "cumulative AST node limit reached");
7798  return false;
7799  }
7800 
7801  // Parse and allocate variables.
7802  CompilationInfo target_info(target, zone());
7803  // Use the same AstValueFactory for creating strings in the sub-compilation
7804  // step, but don't transfer ownership to target_info.
7805  target_info.SetAstValueFactory(top_info()->ast_value_factory(), false);
7806  Handle<SharedFunctionInfo> target_shared(target->shared());
7807  if (!Parser::Parse(&target_info) || !Scope::Analyze(&target_info)) {
7808  if (target_info.isolate()->has_pending_exception()) {
7809  // Parse or scope error, never optimize this function.
7810  SetStackOverflow();
7811  target_shared->DisableOptimization(kParseScopeError);
7812  }
7813  TraceInline(target, caller, "parse failure");
7814  return false;
7815  }
7816 
7817  if (target_info.scope()->num_heap_slots() > 0) {
7818  TraceInline(target, caller, "target has context-allocated variables");
7819  return false;
7820  }
7821  FunctionLiteral* function = target_info.function();
7822 
7823  // The following conditions must be checked again after re-parsing, because
7824  // earlier the information might not have been complete due to lazy parsing.
7825  nodes_added = function->ast_node_count();
7826  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7827  TraceInline(target, caller, "target AST is too large [late]");
7828  return false;
7829  }
7830  if (function->dont_optimize()) {
7831  TraceInline(target, caller, "target contains unsupported syntax [late]");
7832  return false;
7833  }
7834 
7835  // If the function uses the arguments object check that inlining of functions
7836  // with arguments object is enabled and the arguments-variable is
7837  // stack allocated.
7838  if (function->scope()->arguments() != NULL) {
7839  if (!FLAG_inline_arguments) {
7840  TraceInline(target, caller, "target uses arguments object");
7841  return false;
7842  }
7843 
7844  if (!function->scope()->arguments()->IsStackAllocated()) {
7845  TraceInline(target,
7846  caller,
7847  "target uses non-stackallocated arguments object");
7848  return false;
7849  }
7850  }
7851 
7852  // All declarations must be inlineable.
7853  ZoneList<Declaration*>* decls = target_info.scope()->declarations();
7854  int decl_count = decls->length();
7855  for (int i = 0; i < decl_count; ++i) {
7856  if (!decls->at(i)->IsInlineable()) {
7857  TraceInline(target, caller, "target has non-trivial declaration");
7858  return false;
7859  }
7860  }
7861 
7862  // Generate the deoptimization data for the unoptimized version of
7863  // the target function if we don't already have it.
7864  if (!Compiler::EnsureDeoptimizationSupport(&target_info)) {
7865  TraceInline(target, caller, "could not generate deoptimization info");
7866  return false;
7867  }
7868 
7869  // ----------------------------------------------------------------
7870  // After this point, we've made a decision to inline this function (so
7871  // TryInline should always return true).
7872 
7873  // Type-check the inlined function.
7874  DCHECK(target_shared->has_deoptimization_support());
7875  AstTyper::Run(&target_info);
7876 
7877  int function_id = graph()->TraceInlinedFunction(target_shared, position);
7878 
7879  // Save the pending call context. Set up new one for the inlined function.
7880  // The function state is new-allocated because we need to delete it
7881  // in two different places.
7882  FunctionState* target_state = new FunctionState(
7883  this, &target_info, inlining_kind, function_id);
7884 
7885  HConstant* undefined = graph()->GetConstantUndefined();
7886 
7887  HEnvironment* inner_env =
7888  environment()->CopyForInlining(target,
7889  arguments_count,
7890  function,
7891  undefined,
7892  function_state()->inlining_kind());
7893 
7894  HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
7895  inner_env->BindContext(context);
7896 
7897  // Create a dematerialized arguments object for the function, also copy the
7898  // current arguments values to use them for materialization.
7899  HEnvironment* arguments_env = inner_env->arguments_environment();
7900  int parameter_count = arguments_env->parameter_count();
7901  HArgumentsObject* arguments_object = Add<HArgumentsObject>(parameter_count);
7902  for (int i = 0; i < parameter_count; i++) {
7903  arguments_object->AddArgument(arguments_env->Lookup(i), zone());
7904  }
7905 
7906  // If the function uses arguments object then bind bind one.
7907  if (function->scope()->arguments() != NULL) {
7908  DCHECK(function->scope()->arguments()->IsStackAllocated());
7909  inner_env->Bind(function->scope()->arguments(), arguments_object);
7910  }
7911 
7912  // Capture the state before invoking the inlined function for deopt in the
7913  // inlined function. This simulate has no bailout-id since it's not directly
7914  // reachable for deopt, and is only used to capture the state. If the simulate
7915  // becomes reachable by merging, the ast id of the simulate merged into it is
7916  // adopted.
7917  Add<HSimulate>(BailoutId::None());
7918 
7919  current_block()->UpdateEnvironment(inner_env);
7920  Scope* saved_scope = scope();
7921  set_scope(target_info.scope());
7922  HEnterInlined* enter_inlined =
7923  Add<HEnterInlined>(return_id, target, context, arguments_count, function,
7924  function_state()->inlining_kind(),
7925  function->scope()->arguments(), arguments_object);
7926  function_state()->set_entry(enter_inlined);
7927 
7928  VisitDeclarations(target_info.scope()->declarations());
7929  VisitStatements(function->body());
7930  set_scope(saved_scope);
7931  if (HasStackOverflow()) {
7932  // Bail out if the inline function did, as we cannot residualize a call
7933  // instead, but do not disable optimization for the outer function.
7934  TraceInline(target, caller, "inline graph construction failed");
7935  target_shared->DisableOptimization(kInliningBailedOut);
7936  current_info()->RetryOptimization(kInliningBailedOut);
7937  delete target_state;
7938  return true;
7939  }
7940 
7941  // Update inlined nodes count.
7942  inlined_count_ += nodes_added;
7943 
7944  Handle<Code> unoptimized_code(target_shared->code());
7945  DCHECK(unoptimized_code->kind() == Code::FUNCTION);
7946  Handle<TypeFeedbackInfo> type_info(
7947  TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
7948  graph()->update_type_change_checksum(type_info->own_type_change_checksum());
7949 
7950  TraceInline(target, caller, NULL);
7951 
7952  if (current_block() != NULL) {
7953  FunctionState* state = function_state();
7954  if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
7955  // Falling off the end of an inlined construct call. In a test context the
7956  // return value will always evaluate to true, in a value context the
7957  // return value is the newly allocated receiver.
7958  if (call_context()->IsTest()) {
7959  Goto(inlined_test_context()->if_true(), state);
7960  } else if (call_context()->IsEffect()) {
7961  Goto(function_return(), state);
7962  } else {
7963  DCHECK(call_context()->IsValue());
7964  AddLeaveInlined(implicit_return_value, state);
7965  }
7966  } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
7967  // Falling off the end of an inlined setter call. The returned value is
7968  // never used, the value of an assignment is always the value of the RHS
7969  // of the assignment.
7970  if (call_context()->IsTest()) {
7971  inlined_test_context()->ReturnValue(implicit_return_value);
7972  } else if (call_context()->IsEffect()) {
7973  Goto(function_return(), state);
7974  } else {
7975  DCHECK(call_context()->IsValue());
7976  AddLeaveInlined(implicit_return_value, state);
7977  }
7978  } else {
7979  // Falling off the end of a normal inlined function. This basically means
7980  // returning undefined.
7981  if (call_context()->IsTest()) {
7982  Goto(inlined_test_context()->if_false(), state);
7983  } else if (call_context()->IsEffect()) {
7984  Goto(function_return(), state);
7985  } else {
7986  DCHECK(call_context()->IsValue());
7987  AddLeaveInlined(undefined, state);
7988  }
7989  }
7990  }
7991 
7992  // Fix up the function exits.
7993  if (inlined_test_context() != NULL) {
7994  HBasicBlock* if_true = inlined_test_context()->if_true();
7995  HBasicBlock* if_false = inlined_test_context()->if_false();
7996 
7997  HEnterInlined* entry = function_state()->entry();
7998 
7999  // Pop the return test context from the expression context stack.
8002  delete target_state;
8003 
8004  // Forward to the real test context.
8005  if (if_true->HasPredecessor()) {
8006  entry->RegisterReturnTarget(if_true, zone());
8007  if_true->SetJoinId(ast_id);
8008  HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
8009  Goto(if_true, true_target, function_state());
8010  }
8011  if (if_false->HasPredecessor()) {
8012  entry->RegisterReturnTarget(if_false, zone());
8013  if_false->SetJoinId(ast_id);
8014  HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
8015  Goto(if_false, false_target, function_state());
8016  }
8018  return true;
8019 
8020  } else if (function_return()->HasPredecessor()) {
8021  function_state()->entry()->RegisterReturnTarget(function_return(), zone());
8022  function_return()->SetJoinId(ast_id);
8024  } else {
8026  }
8027  delete target_state;
8028  return true;
8029 }
8030 
8031 
8033  return TryInline(expr->target(),
8034  expr->arguments()->length(),
8035  NULL,
8036  expr->id(),
8037  expr->ReturnId(),
8038  NORMAL_RETURN,
8039  ScriptPositionToSourcePosition(expr->position()));
8040 }
8041 
8042 
8044  HValue* implicit_return_value) {
8045  return TryInline(expr->target(),
8046  expr->arguments()->length(),
8047  implicit_return_value,
8048  expr->id(),
8049  expr->ReturnId(),
8051  ScriptPositionToSourcePosition(expr->position()));
8052 }
8053 
8054 
8056  Handle<Map> receiver_map,
8057  BailoutId ast_id,
8058  BailoutId return_id) {
8059  if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
8060  return TryInline(getter,
8061  0,
8062  NULL,
8063  ast_id,
8064  return_id,
8066  source_position());
8067 }
8068 
8069 
8071  Handle<Map> receiver_map,
8072  BailoutId id,
8073  BailoutId assignment_id,
8074  HValue* implicit_return_value) {
8075  if (TryInlineApiSetter(setter, receiver_map, id)) return true;
8076  return TryInline(setter,
8077  1,
8078  implicit_return_value,
8079  id, assignment_id,
8081  source_position());
8082 }
8083 
8084 
8086  Call* expr,
8087  int arguments_count) {
8088  return TryInline(function,
8089  arguments_count,
8090  NULL,
8091  expr->id(),
8092  expr->ReturnId(),
8093  NORMAL_RETURN,
8094  ScriptPositionToSourcePosition(expr->position()));
8095 }
8096 
8097 
8099  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
8100  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
8101  switch (id) {
8102  case kMathExp:
8103  if (!FLAG_fast_math) break;
8104  // Fall through if FLAG_fast_math.
8105  case kMathRound:
8106  case kMathFround:
8107  case kMathFloor:
8108  case kMathAbs:
8109  case kMathSqrt:
8110  case kMathLog:
8111  case kMathClz32:
8112  if (expr->arguments()->length() == 1) {
8113  HValue* argument = Pop();
8114  Drop(2); // Receiver and function.
8115  HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8116  ast_context()->ReturnInstruction(op, expr->id());
8117  return true;
8118  }
8119  break;
8120  case kMathImul:
8121  if (expr->arguments()->length() == 2) {
8122  HValue* right = Pop();
8123  HValue* left = Pop();
8124  Drop(2); // Receiver and function.
8125  HInstruction* op = HMul::NewImul(zone(), context(), left, right);
8126  ast_context()->ReturnInstruction(op, expr->id());
8127  return true;
8128  }
8129  break;
8130  default:
8131  // Not supported for inlining yet.
8132  break;
8133  }
8134  return false;
8135 }
8136 
8137 
8139  Call* expr,
8140  HValue* receiver,
8141  Handle<Map> receiver_map) {
8142  // Try to inline calls like Math.* as operations in the calling function.
8143  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
8144  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
8145  int argument_count = expr->arguments()->length() + 1; // Plus receiver.
8146  switch (id) {
8147  case kStringCharCodeAt:
8148  case kStringCharAt:
8149  if (argument_count == 2) {
8150  HValue* index = Pop();
8151  HValue* string = Pop();
8152  Drop(1); // Function.
8153  HInstruction* char_code =
8154  BuildStringCharCodeAt(string, index);
8155  if (id == kStringCharCodeAt) {
8156  ast_context()->ReturnInstruction(char_code, expr->id());
8157  return true;
8158  }
8159  AddInstruction(char_code);
8160  HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
8161  ast_context()->ReturnInstruction(result, expr->id());
8162  return true;
8163  }
8164  break;
8165  case kStringFromCharCode:
8166  if (argument_count == 2) {
8167  HValue* argument = Pop();
8168  Drop(2); // Receiver and function.
8169  HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
8170  ast_context()->ReturnInstruction(result, expr->id());
8171  return true;
8172  }
8173  break;
8174  case kMathExp:
8175  if (!FLAG_fast_math) break;
8176  // Fall through if FLAG_fast_math.
8177  case kMathRound:
8178  case kMathFround:
8179  case kMathFloor:
8180  case kMathAbs:
8181  case kMathSqrt:
8182  case kMathLog:
8183  case kMathClz32:
8184  if (argument_count == 2) {
8185  HValue* argument = Pop();
8186  Drop(2); // Receiver and function.
8187  HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8188  ast_context()->ReturnInstruction(op, expr->id());
8189  return true;
8190  }
8191  break;
8192  case kMathPow:
8193  if (argument_count == 3) {
8194  HValue* right = Pop();
8195  HValue* left = Pop();
8196  Drop(2); // Receiver and function.
8197  HInstruction* result = NULL;
8198  // Use sqrt() if exponent is 0.5 or -0.5.
8199  if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
8200  double exponent = HConstant::cast(right)->DoubleValue();
8201  if (exponent == 0.5) {
8202  result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
8203  } else if (exponent == -0.5) {
8204  HValue* one = graph()->GetConstant1();
8205  HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
8206  left, kMathPowHalf);
8207  // MathPowHalf doesn't have side effects so there's no need for
8208  // an environment simulation here.
8209  DCHECK(!sqrt->HasObservableSideEffects());
8210  result = NewUncasted<HDiv>(one, sqrt);
8211  } else if (exponent == 2.0) {
8212  result = NewUncasted<HMul>(left, left);
8213  }
8214  }
8215 
8216  if (result == NULL) {
8217  result = NewUncasted<HPower>(left, right);
8218  }
8219  ast_context()->ReturnInstruction(result, expr->id());
8220  return true;
8221  }
8222  break;
8223  case kMathMax:
8224  case kMathMin:
8225  if (argument_count == 3) {
8226  HValue* right = Pop();
8227  HValue* left = Pop();
8228  Drop(2); // Receiver and function.
8229  HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
8230  : HMathMinMax::kMathMax;
8231  HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
8232  ast_context()->ReturnInstruction(result, expr->id());
8233  return true;
8234  }
8235  break;
8236  case kMathImul:
8237  if (argument_count == 3) {
8238  HValue* right = Pop();
8239  HValue* left = Pop();
8240  Drop(2); // Receiver and function.
8241  HInstruction* result = HMul::NewImul(zone(), context(), left, right);
8242  ast_context()->ReturnInstruction(result, expr->id());
8243  return true;
8244  }
8245  break;
8246  case kArrayPop: {
8247  if (receiver_map.is_null()) return false;
8248  if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8249  ElementsKind elements_kind = receiver_map->elements_kind();
8250  if (!IsFastElementsKind(elements_kind)) return false;
8251  if (receiver_map->is_observed()) return false;
8252  if (!receiver_map->is_extensible()) return false;
8253 
8254  Drop(expr->arguments()->length());
8255  HValue* result;
8256  HValue* reduced_length;
8257  HValue* receiver = Pop();
8258 
8259  HValue* checked_object = AddCheckMap(receiver, receiver_map);
8260  HValue* length = Add<HLoadNamedField>(
8261  checked_object, static_cast<HValue*>(NULL),
8262  HObjectAccess::ForArrayLength(elements_kind));
8263 
8264  Drop(1); // Function.
8265 
8266  { NoObservableSideEffectsScope scope(this);
8267  IfBuilder length_checker(this);
8268 
8269  HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
8270  length, graph()->GetConstant0(), Token::EQ);
8271  length_checker.Then();
8272 
8273  if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8274 
8275  length_checker.Else();
8276  HValue* elements = AddLoadElements(checked_object);
8277  // Ensure that we aren't popping from a copy-on-write array.
8278  if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8279  elements = BuildCopyElementsOnWrite(checked_object, elements,
8280  elements_kind, length);
8281  }
8282  reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
8283  result = AddElementAccess(elements, reduced_length, NULL,
8284  bounds_check, elements_kind, LOAD);
8285  Factory* factory = isolate()->factory();
8286  double nan_double = FixedDoubleArray::hole_nan_as_double();
8287  HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
8288  ? Add<HConstant>(factory->the_hole_value())
8289  : Add<HConstant>(nan_double);
8290  if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8291  elements_kind = FAST_HOLEY_ELEMENTS;
8292  }
8294  elements, reduced_length, hole, bounds_check, elements_kind, STORE);
8295  Add<HStoreNamedField>(
8296  checked_object, HObjectAccess::ForArrayLength(elements_kind),
8297  reduced_length, STORE_TO_INITIALIZED_ENTRY);
8298 
8299  if (!ast_context()->IsEffect()) Push(result);
8300 
8301  length_checker.End();
8302  }
8303  result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8304  Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8305  if (!ast_context()->IsEffect()) Drop(1);
8306 
8307  ast_context()->ReturnValue(result);
8308  return true;
8309  }
8310  case kArrayPush: {
8311  if (receiver_map.is_null()) return false;
8312  if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8313  ElementsKind elements_kind = receiver_map->elements_kind();
8314  if (!IsFastElementsKind(elements_kind)) return false;
8315  if (receiver_map->is_observed()) return false;
8316  if (JSArray::IsReadOnlyLengthDescriptor(receiver_map)) return false;
8317  if (!receiver_map->is_extensible()) return false;
8318 
8319  // If there may be elements accessors in the prototype chain, the fast
8320  // inlined version can't be used.
8321  if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8322  // If there currently can be no elements accessors on the prototype chain,
8323  // it doesn't mean that there won't be any later. Install a full prototype
8324  // chain check to trap element accessors being installed on the prototype
8325  // chain, which would cause elements to go to dictionary mode and result
8326  // in a map change.
8327  Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
8329 
8330  const int argc = expr->arguments()->length();
8331  if (argc != 1) return false;
8332 
8333  HValue* value_to_push = Pop();
8334  HValue* array = Pop();
8335  Drop(1); // Drop function.
8336 
8337  HInstruction* new_size = NULL;
8338  HValue* length = NULL;
8339 
8340  {
8341  NoObservableSideEffectsScope scope(this);
8342 
8343  length = Add<HLoadNamedField>(array, static_cast<HValue*>(NULL),
8344  HObjectAccess::ForArrayLength(elements_kind));
8345 
8346  new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
8347 
8348  bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
8350  value_to_push, is_array,
8351  elements_kind, STORE,
8354 
8355  if (!ast_context()->IsEffect()) Push(new_size);
8356  Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8357  if (!ast_context()->IsEffect()) Drop(1);
8358  }
8359 
8360  ast_context()->ReturnValue(new_size);
8361  return true;
8362  }
8363  case kArrayShift: {
8364  if (receiver_map.is_null()) return false;
8365  if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8366  ElementsKind kind = receiver_map->elements_kind();
8367  if (!IsFastElementsKind(kind)) return false;
8368  if (receiver_map->is_observed()) return false;
8369  if (!receiver_map->is_extensible()) return false;
8370 
8371  // If there may be elements accessors in the prototype chain, the fast
8372  // inlined version can't be used.
8373  if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8374 
8375  // If there currently can be no elements accessors on the prototype chain,
8376  // it doesn't mean that there won't be any later. Install a full prototype
8377  // chain check to trap element accessors being installed on the prototype
8378  // chain, which would cause elements to go to dictionary mode and result
8379  // in a map change.
8381  handle(JSObject::cast(receiver_map->prototype()), isolate()),
8383 
8384  // Threshold for fast inlined Array.shift().
8385  HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16));
8386 
8387  Drop(expr->arguments()->length());
8388  HValue* receiver = Pop();
8389  HValue* function = Pop();
8390  HValue* result;
8391 
8392  {
8393  NoObservableSideEffectsScope scope(this);
8394 
8395  HValue* length = Add<HLoadNamedField>(
8396  receiver, static_cast<HValue*>(NULL),
8397  HObjectAccess::ForArrayLength(kind));
8398 
8399  IfBuilder if_lengthiszero(this);
8400  HValue* lengthiszero = if_lengthiszero.If<HCompareNumericAndBranch>(
8401  length, graph()->GetConstant0(), Token::EQ);
8402  if_lengthiszero.Then();
8403  {
8404  if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8405  }
8406  if_lengthiszero.Else();
8407  {
8408  HValue* elements = AddLoadElements(receiver);
8409 
8410  // Check if we can use the fast inlined Array.shift().
8411  IfBuilder if_inline(this);
8412  if_inline.If<HCompareNumericAndBranch>(
8413  length, inline_threshold, Token::LTE);
8414  if (IsFastSmiOrObjectElementsKind(kind)) {
8415  // We cannot handle copy-on-write backing stores here.
8416  if_inline.AndIf<HCompareMap>(
8417  elements, isolate()->factory()->fixed_array_map());
8418  }
8419  if_inline.Then();
8420  {
8421  // Remember the result.
8422  if (!ast_context()->IsEffect()) {
8423  Push(AddElementAccess(elements, graph()->GetConstant0(), NULL,
8424  lengthiszero, kind, LOAD));
8425  }
8426 
8427  // Compute the new length.
8428  HValue* new_length = AddUncasted<HSub>(
8429  length, graph()->GetConstant1());
8430  new_length->ClearFlag(HValue::kCanOverflow);
8431 
8432  // Copy the remaining elements.
8433  LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
8434  {
8435  HValue* new_key = loop.BeginBody(
8436  graph()->GetConstant0(), new_length, Token::LT);
8437  HValue* key = AddUncasted<HAdd>(new_key, graph()->GetConstant1());
8439  HValue* element = AddUncasted<HLoadKeyed>(
8440  elements, key, lengthiszero, kind, ALLOW_RETURN_HOLE);
8441  HStoreKeyed* store = Add<HStoreKeyed>(
8442  elements, new_key, element, kind);
8443  store->SetFlag(HValue::kAllowUndefinedAsNaN);
8444  }
8445  loop.EndBody();
8446 
8447  // Put a hole at the end.
8449  ? Add<HConstant>(isolate()->factory()->the_hole_value())
8450  : Add<HConstant>(FixedDoubleArray::hole_nan_as_double());
8452  Add<HStoreKeyed>(
8453  elements, new_length, hole, kind, INITIALIZING_STORE);
8454 
8455  // Remember new length.
8456  Add<HStoreNamedField>(
8457  receiver, HObjectAccess::ForArrayLength(kind),
8458  new_length, STORE_TO_INITIALIZED_ENTRY);
8459  }
8460  if_inline.Else();
8461  {
8462  Add<HPushArguments>(receiver);
8463  result = Add<HCallJSFunction>(function, 1, true);
8464  if (!ast_context()->IsEffect()) Push(result);
8465  }
8466  if_inline.End();
8467  }
8468  if_lengthiszero.End();
8469  }
8470  result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8471  Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8472  if (!ast_context()->IsEffect()) Drop(1);
8473  ast_context()->ReturnValue(result);
8474  return true;
8475  }
8476  case kArrayIndexOf:
8477  case kArrayLastIndexOf: {
8478  if (receiver_map.is_null()) return false;
8479  if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8480  ElementsKind kind = receiver_map->elements_kind();
8481  if (!IsFastElementsKind(kind)) return false;
8482  if (receiver_map->is_observed()) return false;
8483  if (argument_count != 2) return false;
8484  if (!receiver_map->is_extensible()) return false;
8485 
8486  // If there may be elements accessors in the prototype chain, the fast
8487  // inlined version can't be used.
8488  if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8489 
8490  // If there currently can be no elements accessors on the prototype chain,
8491  // it doesn't mean that there won't be any later. Install a full prototype
8492  // chain check to trap element accessors being installed on the prototype
8493  // chain, which would cause elements to go to dictionary mode and result
8494  // in a map change.
8496  handle(JSObject::cast(receiver_map->prototype()), isolate()),
8498 
8499  HValue* search_element = Pop();
8500  HValue* receiver = Pop();
8501  Drop(1); // Drop function.
8502 
8503  ArrayIndexOfMode mode = (id == kArrayIndexOf)
8505  HValue* index = BuildArrayIndexOf(receiver, search_element, kind, mode);
8506 
8507  if (!ast_context()->IsEffect()) Push(index);
8508  Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8509  if (!ast_context()->IsEffect()) Drop(1);
8510  ast_context()->ReturnValue(index);
8511  return true;
8512  }
8513  default:
8514  // Not yet supported for inlining.
8515  break;
8516  }
8517  return false;
8518 }
8519 
8520 
8522  HValue* receiver) {
8523  Handle<JSFunction> function = expr->target();
8524  int argc = expr->arguments()->length();
8525  SmallMapList receiver_maps;
8526  return TryInlineApiCall(function,
8527  receiver,
8528  &receiver_maps,
8529  argc,
8530  expr->id(),
8532 }
8533 
8534 
8536  Call* expr,
8537  HValue* receiver,
8538  SmallMapList* receiver_maps) {
8539  Handle<JSFunction> function = expr->target();
8540  int argc = expr->arguments()->length();
8541  return TryInlineApiCall(function,
8542  receiver,
8543  receiver_maps,
8544  argc,
8545  expr->id(),
8546  kCallApiMethod);
8547 }
8548 
8549 
8551  Handle<Map> receiver_map,
8552  BailoutId ast_id) {
8553  SmallMapList receiver_maps(1, zone());
8554  receiver_maps.Add(receiver_map, zone());
8555  return TryInlineApiCall(function,
8556  NULL, // Receiver is on expression stack.
8557  &receiver_maps,
8558  0,
8559  ast_id,
8560  kCallApiGetter);
8561 }
8562 
8563 
8565  Handle<Map> receiver_map,
8566  BailoutId ast_id) {
8567  SmallMapList receiver_maps(1, zone());
8568  receiver_maps.Add(receiver_map, zone());
8569  return TryInlineApiCall(function,
8570  NULL, // Receiver is on expression stack.
8571  &receiver_maps,
8572  1,
8573  ast_id,
8574  kCallApiSetter);
8575 }
8576 
8577 
8579  HValue* receiver,
8580  SmallMapList* receiver_maps,
8581  int argc,
8582  BailoutId ast_id,
8583  ApiCallType call_type) {
8584  CallOptimization optimization(function);
8585  if (!optimization.is_simple_api_call()) return false;
8586  Handle<Map> holder_map;
8587  if (call_type == kCallApiFunction) {
8588  // Cannot embed a direct reference to the global proxy map
8589  // as it maybe dropped on deserialization.
8590  CHECK(!isolate()->serializer_enabled());
8591  DCHECK_EQ(0, receiver_maps->length());
8592  receiver_maps->Add(handle(function->global_proxy()->map()), zone());
8593  }
8594  CallOptimization::HolderLookup holder_lookup =
8595  CallOptimization::kHolderNotFound;
8596  Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
8597  receiver_maps->first(), &holder_lookup);
8598  if (holder_lookup == CallOptimization::kHolderNotFound) return false;
8599 
8600  if (FLAG_trace_inlining) {
8601  PrintF("Inlining api function ");
8602  function->ShortPrint();
8603  PrintF("\n");
8604  }
8605 
8606  bool drop_extra = false;
8607  bool is_store = false;
8608  switch (call_type) {
8609  case kCallApiFunction:
8610  case kCallApiMethod:
8611  // Need to check that none of the receiver maps could have changed.
8612  Add<HCheckMaps>(receiver, receiver_maps);
8613  // Need to ensure the chain between receiver and api_holder is intact.
8614  if (holder_lookup == CallOptimization::kHolderFound) {
8615  AddCheckPrototypeMaps(api_holder, receiver_maps->first());
8616  } else {
8617  DCHECK_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
8618  }
8619  // Includes receiver.
8620  PushArgumentsFromEnvironment(argc + 1);
8621  // Drop function after call.
8622  drop_extra = true;
8623  break;
8624  case kCallApiGetter:
8625  // Receiver and prototype chain cannot have changed.
8626  DCHECK_EQ(0, argc);
8627  DCHECK_EQ(NULL, receiver);
8628  // Receiver is on expression stack.
8629  receiver = Pop();
8630  Add<HPushArguments>(receiver);
8631  break;
8632  case kCallApiSetter:
8633  {
8634  is_store = true;
8635  // Receiver and prototype chain cannot have changed.
8636  DCHECK_EQ(1, argc);
8637  DCHECK_EQ(NULL, receiver);
8638  // Receiver and value are on expression stack.
8639  HValue* value = Pop();
8640  receiver = Pop();
8641  Add<HPushArguments>(receiver, value);
8642  break;
8643  }
8644  }
8645 
8646  HValue* holder = NULL;
8647  switch (holder_lookup) {
8648  case CallOptimization::kHolderFound:
8649  holder = Add<HConstant>(api_holder);
8650  break;
8651  case CallOptimization::kHolderIsReceiver:
8652  holder = receiver;
8653  break;
8654  case CallOptimization::kHolderNotFound:
8655  UNREACHABLE();
8656  break;
8657  }
8658  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
8659  Handle<Object> call_data_obj(api_call_info->data(), isolate());
8660  bool call_data_is_undefined = call_data_obj->IsUndefined();
8661  HValue* call_data = Add<HConstant>(call_data_obj);
8662  ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
8663  ExternalReference ref = ExternalReference(&fun,
8664  ExternalReference::DIRECT_API_CALL,
8665  isolate());
8666  HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
8667 
8668  HValue* op_vals[] = {
8669  context(),
8670  Add<HConstant>(function),
8671  call_data,
8672  holder,
8673  api_function_address
8674  };
8675 
8676  ApiFunctionDescriptor descriptor(isolate());
8677  CallApiFunctionStub stub(isolate(), is_store, call_data_is_undefined, argc);
8678  Handle<Code> code = stub.GetCode();
8679  HConstant* code_value = Add<HConstant>(code);
8680 
8681  DCHECK((sizeof(op_vals) / kPointerSize) == descriptor.GetEnvironmentLength());
8682 
8683  HInstruction* call = New<HCallWithDescriptor>(
8684  code_value, argc + 1, descriptor,
8685  Vector<HValue*>(op_vals, descriptor.GetEnvironmentLength()));
8686 
8687  if (drop_extra) Drop(1); // Drop function.
8688  ast_context()->ReturnInstruction(call, ast_id);
8689  return true;
8690 }
8691 
8692 
8694  DCHECK(expr->expression()->IsProperty());
8695 
8696  if (!expr->IsMonomorphic()) {
8697  return false;
8698  }
8699  Handle<Map> function_map = expr->GetReceiverTypes()->first();
8700  if (function_map->instance_type() != JS_FUNCTION_TYPE ||
8701  !expr->target()->shared()->HasBuiltinFunctionId() ||
8702  expr->target()->shared()->builtin_function_id() != kFunctionApply) {
8703  return false;
8704  }
8705 
8706  if (current_info()->scope()->arguments() == NULL) return false;
8707 
8708  ZoneList<Expression*>* args = expr->arguments();
8709  if (args->length() != 2) return false;
8710 
8711  VariableProxy* arg_two = args->at(1)->AsVariableProxy();
8712  if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
8713  HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
8714  if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
8715 
8716  // Found pattern f.apply(receiver, arguments).
8717  CHECK_ALIVE_OR_RETURN(VisitForValue(args->at(0)), true);
8718  HValue* receiver = Pop(); // receiver
8719  HValue* function = Pop(); // f
8720  Drop(1); // apply
8721 
8722  HValue* checked_function = AddCheckMap(function, function_map);
8723 
8724  if (function_state()->outer() == NULL) {
8725  HInstruction* elements = Add<HArgumentsElements>(false);
8726  HInstruction* length = Add<HArgumentsLength>(elements);
8727  HValue* wrapped_receiver = BuildWrapReceiver(receiver, checked_function);
8728  HInstruction* result = New<HApplyArguments>(function,
8729  wrapped_receiver,
8730  length,
8731  elements);
8732  ast_context()->ReturnInstruction(result, expr->id());
8733  return true;
8734  } else {
8735  // We are inside inlined function and we know exactly what is inside
8736  // arguments object. But we need to be able to materialize at deopt.
8737  DCHECK_EQ(environment()->arguments_environment()->parameter_count(),
8738  function_state()->entry()->arguments_object()->arguments_count());
8739  HArgumentsObject* args = function_state()->entry()->arguments_object();
8740  const ZoneList<HValue*>* arguments_values = args->arguments_values();
8741  int arguments_count = arguments_values->length();
8742  Push(function);
8743  Push(BuildWrapReceiver(receiver, checked_function));
8744  for (int i = 1; i < arguments_count; i++) {
8745  Push(arguments_values->at(i));
8746  }
8747 
8748  Handle<JSFunction> known_function;
8749  if (function->IsConstant() &&
8750  HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
8751  known_function = Handle<JSFunction>::cast(
8752  HConstant::cast(function)->handle(isolate()));
8753  int args_count = arguments_count - 1; // Excluding receiver.
8754  if (TryInlineApply(known_function, expr, args_count)) return true;
8755  }
8756 
8757  PushArgumentsFromEnvironment(arguments_count);
8758  HInvokeFunction* call = New<HInvokeFunction>(
8759  function, known_function, arguments_count);
8760  Drop(1); // Function.
8761  ast_context()->ReturnInstruction(call, expr->id());
8762  return true;
8763  }
8764 }
8765 
8766 
8768  Handle<JSFunction> target) {
8769  SharedFunctionInfo* shared = target->shared();
8770  if (shared->strict_mode() == SLOPPY && !shared->native()) {
8771  // Cannot embed a direct reference to the global proxy
8772  // as is it dropped on deserialization.
8773  CHECK(!isolate()->serializer_enabled());
8774  Handle<JSObject> global_proxy(target->context()->global_proxy());
8775  return Add<HConstant>(global_proxy);
8776  }
8777  return graph()->GetConstantUndefined();
8778 }
8779 
8780 
8782  int arguments_count,
8783  HValue* function,
8784  Handle<AllocationSite> site) {
8785  Add<HCheckValue>(function, array_function());
8786 
8787  if (IsCallArrayInlineable(arguments_count, site)) {
8788  BuildInlinedCallArray(expression, arguments_count, site);
8789  return;
8790  }
8791 
8792  HInstruction* call = PreProcessCall(New<HCallNewArray>(
8793  function, arguments_count + 1, site->GetElementsKind()));
8794  if (expression->IsCall()) {
8795  Drop(1);
8796  }
8797  ast_context()->ReturnInstruction(call, expression->id());
8798 }
8799 
8800 
8802  HValue* search_element,
8803  ElementsKind kind,
8805  DCHECK(IsFastElementsKind(kind));
8806 
8807  NoObservableSideEffectsScope no_effects(this);
8808 
8809  HValue* elements = AddLoadElements(receiver);
8810  HValue* length = AddLoadArrayLength(receiver, kind);
8811 
8812  HValue* initial;
8813  HValue* terminating;
8814  Token::Value token;
8815  LoopBuilder::Direction direction;
8816  if (mode == kFirstIndexOf) {
8817  initial = graph()->GetConstant0();
8818  terminating = length;
8819  token = Token::LT;
8820  direction = LoopBuilder::kPostIncrement;
8821  } else {
8823  initial = length;
8824  terminating = graph()->GetConstant0();
8825  token = Token::GT;
8826  direction = LoopBuilder::kPreDecrement;
8827  }
8828 
8829  Push(graph()->GetConstantMinus1());
8830  if (IsFastDoubleElementsKind(kind) || IsFastSmiElementsKind(kind)) {
8831  // Make sure that we can actually compare numbers correctly below, see
8832  // https://code.google.com/p/chromium/issues/detail?id=407946 for details.
8833  search_element = AddUncasted<HForceRepresentation>(
8834  search_element, IsFastSmiElementsKind(kind) ? Representation::Smi()
8836 
8837  LoopBuilder loop(this, context(), direction);
8838  {
8839  HValue* index = loop.BeginBody(initial, terminating, token);
8840  HValue* element = AddUncasted<HLoadKeyed>(
8841  elements, index, static_cast<HValue*>(NULL),
8842  kind, ALLOW_RETURN_HOLE);
8843  IfBuilder if_issame(this);
8844  if_issame.If<HCompareNumericAndBranch>(element, search_element,
8845  Token::EQ_STRICT);
8846  if_issame.Then();
8847  {
8848  Drop(1);
8849  Push(index);
8850  loop.Break();
8851  }
8852  if_issame.End();
8853  }
8854  loop.EndBody();
8855  } else {
8856  IfBuilder if_isstring(this);
8857  if_isstring.If<HIsStringAndBranch>(search_element);
8858  if_isstring.Then();
8859  {
8860  LoopBuilder loop(this, context(), direction);
8861  {
8862  HValue* index = loop.BeginBody(initial, terminating, token);
8863  HValue* element = AddUncasted<HLoadKeyed>(
8864  elements, index, static_cast<HValue*>(NULL),
8865  kind, ALLOW_RETURN_HOLE);
8866  IfBuilder if_issame(this);
8867  if_issame.If<HIsStringAndBranch>(element);
8868  if_issame.AndIf<HStringCompareAndBranch>(
8869  element, search_element, Token::EQ_STRICT);
8870  if_issame.Then();
8871  {
8872  Drop(1);
8873  Push(index);
8874  loop.Break();
8875  }
8876  if_issame.End();
8877  }
8878  loop.EndBody();
8879  }
8880  if_isstring.Else();
8881  {
8882  IfBuilder if_isnumber(this);
8883  if_isnumber.If<HIsSmiAndBranch>(search_element);
8884  if_isnumber.OrIf<HCompareMap>(
8885  search_element, isolate()->factory()->heap_number_map());
8886  if_isnumber.Then();
8887  {
8888  HValue* search_number =
8889  AddUncasted<HForceRepresentation>(search_element,
8891  LoopBuilder loop(this, context(), direction);
8892  {
8893  HValue* index = loop.BeginBody(initial, terminating, token);
8894  HValue* element = AddUncasted<HLoadKeyed>(
8895  elements, index, static_cast<HValue*>(NULL),
8896  kind, ALLOW_RETURN_HOLE);
8897 
8898  IfBuilder if_element_isnumber(this);
8899  if_element_isnumber.If<HIsSmiAndBranch>(element);
8900  if_element_isnumber.OrIf<HCompareMap>(
8901  element, isolate()->factory()->heap_number_map());
8902  if_element_isnumber.Then();
8903  {
8904  HValue* number =
8905  AddUncasted<HForceRepresentation>(element,
8907  IfBuilder if_issame(this);
8908  if_issame.If<HCompareNumericAndBranch>(
8909  number, search_number, Token::EQ_STRICT);
8910  if_issame.Then();
8911  {
8912  Drop(1);
8913  Push(index);
8914  loop.Break();
8915  }
8916  if_issame.End();
8917  }
8918  if_element_isnumber.End();
8919  }
8920  loop.EndBody();
8921  }
8922  if_isnumber.Else();
8923  {
8924  LoopBuilder loop(this, context(), direction);
8925  {
8926  HValue* index = loop.BeginBody(initial, terminating, token);
8927  HValue* element = AddUncasted<HLoadKeyed>(
8928  elements, index, static_cast<HValue*>(NULL),
8929  kind, ALLOW_RETURN_HOLE);
8930  IfBuilder if_issame(this);
8931  if_issame.If<HCompareObjectEqAndBranch>(
8932  element, search_element);
8933  if_issame.Then();
8934  {
8935  Drop(1);
8936  Push(index);
8937  loop.Break();
8938  }
8939  if_issame.End();
8940  }
8941  loop.EndBody();
8942  }
8943  if_isnumber.End();
8944  }
8945  if_isstring.End();
8946  }
8947 
8948  return Pop();
8949 }
8950 
8951 
8953  if (!array_function().is_identical_to(expr->target())) {
8954  return false;
8955  }
8956 
8957  Handle<AllocationSite> site = expr->allocation_site();
8958  if (site.is_null()) return false;
8959 
8960  BuildArrayCall(expr,
8961  expr->arguments()->length(),
8962  function,
8963  site);
8964  return true;
8965 }
8966 
8967 
8969  HValue* function) {
8970  if (!array_function().is_identical_to(expr->target())) {
8971  return false;
8972  }
8973 
8974  BuildArrayCall(expr,
8975  expr->arguments()->length(),
8976  function,
8977  expr->allocation_site());
8978  return true;
8979 }
8980 
8981 
8982 void HOptimizedGraphBuilder::VisitCall(Call* expr) {
8983  DCHECK(!HasStackOverflow());
8984  DCHECK(current_block() != NULL);
8985  DCHECK(current_block()->HasPredecessor());
8986  Expression* callee = expr->expression();
8987  int argument_count = expr->arguments()->length() + 1; // Plus receiver.
8988  HInstruction* call = NULL;
8989 
8990  Property* prop = callee->AsProperty();
8991  if (prop != NULL) {
8992  CHECK_ALIVE(VisitForValue(prop->obj()));
8993  HValue* receiver = Top();
8994 
8995  SmallMapList* types;
8996  ComputeReceiverTypes(expr, receiver, &types, zone());
8997 
8998  if (prop->key()->IsPropertyName() && types->length() > 0) {
8999  Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
9000  PropertyAccessInfo info(this, LOAD, ToType(types->first()), name);
9001  if (!info.CanAccessAsMonomorphic(types)) {
9002  HandlePolymorphicCallNamed(expr, receiver, types, name);
9003  return;
9004  }
9005  }
9006 
9007  HValue* key = NULL;
9008  if (!prop->key()->IsPropertyName()) {
9009  CHECK_ALIVE(VisitForValue(prop->key()));
9010  key = Pop();
9011  }
9012 
9013  CHECK_ALIVE(PushLoad(prop, receiver, key));
9014  HValue* function = Pop();
9015 
9016  if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9017 
9018  // Push the function under the receiver.
9019  environment()->SetExpressionStackAt(0, function);
9020 
9021  Push(receiver);
9022 
9023  if (function->IsConstant() &&
9024  HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9025  Handle<JSFunction> known_function = Handle<JSFunction>::cast(
9026  HConstant::cast(function)->handle(isolate()));
9027  expr->set_target(known_function);
9028 
9029  if (TryCallApply(expr)) return;
9030  CHECK_ALIVE(VisitExpressions(expr->arguments()));
9031 
9032  Handle<Map> map = types->length() == 1 ? types->first() : Handle<Map>();
9033  if (TryInlineBuiltinMethodCall(expr, receiver, map)) {
9034  if (FLAG_trace_inlining) {
9035  PrintF("Inlining builtin ");
9036  known_function->ShortPrint();
9037  PrintF("\n");
9038  }
9039  return;
9040  }
9041  if (TryInlineApiMethodCall(expr, receiver, types)) return;
9042 
9043  // Wrap the receiver if necessary.
9044  if (NeedsWrappingFor(ToType(types->first()), known_function)) {
9045  // Since HWrapReceiver currently cannot actually wrap numbers and
9046  // strings, use the regular CallFunctionStub for method calls to wrap
9047  // the receiver.
9048  // TODO(verwaest): Support creation of value wrappers directly in
9049  // HWrapReceiver.
9050  call = New<HCallFunction>(
9051  function, argument_count, WRAP_AND_CALL);
9052  } else if (TryInlineCall(expr)) {
9053  return;
9054  } else {
9055  call = BuildCallConstantFunction(known_function, argument_count);
9056  }
9057 
9058  } else {
9059  CHECK_ALIVE(VisitExpressions(expr->arguments()));
9060  CallFunctionFlags flags = receiver->type().IsJSObject()
9062  call = New<HCallFunction>(function, argument_count, flags);
9063  }
9064  PushArgumentsFromEnvironment(argument_count);
9065 
9066  } else {
9067  VariableProxy* proxy = expr->expression()->AsVariableProxy();
9068  if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
9069  return Bailout(kPossibleDirectCallToEval);
9070  }
9071 
9072  // The function is on the stack in the unoptimized code during
9073  // evaluation of the arguments.
9074  CHECK_ALIVE(VisitForValue(expr->expression()));
9075  HValue* function = Top();
9076  if (expr->global_call()) {
9077  Variable* var = proxy->var();
9078  bool known_global_function = false;
9079  // If there is a global property cell for the name at compile time and
9080  // access check is not enabled we assume that the function will not change
9081  // and generate optimized code for calling the function.
9082  Handle<GlobalObject> global(current_info()->global_object());
9083  LookupIterator it(global, var->name(),
9084  LookupIterator::OWN_SKIP_INTERCEPTOR);
9086  if (type == kUseCell) {
9087  Handle<GlobalObject> global(current_info()->global_object());
9088  known_global_function = expr->ComputeGlobalTarget(global, &it);
9089  }
9090  if (known_global_function) {
9091  Add<HCheckValue>(function, expr->target());
9092 
9093  // Placeholder for the receiver.
9094  Push(graph()->GetConstantUndefined());
9095  CHECK_ALIVE(VisitExpressions(expr->arguments()));
9096 
9097  // Patch the global object on the stack by the expected receiver.
9098  HValue* receiver = ImplicitReceiverFor(function, expr->target());
9099  const int receiver_index = argument_count - 1;
9100  environment()->SetExpressionStackAt(receiver_index, receiver);
9101 
9102  if (TryInlineBuiltinFunctionCall(expr)) {
9103  if (FLAG_trace_inlining) {
9104  PrintF("Inlining builtin ");
9105  expr->target()->ShortPrint();
9106  PrintF("\n");
9107  }
9108  return;
9109  }
9110  if (TryInlineApiFunctionCall(expr, receiver)) return;
9111  if (TryHandleArrayCall(expr, function)) return;
9112  if (TryInlineCall(expr)) return;
9113 
9114  PushArgumentsFromEnvironment(argument_count);
9115  call = BuildCallConstantFunction(expr->target(), argument_count);
9116  } else {
9117  Push(graph()->GetConstantUndefined());
9118  CHECK_ALIVE(VisitExpressions(expr->arguments()));
9119  PushArgumentsFromEnvironment(argument_count);
9120  call = New<HCallFunction>(function, argument_count);
9121  }
9122 
9123  } else if (expr->IsMonomorphic()) {
9124  Add<HCheckValue>(function, expr->target());
9125 
9126  Push(graph()->GetConstantUndefined());
9127  CHECK_ALIVE(VisitExpressions(expr->arguments()));
9128 
9129  HValue* receiver = ImplicitReceiverFor(function, expr->target());
9130  const int receiver_index = argument_count - 1;
9131  environment()->SetExpressionStackAt(receiver_index, receiver);
9132 
9133  if (TryInlineBuiltinFunctionCall(expr)) {
9134  if (FLAG_trace_inlining) {
9135  PrintF("Inlining builtin ");
9136  expr->target()->ShortPrint();
9137  PrintF("\n");
9138  }
9139  return;
9140  }
9141  if (TryInlineApiFunctionCall(expr, receiver)) return;
9142 
9143  if (TryInlineCall(expr)) return;
9144 
9145  call = PreProcessCall(New<HInvokeFunction>(
9146  function, expr->target(), argument_count));
9147 
9148  } else {
9149  Push(graph()->GetConstantUndefined());
9150  CHECK_ALIVE(VisitExpressions(expr->arguments()));
9151  PushArgumentsFromEnvironment(argument_count);
9152  call = New<HCallFunction>(function, argument_count);
9153  }
9154  }
9155 
9156  Drop(1); // Drop the function.
9157  return ast_context()->ReturnInstruction(call, expr->id());
9158 }
9159 
9160 
9162  Expression* expression,
9163  int argument_count,
9164  Handle<AllocationSite> site) {
9165  DCHECK(!site.is_null());
9166  DCHECK(argument_count >= 0 && argument_count <= 1);
9167  NoObservableSideEffectsScope no_effects(this);
9168 
9169  // We should at least have the constructor on the expression stack.
9170  HValue* constructor = environment()->ExpressionStackAt(argument_count);
9171 
9172  // Register on the site for deoptimization if the transition feedback changes.
9175  ElementsKind kind = site->GetElementsKind();
9176  HInstruction* site_instruction = Add<HConstant>(site);
9177 
9178  // In the single constant argument case, we may have to adjust elements kind
9179  // to avoid creating a packed non-empty array.
9180  if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
9181  HValue* argument = environment()->Top();
9182  if (argument->IsConstant()) {
9183  HConstant* constant_argument = HConstant::cast(argument);
9184  DCHECK(constant_argument->HasSmiValue());
9185  int constant_array_size = constant_argument->Integer32Value();
9186  if (constant_array_size != 0) {
9187  kind = GetHoleyElementsKind(kind);
9188  }
9189  }
9190  }
9191 
9192  // Build the array.
9193  JSArrayBuilder array_builder(this,
9194  kind,
9195  site_instruction,
9196  constructor,
9198  HValue* new_object = argument_count == 0
9199  ? array_builder.AllocateEmptyArray()
9200  : BuildAllocateArrayFromLength(&array_builder, Top());
9201 
9202  int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
9203  Drop(args_to_drop);
9204  ast_context()->ReturnValue(new_object);
9205 }
9206 
9207 
9208 // Checks whether allocation using the given constructor can be inlined.
9209 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
9210  return constructor->has_initial_map() &&
9211  constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
9212  constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize &&
9213  constructor->initial_map()->InitialPropertiesLength() == 0;
9214 }
9215 
9216 
9218  int argument_count,
9219  Handle<AllocationSite> site) {
9220  Handle<JSFunction> caller = current_info()->closure();
9222  // We should have the function plus array arguments on the environment stack.
9223  DCHECK(environment()->length() >= (argument_count + 1));
9224  DCHECK(!site.is_null());
9225 
9226  bool inline_ok = false;
9227  if (site->CanInlineCall()) {
9228  // We also want to avoid inlining in certain 1 argument scenarios.
9229  if (argument_count == 1) {
9230  HValue* argument = Top();
9231  if (argument->IsConstant()) {
9232  // Do not inline if the constant length argument is not a smi or
9233  // outside the valid range for unrolled loop initialization.
9234  HConstant* constant_argument = HConstant::cast(argument);
9235  if (constant_argument->HasSmiValue()) {
9236  int value = constant_argument->Integer32Value();
9237  inline_ok = value >= 0 && value <= kElementLoopUnrollThreshold;
9238  if (!inline_ok) {
9239  TraceInline(target, caller,
9240  "Constant length outside of valid inlining range.");
9241  }
9242  }
9243  } else {
9244  TraceInline(target, caller,
9245  "Dont inline [new] Array(n) where n isn't constant.");
9246  }
9247  } else if (argument_count == 0) {
9248  inline_ok = true;
9249  } else {
9250  TraceInline(target, caller, "Too many arguments to inline.");
9251  }
9252  } else {
9253  TraceInline(target, caller, "AllocationSite requested no inlining.");
9254  }
9255 
9256  if (inline_ok) {
9257  TraceInline(target, caller, NULL);
9258  }
9259  return inline_ok;
9260 }
9261 
9262 
9263 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
9264  DCHECK(!HasStackOverflow());
9265  DCHECK(current_block() != NULL);
9266  DCHECK(current_block()->HasPredecessor());
9267  if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9268  int argument_count = expr->arguments()->length() + 1; // Plus constructor.
9269  Factory* factory = isolate()->factory();
9270 
9271  // The constructor function is on the stack in the unoptimized code
9272  // during evaluation of the arguments.
9273  CHECK_ALIVE(VisitForValue(expr->expression()));
9274  HValue* function = Top();
9275  CHECK_ALIVE(VisitExpressions(expr->arguments()));
9276 
9277  if (FLAG_inline_construct &&
9278  expr->IsMonomorphic() &&
9279  IsAllocationInlineable(expr->target())) {
9280  Handle<JSFunction> constructor = expr->target();
9281  HValue* check = Add<HCheckValue>(function, constructor);
9282 
9283  // Force completion of inobject slack tracking before generating
9284  // allocation code to finalize instance size.
9285  if (constructor->IsInobjectSlackTrackingInProgress()) {
9286  constructor->CompleteInobjectSlackTracking();
9287  }
9288 
9289  // Calculate instance size from initial map of constructor.
9290  DCHECK(constructor->has_initial_map());
9291  Handle<Map> initial_map(constructor->initial_map());
9292  int instance_size = initial_map->instance_size();
9293  DCHECK(initial_map->InitialPropertiesLength() == 0);
9294 
9295  // Allocate an instance of the implicit receiver object.
9296  HValue* size_in_bytes = Add<HConstant>(instance_size);
9297  HAllocationMode allocation_mode;
9298  if (FLAG_pretenuring_call_new) {
9299  if (FLAG_allocation_site_pretenuring) {
9300  // Try to use pretenuring feedback.
9301  Handle<AllocationSite> allocation_site = expr->allocation_site();
9302  allocation_mode = HAllocationMode(allocation_site);
9303  // Take a dependency on allocation site.
9306  top_info());
9307  }
9308  }
9309 
9310  HAllocate* receiver = BuildAllocate(
9311  size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
9312  receiver->set_known_initial_map(initial_map);
9313 
9314  // Initialize map and fields of the newly allocated object.
9315  { NoObservableSideEffectsScope no_effects(this);
9316  DCHECK(initial_map->instance_type() == JS_OBJECT_TYPE);
9317  Add<HStoreNamedField>(receiver,
9318  HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
9319  Add<HConstant>(initial_map));
9320  HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
9321  Add<HStoreNamedField>(receiver,
9322  HObjectAccess::ForMapAndOffset(initial_map,
9324  empty_fixed_array);
9325  Add<HStoreNamedField>(receiver,
9326  HObjectAccess::ForMapAndOffset(initial_map,
9328  empty_fixed_array);
9329  if (initial_map->inobject_properties() != 0) {
9330  HConstant* undefined = graph()->GetConstantUndefined();
9331  for (int i = 0; i < initial_map->inobject_properties(); i++) {
9332  int property_offset = initial_map->GetInObjectPropertyOffset(i);
9333  Add<HStoreNamedField>(receiver,
9334  HObjectAccess::ForMapAndOffset(initial_map, property_offset),
9335  undefined);
9336  }
9337  }
9338  }
9339 
9340  // Replace the constructor function with a newly allocated receiver using
9341  // the index of the receiver from the top of the expression stack.
9342  const int receiver_index = argument_count - 1;
9343  DCHECK(environment()->ExpressionStackAt(receiver_index) == function);
9344  environment()->SetExpressionStackAt(receiver_index, receiver);
9345 
9346  if (TryInlineConstruct(expr, receiver)) {
9347  // Inlining worked, add a dependency on the initial map to make sure that
9348  // this code is deoptimized whenever the initial map of the constructor
9349  // changes.
9352  return;
9353  }
9354 
9355  // TODO(mstarzinger): For now we remove the previous HAllocate and all
9356  // corresponding instructions and instead add HPushArguments for the
9357  // arguments in case inlining failed. What we actually should do is for
9358  // inlining to try to build a subgraph without mutating the parent graph.
9359  HInstruction* instr = current_block()->last();
9360  do {
9361  HInstruction* prev_instr = instr->previous();
9362  instr->DeleteAndReplaceWith(NULL);
9363  instr = prev_instr;
9364  } while (instr != check);
9365  environment()->SetExpressionStackAt(receiver_index, function);
9366  HInstruction* call =
9367  PreProcessCall(New<HCallNew>(function, argument_count));
9368  return ast_context()->ReturnInstruction(call, expr->id());
9369  } else {
9370  // The constructor function is both an operand to the instruction and an
9371  // argument to the construct call.
9372  if (TryHandleArrayCallNew(expr, function)) return;
9373 
9374  HInstruction* call =
9375  PreProcessCall(New<HCallNew>(function, argument_count));
9376  return ast_context()->ReturnInstruction(call, expr->id());
9377  }
9378 }
9379 
9380 
9381 // Support for generating inlined runtime functions.
9382 
9383 // Lookup table for generators for runtime calls that are generated inline.
9384 // Elements of the table are member pointers to functions of
9385 // HOptimizedGraphBuilder.
9386 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
9387  &HOptimizedGraphBuilder::Generate##Name,
9388 
9393 };
9394 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
9395 
9396 
9397 template <class ViewClass>
9399  HValue* obj,
9400  HValue* buffer,
9401  HValue* byte_offset,
9402  HValue* byte_length) {
9403 
9404  for (int offset = ViewClass::kSize;
9405  offset < ViewClass::kSizeWithInternalFields;
9406  offset += kPointerSize) {
9407  Add<HStoreNamedField>(obj,
9408  HObjectAccess::ForObservableJSObjectOffset(offset),
9409  graph()->GetConstant0());
9410  }
9411 
9412  Add<HStoreNamedField>(
9413  obj,
9414  HObjectAccess::ForJSArrayBufferViewByteOffset(),
9415  byte_offset);
9416  Add<HStoreNamedField>(
9417  obj,
9418  HObjectAccess::ForJSArrayBufferViewByteLength(),
9419  byte_length);
9420 
9421  if (buffer != NULL) {
9422  Add<HStoreNamedField>(
9423  obj,
9424  HObjectAccess::ForJSArrayBufferViewBuffer(), buffer);
9425  HObjectAccess weak_first_view_access =
9426  HObjectAccess::ForJSArrayBufferWeakFirstView();
9427  Add<HStoreNamedField>(obj,
9428  HObjectAccess::ForJSArrayBufferViewWeakNext(),
9429  Add<HLoadNamedField>(buffer,
9430  static_cast<HValue*>(NULL),
9431  weak_first_view_access));
9432  Add<HStoreNamedField>(buffer, weak_first_view_access, obj);
9433  } else {
9434  Add<HStoreNamedField>(
9435  obj,
9436  HObjectAccess::ForJSArrayBufferViewBuffer(),
9437  Add<HConstant>(static_cast<int32_t>(0)));
9438  Add<HStoreNamedField>(obj,
9439  HObjectAccess::ForJSArrayBufferViewWeakNext(),
9440  graph()->GetConstantUndefined());
9441  }
9442 }
9443 
9444 
9445 void HOptimizedGraphBuilder::GenerateDataViewInitialize(
9446  CallRuntime* expr) {
9447  ZoneList<Expression*>* arguments = expr->arguments();
9448 
9449  DCHECK(arguments->length()== 4);
9450  CHECK_ALIVE(VisitForValue(arguments->at(0)));
9451  HValue* obj = Pop();
9452 
9453  CHECK_ALIVE(VisitForValue(arguments->at(1)));
9454  HValue* buffer = Pop();
9455 
9456  CHECK_ALIVE(VisitForValue(arguments->at(2)));
9457  HValue* byte_offset = Pop();
9458 
9459  CHECK_ALIVE(VisitForValue(arguments->at(3)));
9460  HValue* byte_length = Pop();
9461 
9462  {
9463  NoObservableSideEffectsScope scope(this);
9464  BuildArrayBufferViewInitialization<JSDataView>(
9465  obj, buffer, byte_offset, byte_length);
9466  }
9467 }
9468 
9469 
9471  ExternalArrayType array_type,
9472  ElementsKind target_kind) {
9473  Handle<Context> native_context = isolate->native_context();
9474  Handle<JSFunction> fun;
9475  switch (array_type) {
9476 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
9477  case kExternal##Type##Array: \
9478  fun = Handle<JSFunction>(native_context->type##_array_fun()); \
9479  break;
9480 
9482 #undef TYPED_ARRAY_CASE
9483  }
9484  Handle<Map> map(fun->initial_map());
9485  return Map::AsElementsKind(map, target_kind);
9486 }
9487 
9488 
9490  ExternalArrayType array_type,
9491  bool is_zero_byte_offset,
9492  HValue* buffer, HValue* byte_offset, HValue* length) {
9493  Handle<Map> external_array_map(
9494  isolate()->heap()->MapForExternalArrayType(array_type));
9495 
9496  // The HForceRepresentation is to prevent possible deopt on int-smi
9497  // conversion after allocation but before the new object fields are set.
9498  length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9499  HValue* elements =
9500  Add<HAllocate>(
9501  Add<HConstant>(ExternalArray::kAlignedSize),
9502  HType::HeapObject(),
9503  NOT_TENURED,
9504  external_array_map->instance_type());
9505 
9506  AddStoreMapConstant(elements, external_array_map);
9507  Add<HStoreNamedField>(elements,
9508  HObjectAccess::ForFixedArrayLength(), length);
9509 
9510  HValue* backing_store = Add<HLoadNamedField>(
9511  buffer, static_cast<HValue*>(NULL),
9512  HObjectAccess::ForJSArrayBufferBackingStore());
9513 
9514  HValue* typed_array_start;
9515  if (is_zero_byte_offset) {
9516  typed_array_start = backing_store;
9517  } else {
9518  HInstruction* external_pointer =
9519  AddUncasted<HAdd>(backing_store, byte_offset);
9520  // Arguments are checked prior to call to TypedArrayInitialize,
9521  // including byte_offset.
9522  external_pointer->ClearFlag(HValue::kCanOverflow);
9523  typed_array_start = external_pointer;
9524  }
9525 
9526  Add<HStoreNamedField>(elements,
9527  HObjectAccess::ForExternalArrayExternalPointer(),
9528  typed_array_start);
9529 
9530  return elements;
9531 }
9532 
9533 
9535  ExternalArrayType array_type, size_t element_size,
9536  ElementsKind fixed_elements_kind,
9537  HValue* byte_length, HValue* length) {
9538  STATIC_ASSERT(
9540  HValue* total_size;
9541 
9542  // if fixed array's elements are not aligned to object's alignment,
9543  // we need to align the whole array to object alignment.
9544  if (element_size % kObjectAlignment != 0) {
9545  total_size = BuildObjectSizeAlignment(
9546  byte_length, FixedTypedArrayBase::kHeaderSize);
9547  } else {
9548  total_size = AddUncasted<HAdd>(byte_length,
9549  Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
9550  total_size->ClearFlag(HValue::kCanOverflow);
9551  }
9552 
9553  // The HForceRepresentation is to prevent possible deopt on int-smi
9554  // conversion after allocation but before the new object fields are set.
9555  length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9556  Handle<Map> fixed_typed_array_map(
9557  isolate()->heap()->MapForFixedTypedArray(array_type));
9558  HValue* elements =
9559  Add<HAllocate>(total_size, HType::HeapObject(),
9560  NOT_TENURED, fixed_typed_array_map->instance_type());
9561  AddStoreMapConstant(elements, fixed_typed_array_map);
9562 
9563  Add<HStoreNamedField>(elements,
9564  HObjectAccess::ForFixedArrayLength(),
9565  length);
9566 
9567  HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
9568 
9569  {
9570  LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
9571 
9572  HValue* key = builder.BeginBody(
9573  Add<HConstant>(static_cast<int32_t>(0)),
9574  length, Token::LT);
9575  Add<HStoreKeyed>(elements, key, filler, fixed_elements_kind);
9576 
9577  builder.EndBody();
9578  }
9579  return elements;
9580 }
9581 
9582 
9583 void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
9584  CallRuntime* expr) {
9585  ZoneList<Expression*>* arguments = expr->arguments();
9586 
9587  static const int kObjectArg = 0;
9588  static const int kArrayIdArg = 1;
9589  static const int kBufferArg = 2;
9590  static const int kByteOffsetArg = 3;
9591  static const int kByteLengthArg = 4;
9592  static const int kArgsLength = 5;
9593  DCHECK(arguments->length() == kArgsLength);
9594 
9595 
9596  CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
9597  HValue* obj = Pop();
9598 
9599  if (arguments->at(kArrayIdArg)->IsLiteral()) {
9600  // This should never happen in real use, but can happen when fuzzing.
9601  // Just bail out.
9602  Bailout(kNeedSmiLiteral);
9603  return;
9604  }
9605  Handle<Object> value =
9606  static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
9607  if (!value->IsSmi()) {
9608  // This should never happen in real use, but can happen when fuzzing.
9609  // Just bail out.
9610  Bailout(kNeedSmiLiteral);
9611  return;
9612  }
9613  int array_id = Smi::cast(*value)->value();
9614 
9615  HValue* buffer;
9616  if (!arguments->at(kBufferArg)->IsNullLiteral()) {
9617  CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
9618  buffer = Pop();
9619  } else {
9620  buffer = NULL;
9621  }
9622 
9623  HValue* byte_offset;
9624  bool is_zero_byte_offset;
9625 
9626  if (arguments->at(kByteOffsetArg)->IsLiteral()
9627  && Smi::FromInt(0) ==
9628  *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
9629  byte_offset = Add<HConstant>(static_cast<int32_t>(0));
9630  is_zero_byte_offset = true;
9631  } else {
9632  CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
9633  byte_offset = Pop();
9634  is_zero_byte_offset = false;
9635  DCHECK(buffer != NULL);
9636  }
9637 
9638  CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
9639  HValue* byte_length = Pop();
9640 
9641  NoObservableSideEffectsScope scope(this);
9642  IfBuilder byte_offset_smi(this);
9643 
9644  if (!is_zero_byte_offset) {
9645  byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
9646  byte_offset_smi.Then();
9647  }
9648 
9649  ExternalArrayType array_type =
9650  kExternalInt8Array; // Bogus initialization.
9651  size_t element_size = 1; // Bogus initialization.
9652  ElementsKind external_elements_kind = // Bogus initialization.
9654  ElementsKind fixed_elements_kind = // Bogus initialization.
9655  INT8_ELEMENTS;
9657  &array_type,
9658  &external_elements_kind,
9659  &fixed_elements_kind,
9660  &element_size);
9661 
9662 
9663  { // byte_offset is Smi.
9664  BuildArrayBufferViewInitialization<JSTypedArray>(
9665  obj, buffer, byte_offset, byte_length);
9666 
9667 
9668  HInstruction* length = AddUncasted<HDiv>(byte_length,
9669  Add<HConstant>(static_cast<int32_t>(element_size)));
9670 
9671  Add<HStoreNamedField>(obj,
9672  HObjectAccess::ForJSTypedArrayLength(),
9673  length);
9674 
9675  HValue* elements;
9676  if (buffer != NULL) {
9677  elements = BuildAllocateExternalElements(
9678  array_type, is_zero_byte_offset, buffer, byte_offset, length);
9679  Handle<Map> obj_map = TypedArrayMap(
9680  isolate(), array_type, external_elements_kind);
9681  AddStoreMapConstant(obj, obj_map);
9682  } else {
9683  DCHECK(is_zero_byte_offset);
9684  elements = BuildAllocateFixedTypedArray(
9685  array_type, element_size, fixed_elements_kind,
9686  byte_length, length);
9687  }
9688  Add<HStoreNamedField>(
9689  obj, HObjectAccess::ForElementsPointer(), elements);
9690  }
9691 
9692  if (!is_zero_byte_offset) {
9693  byte_offset_smi.Else();
9694  { // byte_offset is not Smi.
9695  Push(obj);
9696  CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
9697  Push(buffer);
9698  Push(byte_offset);
9699  Push(byte_length);
9700  PushArgumentsFromEnvironment(kArgsLength);
9701  Add<HCallRuntime>(expr->name(), expr->function(), kArgsLength);
9702  }
9703  }
9704  byte_offset_smi.End();
9705 }
9706 
9707 
9708 void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
9709  DCHECK(expr->arguments()->length() == 0);
9710  HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
9711  return ast_context()->ReturnInstruction(max_smi, expr->id());
9712 }
9713 
9714 
9715 void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
9716  CallRuntime* expr) {
9717  DCHECK(expr->arguments()->length() == 0);
9718  HConstant* result = New<HConstant>(static_cast<int32_t>(
9719  FLAG_typed_array_max_size_in_heap));
9720  return ast_context()->ReturnInstruction(result, expr->id());
9721 }
9722 
9723 
9724 void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength(
9725  CallRuntime* expr) {
9726  DCHECK(expr->arguments()->length() == 1);
9727  CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9728  HValue* buffer = Pop();
9729  HInstruction* result = New<HLoadNamedField>(
9730  buffer,
9731  static_cast<HValue*>(NULL),
9732  HObjectAccess::ForJSArrayBufferByteLength());
9733  return ast_context()->ReturnInstruction(result, expr->id());
9734 }
9735 
9736 
9737 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength(
9738  CallRuntime* expr) {
9739  DCHECK(expr->arguments()->length() == 1);
9740  CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9741  HValue* buffer = Pop();
9742  HInstruction* result = New<HLoadNamedField>(
9743  buffer,
9744  static_cast<HValue*>(NULL),
9745  HObjectAccess::ForJSArrayBufferViewByteLength());
9746  return ast_context()->ReturnInstruction(result, expr->id());
9747 }
9748 
9749 
9750 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset(
9751  CallRuntime* expr) {
9752  DCHECK(expr->arguments()->length() == 1);
9753  CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9754  HValue* buffer = Pop();
9755  HInstruction* result = New<HLoadNamedField>(
9756  buffer,
9757  static_cast<HValue*>(NULL),
9758  HObjectAccess::ForJSArrayBufferViewByteOffset());
9759  return ast_context()->ReturnInstruction(result, expr->id());
9760 }
9761 
9762 
9763 void HOptimizedGraphBuilder::GenerateTypedArrayGetLength(
9764  CallRuntime* expr) {
9765  DCHECK(expr->arguments()->length() == 1);
9766  CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
9767  HValue* buffer = Pop();
9768  HInstruction* result = New<HLoadNamedField>(
9769  buffer,
9770  static_cast<HValue*>(NULL),
9771  HObjectAccess::ForJSTypedArrayLength());
9772  return ast_context()->ReturnInstruction(result, expr->id());
9773 }
9774 
9775 
9776 void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
9777  DCHECK(!HasStackOverflow());
9778  DCHECK(current_block() != NULL);
9779  DCHECK(current_block()->HasPredecessor());
9780  if (expr->is_jsruntime()) {
9781  return Bailout(kCallToAJavaScriptRuntimeFunction);
9782  }
9783 
9784  const Runtime::Function* function = expr->function();
9785  DCHECK(function != NULL);
9786 
9787  if (function->intrinsic_type == Runtime::INLINE ||
9788  function->intrinsic_type == Runtime::INLINE_OPTIMIZED) {
9789  DCHECK(expr->name()->length() > 0);
9790  DCHECK(expr->name()->Get(0) == '_');
9791  // Call to an inline function.
9792  int lookup_index = static_cast<int>(function->function_id) -
9793  static_cast<int>(Runtime::kFirstInlineFunction);
9794  DCHECK(lookup_index >= 0);
9795  DCHECK(static_cast<size_t>(lookup_index) <
9797  InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
9798 
9799  // Call the inline code generator using the pointer-to-member.
9800  (this->*generator)(expr);
9801  } else {
9802  DCHECK(function->intrinsic_type == Runtime::RUNTIME);
9803  Handle<String> name = expr->name();
9804  int argument_count = expr->arguments()->length();
9805  CHECK_ALIVE(VisitExpressions(expr->arguments()));
9806  PushArgumentsFromEnvironment(argument_count);
9807  HCallRuntime* call = New<HCallRuntime>(name, function,
9808  argument_count);
9809  return ast_context()->ReturnInstruction(call, expr->id());
9810  }
9811 }
9812 
9813 
9814 void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
9815  DCHECK(!HasStackOverflow());
9816  DCHECK(current_block() != NULL);
9817  DCHECK(current_block()->HasPredecessor());
9818  switch (expr->op()) {
9819  case Token::DELETE: return VisitDelete(expr);
9820  case Token::VOID: return VisitVoid(expr);
9821  case Token::TYPEOF: return VisitTypeof(expr);
9822  case Token::NOT: return VisitNot(expr);
9823  default: UNREACHABLE();
9824  }
9825 }
9826 
9827 
9828 void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
9829  Property* prop = expr->expression()->AsProperty();
9830  VariableProxy* proxy = expr->expression()->AsVariableProxy();
9831  if (prop != NULL) {
9832  CHECK_ALIVE(VisitForValue(prop->obj()));
9833  CHECK_ALIVE(VisitForValue(prop->key()));
9834  HValue* key = Pop();
9835  HValue* obj = Pop();
9836  HValue* function = AddLoadJSBuiltin(Builtins::DELETE);
9837  Add<HPushArguments>(obj, key, Add<HConstant>(function_strict_mode()));
9838  // TODO(olivf) InvokeFunction produces a check for the parameter count,
9839  // even though we are certain to pass the correct number of arguments here.
9840  HInstruction* instr = New<HInvokeFunction>(function, 3);
9841  return ast_context()->ReturnInstruction(instr, expr->id());
9842  } else if (proxy != NULL) {
9843  Variable* var = proxy->var();
9844  if (var->IsUnallocated()) {
9845  Bailout(kDeleteWithGlobalVariable);
9846  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
9847  // Result of deleting non-global variables is false. 'this' is not
9848  // really a variable, though we implement it as one. The
9849  // subexpression does not have side effects.
9850  HValue* value = var->is_this()
9851  ? graph()->GetConstantTrue()
9852  : graph()->GetConstantFalse();
9853  return ast_context()->ReturnValue(value);
9854  } else {
9855  Bailout(kDeleteWithNonGlobalVariable);
9856  }
9857  } else {
9858  // Result of deleting non-property, non-variable reference is true.
9859  // Evaluate the subexpression for side effects.
9860  CHECK_ALIVE(VisitForEffect(expr->expression()));
9861  return ast_context()->ReturnValue(graph()->GetConstantTrue());
9862  }
9863 }
9864 
9865 
9866 void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
9867  CHECK_ALIVE(VisitForEffect(expr->expression()));
9868  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
9869 }
9870 
9871 
9872 void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
9873  CHECK_ALIVE(VisitForTypeOf(expr->expression()));
9874  HValue* value = Pop();
9875  HInstruction* instr = New<HTypeof>(value);
9876  return ast_context()->ReturnInstruction(instr, expr->id());
9877 }
9878 
9879 
9880 void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
9881  if (ast_context()->IsTest()) {
9882  TestContext* context = TestContext::cast(ast_context());
9883  VisitForControl(expr->expression(),
9884  context->if_false(),
9885  context->if_true());
9886  return;
9887  }
9888 
9889  if (ast_context()->IsEffect()) {
9890  VisitForEffect(expr->expression());
9891  return;
9892  }
9893 
9894  DCHECK(ast_context()->IsValue());
9895  HBasicBlock* materialize_false = graph()->CreateBasicBlock();
9896  HBasicBlock* materialize_true = graph()->CreateBasicBlock();
9897  CHECK_BAILOUT(VisitForControl(expr->expression(),
9898  materialize_false,
9899  materialize_true));
9900 
9901  if (materialize_false->HasPredecessor()) {
9902  materialize_false->SetJoinId(expr->MaterializeFalseId());
9903  set_current_block(materialize_false);
9904  Push(graph()->GetConstantFalse());
9905  } else {
9906  materialize_false = NULL;
9907  }
9908 
9909  if (materialize_true->HasPredecessor()) {
9910  materialize_true->SetJoinId(expr->MaterializeTrueId());
9911  set_current_block(materialize_true);
9912  Push(graph()->GetConstantTrue());
9913  } else {
9914  materialize_true = NULL;
9915  }
9916 
9917  HBasicBlock* join =
9918  CreateJoin(materialize_false, materialize_true, expr->id());
9919  set_current_block(join);
9920  if (join != NULL) return ast_context()->ReturnValue(Pop());
9921 }
9922 
9923 
9925  bool returns_original_input,
9926  CountOperation* expr) {
9927  // The input to the count operation is on top of the expression stack.
9928  Representation rep = Representation::FromType(expr->type());
9929  if (rep.IsNone() || rep.IsTagged()) {
9930  rep = Representation::Smi();
9931  }
9932 
9933  if (returns_original_input) {
9934  // We need an explicit HValue representing ToNumber(input). The
9935  // actual HChange instruction we need is (sometimes) added in a later
9936  // phase, so it is not available now to be used as an input to HAdd and
9937  // as the return value.
9938  HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
9939  if (!rep.IsDouble()) {
9941  number_input->SetFlag(HInstruction::kCannotBeTagged);
9942  }
9943  Push(number_input);
9944  }
9945 
9946  // The addition has no side effects, so we do not need
9947  // to simulate the expression stack after this instruction.
9948  // Any later failures deopt to the load of the input or earlier.
9949  HConstant* delta = (expr->op() == Token::INC)
9950  ? graph()->GetConstant1()
9951  : graph()->GetConstantMinus1();
9952  HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
9953  if (instr->IsAdd()) {
9954  HAdd* add = HAdd::cast(instr);
9955  add->set_observed_input_representation(1, rep);
9956  add->set_observed_input_representation(2, Representation::Smi());
9957  }
9959  instr->ClearAllSideEffects();
9960  return instr;
9961 }
9962 
9963 
9965  Property* prop,
9966  BailoutId ast_id,
9967  BailoutId return_id,
9968  HValue* object,
9969  HValue* key,
9970  HValue* value) {
9971  EffectContext for_effect(this);
9972  Push(object);
9973  if (key != NULL) Push(key);
9974  Push(value);
9975  BuildStore(expr, prop, ast_id, return_id);
9976 }
9977 
9978 
9979 void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
9980  DCHECK(!HasStackOverflow());
9981  DCHECK(current_block() != NULL);
9982  DCHECK(current_block()->HasPredecessor());
9983  if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
9984  Expression* target = expr->expression();
9985  VariableProxy* proxy = target->AsVariableProxy();
9986  Property* prop = target->AsProperty();
9987  if (proxy == NULL && prop == NULL) {
9988  return Bailout(kInvalidLhsInCountOperation);
9989  }
9990 
9991  // Match the full code generator stack by simulating an extra stack
9992  // element for postfix operations in a non-effect context. The return
9993  // value is ToNumber(input).
9994  bool returns_original_input =
9995  expr->is_postfix() && !ast_context()->IsEffect();
9996  HValue* input = NULL; // ToNumber(original_input).
9997  HValue* after = NULL; // The result after incrementing or decrementing.
9998 
9999  if (proxy != NULL) {
10000  Variable* var = proxy->var();
10001  if (var->mode() == CONST_LEGACY) {
10002  return Bailout(kUnsupportedCountOperationWithConst);
10003  }
10004  // Argument of the count operation is a variable, not a property.
10005  DCHECK(prop == NULL);
10006  CHECK_ALIVE(VisitForValue(target));
10007 
10008  after = BuildIncrement(returns_original_input, expr);
10009  input = returns_original_input ? Top() : Pop();
10010  Push(after);
10011 
10012  switch (var->location()) {
10013  case Variable::UNALLOCATED:
10015  after,
10016  expr->AssignmentId());
10017  break;
10018 
10019  case Variable::PARAMETER:
10020  case Variable::LOCAL:
10021  BindIfLive(var, after);
10022  break;
10023 
10024  case Variable::CONTEXT: {
10025  // Bail out if we try to mutate a parameter value in a function
10026  // using the arguments object. We do not (yet) correctly handle the
10027  // arguments property of the function.
10028  if (current_info()->scope()->arguments() != NULL) {
10029  // Parameters will rewrite to context slots. We have no direct
10030  // way to detect that the variable is a parameter so we use a
10031  // linear search of the parameter list.
10032  int count = current_info()->scope()->num_parameters();
10033  for (int i = 0; i < count; ++i) {
10034  if (var == current_info()->scope()->parameter(i)) {
10035  return Bailout(kAssignmentToParameterInArgumentsObject);
10036  }
10037  }
10038  }
10039 
10040  HValue* context = BuildContextChainWalk(var);
10041  HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
10042  ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
10043  HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
10044  mode, after);
10045  if (instr->HasObservableSideEffects()) {
10046  Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
10047  }
10048  break;
10049  }
10050 
10051  case Variable::LOOKUP:
10052  return Bailout(kLookupVariableInCountOperation);
10053  }
10054 
10055  Drop(returns_original_input ? 2 : 1);
10056  return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
10057  }
10058 
10059  // Argument of the count operation is a property.
10060  DCHECK(prop != NULL);
10061  if (returns_original_input) Push(graph()->GetConstantUndefined());
10062 
10063  CHECK_ALIVE(VisitForValue(prop->obj()));
10064  HValue* object = Top();
10065 
10066  HValue* key = NULL;
10067  if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
10068  CHECK_ALIVE(VisitForValue(prop->key()));
10069  key = Top();
10070  }
10071 
10072  CHECK_ALIVE(PushLoad(prop, object, key));
10073 
10074  after = BuildIncrement(returns_original_input, expr);
10075 
10076  if (returns_original_input) {
10077  input = Pop();
10078  // Drop object and key to push it again in the effect context below.
10079  Drop(key == NULL ? 1 : 2);
10080  environment()->SetExpressionStackAt(0, input);
10082  expr, prop, expr->id(), expr->AssignmentId(), object, key, after));
10083  return ast_context()->ReturnValue(Pop());
10084  }
10085 
10086  environment()->SetExpressionStackAt(0, after);
10087  return BuildStore(expr, prop, expr->id(), expr->AssignmentId());
10088 }
10089 
10090 
10092  HValue* string,
10093  HValue* index) {
10094  if (string->IsConstant() && index->IsConstant()) {
10095  HConstant* c_string = HConstant::cast(string);
10096  HConstant* c_index = HConstant::cast(index);
10097  if (c_string->HasStringValue() && c_index->HasNumberValue()) {
10098  int32_t i = c_index->NumberValueAsInteger32();
10099  Handle<String> s = c_string->StringValue();
10100  if (i < 0 || i >= s->length()) {
10101  return New<HConstant>(base::OS::nan_value());
10102  }
10103  return New<HConstant>(s->Get(i));
10104  }
10105  }
10106  string = BuildCheckString(string);
10107  index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
10108  return New<HStringCharCodeAt>(string, index);
10109 }
10110 
10111 
10112 // Checks if the given shift amounts have following forms:
10113 // (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
10115  HValue* const32_minus_sa) {
10116  if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
10117  const HConstant* c1 = HConstant::cast(sa);
10118  const HConstant* c2 = HConstant::cast(const32_minus_sa);
10119  return c1->HasInteger32Value() && c2->HasInteger32Value() &&
10120  (c1->Integer32Value() + c2->Integer32Value() == 32);
10121  }
10122  if (!const32_minus_sa->IsSub()) return false;
10123  HSub* sub = HSub::cast(const32_minus_sa);
10124  return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
10125 }
10126 
10127 
10128 // Checks if the left and the right are shift instructions with the oposite
10129 // directions that can be replaced by one rotate right instruction or not.
10130 // Returns the operand and the shift amount for the rotate instruction in the
10131 // former case.
10133  HValue* right,
10134  HValue** operand,
10135  HValue** shift_amount) {
10136  HShl* shl;
10137  HShr* shr;
10138  if (left->IsShl() && right->IsShr()) {
10139  shl = HShl::cast(left);
10140  shr = HShr::cast(right);
10141  } else if (left->IsShr() && right->IsShl()) {
10142  shl = HShl::cast(right);
10143  shr = HShr::cast(left);
10144  } else {
10145  return false;
10146  }
10147  if (shl->left() != shr->left()) return false;
10148 
10149  if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
10150  !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
10151  return false;
10152  }
10153  *operand= shr->left();
10154  *shift_amount = shr->right();
10155  return true;
10156 }
10157 
10158 
10159 bool CanBeZero(HValue* right) {
10160  if (right->IsConstant()) {
10161  HConstant* right_const = HConstant::cast(right);
10162  if (right_const->HasInteger32Value() &&
10163  (right_const->Integer32Value() & 0x1f) != 0) {
10164  return false;
10165  }
10166  }
10167  return true;
10168 }
10169 
10170 
10172  Type* expected) {
10173  if (expected->Is(Type::SignedSmall())) {
10174  return AddUncasted<HForceRepresentation>(number, Representation::Smi());
10175  }
10176  if (expected->Is(Type::Signed32())) {
10177  return AddUncasted<HForceRepresentation>(number,
10179  }
10180  return number;
10181 }
10182 
10183 
10185  if (value->IsConstant()) {
10186  HConstant* constant = HConstant::cast(value);
10187  Maybe<HConstant*> number = constant->CopyToTruncatedNumber(zone());
10188  if (number.has_value) {
10189  *expected = Type::Number(zone());
10190  return AddInstruction(number.value);
10191  }
10192  }
10193 
10194  // We put temporary values on the stack, which don't correspond to anything
10195  // in baseline code. Since nothing is observable we avoid recording those
10196  // pushes with a NoObservableSideEffectsScope.
10197  NoObservableSideEffectsScope no_effects(this);
10198 
10199  Type* expected_type = *expected;
10200 
10201  // Separate the number type from the rest.
10202  Type* expected_obj =
10203  Type::Intersect(expected_type, Type::NonNumber(zone()), zone());
10204  Type* expected_number =
10205  Type::Intersect(expected_type, Type::Number(zone()), zone());
10206 
10207  // We expect to get a number.
10208  // (We need to check first, since Type::None->Is(Type::Any()) == true.
10209  if (expected_obj->Is(Type::None())) {
10210  DCHECK(!expected_number->Is(Type::None(zone())));
10211  return value;
10212  }
10213 
10214  if (expected_obj->Is(Type::Undefined(zone()))) {
10215  // This is already done by HChange.
10216  *expected = Type::Union(expected_number, Type::Number(zone()), zone());
10217  return value;
10218  }
10219 
10220  return value;
10221 }
10222 
10223 
10225  BinaryOperation* expr,
10226  HValue* left,
10227  HValue* right,
10228  PushBeforeSimulateBehavior push_sim_result) {
10229  Type* left_type = expr->left()->bounds().lower;
10230  Type* right_type = expr->right()->bounds().lower;
10231  Type* result_type = expr->bounds().lower;
10232  Maybe<int> fixed_right_arg = expr->fixed_right_arg();
10233  Handle<AllocationSite> allocation_site = expr->allocation_site();
10234 
10235  HAllocationMode allocation_mode;
10236  if (FLAG_allocation_site_pretenuring && !allocation_site.is_null()) {
10237  allocation_mode = HAllocationMode(allocation_site);
10238  }
10239 
10241  expr->op(), left, right, left_type, right_type, result_type,
10242  fixed_right_arg, allocation_mode);
10243  // Add a simulate after instructions with observable side effects, and
10244  // after phis, which are the result of BuildBinaryOperation when we
10245  // inlined some complex subgraph.
10246  if (result->HasObservableSideEffects() || result->IsPhi()) {
10247  if (push_sim_result == PUSH_BEFORE_SIMULATE) {
10248  Push(result);
10249  Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10250  Drop(1);
10251  } else {
10252  Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10253  }
10254  }
10255  return result;
10256 }
10257 
10258 
10260  Token::Value op,
10261  HValue* left,
10262  HValue* right,
10263  Type* left_type,
10264  Type* right_type,
10265  Type* result_type,
10266  Maybe<int> fixed_right_arg,
10267  HAllocationMode allocation_mode) {
10268 
10269  Representation left_rep = Representation::FromType(left_type);
10270  Representation right_rep = Representation::FromType(right_type);
10271 
10272  bool maybe_string_add = op == Token::ADD &&
10273  (left_type->Maybe(Type::String()) ||
10274  left_type->Maybe(Type::Receiver()) ||
10275  right_type->Maybe(Type::String()) ||
10276  right_type->Maybe(Type::Receiver()));
10277 
10278  if (!left_type->IsInhabited()) {
10279  Add<HDeoptimize>("Insufficient type feedback for LHS of binary operation",
10281  // TODO(rossberg): we should be able to get rid of non-continuous
10282  // defaults.
10283  left_type = Type::Any(zone());
10284  } else {
10285  if (!maybe_string_add) left = TruncateToNumber(left, &left_type);
10286  left_rep = Representation::FromType(left_type);
10287  }
10288 
10289  if (!right_type->IsInhabited()) {
10290  Add<HDeoptimize>("Insufficient type feedback for RHS of binary operation",
10292  right_type = Type::Any(zone());
10293  } else {
10294  if (!maybe_string_add) right = TruncateToNumber(right, &right_type);
10295  right_rep = Representation::FromType(right_type);
10296  }
10297 
10298  // Special case for string addition here.
10299  if (op == Token::ADD &&
10300  (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
10301  // Validate type feedback for left argument.
10302  if (left_type->Is(Type::String())) {
10303  left = BuildCheckString(left);
10304  }
10305 
10306  // Validate type feedback for right argument.
10307  if (right_type->Is(Type::String())) {
10308  right = BuildCheckString(right);
10309  }
10310 
10311  // Convert left argument as necessary.
10312  if (left_type->Is(Type::Number())) {
10313  DCHECK(right_type->Is(Type::String()));
10314  left = BuildNumberToString(left, left_type);
10315  } else if (!left_type->Is(Type::String())) {
10316  DCHECK(right_type->Is(Type::String()));
10317  HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_RIGHT);
10318  Add<HPushArguments>(left, right);
10319  return AddUncasted<HInvokeFunction>(function, 2);
10320  }
10321 
10322  // Convert right argument as necessary.
10323  if (right_type->Is(Type::Number())) {
10324  DCHECK(left_type->Is(Type::String()));
10325  right = BuildNumberToString(right, right_type);
10326  } else if (!right_type->Is(Type::String())) {
10327  DCHECK(left_type->Is(Type::String()));
10328  HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_LEFT);
10329  Add<HPushArguments>(left, right);
10330  return AddUncasted<HInvokeFunction>(function, 2);
10331  }
10332 
10333  // Fast path for empty constant strings.
10334  if (left->IsConstant() &&
10335  HConstant::cast(left)->HasStringValue() &&
10336  HConstant::cast(left)->StringValue()->length() == 0) {
10337  return right;
10338  }
10339  if (right->IsConstant() &&
10340  HConstant::cast(right)->HasStringValue() &&
10341  HConstant::cast(right)->StringValue()->length() == 0) {
10342  return left;
10343  }
10344 
10345  // Register the dependent code with the allocation site.
10346  if (!allocation_mode.feedback_site().is_null()) {
10347  DCHECK(!graph()->info()->IsStub());
10348  Handle<AllocationSite> site(allocation_mode.feedback_site());
10351  }
10352 
10353  // Inline the string addition into the stub when creating allocation
10354  // mementos to gather allocation site feedback, or if we can statically
10355  // infer that we're going to create a cons string.
10356  if ((graph()->info()->IsStub() &&
10357  allocation_mode.CreateAllocationMementos()) ||
10358  (left->IsConstant() &&
10359  HConstant::cast(left)->HasStringValue() &&
10360  HConstant::cast(left)->StringValue()->length() + 1 >=
10362  (right->IsConstant() &&
10363  HConstant::cast(right)->HasStringValue() &&
10364  HConstant::cast(right)->StringValue()->length() + 1 >=
10366  return BuildStringAdd(left, right, allocation_mode);
10367  }
10368 
10369  // Fallback to using the string add stub.
10370  return AddUncasted<HStringAdd>(
10371  left, right, allocation_mode.GetPretenureMode(),
10372  STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
10373  }
10374 
10375  if (graph()->info()->IsStub()) {
10376  left = EnforceNumberType(left, left_type);
10377  right = EnforceNumberType(right, right_type);
10378  }
10379 
10380  Representation result_rep = Representation::FromType(result_type);
10381 
10382  bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
10383  (right_rep.IsTagged() && !right_rep.IsSmi());
10384 
10385  HInstruction* instr = NULL;
10386  // Only the stub is allowed to call into the runtime, since otherwise we would
10387  // inline several instructions (including the two pushes) for every tagged
10388  // operation in optimized code, which is more expensive, than a stub call.
10389  if (graph()->info()->IsStub() && is_non_primitive) {
10391  Add<HPushArguments>(left, right);
10392  instr = AddUncasted<HInvokeFunction>(function, 2);
10393  } else {
10394  switch (op) {
10395  case Token::ADD:
10396  instr = AddUncasted<HAdd>(left, right);
10397  break;
10398  case Token::SUB:
10399  instr = AddUncasted<HSub>(left, right);
10400  break;
10401  case Token::MUL:
10402  instr = AddUncasted<HMul>(left, right);
10403  break;
10404  case Token::MOD: {
10405  if (fixed_right_arg.has_value &&
10406  !right->EqualsInteger32Constant(fixed_right_arg.value)) {
10407  HConstant* fixed_right = Add<HConstant>(
10408  static_cast<int>(fixed_right_arg.value));
10409  IfBuilder if_same(this);
10410  if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
10411  if_same.Then();
10412  if_same.ElseDeopt("Unexpected RHS of binary operation");
10413  right = fixed_right;
10414  }
10415  instr = AddUncasted<HMod>(left, right);
10416  break;
10417  }
10418  case Token::DIV:
10419  instr = AddUncasted<HDiv>(left, right);
10420  break;
10421  case Token::BIT_XOR:
10422  case Token::BIT_AND:
10423  instr = AddUncasted<HBitwise>(op, left, right);
10424  break;
10425  case Token::BIT_OR: {
10426  HValue* operand, *shift_amount;
10427  if (left_type->Is(Type::Signed32()) &&
10428  right_type->Is(Type::Signed32()) &&
10429  MatchRotateRight(left, right, &operand, &shift_amount)) {
10430  instr = AddUncasted<HRor>(operand, shift_amount);
10431  } else {
10432  instr = AddUncasted<HBitwise>(op, left, right);
10433  }
10434  break;
10435  }
10436  case Token::SAR:
10437  instr = AddUncasted<HSar>(left, right);
10438  break;
10439  case Token::SHR:
10440  instr = AddUncasted<HShr>(left, right);
10441  if (FLAG_opt_safe_uint32_operations && instr->IsShr() &&
10442  CanBeZero(right)) {
10443  graph()->RecordUint32Instruction(instr);
10444  }
10445  break;
10446  case Token::SHL:
10447  instr = AddUncasted<HShl>(left, right);
10448  break;
10449  default:
10450  UNREACHABLE();
10451  }
10452  }
10453 
10454  if (instr->IsBinaryOperation()) {
10455  HBinaryOperation* binop = HBinaryOperation::cast(instr);
10456  binop->set_observed_input_representation(1, left_rep);
10457  binop->set_observed_input_representation(2, right_rep);
10458  binop->initialize_output_representation(result_rep);
10459  if (graph()->info()->IsStub()) {
10460  // Stub should not call into stub.
10462  // And should truncate on HForceRepresentation already.
10463  if (left->IsForceRepresentation()) {
10464  left->CopyFlag(HValue::kTruncatingToSmi, instr);
10465  left->CopyFlag(HValue::kTruncatingToInt32, instr);
10466  }
10467  if (right->IsForceRepresentation()) {
10468  right->CopyFlag(HValue::kTruncatingToSmi, instr);
10469  right->CopyFlag(HValue::kTruncatingToInt32, instr);
10470  }
10471  }
10472  }
10473  return instr;
10474 }
10475 
10476 
10477 // Check for the form (%_ClassOf(foo) === 'BarClass').
10478 static bool IsClassOfTest(CompareOperation* expr) {
10479  if (expr->op() != Token::EQ_STRICT) return false;
10480  CallRuntime* call = expr->left()->AsCallRuntime();
10481  if (call == NULL) return false;
10482  Literal* literal = expr->right()->AsLiteral();
10483  if (literal == NULL) return false;
10484  if (!literal->value()->IsString()) return false;
10485  if (!call->name()->IsOneByteEqualTo(STATIC_CHAR_VECTOR("_ClassOf"))) {
10486  return false;
10487  }
10488  DCHECK(call->arguments()->length() == 1);
10489  return true;
10490 }
10491 
10492 
10493 void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
10494  DCHECK(!HasStackOverflow());
10495  DCHECK(current_block() != NULL);
10496  DCHECK(current_block()->HasPredecessor());
10497  switch (expr->op()) {
10498  case Token::COMMA:
10499  return VisitComma(expr);
10500  case Token::OR:
10501  case Token::AND:
10502  return VisitLogicalExpression(expr);
10503  default:
10504  return VisitArithmeticExpression(expr);
10505  }
10506 }
10507 
10508 
10509 void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
10510  CHECK_ALIVE(VisitForEffect(expr->left()));
10511  // Visit the right subexpression in the same AST context as the entire
10512  // expression.
10513  Visit(expr->right());
10514 }
10515 
10516 
10518  bool is_logical_and = expr->op() == Token::AND;
10519  if (ast_context()->IsTest()) {
10520  TestContext* context = TestContext::cast(ast_context());
10521  // Translate left subexpression.
10522  HBasicBlock* eval_right = graph()->CreateBasicBlock();
10523  if (is_logical_and) {
10524  CHECK_BAILOUT(VisitForControl(expr->left(),
10525  eval_right,
10526  context->if_false()));
10527  } else {
10528  CHECK_BAILOUT(VisitForControl(expr->left(),
10529  context->if_true(),
10530  eval_right));
10531  }
10532 
10533  // Translate right subexpression by visiting it in the same AST
10534  // context as the entire expression.
10535  if (eval_right->HasPredecessor()) {
10536  eval_right->SetJoinId(expr->RightId());
10537  set_current_block(eval_right);
10538  Visit(expr->right());
10539  }
10540 
10541  } else if (ast_context()->IsValue()) {
10542  CHECK_ALIVE(VisitForValue(expr->left()));
10543  DCHECK(current_block() != NULL);
10544  HValue* left_value = Top();
10545 
10546  // Short-circuit left values that always evaluate to the same boolean value.
10547  if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
10548  // l (evals true) && r -> r
10549  // l (evals true) || r -> l
10550  // l (evals false) && r -> l
10551  // l (evals false) || r -> r
10552  if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
10553  Drop(1);
10554  CHECK_ALIVE(VisitForValue(expr->right()));
10555  }
10556  return ast_context()->ReturnValue(Pop());
10557  }
10558 
10559  // We need an extra block to maintain edge-split form.
10560  HBasicBlock* empty_block = graph()->CreateBasicBlock();
10561  HBasicBlock* eval_right = graph()->CreateBasicBlock();
10562  ToBooleanStub::Types expected(expr->left()->to_boolean_types());
10563  HBranch* test = is_logical_and
10564  ? New<HBranch>(left_value, expected, eval_right, empty_block)
10565  : New<HBranch>(left_value, expected, empty_block, eval_right);
10566  FinishCurrentBlock(test);
10567 
10568  set_current_block(eval_right);
10569  Drop(1); // Value of the left subexpression.
10570  CHECK_BAILOUT(VisitForValue(expr->right()));
10571 
10572  HBasicBlock* join_block =
10573  CreateJoin(empty_block, current_block(), expr->id());
10574  set_current_block(join_block);
10575  return ast_context()->ReturnValue(Pop());
10576 
10577  } else {
10578  DCHECK(ast_context()->IsEffect());
10579  // In an effect context, we don't need the value of the left subexpression,
10580  // only its control flow and side effects. We need an extra block to
10581  // maintain edge-split form.
10582  HBasicBlock* empty_block = graph()->CreateBasicBlock();
10583  HBasicBlock* right_block = graph()->CreateBasicBlock();
10584  if (is_logical_and) {
10585  CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
10586  } else {
10587  CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
10588  }
10589 
10590  // TODO(kmillikin): Find a way to fix this. It's ugly that there are
10591  // actually two empty blocks (one here and one inserted by
10592  // TestContext::BuildBranch, and that they both have an HSimulate though the
10593  // second one is not a merge node, and that we really have no good AST ID to
10594  // put on that first HSimulate.
10595 
10596  if (empty_block->HasPredecessor()) {
10597  empty_block->SetJoinId(expr->id());
10598  } else {
10599  empty_block = NULL;
10600  }
10601 
10602  if (right_block->HasPredecessor()) {
10603  right_block->SetJoinId(expr->RightId());
10604  set_current_block(right_block);
10605  CHECK_BAILOUT(VisitForEffect(expr->right()));
10606  right_block = current_block();
10607  } else {
10608  right_block = NULL;
10609  }
10610 
10611  HBasicBlock* join_block =
10612  CreateJoin(empty_block, right_block, expr->id());
10613  set_current_block(join_block);
10614  // We did not materialize any value in the predecessor environments,
10615  // so there is no need to handle it here.
10616  }
10617 }
10618 
10619 
10621  CHECK_ALIVE(VisitForValue(expr->left()));
10622  CHECK_ALIVE(VisitForValue(expr->right()));
10623  SetSourcePosition(expr->position());
10624  HValue* right = Pop();
10625  HValue* left = Pop();
10626  HValue* result =
10627  BuildBinaryOperation(expr, left, right,
10628  ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
10630  if (FLAG_hydrogen_track_positions && result->IsBinaryOperation()) {
10631  HBinaryOperation::cast(result)->SetOperandPositions(
10632  zone(),
10633  ScriptPositionToSourcePosition(expr->left()->position()),
10634  ScriptPositionToSourcePosition(expr->right()->position()));
10635  }
10636  return ast_context()->ReturnValue(result);
10637 }
10638 
10639 
10641  Expression* sub_expr,
10642  Handle<String> check) {
10643  CHECK_ALIVE(VisitForTypeOf(sub_expr));
10644  SetSourcePosition(expr->position());
10645  HValue* value = Pop();
10646  HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
10647  return ast_context()->ReturnControl(instr, expr->id());
10648 }
10649 
10650 
10652  HValue* left,
10653  Token::Value op,
10654  HValue* right) {
10655  return op == Token::EQ_STRICT &&
10656  ((left->IsConstant() &&
10657  HConstant::cast(left)->handle(isolate)->IsBoolean()) ||
10658  (right->IsConstant() &&
10659  HConstant::cast(right)->handle(isolate)->IsBoolean()));
10660 }
10661 
10662 
10663 void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
10664  DCHECK(!HasStackOverflow());
10665  DCHECK(current_block() != NULL);
10666  DCHECK(current_block()->HasPredecessor());
10667 
10668  if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10669 
10670  // Check for a few fast cases. The AST visiting behavior must be in sync
10671  // with the full codegen: We don't push both left and right values onto
10672  // the expression stack when one side is a special-case literal.
10673  Expression* sub_expr = NULL;
10674  Handle<String> check;
10675  if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
10676  return HandleLiteralCompareTypeof(expr, sub_expr, check);
10677  }
10678  if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
10679  return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
10680  }
10681  if (expr->IsLiteralCompareNull(&sub_expr)) {
10682  return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
10683  }
10684 
10685  if (IsClassOfTest(expr)) {
10686  CallRuntime* call = expr->left()->AsCallRuntime();
10687  DCHECK(call->arguments()->length() == 1);
10688  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10689  HValue* value = Pop();
10690  Literal* literal = expr->right()->AsLiteral();
10691  Handle<String> rhs = Handle<String>::cast(literal->value());
10692  HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
10693  return ast_context()->ReturnControl(instr, expr->id());
10694  }
10695 
10696  Type* left_type = expr->left()->bounds().lower;
10697  Type* right_type = expr->right()->bounds().lower;
10698  Type* combined_type = expr->combined_type();
10699 
10700  CHECK_ALIVE(VisitForValue(expr->left()));
10701  CHECK_ALIVE(VisitForValue(expr->right()));
10702 
10703  if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10704 
10705  HValue* right = Pop();
10706  HValue* left = Pop();
10707  Token::Value op = expr->op();
10708 
10709  if (IsLiteralCompareBool(isolate(), left, op, right)) {
10710  HCompareObjectEqAndBranch* result =
10711  New<HCompareObjectEqAndBranch>(left, right);
10712  return ast_context()->ReturnControl(result, expr->id());
10713  }
10714 
10715  if (op == Token::INSTANCEOF) {
10716  // Check to see if the rhs of the instanceof is a global function not
10717  // residing in new space. If it is we assume that the function will stay the
10718  // same.
10719  Handle<JSFunction> target = Handle<JSFunction>::null();
10720  VariableProxy* proxy = expr->right()->AsVariableProxy();
10721  bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
10722  if (global_function && current_info()->has_global_object()) {
10723  Handle<String> name = proxy->name();
10724  Handle<GlobalObject> global(current_info()->global_object());
10725  LookupIterator it(global, name, LookupIterator::OWN_SKIP_INTERCEPTOR);
10726  Handle<Object> value = JSObject::GetDataProperty(&it);
10727  if (it.IsFound() && value->IsJSFunction()) {
10728  Handle<JSFunction> candidate = Handle<JSFunction>::cast(value);
10729  // If the function is in new space we assume it's more likely to
10730  // change and thus prefer the general IC code.
10731  if (!isolate()->heap()->InNewSpace(*candidate)) {
10732  target = candidate;
10733  }
10734  }
10735  }
10736 
10737  // If the target is not null we have found a known global function that is
10738  // assumed to stay the same for this instanceof.
10739  if (target.is_null()) {
10740  HInstanceOf* result = New<HInstanceOf>(left, right);
10741  return ast_context()->ReturnInstruction(result, expr->id());
10742  } else {
10743  Add<HCheckValue>(right, target);
10744  HInstanceOfKnownGlobal* result =
10745  New<HInstanceOfKnownGlobal>(left, target);
10746  return ast_context()->ReturnInstruction(result, expr->id());
10747  }
10748 
10749  // Code below assumes that we don't fall through.
10750  UNREACHABLE();
10751  } else if (op == Token::IN) {
10752  HValue* function = AddLoadJSBuiltin(Builtins::IN);
10753  Add<HPushArguments>(left, right);
10754  // TODO(olivf) InvokeFunction produces a check for the parameter count,
10755  // even though we are certain to pass the correct number of arguments here.
10756  HInstruction* result = New<HInvokeFunction>(function, 2);
10757  return ast_context()->ReturnInstruction(result, expr->id());
10758  }
10759 
10760  PushBeforeSimulateBehavior push_behavior =
10763  HControlInstruction* compare = BuildCompareInstruction(
10764  op, left, right, left_type, right_type, combined_type,
10765  ScriptPositionToSourcePosition(expr->left()->position()),
10766  ScriptPositionToSourcePosition(expr->right()->position()),
10767  push_behavior, expr->id());
10768  if (compare == NULL) return; // Bailed out.
10769  return ast_context()->ReturnControl(compare, expr->id());
10770 }
10771 
10772 
10774  Token::Value op,
10775  HValue* left,
10776  HValue* right,
10777  Type* left_type,
10778  Type* right_type,
10779  Type* combined_type,
10780  HSourcePosition left_position,
10781  HSourcePosition right_position,
10782  PushBeforeSimulateBehavior push_sim_result,
10783  BailoutId bailout_id) {
10784  // Cases handled below depend on collected type feedback. They should
10785  // soft deoptimize when there is no type feedback.
10786  if (!combined_type->IsInhabited()) {
10787  Add<HDeoptimize>("Insufficient type feedback for combined type "
10788  "of binary operation",
10790  combined_type = left_type = right_type = Type::Any(zone());
10791  }
10792 
10793  Representation left_rep = Representation::FromType(left_type);
10794  Representation right_rep = Representation::FromType(right_type);
10795  Representation combined_rep = Representation::FromType(combined_type);
10796 
10797  if (combined_type->Is(Type::Receiver())) {
10798  if (Token::IsEqualityOp(op)) {
10799  // HCompareObjectEqAndBranch can only deal with object, so
10800  // exclude numbers.
10801  if ((left->IsConstant() &&
10802  HConstant::cast(left)->HasNumberValue()) ||
10803  (right->IsConstant() &&
10804  HConstant::cast(right)->HasNumberValue())) {
10805  Add<HDeoptimize>("Type mismatch between feedback and constant",
10807  // The caller expects a branch instruction, so make it happy.
10808  return New<HBranch>(graph()->GetConstantTrue());
10809  }
10810  // Can we get away with map check and not instance type check?
10811  HValue* operand_to_check =
10812  left->block()->block_id() < right->block()->block_id() ? left : right;
10813  if (combined_type->IsClass()) {
10814  Handle<Map> map = combined_type->AsClass()->Map();
10815  AddCheckMap(operand_to_check, map);
10816  HCompareObjectEqAndBranch* result =
10817  New<HCompareObjectEqAndBranch>(left, right);
10818  if (FLAG_hydrogen_track_positions) {
10819  result->set_operand_position(zone(), 0, left_position);
10820  result->set_operand_position(zone(), 1, right_position);
10821  }
10822  return result;
10823  } else {
10824  BuildCheckHeapObject(operand_to_check);
10825  Add<HCheckInstanceType>(operand_to_check,
10826  HCheckInstanceType::IS_SPEC_OBJECT);
10827  HCompareObjectEqAndBranch* result =
10828  New<HCompareObjectEqAndBranch>(left, right);
10829  return result;
10830  }
10831  } else {
10832  Bailout(kUnsupportedNonPrimitiveCompare);
10833  return NULL;
10834  }
10835  } else if (combined_type->Is(Type::InternalizedString()) &&
10836  Token::IsEqualityOp(op)) {
10837  // If we have a constant argument, it should be consistent with the type
10838  // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
10839  if ((left->IsConstant() &&
10840  !HConstant::cast(left)->HasInternalizedStringValue()) ||
10841  (right->IsConstant() &&
10842  !HConstant::cast(right)->HasInternalizedStringValue())) {
10843  Add<HDeoptimize>("Type mismatch between feedback and constant",
10845  // The caller expects a branch instruction, so make it happy.
10846  return New<HBranch>(graph()->GetConstantTrue());
10847  }
10848  BuildCheckHeapObject(left);
10849  Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
10850  BuildCheckHeapObject(right);
10851  Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
10852  HCompareObjectEqAndBranch* result =
10853  New<HCompareObjectEqAndBranch>(left, right);
10854  return result;
10855  } else if (combined_type->Is(Type::String())) {
10856  BuildCheckHeapObject(left);
10857  Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
10858  BuildCheckHeapObject(right);
10859  Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
10860  HStringCompareAndBranch* result =
10861  New<HStringCompareAndBranch>(left, right, op);
10862  return result;
10863  } else {
10864  if (combined_rep.IsTagged() || combined_rep.IsNone()) {
10865  HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
10866  result->set_observed_input_representation(1, left_rep);
10867  result->set_observed_input_representation(2, right_rep);
10868  if (result->HasObservableSideEffects()) {
10869  if (push_sim_result == PUSH_BEFORE_SIMULATE) {
10870  Push(result);
10871  AddSimulate(bailout_id, REMOVABLE_SIMULATE);
10872  Drop(1);
10873  } else {
10874  AddSimulate(bailout_id, REMOVABLE_SIMULATE);
10875  }
10876  }
10877  // TODO(jkummerow): Can we make this more efficient?
10878  HBranch* branch = New<HBranch>(result);
10879  return branch;
10880  } else {
10881  HCompareNumericAndBranch* result =
10882  New<HCompareNumericAndBranch>(left, right, op);
10883  result->set_observed_input_representation(left_rep, right_rep);
10884  if (FLAG_hydrogen_track_positions) {
10885  result->SetOperandPositions(zone(), left_position, right_position);
10886  }
10887  return result;
10888  }
10889  }
10890 }
10891 
10892 
10894  Expression* sub_expr,
10895  NilValue nil) {
10896  DCHECK(!HasStackOverflow());
10897  DCHECK(current_block() != NULL);
10898  DCHECK(current_block()->HasPredecessor());
10899  DCHECK(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
10900  if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position());
10901  CHECK_ALIVE(VisitForValue(sub_expr));
10902  HValue* value = Pop();
10903  if (expr->op() == Token::EQ_STRICT) {
10904  HConstant* nil_constant = nil == kNullValue
10905  ? graph()->GetConstantNull()
10906  : graph()->GetConstantUndefined();
10907  HCompareObjectEqAndBranch* instr =
10908  New<HCompareObjectEqAndBranch>(value, nil_constant);
10909  return ast_context()->ReturnControl(instr, expr->id());
10910  } else {
10911  DCHECK_EQ(Token::EQ, expr->op());
10912  Type* type = expr->combined_type()->Is(Type::None())
10913  ? Type::Any(zone()) : expr->combined_type();
10914  HIfContinuation continuation;
10915  BuildCompareNil(value, type, &continuation);
10916  return ast_context()->ReturnContinuation(&continuation, expr->id());
10917  }
10918 }
10919 
10920 
10922  // If we share optimized code between different closures, the
10923  // this-function is not a constant, except inside an inlined body.
10924  if (function_state()->outer() != NULL) {
10925  return New<HConstant>(
10926  function_state()->compilation_info()->closure());
10927  } else {
10928  return New<HThisFunction>();
10929  }
10930 }
10931 
10932 
10934  Handle<JSObject> boilerplate_object,
10935  AllocationSiteUsageContext* site_context) {
10936  NoObservableSideEffectsScope no_effects(this);
10937  InstanceType instance_type = boilerplate_object->map()->instance_type();
10938  DCHECK(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
10939 
10940  HType type = instance_type == JS_ARRAY_TYPE
10941  ? HType::JSArray() : HType::JSObject();
10942  HValue* object_size_constant = Add<HConstant>(
10943  boilerplate_object->map()->instance_size());
10944 
10945  PretenureFlag pretenure_flag = NOT_TENURED;
10946  if (FLAG_allocation_site_pretenuring) {
10947  pretenure_flag = site_context->current()->GetPretenureMode();
10948  Handle<AllocationSite> site(site_context->current());
10951  }
10952 
10953  HInstruction* object = Add<HAllocate>(object_size_constant, type,
10954  pretenure_flag, instance_type, site_context->current());
10955 
10956  // If allocation folding reaches Page::kMaxRegularHeapObjectSize the
10957  // elements array may not get folded into the object. Hence, we set the
10958  // elements pointer to empty fixed array and let store elimination remove
10959  // this store in the folding case.
10960  HConstant* empty_fixed_array = Add<HConstant>(
10961  isolate()->factory()->empty_fixed_array());
10962  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
10963  empty_fixed_array);
10964 
10965  BuildEmitObjectHeader(boilerplate_object, object);
10966 
10967  Handle<FixedArrayBase> elements(boilerplate_object->elements());
10968  int elements_size = (elements->length() > 0 &&
10969  elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
10970  elements->Size() : 0;
10971 
10972  if (pretenure_flag == TENURED &&
10973  elements->map() == isolate()->heap()->fixed_cow_array_map() &&
10974  isolate()->heap()->InNewSpace(*elements)) {
10975  // If we would like to pretenure a fixed cow array, we must ensure that the
10976  // array is already in old space, otherwise we'll create too many old-to-
10977  // new-space pointers (overflowing the store buffer).
10978  elements = Handle<FixedArrayBase>(
10979  isolate()->factory()->CopyAndTenureFixedCOWArray(
10980  Handle<FixedArray>::cast(elements)));
10981  boilerplate_object->set_elements(*elements);
10982  }
10983 
10984  HInstruction* object_elements = NULL;
10985  if (elements_size > 0) {
10986  HValue* object_elements_size = Add<HConstant>(elements_size);
10987  InstanceType instance_type = boilerplate_object->HasFastDoubleElements()
10989  object_elements = Add<HAllocate>(
10990  object_elements_size, HType::HeapObject(),
10991  pretenure_flag, instance_type, site_context->current());
10992  }
10993  BuildInitElementsInObjectHeader(boilerplate_object, object, object_elements);
10994 
10995  // Copy object elements if non-COW.
10996  if (object_elements != NULL) {
10997  BuildEmitElements(boilerplate_object, elements, object_elements,
10998  site_context);
10999  }
11000 
11001  // Copy in-object properties.
11002  if (boilerplate_object->map()->NumberOfFields() != 0 ||
11003  boilerplate_object->map()->unused_property_fields() > 0) {
11004  BuildEmitInObjectProperties(boilerplate_object, object, site_context,
11005  pretenure_flag);
11006  }
11007  return object;
11008 }
11009 
11010 
11012  Handle<JSObject> boilerplate_object,
11013  HInstruction* object) {
11014  DCHECK(boilerplate_object->properties()->length() == 0);
11015 
11016  Handle<Map> boilerplate_object_map(boilerplate_object->map());
11017  AddStoreMapConstant(object, boilerplate_object_map);
11018 
11019  Handle<Object> properties_field =
11020  Handle<Object>(boilerplate_object->properties(), isolate());
11021  DCHECK(*properties_field == isolate()->heap()->empty_fixed_array());
11022  HInstruction* properties = Add<HConstant>(properties_field);
11023  HObjectAccess access = HObjectAccess::ForPropertiesPointer();
11024  Add<HStoreNamedField>(object, access, properties);
11025 
11026  if (boilerplate_object->IsJSArray()) {
11027  Handle<JSArray> boilerplate_array =
11028  Handle<JSArray>::cast(boilerplate_object);
11029  Handle<Object> length_field =
11030  Handle<Object>(boilerplate_array->length(), isolate());
11031  HInstruction* length = Add<HConstant>(length_field);
11032 
11033  DCHECK(boilerplate_array->length()->IsSmi());
11034  Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
11035  boilerplate_array->GetElementsKind()), length);
11036  }
11037 }
11038 
11039 
11041  Handle<JSObject> boilerplate_object,
11042  HInstruction* object,
11043  HInstruction* object_elements) {
11044  DCHECK(boilerplate_object->properties()->length() == 0);
11045  if (object_elements == NULL) {
11046  Handle<Object> elements_field =
11047  Handle<Object>(boilerplate_object->elements(), isolate());
11048  object_elements = Add<HConstant>(elements_field);
11049  }
11050  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11051  object_elements);
11052 }
11053 
11054 
11056  Handle<JSObject> boilerplate_object,
11057  HInstruction* object,
11058  AllocationSiteUsageContext* site_context,
11059  PretenureFlag pretenure_flag) {
11060  Handle<Map> boilerplate_map(boilerplate_object->map());
11061  Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
11062  int limit = boilerplate_map->NumberOfOwnDescriptors();
11063 
11064  int copied_fields = 0;
11065  for (int i = 0; i < limit; i++) {
11066  PropertyDetails details = descriptors->GetDetails(i);
11067  if (details.type() != FIELD) continue;
11068  copied_fields++;
11069  int index = descriptors->GetFieldIndex(i);
11070  int property_offset = boilerplate_object->GetInObjectPropertyOffset(index);
11071  Handle<Name> name(descriptors->GetKey(i));
11072  Handle<Object> value =
11073  Handle<Object>(boilerplate_object->InObjectPropertyAt(index),
11074  isolate());
11075 
11076  // The access for the store depends on the type of the boilerplate.
11077  HObjectAccess access = boilerplate_object->IsJSArray() ?
11078  HObjectAccess::ForJSArrayOffset(property_offset) :
11079  HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11080 
11081  if (value->IsJSObject()) {
11082  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11083  Handle<AllocationSite> current_site = site_context->EnterNewScope();
11084  HInstruction* result =
11085  BuildFastLiteral(value_object, site_context);
11086  site_context->ExitScope(current_site, value_object);
11087  Add<HStoreNamedField>(object, access, result);
11088  } else {
11089  Representation representation = details.representation();
11090  HInstruction* value_instruction;
11091 
11092  if (representation.IsDouble()) {
11093  // Allocate a HeapNumber box and store the value into it.
11094  HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
11095  // This heap number alloc does not have a corresponding
11096  // AllocationSite. That is okay because
11097  // 1) it's a child object of another object with a valid allocation site
11098  // 2) we can just use the mode of the parent object for pretenuring
11099  HInstruction* double_box =
11100  Add<HAllocate>(heap_number_constant, HType::HeapObject(),
11101  pretenure_flag, MUTABLE_HEAP_NUMBER_TYPE);
11102  AddStoreMapConstant(double_box,
11103  isolate()->factory()->mutable_heap_number_map());
11104  // Unwrap the mutable heap number from the boilerplate.
11105  HValue* double_value =
11106  Add<HConstant>(Handle<HeapNumber>::cast(value)->value());
11107  Add<HStoreNamedField>(
11108  double_box, HObjectAccess::ForHeapNumberValue(), double_value);
11109  value_instruction = double_box;
11110  } else if (representation.IsSmi()) {
11111  value_instruction = value->IsUninitialized()
11112  ? graph()->GetConstant0()
11113  : Add<HConstant>(value);
11114  // Ensure that value is stored as smi.
11115  access = access.WithRepresentation(representation);
11116  } else {
11117  value_instruction = Add<HConstant>(value);
11118  }
11119 
11120  Add<HStoreNamedField>(object, access, value_instruction);
11121  }
11122  }
11123 
11124  int inobject_properties = boilerplate_object->map()->inobject_properties();
11125  HInstruction* value_instruction =
11126  Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
11127  for (int i = copied_fields; i < inobject_properties; i++) {
11128  DCHECK(boilerplate_object->IsJSObject());
11129  int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
11130  HObjectAccess access =
11131  HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11132  Add<HStoreNamedField>(object, access, value_instruction);
11133  }
11134 }
11135 
11136 
11138  Handle<JSObject> boilerplate_object,
11139  Handle<FixedArrayBase> elements,
11140  HValue* object_elements,
11141  AllocationSiteUsageContext* site_context) {
11142  ElementsKind kind = boilerplate_object->map()->elements_kind();
11143  int elements_length = elements->length();
11144  HValue* object_elements_length = Add<HConstant>(elements_length);
11145  BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
11146 
11147  // Copy elements backing store content.
11148  if (elements->IsFixedDoubleArray()) {
11149  BuildEmitFixedDoubleArray(elements, kind, object_elements);
11150  } else if (elements->IsFixedArray()) {
11151  BuildEmitFixedArray(elements, kind, object_elements,
11152  site_context);
11153  } else {
11154  UNREACHABLE();
11155  }
11156 }
11157 
11158 
11160  Handle<FixedArrayBase> elements,
11161  ElementsKind kind,
11162  HValue* object_elements) {
11163  HInstruction* boilerplate_elements = Add<HConstant>(elements);
11164  int elements_length = elements->length();
11165  for (int i = 0; i < elements_length; i++) {
11166  HValue* key_constant = Add<HConstant>(i);
11167  HInstruction* value_instruction =
11168  Add<HLoadKeyed>(boilerplate_elements, key_constant,
11169  static_cast<HValue*>(NULL), kind,
11171  HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
11172  value_instruction, kind);
11174  }
11175 }
11176 
11177 
11179  Handle<FixedArrayBase> elements,
11180  ElementsKind kind,
11181  HValue* object_elements,
11182  AllocationSiteUsageContext* site_context) {
11183  HInstruction* boilerplate_elements = Add<HConstant>(elements);
11184  int elements_length = elements->length();
11185  Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
11186  for (int i = 0; i < elements_length; i++) {
11187  Handle<Object> value(fast_elements->get(i), isolate());
11188  HValue* key_constant = Add<HConstant>(i);
11189  if (value->IsJSObject()) {
11190  Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11191  Handle<AllocationSite> current_site = site_context->EnterNewScope();
11192  HInstruction* result =
11193  BuildFastLiteral(value_object, site_context);
11194  site_context->ExitScope(current_site, value_object);
11195  Add<HStoreKeyed>(object_elements, key_constant, result, kind);
11196  } else {
11197  HInstruction* value_instruction =
11198  Add<HLoadKeyed>(boilerplate_elements, key_constant,
11199  static_cast<HValue*>(NULL), kind,
11201  Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind);
11202  }
11203  }
11204 }
11205 
11206 
11207 void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
11208  DCHECK(!HasStackOverflow());
11209  DCHECK(current_block() != NULL);
11210  DCHECK(current_block()->HasPredecessor());
11211  HInstruction* instr = BuildThisFunction();
11212  return ast_context()->ReturnInstruction(instr, expr->id());
11213 }
11214 
11215 
11216 void HOptimizedGraphBuilder::VisitSuperReference(SuperReference* expr) {
11217  DCHECK(!HasStackOverflow());
11218  DCHECK(current_block() != NULL);
11219  DCHECK(current_block()->HasPredecessor());
11220  return Bailout(kSuperReference);
11221 }
11222 
11223 
11225  ZoneList<Declaration*>* declarations) {
11226  DCHECK(globals_.is_empty());
11227  AstVisitor::VisitDeclarations(declarations);
11228  if (!globals_.is_empty()) {
11229  Handle<FixedArray> array =
11230  isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
11231  for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
11232  int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) |
11235  Add<HDeclareGlobals>(array, flags);
11236  globals_.Rewind(0);
11237  }
11238 }
11239 
11240 
11241 void HOptimizedGraphBuilder::VisitVariableDeclaration(
11242  VariableDeclaration* declaration) {
11243  VariableProxy* proxy = declaration->proxy();
11244  VariableMode mode = declaration->mode();
11245  Variable* variable = proxy->var();
11246  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
11247  switch (variable->location()) {
11248  case Variable::UNALLOCATED:
11249  globals_.Add(variable->name(), zone());
11250  globals_.Add(variable->binding_needs_init()
11251  ? isolate()->factory()->the_hole_value()
11252  : isolate()->factory()->undefined_value(), zone());
11253  return;
11254  case Variable::PARAMETER:
11255  case Variable::LOCAL:
11256  if (hole_init) {
11257  HValue* value = graph()->GetConstantHole();
11258  environment()->Bind(variable, value);
11259  }
11260  break;
11261  case Variable::CONTEXT:
11262  if (hole_init) {
11263  HValue* value = graph()->GetConstantHole();
11264  HValue* context = environment()->context();
11265  HStoreContextSlot* store = Add<HStoreContextSlot>(
11266  context, variable->index(), HStoreContextSlot::kNoCheck, value);
11267  if (store->HasObservableSideEffects()) {
11268  Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
11269  }
11270  }
11271  break;
11272  case Variable::LOOKUP:
11273  return Bailout(kUnsupportedLookupSlotInDeclaration);
11274  }
11275 }
11276 
11277 
11278 void HOptimizedGraphBuilder::VisitFunctionDeclaration(
11279  FunctionDeclaration* declaration) {
11280  VariableProxy* proxy = declaration->proxy();
11281  Variable* variable = proxy->var();
11282  switch (variable->location()) {
11283  case Variable::UNALLOCATED: {
11284  globals_.Add(variable->name(), zone());
11285  Handle<SharedFunctionInfo> function = Compiler::BuildFunctionInfo(
11286  declaration->fun(), current_info()->script(), top_info());
11287  // Check for stack-overflow exception.
11288  if (function.is_null()) return SetStackOverflow();
11289  globals_.Add(function, zone());
11290  return;
11291  }
11292  case Variable::PARAMETER:
11293  case Variable::LOCAL: {
11294  CHECK_ALIVE(VisitForValue(declaration->fun()));
11295  HValue* value = Pop();
11296  BindIfLive(variable, value);
11297  break;
11298  }
11299  case Variable::CONTEXT: {
11300  CHECK_ALIVE(VisitForValue(declaration->fun()));
11301  HValue* value = Pop();
11302  HValue* context = environment()->context();
11303  HStoreContextSlot* store = Add<HStoreContextSlot>(
11304  context, variable->index(), HStoreContextSlot::kNoCheck, value);
11305  if (store->HasObservableSideEffects()) {
11306  Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
11307  }
11308  break;
11309  }
11310  case Variable::LOOKUP:
11311  return Bailout(kUnsupportedLookupSlotInDeclaration);
11312  }
11313 }
11314 
11315 
11316 void HOptimizedGraphBuilder::VisitModuleDeclaration(
11317  ModuleDeclaration* declaration) {
11318  UNREACHABLE();
11319 }
11320 
11321 
11322 void HOptimizedGraphBuilder::VisitImportDeclaration(
11323  ImportDeclaration* declaration) {
11324  UNREACHABLE();
11325 }
11326 
11327 
11328 void HOptimizedGraphBuilder::VisitExportDeclaration(
11329  ExportDeclaration* declaration) {
11330  UNREACHABLE();
11331 }
11332 
11333 
11334 void HOptimizedGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
11335  UNREACHABLE();
11336 }
11337 
11338 
11339 void HOptimizedGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
11340  UNREACHABLE();
11341 }
11342 
11343 
11344 void HOptimizedGraphBuilder::VisitModulePath(ModulePath* module) {
11345  UNREACHABLE();
11346 }
11347 
11348 
11349 void HOptimizedGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
11350  UNREACHABLE();
11351 }
11352 
11353 
11354 void HOptimizedGraphBuilder::VisitModuleStatement(ModuleStatement* stmt) {
11355  UNREACHABLE();
11356 }
11357 
11358 
11359 // Generators for inline runtime functions.
11360 // Support for types.
11361 void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
11362  DCHECK(call->arguments()->length() == 1);
11363  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11364  HValue* value = Pop();
11365  HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
11366  return ast_context()->ReturnControl(result, call->id());
11367 }
11368 
11369 
11370 void HOptimizedGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
11371  DCHECK(call->arguments()->length() == 1);
11372  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11373  HValue* value = Pop();
11374  HHasInstanceTypeAndBranch* result =
11375  New<HHasInstanceTypeAndBranch>(value,
11378  return ast_context()->ReturnControl(result, call->id());
11379 }
11380 
11381 
11382 void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) {
11383  DCHECK(call->arguments()->length() == 1);
11384  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11385  HValue* value = Pop();
11386  HHasInstanceTypeAndBranch* result =
11387  New<HHasInstanceTypeAndBranch>(value, JS_FUNCTION_TYPE);
11388  return ast_context()->ReturnControl(result, call->id());
11389 }
11390 
11391 
11392 void HOptimizedGraphBuilder::GenerateIsMinusZero(CallRuntime* call) {
11393  DCHECK(call->arguments()->length() == 1);
11394  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11395  HValue* value = Pop();
11396  HCompareMinusZeroAndBranch* result = New<HCompareMinusZeroAndBranch>(value);
11397  return ast_context()->ReturnControl(result, call->id());
11398 }
11399 
11400 
11401 void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
11402  DCHECK(call->arguments()->length() == 1);
11403  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11404  HValue* value = Pop();
11405  HHasCachedArrayIndexAndBranch* result =
11406  New<HHasCachedArrayIndexAndBranch>(value);
11407  return ast_context()->ReturnControl(result, call->id());
11408 }
11409 
11410 
11411 void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
11412  DCHECK(call->arguments()->length() == 1);
11413  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11414  HValue* value = Pop();
11415  HHasInstanceTypeAndBranch* result =
11416  New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
11417  return ast_context()->ReturnControl(result, call->id());
11418 }
11419 
11420 
11421 void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
11422  DCHECK(call->arguments()->length() == 1);
11423  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11424  HValue* value = Pop();
11425  HHasInstanceTypeAndBranch* result =
11426  New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
11427  return ast_context()->ReturnControl(result, call->id());
11428 }
11429 
11430 
11431 void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) {
11432  DCHECK(call->arguments()->length() == 1);
11433  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11434  HValue* value = Pop();
11435  HIsObjectAndBranch* result = New<HIsObjectAndBranch>(value);
11436  return ast_context()->ReturnControl(result, call->id());
11437 }
11438 
11439 
11440 void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
11441  return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
11442 }
11443 
11444 
11445 void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
11446  DCHECK(call->arguments()->length() == 1);
11447  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11448  HValue* value = Pop();
11449  HIsUndetectableAndBranch* result = New<HIsUndetectableAndBranch>(value);
11450  return ast_context()->ReturnControl(result, call->id());
11451 }
11452 
11453 
11454 void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
11455  CallRuntime* call) {
11456  return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
11457 }
11458 
11459 
11460 // Support for construct call checks.
11461 void HOptimizedGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
11462  DCHECK(call->arguments()->length() == 0);
11463  if (function_state()->outer() != NULL) {
11464  // We are generating graph for inlined function.
11465  HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN
11466  ? graph()->GetConstantTrue()
11467  : graph()->GetConstantFalse();
11468  return ast_context()->ReturnValue(value);
11469  } else {
11470  return ast_context()->ReturnControl(New<HIsConstructCallAndBranch>(),
11471  call->id());
11472  }
11473 }
11474 
11475 
11476 // Support for arguments.length and arguments[?].
11477 void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
11478  DCHECK(call->arguments()->length() == 0);
11479  HInstruction* result = NULL;
11480  if (function_state()->outer() == NULL) {
11481  HInstruction* elements = Add<HArgumentsElements>(false);
11482  result = New<HArgumentsLength>(elements);
11483  } else {
11484  // Number of arguments without receiver.
11485  int argument_count = environment()->
11486  arguments_environment()->parameter_count() - 1;
11487  result = New<HConstant>(argument_count);
11488  }
11489  return ast_context()->ReturnInstruction(result, call->id());
11490 }
11491 
11492 
11493 void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
11494  DCHECK(call->arguments()->length() == 1);
11495  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11496  HValue* index = Pop();
11497  HInstruction* result = NULL;
11498  if (function_state()->outer() == NULL) {
11499  HInstruction* elements = Add<HArgumentsElements>(false);
11500  HInstruction* length = Add<HArgumentsLength>(elements);
11501  HInstruction* checked_index = Add<HBoundsCheck>(index, length);
11502  result = New<HAccessArgumentsAt>(elements, length, checked_index);
11503  } else {
11505 
11506  // Number of arguments without receiver.
11507  HInstruction* elements = function_state()->arguments_elements();
11508  int argument_count = environment()->
11509  arguments_environment()->parameter_count() - 1;
11510  HInstruction* length = Add<HConstant>(argument_count);
11511  HInstruction* checked_key = Add<HBoundsCheck>(index, length);
11512  result = New<HAccessArgumentsAt>(elements, length, checked_key);
11513  }
11514  return ast_context()->ReturnInstruction(result, call->id());
11515 }
11516 
11517 
11518 // Support for accessing the class and value fields of an object.
11519 void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
11520  // The special form detected by IsClassOfTest is detected before we get here
11521  // and does not cause a bailout.
11522  return Bailout(kInlinedRuntimeFunctionClassOf);
11523 }
11524 
11525 
11526 void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
11527  DCHECK(call->arguments()->length() == 1);
11528  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11529  HValue* object = Pop();
11530 
11531  IfBuilder if_objectisvalue(this);
11532  HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>(
11533  object, JS_VALUE_TYPE);
11534  if_objectisvalue.Then();
11535  {
11536  // Return the actual value.
11537  Push(Add<HLoadNamedField>(
11538  object, objectisvalue,
11539  HObjectAccess::ForObservableJSObjectOffset(
11541  Add<HSimulate>(call->id(), FIXED_SIMULATE);
11542  }
11543  if_objectisvalue.Else();
11544  {
11545  // If the object is not a value return the object.
11546  Push(object);
11547  Add<HSimulate>(call->id(), FIXED_SIMULATE);
11548  }
11549  if_objectisvalue.End();
11550  return ast_context()->ReturnValue(Pop());
11551 }
11552 
11553 
11554 void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) {
11555  DCHECK(call->arguments()->length() == 2);
11556  DCHECK_NE(NULL, call->arguments()->at(1)->AsLiteral());
11557  Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value()));
11558  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11559  HValue* date = Pop();
11560  HDateField* result = New<HDateField>(date, index);
11561  return ast_context()->ReturnInstruction(result, call->id());
11562 }
11563 
11564 
11565 void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar(
11566  CallRuntime* call) {
11567  DCHECK(call->arguments()->length() == 3);
11568  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11569  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11570  CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11571  HValue* string = Pop();
11572  HValue* value = Pop();
11573  HValue* index = Pop();
11574  Add<HSeqStringSetChar>(String::ONE_BYTE_ENCODING, string,
11575  index, value);
11576  Add<HSimulate>(call->id(), FIXED_SIMULATE);
11577  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11578 }
11579 
11580 
11581 void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar(
11582  CallRuntime* call) {
11583  DCHECK(call->arguments()->length() == 3);
11584  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11585  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11586  CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11587  HValue* string = Pop();
11588  HValue* value = Pop();
11589  HValue* index = Pop();
11590  Add<HSeqStringSetChar>(String::TWO_BYTE_ENCODING, string,
11591  index, value);
11592  Add<HSimulate>(call->id(), FIXED_SIMULATE);
11593  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11594 }
11595 
11596 
11597 void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
11598  DCHECK(call->arguments()->length() == 2);
11599  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11600  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11601  HValue* value = Pop();
11602  HValue* object = Pop();
11603 
11604  // Check if object is a JSValue.
11605  IfBuilder if_objectisvalue(this);
11606  if_objectisvalue.If<HHasInstanceTypeAndBranch>(object, JS_VALUE_TYPE);
11607  if_objectisvalue.Then();
11608  {
11609  // Create in-object property store to kValueOffset.
11610  Add<HStoreNamedField>(object,
11611  HObjectAccess::ForObservableJSObjectOffset(JSValue::kValueOffset),
11612  value);
11613  if (!ast_context()->IsEffect()) {
11614  Push(value);
11615  }
11616  Add<HSimulate>(call->id(), FIXED_SIMULATE);
11617  }
11618  if_objectisvalue.Else();
11619  {
11620  // Nothing to do in this case.
11621  if (!ast_context()->IsEffect()) {
11622  Push(value);
11623  }
11624  Add<HSimulate>(call->id(), FIXED_SIMULATE);
11625  }
11626  if_objectisvalue.End();
11627  if (!ast_context()->IsEffect()) {
11628  Drop(1);
11629  }
11630  return ast_context()->ReturnValue(value);
11631 }
11632 
11633 
11634 // Fast support for charCodeAt(n).
11635 void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
11636  DCHECK(call->arguments()->length() == 2);
11637  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11638  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11639  HValue* index = Pop();
11640  HValue* string = Pop();
11641  HInstruction* result = BuildStringCharCodeAt(string, index);
11642  return ast_context()->ReturnInstruction(result, call->id());
11643 }
11644 
11645 
11646 // Fast support for string.charAt(n) and string[n].
11647 void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
11648  DCHECK(call->arguments()->length() == 1);
11649  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11650  HValue* char_code = Pop();
11651  HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
11652  return ast_context()->ReturnInstruction(result, call->id());
11653 }
11654 
11655 
11656 // Fast support for string.charAt(n) and string[n].
11657 void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
11658  DCHECK(call->arguments()->length() == 2);
11659  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11660  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11661  HValue* index = Pop();
11662  HValue* string = Pop();
11663  HInstruction* char_code = BuildStringCharCodeAt(string, index);
11664  AddInstruction(char_code);
11665  HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
11666  return ast_context()->ReturnInstruction(result, call->id());
11667 }
11668 
11669 
11670 // Fast support for object equality testing.
11671 void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
11672  DCHECK(call->arguments()->length() == 2);
11673  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11674  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11675  HValue* right = Pop();
11676  HValue* left = Pop();
11677  HCompareObjectEqAndBranch* result =
11678  New<HCompareObjectEqAndBranch>(left, right);
11679  return ast_context()->ReturnControl(result, call->id());
11680 }
11681 
11682 
11683 // Fast support for StringAdd.
11684 void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) {
11685  DCHECK_EQ(2, call->arguments()->length());
11686  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11687  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11688  HValue* right = Pop();
11689  HValue* left = Pop();
11690  HInstruction* result = NewUncasted<HStringAdd>(left, right);
11691  return ast_context()->ReturnInstruction(result, call->id());
11692 }
11693 
11694 
11695 // Fast support for SubString.
11696 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
11697  DCHECK_EQ(3, call->arguments()->length());
11698  CHECK_ALIVE(VisitExpressions(call->arguments()));
11699  PushArgumentsFromEnvironment(call->arguments()->length());
11700  HCallStub* result = New<HCallStub>(CodeStub::SubString, 3);
11701  return ast_context()->ReturnInstruction(result, call->id());
11702 }
11703 
11704 
11705 // Fast support for StringCompare.
11706 void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) {
11707  DCHECK_EQ(2, call->arguments()->length());
11708  CHECK_ALIVE(VisitExpressions(call->arguments()));
11709  PushArgumentsFromEnvironment(call->arguments()->length());
11710  HCallStub* result = New<HCallStub>(CodeStub::StringCompare, 2);
11711  return ast_context()->ReturnInstruction(result, call->id());
11712 }
11713 
11714 
11715 // Support for direct calls from JavaScript to native RegExp code.
11716 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
11717  DCHECK_EQ(4, call->arguments()->length());
11718  CHECK_ALIVE(VisitExpressions(call->arguments()));
11719  PushArgumentsFromEnvironment(call->arguments()->length());
11720  HCallStub* result = New<HCallStub>(CodeStub::RegExpExec, 4);
11721  return ast_context()->ReturnInstruction(result, call->id());
11722 }
11723 
11724 
11725 void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) {
11726  DCHECK_EQ(1, call->arguments()->length());
11727  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11728  HValue* value = Pop();
11729  HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW);
11730  return ast_context()->ReturnInstruction(result, call->id());
11731 }
11732 
11733 
11734 void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) {
11735  DCHECK_EQ(1, call->arguments()->length());
11736  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11737  HValue* value = Pop();
11738  HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH);
11739  return ast_context()->ReturnInstruction(result, call->id());
11740 }
11741 
11742 
11743 void HOptimizedGraphBuilder::GenerateConstructDouble(CallRuntime* call) {
11744  DCHECK_EQ(2, call->arguments()->length());
11745  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11746  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11747  HValue* lo = Pop();
11748  HValue* hi = Pop();
11749  HInstruction* result = NewUncasted<HConstructDouble>(hi, lo);
11750  return ast_context()->ReturnInstruction(result, call->id());
11751 }
11752 
11753 
11754 // Construct a RegExp exec result with two in-object properties.
11755 void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
11756  DCHECK_EQ(3, call->arguments()->length());
11757  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11758  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11759  CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
11760  HValue* input = Pop();
11761  HValue* index = Pop();
11762  HValue* length = Pop();
11763  HValue* result = BuildRegExpConstructResult(length, index, input);
11764  return ast_context()->ReturnValue(result);
11765 }
11766 
11767 
11768 // Support for fast native caches.
11769 void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
11770  return Bailout(kInlinedRuntimeFunctionGetFromCache);
11771 }
11772 
11773 
11774 // Fast support for number to string.
11775 void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
11776  DCHECK_EQ(1, call->arguments()->length());
11777  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11778  HValue* number = Pop();
11779  HValue* result = BuildNumberToString(number, Type::Any(zone()));
11780  return ast_context()->ReturnValue(result);
11781 }
11782 
11783 
11784 // Fast call for custom callbacks.
11785 void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
11786  // 1 ~ The function to call is not itself an argument to the call.
11787  int arg_count = call->arguments()->length() - 1;
11788  DCHECK(arg_count >= 1); // There's always at least a receiver.
11789 
11790  CHECK_ALIVE(VisitExpressions(call->arguments()));
11791  // The function is the last argument
11792  HValue* function = Pop();
11793  // Push the arguments to the stack
11794  PushArgumentsFromEnvironment(arg_count);
11795 
11796  IfBuilder if_is_jsfunction(this);
11797  if_is_jsfunction.If<HHasInstanceTypeAndBranch>(function, JS_FUNCTION_TYPE);
11798 
11799  if_is_jsfunction.Then();
11800  {
11801  HInstruction* invoke_result =
11802  Add<HInvokeFunction>(function, arg_count);
11803  if (!ast_context()->IsEffect()) {
11804  Push(invoke_result);
11805  }
11806  Add<HSimulate>(call->id(), FIXED_SIMULATE);
11807  }
11808 
11809  if_is_jsfunction.Else();
11810  {
11811  HInstruction* call_result =
11812  Add<HCallFunction>(function, arg_count);
11813  if (!ast_context()->IsEffect()) {
11814  Push(call_result);
11815  }
11816  Add<HSimulate>(call->id(), FIXED_SIMULATE);
11817  }
11818  if_is_jsfunction.End();
11819 
11820  if (ast_context()->IsEffect()) {
11821  // EffectContext::ReturnValue ignores the value, so we can just pass
11822  // 'undefined' (as we do not have the call result anymore).
11823  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
11824  } else {
11825  return ast_context()->ReturnValue(Pop());
11826  }
11827 }
11828 
11829 
11830 // Fast call to math functions.
11831 void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
11832  DCHECK_EQ(2, call->arguments()->length());
11833  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11834  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
11835  HValue* right = Pop();
11836  HValue* left = Pop();
11837  HInstruction* result = NewUncasted<HPower>(left, right);
11838  return ast_context()->ReturnInstruction(result, call->id());
11839 }
11840 
11841 
11842 void HOptimizedGraphBuilder::GenerateMathLogRT(CallRuntime* call) {
11843  DCHECK(call->arguments()->length() == 1);
11844  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11845  HValue* value = Pop();
11846  HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathLog);
11847  return ast_context()->ReturnInstruction(result, call->id());
11848 }
11849 
11850 
11851 void HOptimizedGraphBuilder::GenerateMathSqrtRT(CallRuntime* call) {
11852  DCHECK(call->arguments()->length() == 1);
11853  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11854  HValue* value = Pop();
11855  HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathSqrt);
11856  return ast_context()->ReturnInstruction(result, call->id());
11857 }
11858 
11859 
11860 void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
11861  DCHECK(call->arguments()->length() == 1);
11862  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11863  HValue* value = Pop();
11864  HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
11865  return ast_context()->ReturnInstruction(result, call->id());
11866 }
11867 
11868 
11869 void HOptimizedGraphBuilder::GenerateFastOneByteArrayJoin(CallRuntime* call) {
11870  return Bailout(kInlinedRuntimeFunctionFastOneByteArrayJoin);
11871 }
11872 
11873 
11874 // Support for generators.
11875 void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
11876  return Bailout(kInlinedRuntimeFunctionGeneratorNext);
11877 }
11878 
11879 
11880 void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
11881  return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
11882 }
11883 
11884 
11885 void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
11886  CallRuntime* call) {
11887  Add<HDebugBreak>();
11888  return ast_context()->ReturnValue(graph()->GetConstant0());
11889 }
11890 
11891 
11892 void HOptimizedGraphBuilder::GenerateDebugIsActive(CallRuntime* call) {
11893  DCHECK(call->arguments()->length() == 0);
11894  HValue* ref =
11895  Add<HConstant>(ExternalReference::debug_is_active_address(isolate()));
11896  HValue* value = Add<HLoadNamedField>(
11897  ref, static_cast<HValue*>(NULL), HObjectAccess::ForExternalUInteger8());
11898  return ast_context()->ReturnValue(value);
11899 }
11900 
11901 
11902 #undef CHECK_BAILOUT
11903 #undef CHECK_ALIVE
11904 
11905 
11906 HEnvironment::HEnvironment(HEnvironment* outer,
11907  Scope* scope,
11908  Handle<JSFunction> closure,
11909  Zone* zone)
11910  : closure_(closure),
11911  values_(0, zone),
11912  frame_type_(JS_FUNCTION),
11913  parameter_count_(0),
11914  specials_count_(1),
11915  local_count_(0),
11916  outer_(outer),
11917  entry_(NULL),
11918  pop_count_(0),
11919  push_count_(0),
11920  ast_id_(BailoutId::None()),
11921  zone_(zone) {
11922  Scope* declaration_scope = scope->DeclarationScope();
11923  Initialize(declaration_scope->num_parameters() + 1,
11924  declaration_scope->num_stack_slots(), 0);
11925 }
11926 
11927 
11928 HEnvironment::HEnvironment(Zone* zone, int parameter_count)
11929  : values_(0, zone),
11930  frame_type_(STUB),
11931  parameter_count_(parameter_count),
11932  specials_count_(1),
11933  local_count_(0),
11934  outer_(NULL),
11935  entry_(NULL),
11936  pop_count_(0),
11937  push_count_(0),
11938  ast_id_(BailoutId::None()),
11939  zone_(zone) {
11940  Initialize(parameter_count, 0, 0);
11941 }
11942 
11943 
11944 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
11945  : values_(0, zone),
11946  frame_type_(JS_FUNCTION),
11947  parameter_count_(0),
11948  specials_count_(0),
11949  local_count_(0),
11950  outer_(NULL),
11951  entry_(NULL),
11952  pop_count_(0),
11953  push_count_(0),
11954  ast_id_(other->ast_id()),
11955  zone_(zone) {
11956  Initialize(other);
11957 }
11958 
11959 
11960 HEnvironment::HEnvironment(HEnvironment* outer,
11961  Handle<JSFunction> closure,
11962  FrameType frame_type,
11963  int arguments,
11964  Zone* zone)
11965  : closure_(closure),
11966  values_(arguments, zone),
11967  frame_type_(frame_type),
11968  parameter_count_(arguments),
11969  specials_count_(0),
11970  local_count_(0),
11971  outer_(outer),
11972  entry_(NULL),
11973  pop_count_(0),
11974  push_count_(0),
11975  ast_id_(BailoutId::None()),
11976  zone_(zone) {
11977 }
11978 
11979 
11980 void HEnvironment::Initialize(int parameter_count,
11981  int local_count,
11982  int stack_height) {
11983  parameter_count_ = parameter_count;
11984  local_count_ = local_count;
11985 
11986  // Avoid reallocating the temporaries' backing store on the first Push.
11987  int total = parameter_count + specials_count_ + local_count + stack_height;
11988  values_.Initialize(total + 4, zone());
11989  for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
11990 }
11991 
11992 
11993 void HEnvironment::Initialize(const HEnvironment* other) {
11994  closure_ = other->closure();
11995  values_.AddAll(other->values_, zone());
11996  assigned_variables_.Union(other->assigned_variables_, zone());
11997  frame_type_ = other->frame_type_;
11998  parameter_count_ = other->parameter_count_;
11999  local_count_ = other->local_count_;
12000  if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
12001  entry_ = other->entry_;
12002  pop_count_ = other->pop_count_;
12003  push_count_ = other->push_count_;
12004  specials_count_ = other->specials_count_;
12005  ast_id_ = other->ast_id_;
12006 }
12007 
12008 
12009 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
12010  DCHECK(!block->IsLoopHeader());
12011  DCHECK(values_.length() == other->values_.length());
12012 
12013  int length = values_.length();
12014  for (int i = 0; i < length; ++i) {
12015  HValue* value = values_[i];
12016  if (value != NULL && value->IsPhi() && value->block() == block) {
12017  // There is already a phi for the i'th value.
12018  HPhi* phi = HPhi::cast(value);
12019  // Assert index is correct and that we haven't missed an incoming edge.
12020  DCHECK(phi->merged_index() == i || !phi->HasMergedIndex());
12021  DCHECK(phi->OperandCount() == block->predecessors()->length());
12022  phi->AddInput(other->values_[i]);
12023  } else if (values_[i] != other->values_[i]) {
12024  // There is a fresh value on the incoming edge, a phi is needed.
12025  DCHECK(values_[i] != NULL && other->values_[i] != NULL);
12026  HPhi* phi = block->AddNewPhi(i);
12027  HValue* old_value = values_[i];
12028  for (int j = 0; j < block->predecessors()->length(); j++) {
12029  phi->AddInput(old_value);
12030  }
12031  phi->AddInput(other->values_[i]);
12032  this->values_[i] = phi;
12033  }
12034  }
12035 }
12036 
12037 
12038 void HEnvironment::Bind(int index, HValue* value) {
12039  DCHECK(value != NULL);
12040  assigned_variables_.Add(index, zone());
12041  values_[index] = value;
12042 }
12043 
12044 
12045 bool HEnvironment::HasExpressionAt(int index) const {
12046  return index >= parameter_count_ + specials_count_ + local_count_;
12047 }
12048 
12049 
12050 bool HEnvironment::ExpressionStackIsEmpty() const {
12051  DCHECK(length() >= first_expression_index());
12052  return length() == first_expression_index();
12053 }
12054 
12055 
12056 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
12057  int count = index_from_top + 1;
12058  int index = values_.length() - count;
12059  DCHECK(HasExpressionAt(index));
12060  // The push count must include at least the element in question or else
12061  // the new value will not be included in this environment's history.
12062  if (push_count_ < count) {
12063  // This is the same effect as popping then re-pushing 'count' elements.
12064  pop_count_ += (count - push_count_);
12065  push_count_ = count;
12066  }
12067  values_[index] = value;
12068 }
12069 
12070 
12071 void HEnvironment::Drop(int count) {
12072  for (int i = 0; i < count; ++i) {
12073  Pop();
12074  }
12075 }
12076 
12077 
12078 HEnvironment* HEnvironment::Copy() const {
12079  return new(zone()) HEnvironment(this, zone());
12080 }
12081 
12082 
12083 HEnvironment* HEnvironment::CopyWithoutHistory() const {
12084  HEnvironment* result = Copy();
12085  result->ClearHistory();
12086  return result;
12087 }
12088 
12089 
12090 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
12091  HEnvironment* new_env = Copy();
12092  for (int i = 0; i < values_.length(); ++i) {
12093  HPhi* phi = loop_header->AddNewPhi(i);
12094  phi->AddInput(values_[i]);
12095  new_env->values_[i] = phi;
12096  }
12097  new_env->ClearHistory();
12098  return new_env;
12099 }
12100 
12101 
12102 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
12103  Handle<JSFunction> target,
12104  FrameType frame_type,
12105  int arguments) const {
12106  HEnvironment* new_env =
12107  new(zone()) HEnvironment(outer, target, frame_type,
12108  arguments + 1, zone());
12109  for (int i = 0; i <= arguments; ++i) { // Include receiver.
12110  new_env->Push(ExpressionStackAt(arguments - i));
12111  }
12112  new_env->ClearHistory();
12113  return new_env;
12114 }
12115 
12116 
12117 HEnvironment* HEnvironment::CopyForInlining(
12118  Handle<JSFunction> target,
12119  int arguments,
12120  FunctionLiteral* function,
12121  HConstant* undefined,
12122  InliningKind inlining_kind) const {
12123  DCHECK(frame_type() == JS_FUNCTION);
12124 
12125  // Outer environment is a copy of this one without the arguments.
12126  int arity = function->scope()->num_parameters();
12127 
12128  HEnvironment* outer = Copy();
12129  outer->Drop(arguments + 1); // Including receiver.
12130  outer->ClearHistory();
12131 
12132  if (inlining_kind == CONSTRUCT_CALL_RETURN) {
12133  // Create artificial constructor stub environment. The receiver should
12134  // actually be the constructor function, but we pass the newly allocated
12135  // object instead, DoComputeConstructStubFrame() relies on that.
12136  outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
12137  } else if (inlining_kind == GETTER_CALL_RETURN) {
12138  // We need an additional StackFrame::INTERNAL frame for restoring the
12139  // correct context.
12140  outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
12141  } else if (inlining_kind == SETTER_CALL_RETURN) {
12142  // We need an additional StackFrame::INTERNAL frame for temporarily saving
12143  // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
12144  outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
12145  }
12146 
12147  if (arity != arguments) {
12148  // Create artificial arguments adaptation environment.
12149  outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
12150  }
12151 
12152  HEnvironment* inner =
12153  new(zone()) HEnvironment(outer, function->scope(), target, zone());
12154  // Get the argument values from the original environment.
12155  for (int i = 0; i <= arity; ++i) { // Include receiver.
12156  HValue* push = (i <= arguments) ?
12157  ExpressionStackAt(arguments - i) : undefined;
12158  inner->SetValueAt(i, push);
12159  }
12160  inner->SetValueAt(arity + 1, context());
12161  for (int i = arity + 2; i < inner->length(); ++i) {
12162  inner->SetValueAt(i, undefined);
12163  }
12164 
12165  inner->set_ast_id(BailoutId::FunctionEntry());
12166  return inner;
12167 }
12168 
12169 
12170 OStream& operator<<(OStream& os, const HEnvironment& env) {
12171  for (int i = 0; i < env.length(); i++) {
12172  if (i == 0) os << "parameters\n";
12173  if (i == env.parameter_count()) os << "specials\n";
12174  if (i == env.parameter_count() + env.specials_count()) os << "locals\n";
12175  if (i == env.parameter_count() + env.specials_count() + env.local_count()) {
12176  os << "expressions\n";
12177  }
12178  HValue* val = env.values()->at(i);
12179  os << i << ": ";
12180  if (val != NULL) {
12181  os << val;
12182  } else {
12183  os << "NULL";
12184  }
12185  os << "\n";
12186  }
12187  return os << "\n";
12188 }
12189 
12190 
12191 void HTracer::TraceCompilation(CompilationInfo* info) {
12192  Tag tag(this, "compilation");
12193  if (info->IsOptimizing()) {
12194  Handle<String> name = info->function()->debug_name();
12195  PrintStringProperty("name", name->ToCString().get());
12196  PrintIndent();
12197  trace_.Add("method \"%s:%d\"\n",
12198  name->ToCString().get(),
12199  info->optimization_id());
12200  } else {
12201  CodeStub::Major major_key = info->code_stub()->MajorKey();
12202  PrintStringProperty("name", CodeStub::MajorName(major_key, false));
12203  PrintStringProperty("method", "stub");
12204  }
12205  PrintLongProperty("date",
12206  static_cast<int64_t>(base::OS::TimeCurrentMillis()));
12207 }
12208 
12209 
12210 void HTracer::TraceLithium(const char* name, LChunk* chunk) {
12211  DCHECK(!chunk->isolate()->concurrent_recompilation_enabled());
12212  AllowHandleDereference allow_deref;
12213  AllowDeferredHandleDereference allow_deferred_deref;
12214  Trace(name, chunk->graph(), chunk);
12215 }
12216 
12217 
12218 void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
12219  DCHECK(!graph->isolate()->concurrent_recompilation_enabled());
12220  AllowHandleDereference allow_deref;
12221  AllowDeferredHandleDereference allow_deferred_deref;
12222  Trace(name, graph, NULL);
12223 }
12224 
12225 
12226 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
12227  Tag tag(this, "cfg");
12228  PrintStringProperty("name", name);
12229  const ZoneList<HBasicBlock*>* blocks = graph->blocks();
12230  for (int i = 0; i < blocks->length(); i++) {
12231  HBasicBlock* current = blocks->at(i);
12232  Tag block_tag(this, "block");
12233  PrintBlockProperty("name", current->block_id());
12234  PrintIntProperty("from_bci", -1);
12235  PrintIntProperty("to_bci", -1);
12236 
12237  if (!current->predecessors()->is_empty()) {
12238  PrintIndent();
12239  trace_.Add("predecessors");
12240  for (int j = 0; j < current->predecessors()->length(); ++j) {
12241  trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
12242  }
12243  trace_.Add("\n");
12244  } else {
12245  PrintEmptyProperty("predecessors");
12246  }
12247 
12248  if (current->end()->SuccessorCount() == 0) {
12249  PrintEmptyProperty("successors");
12250  } else {
12251  PrintIndent();
12252  trace_.Add("successors");
12253  for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
12254  trace_.Add(" \"B%d\"", it.Current()->block_id());
12255  }
12256  trace_.Add("\n");
12257  }
12258 
12259  PrintEmptyProperty("xhandlers");
12260 
12261  {
12262  PrintIndent();
12263  trace_.Add("flags");
12264  if (current->IsLoopSuccessorDominator()) {
12265  trace_.Add(" \"dom-loop-succ\"");
12266  }
12267  if (current->IsUnreachable()) {
12268  trace_.Add(" \"dead\"");
12269  }
12270  if (current->is_osr_entry()) {
12271  trace_.Add(" \"osr\"");
12272  }
12273  trace_.Add("\n");
12274  }
12275 
12276  if (current->dominator() != NULL) {
12277  PrintBlockProperty("dominator", current->dominator()->block_id());
12278  }
12279 
12280  PrintIntProperty("loop_depth", current->LoopNestingDepth());
12281 
12282  if (chunk != NULL) {
12283  int first_index = current->first_instruction_index();
12284  int last_index = current->last_instruction_index();
12285  PrintIntProperty(
12286  "first_lir_id",
12287  LifetimePosition::FromInstructionIndex(first_index).Value());
12288  PrintIntProperty(
12289  "last_lir_id",
12290  LifetimePosition::FromInstructionIndex(last_index).Value());
12291  }
12292 
12293  {
12294  Tag states_tag(this, "states");
12295  Tag locals_tag(this, "locals");
12296  int total = current->phis()->length();
12297  PrintIntProperty("size", current->phis()->length());
12298  PrintStringProperty("method", "None");
12299  for (int j = 0; j < total; ++j) {
12300  HPhi* phi = current->phis()->at(j);
12301  PrintIndent();
12302  OStringStream os;
12303  os << phi->merged_index() << " " << NameOf(phi) << " " << *phi << "\n";
12304  trace_.Add(os.c_str());
12305  }
12306  }
12307 
12308  {
12309  Tag HIR_tag(this, "HIR");
12310  for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
12311  HInstruction* instruction = it.Current();
12312  int uses = instruction->UseCount();
12313  PrintIndent();
12314  OStringStream os;
12315  os << "0 " << uses << " " << NameOf(instruction) << " " << *instruction;
12316  if (FLAG_hydrogen_track_positions &&
12317  instruction->has_position() &&
12318  instruction->position().raw() != 0) {
12319  const HSourcePosition pos = instruction->position();
12320  os << " pos:";
12321  if (pos.inlining_id() != 0) os << pos.inlining_id() << "_";
12322  os << pos.position();
12323  }
12324  os << " <|@\n";
12325  trace_.Add(os.c_str());
12326  }
12327  }
12328 
12329 
12330  if (chunk != NULL) {
12331  Tag LIR_tag(this, "LIR");
12332  int first_index = current->first_instruction_index();
12333  int last_index = current->last_instruction_index();
12334  if (first_index != -1 && last_index != -1) {
12335  const ZoneList<LInstruction*>* instructions = chunk->instructions();
12336  for (int i = first_index; i <= last_index; ++i) {
12337  LInstruction* linstr = instructions->at(i);
12338  if (linstr != NULL) {
12339  PrintIndent();
12340  trace_.Add("%d ",
12342  linstr->PrintTo(&trace_);
12343  OStringStream os;
12344  os << " [hir:" << NameOf(linstr->hydrogen_value()) << "] <|@\n";
12345  trace_.Add(os.c_str());
12346  }
12347  }
12348  }
12349  }
12350  }
12351 }
12352 
12353 
12354 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
12355  Tag tag(this, "intervals");
12356  PrintStringProperty("name", name);
12357 
12358  const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
12359  for (int i = 0; i < fixed_d->length(); ++i) {
12360  TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
12361  }
12362 
12363  const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
12364  for (int i = 0; i < fixed->length(); ++i) {
12365  TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
12366  }
12367 
12368  const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
12369  for (int i = 0; i < live_ranges->length(); ++i) {
12370  TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
12371  }
12372 }
12373 
12374 
12375 void HTracer::TraceLiveRange(LiveRange* range, const char* type,
12376  Zone* zone) {
12377  if (range != NULL && !range->IsEmpty()) {
12378  PrintIndent();
12379  trace_.Add("%d %s", range->id(), type);
12380  if (range->HasRegisterAssigned()) {
12381  LOperand* op = range->CreateAssignedOperand(zone);
12382  int assigned_reg = op->index();
12383  if (op->IsDoubleRegister()) {
12384  trace_.Add(" \"%s\"",
12386  } else {
12387  DCHECK(op->IsRegister());
12388  trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
12389  }
12390  } else if (range->IsSpilled()) {
12391  LOperand* op = range->TopLevel()->GetSpillOperand();
12392  if (op->IsDoubleStackSlot()) {
12393  trace_.Add(" \"double_stack:%d\"", op->index());
12394  } else {
12395  DCHECK(op->IsStackSlot());
12396  trace_.Add(" \"stack:%d\"", op->index());
12397  }
12398  }
12399  int parent_index = -1;
12400  if (range->IsChild()) {
12401  parent_index = range->parent()->id();
12402  } else {
12403  parent_index = range->id();
12404  }
12405  LOperand* op = range->FirstHint();
12406  int hint_index = -1;
12407  if (op != NULL && op->IsUnallocated()) {
12408  hint_index = LUnallocated::cast(op)->virtual_register();
12409  }
12410  trace_.Add(" %d %d", parent_index, hint_index);
12411  UseInterval* cur_interval = range->first_interval();
12412  while (cur_interval != NULL && range->Covers(cur_interval->start())) {
12413  trace_.Add(" [%d, %d[",
12414  cur_interval->start().Value(),
12415  cur_interval->end().Value());
12416  cur_interval = cur_interval->next();
12417  }
12418 
12419  UsePosition* current_pos = range->first_pos();
12420  while (current_pos != NULL) {
12421  if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
12422  trace_.Add(" %d M", current_pos->pos().Value());
12423  }
12424  current_pos = current_pos->next();
12425  }
12426 
12427  trace_.Add(" \"\"\n");
12428  }
12429 }
12430 
12431 
12432 void HTracer::FlushToFile() {
12433  AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
12434  false);
12435  trace_.Reset();
12436 }
12437 
12438 
12439 void HStatistics::Initialize(CompilationInfo* info) {
12440  if (info->shared_info().is_null()) return;
12441  source_size_ += info->shared_info()->SourceSize();
12442 }
12443 
12444 
12445 void HStatistics::Print(const char* stats_name) {
12446  PrintF(
12447  "\n"
12448  "----------------------------------------"
12449  "----------------------------------------\n"
12450  "--- %s timing results:\n"
12451  "----------------------------------------"
12452  "----------------------------------------\n",
12453  stats_name);
12454  base::TimeDelta sum;
12455  for (int i = 0; i < times_.length(); ++i) {
12456  sum += times_[i];
12457  }
12458 
12459  for (int i = 0; i < names_.length(); ++i) {
12460  PrintF("%33s", names_[i]);
12461  double ms = times_[i].InMillisecondsF();
12462  double percent = times_[i].PercentOf(sum);
12463  PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
12464 
12465  unsigned size = sizes_[i];
12466  double size_percent = static_cast<double>(size) * 100 / total_size_;
12467  PrintF(" %9u bytes / %4.1f %%\n", size, size_percent);
12468  }
12469 
12470  PrintF(
12471  "----------------------------------------"
12472  "----------------------------------------\n");
12473  base::TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
12474  PrintF("%33s %8.3f ms / %4.1f %% \n", "Create graph",
12475  create_graph_.InMillisecondsF(), create_graph_.PercentOf(total));
12476  PrintF("%33s %8.3f ms / %4.1f %% \n", "Optimize graph",
12477  optimize_graph_.InMillisecondsF(), optimize_graph_.PercentOf(total));
12478  PrintF("%33s %8.3f ms / %4.1f %% \n", "Generate and install code",
12479  generate_code_.InMillisecondsF(), generate_code_.PercentOf(total));
12480  PrintF(
12481  "----------------------------------------"
12482  "----------------------------------------\n");
12483  PrintF("%33s %8.3f ms %9u bytes\n", "Total",
12484  total.InMillisecondsF(), total_size_);
12485  PrintF("%33s (%.1f times slower than full code gen)\n", "",
12486  total.TimesOf(full_code_gen_));
12487 
12488  double source_size_in_kb = static_cast<double>(source_size_) / 1024;
12489  double normalized_time = source_size_in_kb > 0
12490  ? total.InMillisecondsF() / source_size_in_kb
12491  : 0;
12492  double normalized_size_in_kb = source_size_in_kb > 0
12493  ? total_size_ / 1024 / source_size_in_kb
12494  : 0;
12495  PrintF("%33s %8.3f ms %7.3f kB allocated\n",
12496  "Average per kB source", normalized_time, normalized_size_in_kb);
12497 }
12498 
12499 
12500 void HStatistics::SaveTiming(const char* name, base::TimeDelta time,
12501  unsigned size) {
12502  total_size_ += size;
12503  for (int i = 0; i < names_.length(); ++i) {
12504  if (strcmp(names_[i], name) == 0) {
12505  times_[i] += time;
12506  sizes_[i] += size;
12507  return;
12508  }
12509  }
12510  names_.Add(name);
12511  times_.Add(time);
12512  sizes_.Add(size);
12513 }
12514 
12515 
12517  if (ShouldProduceTraceOutput()) {
12518  isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
12519  }
12520 
12521 #ifdef DEBUG
12522  graph_->Verify(false); // No full verify.
12523 #endif
12524 }
12525 
12526 } } // namespace v8::internal
#define BASE_EMBEDDED
Definition: allocation.h:45
An object reference managed by the v8 garbage collector.
Definition: v8.h:198
static double nan_value()
static double TimeCurrentMillis()
void ExitScope(Handle< AllocationSite > scope_site, Handle< JSObject > object)
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
Definition: objects-inl.h:1591
static void AddDependentCompilationInfo(Handle< AllocationSite > site, Reason reason, CompilationInfo *info)
Definition: objects.cc:12660
virtual void ReturnValue(HValue *value)=0
virtual void ReturnControl(HControlInstruction *instr, BailoutId ast_id)=0
HOptimizedGraphBuilder * owner() const
Definition: hydrogen.h:798
AstContext(HOptimizedGraphBuilder *owner, Expression::Context kind)
Definition: hydrogen.cc:3985
virtual void ReturnContinuation(HIfContinuation *continuation, BailoutId ast_id)=0
AstContext * outer_
Definition: hydrogen.h:811
HOptimizedGraphBuilder * owner_
Definition: hydrogen.h:809
bool IsEffect() const
Definition: hydrogen.h:763
virtual void ReturnInstruction(HInstruction *instr, BailoutId ast_id)=0
static void Run(CompilationInfo *info)
Definition: typing.cc:36
static BailoutId Declarations()
Definition: utils.h:962
static BailoutId FunctionEntry()
Definition: utils.h:961
static BailoutId None()
Definition: utils.h:960
static Builtins::JavaScript TokenToJSBuiltin(Token::Value op)
Definition: ic.cc:2454
static U encode(T value)
Definition: utils.h:217
BailoutId ExitId() const
Definition: ast.h:426
void AbortOptimization(BailoutReason reason)
Definition: compiler.h:330
void RetryOptimization(BailoutReason reason)
Definition: compiler.h:335
bool has_global_object() const
Definition: compiler.h:272
void set_this_has_uses(bool has_no_uses)
Definition: compiler.h:150
FunctionLiteral * function() const
Definition: compiler.h:107
void SetAstValueFactory(AstValueFactory *ast_value_factory, bool owned=true)
Definition: compiler.h:389
Handle< JSFunction > closure() const
Definition: compiler.h:111
Isolate * isolate() const
Definition: compiler.h:96
Handle< SharedFunctionInfo > shared_info() const
Definition: compiler.h:112
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3331
static bool EnsureDeoptimizationSupport(CompilationInfo *info)
Definition: compiler.cc:895
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
Definition: compiler.cc:1243
static const int kMinLength
Definition: objects.h:9066
static const int kEnumCacheBridgeIndicesCacheIndex
Definition: objects.h:3024
static const int kEnumCacheBridgeCacheIndex
Definition: objects.h:3023
BailoutId id() const
Definition: ast.h:379
virtual KeyedAccessStoreMode GetStoreMode()
Definition: ast.h:370
virtual bool IsMonomorphic()
Definition: ast.h:362
virtual bool IsPropertyName() const
Definition: ast.h:334
virtual SmallMapList * GetReceiverTypes()
Definition: ast.h:366
static const int kAlignedSize
Definition: objects.h:4474
static const int kHeaderSize
Definition: objects.h:2393
static double hole_nan_as_double()
Definition: objects-inl.h:2205
FunctionSorter(int index=0, int ticks=0, int size=0)
Definition: hydrogen.cc:7487
static const int kBuiltinsOffset
Definition: objects.h:7458
void set_observed_input_representation(int index, Representation rep)
virtual void initialize_output_representation(Representation observed)
void set_observed_input_representation(Representation left, Representation right)
void SetOperandPositions(Zone *zone, HSourcePosition left_pos, HSourcePosition right_pos)
void FinishExitWithHardDeoptimization(const char *reason)
Definition: hydrogen.cc:1272
HEnvironment * environment() const
Definition: hydrogen.h:1059
HValue * BuildAllocateArrayFromLength(JSArrayBuilder *array_builder, HValue *length_argument)
Definition: hydrogen.cc:2514
HValue * BuildCheckHeapObject(HValue *object)
Definition: hydrogen.cc:1266
HValue * BuildStringAdd(HValue *left, HValue *right, HAllocationMode allocation_mode)
Definition: hydrogen.cc:2349
static const int kElementLoopUnrollThreshold
Definition: hydrogen.h:1308
HStoreNamedField * AddStoreMapConstant(HValue *object, Handle< Map > map)
Definition: hydrogen.h:1438
void set_current_block(HBasicBlock *block)
Definition: hydrogen.h:1058
HInstruction * BuildUncheckedMonomorphicElementAccess(HValue *checked_object, HValue *key, HValue *val, bool is_js_array, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode, KeyedAccessStoreMode store_mode)
Definition: hydrogen.cc:2398
void SetSourcePosition(int position)
Definition: hydrogen.h:1882
HValue * AddLoadJSBuiltin(Builtins::JavaScript builtin)
Definition: hydrogen.cc:3306
void AddSimulate(BailoutId id, RemovableSimulate removable=FIXED_SIMULATE)
Definition: hydrogen.cc:1232
void GotoNoSimulate(HBasicBlock *from, HBasicBlock *target)
Definition: hydrogen.h:1091
void Push(HValue *value)
Definition: hydrogen.h:1070
HSourcePosition source_position()
Definition: hydrogen.h:1903
HLoadNamedField * AddLoadElements(HValue *object, HValue *dependency=NULL)
Definition: hydrogen.cc:2708
void BuildArrayBufferViewInitialization(HValue *obj, HValue *buffer, HValue *byte_offset, HValue *byte_length)
Definition: hydrogen.cc:9398
virtual HValue * context()=0
HAllocate * BuildAllocate(HValue *object_size, HType type, InstanceType instance_type, HAllocationMode allocation_mode)
Definition: hydrogen.cc:2023
HInstruction * BuildCheckPrototypeMaps(Handle< JSObject > prototype, Handle< JSObject > holder)
Definition: hydrogen.cc:7403
HValue * BuildRegExpConstructResult(HValue *length, HValue *index, HValue *input)
Definition: hydrogen.cc:1807
void set_scope(Scope *scope)
Definition: hydrogen.h:1055
HValue * BuildNumberToString(HValue *object, Type *type)
Definition: hydrogen.cc:1878
HInstruction * AddLoadStringInstanceType(HValue *string)
Definition: hydrogen.cc:6793
HSourcePosition ScriptPositionToSourcePosition(int position)
Definition: hydrogen.h:1897
void Goto(HBasicBlock *from, HBasicBlock *target, FunctionState *state=NULL, bool add_simulate=true)
Definition: hydrogen.h:1080
HInstruction * AddElementAccess(HValue *elements, HValue *checked_key, HValue *val, HValue *dependency, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode=NEVER_RETURN_HOLE)
Definition: hydrogen.cc:2671
HBasicBlock * CreateBasicBlock(HEnvironment *env)
Definition: hydrogen.cc:1240
HValue * BuildBinaryOperation(Token::Value op, HValue *left, HValue *right, Type *left_type, Type *right_type, Type *result_type, Maybe< int > fixed_right_arg, HAllocationMode allocation_mode)
Definition: hydrogen.cc:10259
HLoadNamedField * AddLoadArrayLength(HValue *object, ElementsKind kind, HValue *dependency=NULL)
Definition: hydrogen.cc:2723
Scope * scope() const
Definition: hydrogen.h:1054
Isolate * isolate() const
Definition: hydrogen.h:1064
HGraph * graph() const
Definition: hydrogen.h:1063
HValue * EnforceNumberType(HValue *number, Type *expected)
Definition: hydrogen.cc:10171
HInstruction * AddInstruction(HInstruction *instr)
Definition: hydrogen.cc:1185
void FinishExitCurrentBlock(HControlInstruction *instruction)
Definition: hydrogen.cc:1209
HInstruction * BuildConstantMapCheck(Handle< JSObject > constant)
Definition: hydrogen.cc:7395
HBasicBlock * CreateLoopHeaderBlock()
Definition: hydrogen.cc:1247
void BuildCompareNil(HValue *value, Type *type, HIfContinuation *continuation)
Definition: hydrogen.cc:3023
HValue * BuildCopyElementsOnWrite(HValue *object, HValue *elements, ElementsKind kind, HValue *length)
Definition: hydrogen.cc:1366
HValue * TruncateToNumber(HValue *value, Type **expected)
Definition: hydrogen.cc:10184
HValue * BuildCheckString(HValue *string)
Definition: hydrogen.cc:1278
CompilationInfo * top_info()
Definition: hydrogen.h:1065
bool MatchRotateRight(HValue *left, HValue *right, HValue **operand, HValue **shift_amount)
Definition: hydrogen.cc:10132
HInstruction * AddLoadStringLength(HValue *string)
Definition: hydrogen.cc:6807
HValue * BuildObjectSizeAlignment(HValue *unaligned_size, int header_size)
Definition: hydrogen.cc:2167
HBasicBlock * current_block() const
Definition: hydrogen.h:1057
void AddLeaveInlined(HBasicBlock *block, HValue *return_value, FunctionState *state)
Definition: hydrogen.h:1097
void BuildInitializeElementsHeader(HValue *elements, ElementsKind kind, HValue *capacity)
Definition: hydrogen.cc:2615
void FinishCurrentBlock(HControlInstruction *last)
Definition: hydrogen.cc:1198
HValue * BuildWrapReceiver(HValue *object, HValue *function)
Definition: hydrogen.cc:1289
void set_operand_position(Zone *zone, int index, HSourcePosition pos)
void InsertAfter(HInstruction *previous)
static const InlineFunctionGenerator kInlineFunctionGenerators[]
Definition: hydrogen.h:2141
bool TryInlineBuiltinMethodCall(Call *expr, HValue *receiver, Handle< Map > receiver_map)
Definition: hydrogen.cc:8138
HInstruction * BuildFastLiteral(Handle< JSObject > boilerplate_object, AllocationSiteUsageContext *site_context)
Definition: hydrogen.cc:10933
static const int kUnlimitedMaxInlinedNodes
Definition: hydrogen.h:2152
bool TryInlineApiMethodCall(Call *expr, HValue *receiver, SmallMapList *receiver_types)
Definition: hydrogen.cc:8535
static const int kMaxStorePolymorphism
Definition: hydrogen.h:2147
void HandleLiteralCompareTypeof(CompareOperation *expr, Expression *sub_expr, Handle< String > check)
Definition: hydrogen.cc:10640
void VisitForControl(Expression *expr, HBasicBlock *true_block, HBasicBlock *false_block)
Definition: hydrogen.cc:4238
HValue * BuildBinaryOperation(BinaryOperation *expr, HValue *left, HValue *right, PushBeforeSimulateBehavior push_sim_result)
Definition: hydrogen.cc:10224
HBasicBlock * CreateLoop(IterationStatement *statement, HBasicBlock *loop_entry, HBasicBlock *body_exit, HBasicBlock *loop_successor, HBasicBlock *break_block)
Definition: hydrogen.cc:3370
HInstruction * BuildIncrement(bool returns_original_input, CountOperation *expr)
Definition: hydrogen.cc:9924
HInstruction * BuildNamedAccess(PropertyAccessType access, BailoutId ast_id, BailoutId reutrn_id, Expression *expr, HValue *object, Handle< String > name, HValue *value, bool is_uninitialized=false)
Definition: hydrogen.cc:7288
HValue * BuildAllocateFixedTypedArray(ExternalArrayType array_type, size_t element_size, ElementsKind fixed_elements_kind, HValue *byte_length, HValue *length)
Definition: hydrogen.cc:9534
HInstruction * NewArgumentAdaptorCall(HValue *fun, HValue *context, int argument_count, HValue *expected_param_count)
Definition: hydrogen.cc:7437
void VisitNot(UnaryOperation *expr)
Definition: hydrogen.cc:9880
HInstruction * BuildMonomorphicAccess(PropertyAccessInfo *info, HValue *object, HValue *checked_object, HValue *value, BailoutId ast_id, BailoutId return_id, bool can_inline_accessor=true)
Definition: hydrogen.cc:6172
void HandleLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
Definition: hydrogen.cc:10893
void(HOptimizedGraphBuilder::* InlineFunctionGenerator)(CallRuntime *call)
Definition: hydrogen.h:2138
HBasicBlock * CreateJoin(HBasicBlock *first, HBasicBlock *second, BailoutId join_id)
Definition: hydrogen.cc:3341
TestContext * inlined_test_context() const
Definition: hydrogen.h:2176
HCheckMaps * AddCheckMap(HValue *object, Handle< Map > map)
Definition: hydrogen.cc:5797
void VisitComma(BinaryOperation *expr)
Definition: hydrogen.cc:10509
void BuildEmitFixedArray(Handle< FixedArrayBase > elements, ElementsKind kind, HValue *object_elements, AllocationSiteUsageContext *site_context)
Definition: hydrogen.cc:11178
void VisitForTypeOf(Expression *expr)
Definition: hydrogen.cc:4231
HValue * BuildArrayIndexOf(HValue *receiver, HValue *search_element, ElementsKind kind, ArrayIndexOfMode mode)
Definition: hydrogen.cc:8801
AstContext * call_context() const
Definition: hydrogen.h:2170
HInstruction * BuildStringCharCodeAt(HValue *string, HValue *index)
Definition: hydrogen.cc:10091
static const int kMaxCallPolymorphism
Definition: hydrogen.h:2145
HOptimizedGraphBuilder(CompilationInfo *info)
Definition: hydrogen.cc:3321
void BuildInitElementsInObjectHeader(Handle< JSObject > boilerplate_object, HInstruction *object, HInstruction *object_elements)
Definition: hydrogen.cc:11040
HInstruction * BuildNamedGeneric(PropertyAccessType access, Expression *expr, HValue *object, Handle< String > name, HValue *value, bool is_uninitialized=false)
Definition: hydrogen.cc:6819
void HandleGlobalVariableAssignment(Variable *var, HValue *value, BailoutId ast_id)
Definition: hydrogen.cc:6479
BreakAndContinueScope * break_scope() const
Definition: hydrogen.h:2110
bool TryInline(Handle< JSFunction > target, int arguments_count, HValue *implicit_return_value, BailoutId ast_id, BailoutId return_id, InliningKind inlining_kind, HSourcePosition position)
Definition: hydrogen.cc:7753
void VisitExpressions(ZoneList< Expression * > *exprs)
Definition: hydrogen.cc:4246
Type * ToType(Handle< Map > map)
Definition: hydrogen.cc:4513
bool IsCallArrayInlineable(int argument_count, Handle< AllocationSite > site)
Definition: hydrogen.cc:9217
AstContext * ast_context() const
Definition: hydrogen.h:2163
HValue * BuildAllocateExternalElements(ExternalArrayType array_type, bool is_zero_byte_offset, HValue *buffer, HValue *byte_offset, HValue *length)
Definition: hydrogen.cc:9489
void BuildArrayCall(Expression *expr, int arguments_count, HValue *function, Handle< AllocationSite > cell)
Definition: hydrogen.cc:8781
bool TryInlineApply(Handle< JSFunction > function, Call *expr, int arguments_count)
Definition: hydrogen.cc:8085
bool TryInlineApiFunctionCall(Call *expr, HValue *receiver)
Definition: hydrogen.cc:8521
static const int kMaxLoadPolymorphism
Definition: hydrogen.h:2146
CompilationInfo * current_info() const
Definition: hydrogen.h:2167
bool TryArgumentsAccess(Property *expr)
Definition: hydrogen.cc:7237
void BuildEmitFixedDoubleArray(Handle< FixedArrayBase > elements, ElementsKind kind, HValue *object_elements)
Definition: hydrogen.cc:11159
HInstruction * NewPlainFunctionCall(HValue *fun, int argument_count, bool pass_argument_count)
Definition: hydrogen.cc:7430
Handle< JSFunction > array_function()
Definition: hydrogen.h:2412
bool TryInlineConstruct(CallNew *expr, HValue *implicit_return_value)
Definition: hydrogen.cc:8043
HInstruction * BuildCallConstantFunction(Handle< JSFunction > target, int argument_count)
Definition: hydrogen.cc:7455
void PushArgumentsFromEnvironment(int count)
Definition: hydrogen.cc:4453
virtual void VisitStatements(ZoneList< Statement * > *statements) OVERRIDE
Definition: hydrogen.cc:4518
void HandleCompoundAssignment(Assignment *expr)
Definition: hydrogen.cc:6530
bool TryHandleArrayCall(Call *expr, HValue *function)
Definition: hydrogen.cc:8952
HValue * ImplicitReceiverFor(HValue *function, Handle< JSFunction > target)
Definition: hydrogen.cc:8767
bool TryInlineSetter(Handle< JSFunction > setter, Handle< Map > receiver_map, BailoutId id, BailoutId assignment_id, HValue *implicit_return_value)
Definition: hydrogen.cc:8070
void VisitDeclarations(ZoneList< Declaration * > *declarations)
Definition: hydrogen.cc:11224
bool TryInlineApiGetter(Handle< JSFunction > function, Handle< Map > receiver_map, BailoutId ast_id)
Definition: hydrogen.cc:8550
static const int kUnlimitedMaxInlinedNodesCumulative
Definition: hydrogen.h:2153
GlobalPropertyAccess LookupGlobalProperty(Variable *var, LookupIterator *it, PropertyAccessType access_type)
Definition: hydrogen.cc:5301
void BindIfLive(Variable *var, HValue *value)
Definition: hydrogen.h:2245
void set_ast_context(AstContext *context)
Definition: hydrogen.h:2164
static const int kMaxFastLiteralProperties
Definition: hydrogen.h:2158
HValue * HandleKeyedElementAccess(HValue *obj, HValue *key, HValue *val, Expression *expr, BailoutId ast_id, BailoutId return_id, PropertyAccessType access_type, bool *has_side_effects)
Definition: hydrogen.cc:7125
virtual bool BuildGraph() OVERRIDE
Definition: hydrogen.cc:4254
ZoneList< Handle< Object > > globals_
Definition: hydrogen.h:2730
HValue * BuildContextChainWalk(Variable *var)
Definition: hydrogen.cc:5325
bool TryHandleArrayCallNew(CallNew *expr, HValue *function)
Definition: hydrogen.cc:8968
FunctionState * function_state() const
Definition: hydrogen.h:2123
HInstruction * BuildMonomorphicElementAccess(HValue *object, HValue *key, HValue *val, HValue *dependency, Handle< Map > map, PropertyAccessType access_type, KeyedAccessStoreMode store_mode)
Definition: hydrogen.cc:6885
void VisitDelete(UnaryOperation *expr)
Definition: hydrogen.cc:9828
HControlInstruction * BuildCompareInstruction(Token::Value op, HValue *left, HValue *right, Type *left_type, Type *right_type, Type *combined_type, HSourcePosition left_position, HSourcePosition right_position, PushBeforeSimulateBehavior push_sim_result, BailoutId bailout_id)
Definition: hydrogen.cc:10773
void VisitArithmeticExpression(BinaryOperation *expr)
Definition: hydrogen.cc:10620
void BuildLoad(Property *property, BailoutId ast_id)
Definition: hydrogen.cc:7337
HBasicBlock * function_return() const
Definition: hydrogen.h:2173
LoadKeyedHoleMode BuildKeyedHoleMode(Handle< Map > map)
Definition: hydrogen.cc:6869
bool TryInlineGetter(Handle< JSFunction > getter, Handle< Map > receiver_map, BailoutId ast_id, BailoutId return_id)
Definition: hydrogen.cc:8055
bool TryInlineApiSetter(Handle< JSFunction > function, Handle< Map > receiver_map, BailoutId ast_id)
Definition: hydrogen.cc:8564
void BuildStoreForEffect(Expression *expression, Property *prop, BailoutId ast_id, BailoutId return_id, HValue *object, HValue *key, HValue *value)
Definition: hydrogen.cc:9964
void BuildEmitInObjectProperties(Handle< JSObject > boilerplate_object, HInstruction *object, AllocationSiteUsageContext *site_context, PretenureFlag pretenure_flag)
Definition: hydrogen.cc:11055
HValue * LookupAndMakeLive(Variable *var)
Definition: hydrogen.h:2259
void VisitLogicalExpression(BinaryOperation *expr)
Definition: hydrogen.cc:10517
void HandlePolymorphicCallNamed(Call *expr, HValue *receiver, SmallMapList *types, Handle< String > name)
Definition: hydrogen.cc:7508
HInstruction * BuildStoreNamedField(PropertyAccessInfo *info, HValue *checked_object, HValue *value)
Definition: hydrogen.cc:5846
HInstruction * TryBuildConsolidatedElementLoad(HValue *object, HValue *key, HValue *val, SmallMapList *maps)
Definition: hydrogen.cc:6923
HInstruction * BuildKeyedGeneric(PropertyAccessType access_type, Expression *expr, HValue *object, HValue *key, HValue *value)
Definition: hydrogen.cc:6847
void TraceInline(Handle< JSFunction > target, Handle< JSFunction > caller, const char *failure_reason)
Definition: hydrogen.cc:7690
HValue * HandlePolymorphicElementAccess(Expression *expr, HValue *object, HValue *key, HValue *val, SmallMapList *maps, PropertyAccessType access_type, KeyedAccessStoreMode store_mode, bool *has_side_effects)
Definition: hydrogen.cc:6989
void VisitVoid(UnaryOperation *expr)
Definition: hydrogen.cc:9866
int InliningAstSize(Handle< JSFunction > target)
Definition: hydrogen.cc:7712
void HandlePolymorphicNamedFieldAccess(PropertyAccessType access_type, Expression *expr, BailoutId ast_id, BailoutId return_id, HValue *object, HValue *value, SmallMapList *types, Handle< String > name)
Definition: hydrogen.cc:6250
bool TryInlineBuiltinFunctionCall(Call *expr)
Definition: hydrogen.cc:8098
bool TryInlineApiCall(Handle< JSFunction > function, HValue *receiver, SmallMapList *receiver_maps, int argc, BailoutId ast_id, ApiCallType call_type)
Definition: hydrogen.cc:8578
void VisitLoopBody(IterationStatement *stmt, HBasicBlock *loop_entry)
Definition: hydrogen.cc:4936
static const int kUnlimitedMaxInlinedSourceSize
Definition: hydrogen.h:2151
HInstruction * PreProcessCall(Instruction *call)
Definition: hydrogen.cc:4468
HInstruction * BuildLoadNamedField(PropertyAccessInfo *info, HValue *checked_object)
Definition: hydrogen.cc:5804
void VisitForEffect(Expression *expr)
Definition: hydrogen.cc:4218
void BuildInlinedCallArray(Expression *expression, int argument_count, Handle< AllocationSite > site)
Definition: hydrogen.cc:9161
void VisitForValue(Expression *expr, ArgumentsAllowedFlag flag=ARGUMENTS_NOT_ALLOWED)
Definition: hydrogen.cc:4224
void PushLoad(Property *property, HValue *object, HValue *key)
Definition: hydrogen.cc:7327
static const int kMaxFastLiteralDepth
Definition: hydrogen.h:2157
void VisitTypeof(UnaryOperation *expr)
Definition: hydrogen.cc:9872
void HandlePropertyAssignment(Assignment *expr)
Definition: hydrogen.cc:6463
void BuildEmitElements(Handle< JSObject > boilerplate_object, Handle< FixedArrayBase > elements, HValue *object_elements, AllocationSiteUsageContext *site_context)
Definition: hydrogen.cc:11137
void Bind(Variable *var, HValue *value)
Definition: hydrogen.h:2232
HBasicBlock * JoinContinue(IterationStatement *statement, HBasicBlock *exit_block, HBasicBlock *continue_block)
Definition: hydrogen.cc:3358
void BuildEmitObjectHeader(Handle< JSObject > boilerplate_object, HInstruction *object)
Definition: hydrogen.cc:11011
void Bailout(BailoutReason reason)
Definition: hydrogen.cc:4212
void BuildStore(Expression *expression, Property *prop, BailoutId ast_id, BailoutId return_id, bool is_uninitialized=false)
Definition: hydrogen.cc:6422
void AddCheckPrototypeMaps(Handle< JSObject > holder, Handle< Map > receiver_map)
Definition: hydrogen.cc:7421
HBasicBlock * BuildOsrLoopEntry(IterationStatement *statement)
Definition: hydrogen-osr.cc:17
bool HasOsrEntryAt(IterationStatement *statement)
Definition: hydrogen-osr.cc:12
static HSourcePosition Unknown()
static HValue * cast(HValue *value)
bool Equals(HValue *other)
bool HasObservableSideEffects() const
void set_type(HType new_type)
HBasicBlock * block() const
void ChangeRepresentation(Representation r)
bool EqualsInteger32Constant(int32_t value)
void ClearDependsOnFlag(GVNFlag f)
void CopyFlag(Flag f, HValue *other)
virtual Handle< Map > GetMonomorphicJSObjectMap()
bool CheckFlag(Flag f) const
static Handle< T > cast(Handle< S > that)
Definition: handles.h:116
bool is_null() const
Definition: handles.h:124
static Handle< T > null()
Definition: handles.h:123
static const int kSize
Definition: objects.h:1521
static const int kMapOffset
Definition: objects.h:1427
bool InNewSpace(Object *object)
Definition: heap-inl.h:322
static JSFunction * GetRootConstructor(TypeClass *type, Context *native_context)
Definition: ic-inl.h:162
Handle< Context > native_context()
Definition: isolate.cc:1339
Builtins * builtins()
Definition: isolate.h:947
Counters * counters()
Definition: isolate.h:857
Factory * factory()
Definition: isolate.h:982
bool has_pending_exception()
Definition: isolate.h:581
virtual BailoutId ContinueId() const =0
Statement * body() const
Definition: ast.h:732
virtual BailoutId StackCheckId() const =0
static bool IsReadOnlyLengthDescriptor(Handle< Map > jsarray_map)
Definition: objects.cc:12803
static int OffsetOfFunctionWithId(Builtins::JavaScript id)
Definition: objects.h:7520
static Handle< Object > GetDataProperty(Handle< JSObject > object, Handle< Name > key)
Definition: objects.cc:140
static bool TryMigrateInstance(Handle< JSObject > instance)
Definition: objects.cc:3740
static const int kPropertiesOffset
Definition: objects.h:2193
static MUST_USE_RESULT MaybeHandle< JSObject > DeepWalk(Handle< JSObject > object, AllocationSiteCreationContext *site_context)
Definition: objects.cc:5581
static const int kElementsOffset
Definition: objects.h:2194
static const int kValueOffset
Definition: objects.h:7546
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:138
int virtual_register() const
Definition: lithium.h:243
static LifetimePosition FromInstructionIndex(int index)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:17
T & at(int i) const
Definition: list.h:69
bool Contains(const T &elm) const
Definition: list-inl.h:174
static Handle< Map > AsElementsKind(Handle< Map > map, ElementsKind kind)
Definition: objects.cc:3374
static void AddDependentCompilationInfo(Handle< Map > map, DependentCode::DependencyGroup group, CompilationInfo *info)
Definition: objects.cc:11345
static bool IsValidElementsTransition(ElementsKind from_kind, ElementsKind to_kind)
Definition: objects.cc:12775
PostorderProcessor * Backtrack(Zone *zone, ZoneList< HBasicBlock * > *order)
Definition: hydrogen.cc:3726
HLoopInformation * loop()
Definition: hydrogen.cc:3591
PostorderProcessor * SetupSuccessorsOfLoopMember(HBasicBlock *block, HLoopInformation *loop, HBasicBlock *loop_header)
Definition: hydrogen.cc:3669
PostorderProcessor * SetupLoopMembers(Zone *zone, HBasicBlock *block, HLoopInformation *loop, HBasicBlock *loop_header)
Definition: hydrogen.cc:3657
PostorderProcessor * PerformNonBacktrackingStep(Zone *zone, ZoneList< HBasicBlock * > *order)
Definition: hydrogen.cc:3741
PostorderProcessor * child_
Definition: hydrogen.cc:3815
HBasicBlock * AdvanceSuccessors()
Definition: hydrogen.cc:3788
PostorderProcessor * Pop(Zone *zone, ZoneList< HBasicBlock * > *order)
Definition: hydrogen.cc:3700
PostorderProcessor * PerformStep(Zone *zone, ZoneList< HBasicBlock * > *order)
Definition: hydrogen.cc:3600
PostorderProcessor * Push(Zone *zone)
Definition: hydrogen.cc:3682
HSuccessorIterator successor_iterator
Definition: hydrogen.cc:3821
static PostorderProcessor * CreateEntryProcessor(Zone *zone, HBasicBlock *block)
Definition: hydrogen.cc:3594
PostorderProcessor * father_
Definition: hydrogen.cc:3814
PostorderProcessor * parent()
Definition: hydrogen.cc:3587
HBasicBlock * AdvanceLoopMembers()
Definition: hydrogen.cc:3803
PostorderProcessor(PostorderProcessor *father)
Definition: hydrogen.cc:3612
PostorderProcessor * child()
Definition: hydrogen.cc:3589
PostorderProcessor * SetupSuccessors(Zone *zone, HBasicBlock *block, HBasicBlock *loop_header)
Definition: hydrogen.cc:3625
void ClosePostorder(ZoneList< HBasicBlock * > *order, Zone *zone)
Definition: hydrogen.cc:3689
static void AddDependentCompilationInfo(Handle< PropertyCell > cell, CompilationInfo *info)
Definition: objects.cc:16378
A class to uniformly access the prototype of any Object and walk its prototype chain.
Definition: prototype.h:25
bool IsAtEnd(WhereToEnd where_to_end=END_AT_NULL) const
Definition: prototype.h:99
Object * GetCurrent() const
Definition: prototype.h:62
static Representation FromType(Type *type)
static Representation Double()
static Representation Smi()
static Representation Integer32()
static Representation Tagged()
static const Function * FunctionForId(FunctionId id)
Definition: runtime.cc:9312
static void ArrayIdToTypeAndSize(int array_id, ExternalArrayType *type, ElementsKind *external_elements_kind, ElementsKind *fixed_elements_kind, size_t *element_size)
static MUST_USE_RESULT MaybeHandle< Object > CreateArrayLiteralBoilerplate(Isolate *isolate, Handle< FixedArray > literals, Handle< FixedArray > elements)
Definition: runtime.cc:248
int num_heap_slots() const
Definition: scopes.h:352
bool is_global_scope() const
Definition: scopes.h:267
static bool Analyze(CompilationInfo *info)
Definition: scopes.cc:260
int num_parameters() const
Definition: scopes.h:321
VariableDeclaration * function() const
Definition: scopes.h:309
int ContextChainLength(Scope *scope)
Definition: scopes.cc:715
bool calls_eval() const
Definition: scopes.h:280
bool HasIllegalRedeclaration() const
Definition: scopes.h:198
bool is_eval_scope() const
Definition: scopes.h:264
bool is_function_scope() const
Definition: scopes.h:265
ZoneList< Declaration * > * declarations()
Definition: scopes.h:327
Scope * DeclarationScope()
Definition: scopes.cc:737
Variable * arguments() const
Definition: scopes.h:324
Handle< ScopeInfo > GetScopeInfo()
Definition: scopes.cc:746
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:6888
static const int kMaxValue
Definition: objects.h:1272
int value() const
Definition: objects-inl.h:1316
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1334
virtual bool IsJump() const
Definition: ast.h:260
static Handle< String > Flatten(Handle< String > string, PretenureFlag pretenure=NOT_TENURED)
Definition: objects-inl.h:3354
bool Equals(String *other)
Definition: objects-inl.h:3336
static bool IsEqualityOp(Value op)
Definition: token.h:210
i::Handle< T > Current()
Definition: types.cc:906
Iterator< i::Map > Classes()
Definition: types.h:460
bool Is(TypeImpl *that)
Definition: types.h:390
ClassType * AsClass()
Definition: types.h:442
static TypeHandle Intersect(TypeHandle type1, TypeHandle type2, Region *reg)
Definition: types.cc:607
static TypeHandle Union(TypeHandle type1, TypeHandle type2, Region *reg)
Definition: types.cc:737
bool Maybe(TypeImpl *that)
Definition: types.cc:504
Handle< String > name() const
Definition: variables.h:71
bool binding_needs_init() const
Definition: variables.h:103
VariableMode mode() const
Definition: variables.h:73
bool IsStackAllocated() const
Definition: variables.h:96
Location location() const
Definition: variables.h:124
bool is_this() const
Definition: variables.h:107
Scope * scope() const
Definition: variables.h:69
bool IsContextSlot() const
Definition: variables.h:97
bool IsUnallocated() const
Definition: variables.h:93
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions true
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
#define COMMA
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define STRING_TYPE(NAME, size, name, Name)
#define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value)
Definition: hydrogen.cc:690
#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)
Definition: hydrogen.cc:9386
#define CHECK_ALIVE(call)
Definition: hydrogen.cc:4198
#define CHECK_BAILOUT(call)
Definition: hydrogen.cc:4191
#define CHECK_ALIVE_OR_RETURN(call, value)
Definition: hydrogen.cc:4205
#define DEFINE_IS_CONSTANT(Name, name)
Definition: hydrogen.cc:719
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)
#define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value)
Definition: isolate.h:123
#define UNREACHABLE()
Definition: logging.h:30
#define CHECK(condition)
Definition: logging.h:36
#define DCHECK_NE(v1, v2)
Definition: logging.h:207
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
#define STORE(Type)
#define LOAD(Type)
void USE(T)
Definition: macros.h:322
#define STATIC_ASSERT(test)
Definition: macros.h:311
#define arraysize(array)
Definition: macros.h:86
int int32_t
Definition: unicode.cc:24
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
static void Trace(const char *msg,...)
Definition: scheduler.cc:21
Matcher< Node * > IsLoad(const Matcher< LoadRepresentation > &rep_matcher, const Matcher< Node * > &base_matcher, const Matcher< Node * > &index_matcher, const Matcher< Node * > &effect_matcher)
const int kPointerSize
Definition: globals.h:129
bool IsFastHoleyElementsKind(ElementsKind kind)
const uint32_t kStringEncodingMask
Definition: objects.h:555
@ STRING_ADD_CHECK_NONE
Definition: code-stubs.h:1212
@ TRACK_ALLOCATION_SITE
Definition: objects.h:8085
@ DONT_TRACK_ALLOCATION_SITE
Definition: objects.h:8084
@ kSeqStringTag
Definition: objects.h:563
bool IsExternalArrayElementsKind(ElementsKind kind)
Definition: elements-kind.h:95
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:110
@ ARGUMENTS_ADAPTOR
Definition: hydrogen.h:546
static bool IsFastLiteral(Handle< JSObject > boilerplate, int max_depth, int *max_properties)
Definition: hydrogen.cc:5475
TypeImpl< ZoneTypeConfig > Type
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
static bool IsClassOfTest(CompareOperation *expr)
Definition: hydrogen.cc:10478
bool IsLexicalVariableMode(VariableMode mode)
Definition: globals.h:710
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static bool AreStringTypes(SmallMapList *types)
Definition: hydrogen.cc:6414
bool IsDeclaredVariableMode(VariableMode mode)
Definition: globals.h:705
bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind, ElementsKind to_kind)
const int kDoubleSize
Definition: globals.h:127
ArgumentsAllowedFlag
Definition: hydrogen.h:751
@ ARGUMENTS_ALLOWED
Definition: hydrogen.h:753
@ ARGUMENTS_NOT_ALLOWED
Definition: hydrogen.h:752
static bool IsAllocationInlineable(Handle< JSFunction > constructor)
Definition: hydrogen.cc:9209
KeyedAccessStoreMode
Definition: objects.h:153
@ STORE_AND_GROW_NO_TRANSITION
Definition: objects.h:161
@ STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS
Definition: objects.h:168
@ STORE_NO_TRANSITION_HANDLE_COW
Definition: objects.h:169
@ STANDARD_STORE
Definition: objects.h:154
bool IsHoleyElementsKind(ElementsKind kind)
OStream & endl(OStream &os)
Definition: ostreams.cc:112
static bool CanInlinePropertyAccess(Type *type)
Definition: hydrogen.cc:5462
PerThreadAssertScopeDebugOnly< HANDLE_DEREFERENCE_ASSERT, true > AllowHandleDereference
Definition: assert-scope.h:122
@ JS_REGEXP_TYPE
Definition: objects.h:748
@ JS_VALUE_TYPE
Definition: objects.h:728
@ FIXED_DOUBLE_ARRAY_TYPE
Definition: objects.h:692
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIXED_ARRAY_TYPE
Definition: objects.h:717
@ JS_OBJECT_TYPE
Definition: objects.h:731
@ CONS_ONE_BYTE_STRING_TYPE
Definition: objects.h:636
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ MUTABLE_HEAP_NUMBER_TYPE
Definition: objects.h:670
@ LAST_UNIQUE_NAME_TYPE
Definition: objects.h:757
@ LAST_JS_OBJECT_TYPE
Definition: objects.h:776
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_GLOBAL_OBJECT_TYPE
Definition: objects.h:735
@ CONS_STRING_TYPE
Definition: objects.h:635
@ JS_BUILTINS_OBJECT_TYPE
Definition: objects.h:736
@ UINT8_CLAMPED_ELEMENTS
Definition: elements-kind.h:52
@ FAST_HOLEY_DOUBLE_ELEMENTS
Definition: elements-kind.h:27
@ SLOPPY_ARGUMENTS_ELEMENTS
Definition: elements-kind.h:31
@ EXTERNAL_INT8_ELEMENTS
Definition: elements-kind.h:33
@ FAST_HOLEY_SMI_ELEMENTS
Definition: elements-kind.h:17
@ EXTERNAL_UINT32_ELEMENTS
Definition: elements-kind.h:38
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
Definition: elements-kind.h:41
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
bool IsFastDoubleElementsKind(ElementsKind kind)
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:146
const uint32_t kOneByteStringTag
Definition: objects.h:557
int ElementsKindToShiftSize(ElementsKind elements_kind)
int AppendChars(const char *filename, const char *str, int size, bool verbose)
Definition: utils.cc:273
const intptr_t kObjectAlignmentMask
Definition: globals.h:227
bool CanBeZero(HValue *right)
Definition: hydrogen.cc:10159
static bool IsGrowStoreMode(KeyedAccessStoreMode store_mode)
Definition: objects.h:228
const intptr_t kObjectAlignment
Definition: globals.h:226
bool IsFastPackedElementsKind(ElementsKind kind)
bool operator<(const Handle< Map > &lhs, const Handle< Map > &rhs)
Definition: handles.h:159
OStream & operator<<(OStream &os, const BasicBlockProfiler &p)
@ CONST_LEGACY
Definition: globals.h:671
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
AllocationSiteOverrideMode
Definition: code-stubs.h:716
@ DISABLE_ALLOCATION_SITES
Definition: code-stubs.h:718
const uint32_t kStringRepresentationMask
Definition: objects.h:561
static const int kNotInlinable
Definition: hydrogen.cc:7709
bool IsFastElementsKind(ElementsKind kind)
NilValue
Definition: v8.h:97
@ kNullValue
Definition: v8.h:97
@ kUndefinedValue
Definition: v8.h:97
void PrintF(const char *format,...)
Definition: utils.cc:80
bool IsDictionaryElementsKind(ElementsKind kind)
Definition: elements-kind.h:85
const uint32_t kOneByteDataHintMask
Definition: objects.h:584
static bool ComputeReceiverTypes(Expression *expr, HValue *receiver, SmallMapList **t, Zone *zone)
Definition: hydrogen.cc:6397
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418
static bool NeedsWrappingFor(Type *type, Handle< JSFunction > target)
Definition: hydrogen.cc:6165
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const uint32_t kIsNotInternalizedMask
Definition: objects.h:549
const uint32_t kOneByteDataHintTag
Definition: objects.h:585
bool IsFixedTypedArrayElementsKind(ElementsKind kind)
static bool ShiftAmountsAllowReplaceByRotate(HValue *sa, HValue *const32_minus_sa)
Definition: hydrogen.cc:10114
static Handle< Map > TypedArrayMap(Isolate *isolate, ExternalArrayType array_type, ElementsKind target_kind)
Definition: hydrogen.cc:9470
static bool IsLiteralCompareBool(Isolate *isolate, HValue *left, Token::Value op, HValue *right)
Definition: hydrogen.cc:10651
bool IsFastSmiElementsKind(ElementsKind kind)
@ NO_CALL_FUNCTION_FLAGS
Definition: globals.h:469
@ WRAP_AND_CALL
Definition: globals.h:473
@ CALL_AS_METHOD
Definition: globals.h:470
ElementsKind GetInitialFastElementsKind()
Definition: elements-kind.h:78
bool IsFastObjectElementsKind(ElementsKind kind)
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Definition: assert-scope.h:130
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
@ None
Definition: v8.h:2211
@ Break
Definition: v8-debug.h:17
Handle< Boolean > False(Isolate *isolate)
Definition: v8.h:6863
ExternalArrayType
Definition: v8.h:2217
@ kExternalInt8Array
Definition: v8.h:2218
Handle< Boolean > True(Isolate *isolate)
Definition: v8.h:6854
Handle< Primitive > Null(Isolate *isolate)
Definition: v8.h:6845
static Handle< Value > Throw(Isolate *isolate, const char *message)
Definition: d8.cc:72
Handle< Primitive > Undefined(Isolate *isolate)
Definition: v8.h:6836
#define TYPED_ARRAYS(V)
Definition: objects.h:4433
#define VOID
#define IN
@ NONE
#define INLINE_OPTIMIZED_FUNCTION_LIST(F)
Definition: runtime.h:708
#define INLINE_FUNCTION_LIST(F)
Definition: runtime.h:659
A simple Maybe type, representing an object which may or may not have a value.
Definition: v8.h:890
T value
Definition: v8.h:896
bool has_value
Definition: v8.h:895
static const char * AllocationIndexToString(int index)
static const char * AllocationIndexToString(int index)
Tag
Definition: types.h:643
#define STATIC_CHAR_VECTOR(x)
Definition: vector.h:154