V8 Project
lithium-allocator.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #include "src/hydrogen.h"
8 #include "src/lithium-inl.h"
10 #include "src/string-stream.h"
11 
12 namespace v8 {
13 namespace internal {
14 
16  return a.Value() < b.Value() ? a : b;
17 }
18 
19 
21  return a.Value() > b.Value() ? a : b;
22 }
23 
24 
26  LOperand* operand,
27  LOperand* hint)
28  : operand_(operand),
29  hint_(hint),
30  pos_(pos),
31  next_(NULL),
32  requires_reg_(false),
33  register_beneficial_(true) {
34  if (operand_ != NULL && operand_->IsUnallocated()) {
36  requires_reg_ = unalloc->HasRegisterPolicy() ||
37  unalloc->HasDoubleRegisterPolicy();
38  register_beneficial_ = !unalloc->HasAnyPolicy();
39  }
40  DCHECK(pos_.IsValid());
41 }
42 
43 
44 bool UsePosition::HasHint() const {
45  return hint_ != NULL && !hint_->IsUnallocated();
46 }
47 
48 
50  return requires_reg_;
51 }
52 
53 
55  return register_beneficial_;
56 }
57 
58 
60  DCHECK(Contains(pos) && pos.Value() != start().Value());
61  UseInterval* after = new(zone) UseInterval(pos, end_);
62  after->next_ = next_;
63  next_ = after;
64  end_ = pos;
65 }
66 
67 
68 #ifdef DEBUG
69 
70 
71 void LiveRange::Verify() const {
72  UsePosition* cur = first_pos_;
73  while (cur != NULL) {
74  DCHECK(Start().Value() <= cur->pos().Value() &&
75  cur->pos().Value() <= End().Value());
76  cur = cur->next();
77  }
78 }
79 
80 
81 bool LiveRange::HasOverlap(UseInterval* target) const {
82  UseInterval* current_interval = first_interval_;
83  while (current_interval != NULL) {
84  // Intervals overlap if the start of one is contained in the other.
85  if (current_interval->Contains(target->start()) ||
86  target->Contains(current_interval->start())) {
87  return true;
88  }
89  current_interval = current_interval->next();
90  }
91  return false;
92 }
93 
94 
95 #endif
96 
97 
98 LiveRange::LiveRange(int id, Zone* zone)
99  : id_(id),
100  spilled_(false),
101  kind_(UNALLOCATED_REGISTERS),
102  assigned_register_(kInvalidAssignment),
103  last_interval_(NULL),
104  first_interval_(NULL),
105  first_pos_(NULL),
106  parent_(NULL),
107  next_(NULL),
108  current_interval_(NULL),
109  last_processed_use_(NULL),
110  current_hint_operand_(NULL),
111  spill_operand_(new (zone) LOperand()),
112  spill_start_index_(kMaxInt) {}
113 
114 
117  assigned_register_ = reg;
118  ConvertOperands(zone);
119 }
120 
121 
123  DCHECK(!IsSpilled());
125  spilled_ = true;
127  ConvertOperands(zone);
128 }
129 
130 
133  return !spill_operand_->IsIgnored();
134 }
135 
136 
138  DCHECK(!operand->IsUnallocated());
140  DCHECK(spill_operand_->IsIgnored());
141  spill_operand_->ConvertTo(operand->kind(), operand->index());
142 }
143 
144 
146  UsePosition* use_pos = last_processed_use_;
147  if (use_pos == NULL) use_pos = first_pos();
148  while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
149  use_pos = use_pos->next();
150  }
151  last_processed_use_ = use_pos;
152  return use_pos;
153 }
154 
155 
157  LifetimePosition start) {
158  UsePosition* pos = NextUsePosition(start);
159  while (pos != NULL && !pos->RegisterIsBeneficial()) {
160  pos = pos->next();
161  }
162  return pos;
163 }
164 
165 
167  LifetimePosition start) {
168  UsePosition* pos = first_pos();
169  UsePosition* prev = NULL;
170  while (pos != NULL && pos->pos().Value() < start.Value()) {
171  if (pos->RegisterIsBeneficial()) prev = pos;
172  pos = pos->next();
173  }
174  return prev;
175 }
176 
177 
179  UsePosition* pos = NextUsePosition(start);
180  while (pos != NULL && !pos->RequiresRegister()) {
181  pos = pos->next();
182  }
183  return pos;
184 }
185 
186 
188  // We cannot spill a live range that has a use requiring a register
189  // at the current or the immediate next position.
190  UsePosition* use_pos = NextRegisterPosition(pos);
191  if (use_pos == NULL) return true;
192  return
193  use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
194 }
195 
196 
198  LOperand* op = NULL;
199  if (HasRegisterAssigned()) {
200  DCHECK(!IsSpilled());
201  switch (Kind()) {
202  case GENERAL_REGISTERS:
203  op = LRegister::Create(assigned_register(), zone);
204  break;
205  case DOUBLE_REGISTERS:
206  op = LDoubleRegister::Create(assigned_register(), zone);
207  break;
208  default:
209  UNREACHABLE();
210  }
211  } else if (IsSpilled()) {
213  op = TopLevel()->GetSpillOperand();
214  DCHECK(!op->IsUnallocated());
215  } else {
216  LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
217  unalloc->set_virtual_register(id_);
218  op = unalloc;
219  }
220  return op;
221 }
222 
223 
225  LifetimePosition position) const {
226  if (current_interval_ == NULL) return first_interval_;
227  if (current_interval_->start().Value() > position.Value()) {
229  return first_interval_;
230  }
231  return current_interval_;
232 }
233 
234 
236  UseInterval* to_start_of, LifetimePosition but_not_past) const {
237  if (to_start_of == NULL) return;
238  if (to_start_of->start().Value() > but_not_past.Value()) return;
239  LifetimePosition start =
242  if (to_start_of->start().Value() > start.Value()) {
243  current_interval_ = to_start_of;
244  }
245 }
246 
247 
249  LiveRange* result,
250  Zone* zone) {
251  DCHECK(Start().Value() < position.Value());
252  DCHECK(result->IsEmpty());
253  // Find the last interval that ends before the position. If the
254  // position is contained in one of the intervals in the chain, we
255  // split that interval and use the first part.
256  UseInterval* current = FirstSearchIntervalForPosition(position);
257 
258  // If the split position coincides with the beginning of a use interval
259  // we need to split use positons in a special way.
260  bool split_at_start = false;
261 
262  if (current->start().Value() == position.Value()) {
263  // When splitting at start we need to locate the previous use interval.
264  current = first_interval_;
265  }
266 
267  while (current != NULL) {
268  if (current->Contains(position)) {
269  current->SplitAt(position, zone);
270  break;
271  }
272  UseInterval* next = current->next();
273  if (next->start().Value() >= position.Value()) {
274  split_at_start = (next->start().Value() == position.Value());
275  break;
276  }
277  current = next;
278  }
279 
280  // Partition original use intervals to the two live ranges.
281  UseInterval* before = current;
282  UseInterval* after = before->next();
283  result->last_interval_ = (last_interval_ == before)
284  ? after // Only interval in the range after split.
285  : last_interval_; // Last interval of the original range.
286  result->first_interval_ = after;
287  last_interval_ = before;
288 
289  // Find the last use position before the split and the first use
290  // position after it.
291  UsePosition* use_after = first_pos_;
292  UsePosition* use_before = NULL;
293  if (split_at_start) {
294  // The split position coincides with the beginning of a use interval (the
295  // end of a lifetime hole). Use at this position should be attributed to
296  // the split child because split child owns use interval covering it.
297  while (use_after != NULL && use_after->pos().Value() < position.Value()) {
298  use_before = use_after;
299  use_after = use_after->next();
300  }
301  } else {
302  while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
303  use_before = use_after;
304  use_after = use_after->next();
305  }
306  }
307 
308  // Partition original use positions to the two live ranges.
309  if (use_before != NULL) {
310  use_before->next_ = NULL;
311  } else {
312  first_pos_ = NULL;
313  }
314  result->first_pos_ = use_after;
315 
316  // Discard cached iteration state. It might be pointing
317  // to the use that no longer belongs to this live range.
320 
321  // Link the new live range in the chain before any of the other
322  // ranges linked from the range before the split.
323  result->parent_ = (parent_ == NULL) ? this : parent_;
324  result->kind_ = result->parent_->kind_;
325  result->next_ = next_;
326  next_ = result;
327 
328 #ifdef DEBUG
329  Verify();
330  result->Verify();
331 #endif
332 }
333 
334 
335 // This implements an ordering on live ranges so that they are ordered by their
336 // start positions. This is needed for the correctness of the register
337 // allocation algorithm. If two live ranges start at the same offset then there
338 // is a tie breaker based on where the value is first used. This part of the
339 // ordering is merely a heuristic.
341  LifetimePosition start = Start();
342  LifetimePosition other_start = other->Start();
343  if (start.Value() == other_start.Value()) {
344  UsePosition* pos = first_pos();
345  if (pos == NULL) return false;
346  UsePosition* other_pos = other->first_pos();
347  if (other_pos == NULL) return true;
348  return pos->pos().Value() < other_pos->pos().Value();
349  }
350  return start.Value() < other_start.Value();
351 }
352 
353 
355  LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
357  DCHECK(first_interval_->start().Value() <= start.Value());
358  DCHECK(start.Value() < first_interval_->end().Value());
359  first_interval_->set_start(start);
360 }
361 
362 
364  LifetimePosition end,
365  Zone* zone) {
366  LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
367  id_,
368  start.Value(),
369  end.Value());
370  LifetimePosition new_end = end;
371  while (first_interval_ != NULL &&
372  first_interval_->start().Value() <= end.Value()) {
373  if (first_interval_->end().Value() > end.Value()) {
374  new_end = first_interval_->end();
375  }
377  }
378 
379  UseInterval* new_interval = new(zone) UseInterval(start, new_end);
380  new_interval->next_ = first_interval_;
381  first_interval_ = new_interval;
382  if (new_interval->next() == NULL) {
383  last_interval_ = new_interval;
384  }
385 }
386 
387 
389  LifetimePosition end,
390  Zone* zone) {
391  LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
392  id_,
393  start.Value(),
394  end.Value());
395  if (first_interval_ == NULL) {
396  UseInterval* interval = new(zone) UseInterval(start, end);
397  first_interval_ = interval;
398  last_interval_ = interval;
399  } else {
400  if (end.Value() == first_interval_->start().Value()) {
401  first_interval_->set_start(start);
402  } else if (end.Value() < first_interval_->start().Value()) {
403  UseInterval* interval = new(zone) UseInterval(start, end);
404  interval->set_next(first_interval_);
405  first_interval_ = interval;
406  } else {
407  // Order of instruction's processing (see ProcessInstructions) guarantees
408  // that each new use interval either precedes or intersects with
409  // last added interval.
410  DCHECK(start.Value() < first_interval_->end().Value());
413  }
414  }
415 }
416 
417 
419  LOperand* operand,
420  LOperand* hint,
421  Zone* zone) {
422  LAllocator::TraceAlloc("Add to live range %d use position %d\n",
423  id_,
424  pos.Value());
425  UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
426  UsePosition* prev_hint = NULL;
427  UsePosition* prev = NULL;
428  UsePosition* current = first_pos_;
429  while (current != NULL && current->pos().Value() < pos.Value()) {
430  prev_hint = current->HasHint() ? current : prev_hint;
431  prev = current;
432  current = current->next();
433  }
434 
435  if (prev == NULL) {
436  use_pos->set_next(first_pos_);
437  first_pos_ = use_pos;
438  } else {
439  use_pos->next_ = prev->next_;
440  prev->next_ = use_pos;
441  }
442 
443  if (prev_hint == NULL && use_pos->HasHint()) {
444  current_hint_operand_ = hint;
445  }
446 }
447 
448 
450  LOperand* op = CreateAssignedOperand(zone);
451  UsePosition* use_pos = first_pos();
452  while (use_pos != NULL) {
453  DCHECK(Start().Value() <= use_pos->pos().Value() &&
454  use_pos->pos().Value() <= End().Value());
455 
456  if (use_pos->HasOperand()) {
457  DCHECK(op->IsRegister() || op->IsDoubleRegister() ||
458  !use_pos->RequiresRegister());
459  use_pos->operand()->ConvertTo(op->kind(), op->index());
460  }
461  use_pos = use_pos->next();
462  }
463 }
464 
465 
466 bool LiveRange::CanCover(LifetimePosition position) const {
467  if (IsEmpty()) return false;
468  return Start().Value() <= position.Value() &&
469  position.Value() < End().Value();
470 }
471 
472 
474  if (!CanCover(position)) return false;
475  UseInterval* start_search = FirstSearchIntervalForPosition(position);
476  for (UseInterval* interval = start_search;
477  interval != NULL;
478  interval = interval->next()) {
479  DCHECK(interval->next() == NULL ||
480  interval->next()->start().Value() >= interval->start().Value());
481  AdvanceLastProcessedMarker(interval, position);
482  if (interval->Contains(position)) return true;
483  if (interval->start().Value() > position.Value()) return false;
484  }
485  return false;
486 }
487 
488 
490  UseInterval* b = other->first_interval();
491  if (b == NULL) return LifetimePosition::Invalid();
492  LifetimePosition advance_last_processed_up_to = b->start();
494  while (a != NULL && b != NULL) {
495  if (a->start().Value() > other->End().Value()) break;
496  if (b->start().Value() > End().Value()) break;
497  LifetimePosition cur_intersection = a->Intersect(b);
498  if (cur_intersection.IsValid()) {
499  return cur_intersection;
500  }
501  if (a->start().Value() < b->start().Value()) {
502  a = a->next();
503  if (a == NULL || a->start().Value() > other->End().Value()) break;
504  AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
505  } else {
506  b = b->next();
507  }
508  }
509  return LifetimePosition::Invalid();
510 }
511 
512 
513 LAllocator::LAllocator(int num_values, HGraph* graph)
514  : zone_(graph->isolate()),
515  chunk_(NULL),
516  live_in_sets_(graph->blocks()->length(), zone()),
517  live_ranges_(num_values * 2, zone()),
518  fixed_live_ranges_(NULL),
519  fixed_double_live_ranges_(NULL),
520  unhandled_live_ranges_(num_values * 2, zone()),
521  active_live_ranges_(8, zone()),
522  inactive_live_ranges_(8, zone()),
523  reusable_slots_(8, zone()),
524  next_virtual_register_(num_values),
525  first_artificial_register_(num_values),
526  mode_(UNALLOCATED_REGISTERS),
527  num_registers_(-1),
528  graph_(graph),
529  has_osr_entry_(false),
530  allocation_ok_(true) {}
531 
532 
533 void LAllocator::InitializeLivenessAnalysis() {
534  // Initialize the live_in sets for each block to NULL.
535  int block_count = graph_->blocks()->length();
536  live_in_sets_.Initialize(block_count, zone());
537  live_in_sets_.AddBlock(NULL, block_count, zone());
538 }
539 
540 
541 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
542  // Compute live out for the given block, except not including backward
543  // successor edges.
544  BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
545 
546  // Process all successor blocks.
547  for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
548  // Add values live on entry to the successor. Note the successor's
549  // live_in will not be computed yet for backwards edges.
550  HBasicBlock* successor = it.Current();
551  BitVector* live_in = live_in_sets_[successor->block_id()];
552  if (live_in != NULL) live_out->Union(*live_in);
553 
554  // All phi input operands corresponding to this successor edge are live
555  // out from this block.
556  int index = successor->PredecessorIndexOf(block);
557  const ZoneList<HPhi*>* phis = successor->phis();
558  for (int i = 0; i < phis->length(); ++i) {
559  HPhi* phi = phis->at(i);
560  if (!phi->OperandAt(index)->IsConstant()) {
561  live_out->Add(phi->OperandAt(index)->id());
562  }
563  }
564  }
565 
566  return live_out;
567 }
568 
569 
570 void LAllocator::AddInitialIntervals(HBasicBlock* block,
571  BitVector* live_out) {
572  // Add an interval that includes the entire block to the live range for
573  // each live_out value.
574  LifetimePosition start = LifetimePosition::FromInstructionIndex(
575  block->first_instruction_index());
576  LifetimePosition end = LifetimePosition::FromInstructionIndex(
577  block->last_instruction_index()).NextInstruction();
578  BitVector::Iterator iterator(live_out);
579  while (!iterator.Done()) {
580  int operand_index = iterator.Current();
581  LiveRange* range = LiveRangeFor(operand_index);
582  range->AddUseInterval(start, end, zone());
583  iterator.Advance();
584  }
585 }
586 
587 
588 int LAllocator::FixedDoubleLiveRangeID(int index) {
589  return -index - 1 - Register::kMaxNumAllocatableRegisters;
590 }
591 
592 
593 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
594  int pos,
595  bool is_tagged) {
596  TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
597  DCHECK(operand->HasFixedPolicy());
598  if (operand->HasFixedSlotPolicy()) {
599  operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
600  } else if (operand->HasFixedRegisterPolicy()) {
601  int reg_index = operand->fixed_register_index();
602  operand->ConvertTo(LOperand::REGISTER, reg_index);
603  } else if (operand->HasFixedDoubleRegisterPolicy()) {
604  int reg_index = operand->fixed_register_index();
605  operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
606  } else {
607  UNREACHABLE();
608  }
609  if (is_tagged) {
610  TraceAlloc("Fixed reg is tagged at %d\n", pos);
611  LInstruction* instr = InstructionAt(pos);
612  if (instr->HasPointerMap()) {
613  instr->pointer_map()->RecordPointer(operand, chunk()->zone());
614  }
615  }
616  return operand;
617 }
618 
619 
620 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
622  LiveRange* result = fixed_live_ranges_[index];
623  if (result == NULL) {
624  result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
625  DCHECK(result->IsFixed());
626  result->kind_ = GENERAL_REGISTERS;
627  SetLiveRangeAssignedRegister(result, index);
628  fixed_live_ranges_[index] = result;
629  }
630  return result;
631 }
632 
633 
634 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
636  LiveRange* result = fixed_double_live_ranges_[index];
637  if (result == NULL) {
638  result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
639  chunk()->zone());
640  DCHECK(result->IsFixed());
641  result->kind_ = DOUBLE_REGISTERS;
642  SetLiveRangeAssignedRegister(result, index);
643  fixed_double_live_ranges_[index] = result;
644  }
645  return result;
646 }
647 
648 
649 LiveRange* LAllocator::LiveRangeFor(int index) {
650  if (index >= live_ranges_.length()) {
651  live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
652  }
653  LiveRange* result = live_ranges_[index];
654  if (result == NULL) {
655  result = new(zone()) LiveRange(index, chunk()->zone());
656  live_ranges_[index] = result;
657  }
658  return result;
659 }
660 
661 
662 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
663  int last_instruction = block->last_instruction_index();
664  int index = chunk_->NearestGapPos(last_instruction);
665  return GapAt(index);
666 }
667 
668 
669 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
670  if (!operand->IsUnallocated()) return NULL;
671  int index = LUnallocated::cast(operand)->virtual_register();
672  HValue* instr = graph_->LookupValue(index);
673  if (instr != NULL && instr->IsPhi()) {
674  return HPhi::cast(instr);
675  }
676  return NULL;
677 }
678 
679 
680 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
681  if (operand->IsUnallocated()) {
682  return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
683  } else if (operand->IsRegister()) {
684  return FixedLiveRangeFor(operand->index());
685  } else if (operand->IsDoubleRegister()) {
686  return FixedDoubleLiveRangeFor(operand->index());
687  } else {
688  return NULL;
689  }
690 }
691 
692 
693 void LAllocator::Define(LifetimePosition position,
694  LOperand* operand,
695  LOperand* hint) {
696  LiveRange* range = LiveRangeFor(operand);
697  if (range == NULL) return;
698 
699  if (range->IsEmpty() || range->Start().Value() > position.Value()) {
700  // Can happen if there is a definition without use.
701  range->AddUseInterval(position, position.NextInstruction(), zone());
702  range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
703  } else {
704  range->ShortenTo(position);
705  }
706 
707  if (operand->IsUnallocated()) {
708  LUnallocated* unalloc_operand = LUnallocated::cast(operand);
709  range->AddUsePosition(position, unalloc_operand, hint, zone());
710  }
711 }
712 
713 
714 void LAllocator::Use(LifetimePosition block_start,
715  LifetimePosition position,
716  LOperand* operand,
717  LOperand* hint) {
718  LiveRange* range = LiveRangeFor(operand);
719  if (range == NULL) return;
720  if (operand->IsUnallocated()) {
721  LUnallocated* unalloc_operand = LUnallocated::cast(operand);
722  range->AddUsePosition(position, unalloc_operand, hint, zone());
723  }
724  range->AddUseInterval(block_start, position, zone());
725 }
726 
727 
728 void LAllocator::AddConstraintsGapMove(int index,
729  LOperand* from,
730  LOperand* to) {
731  LGap* gap = GapAt(index);
732  LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
733  chunk()->zone());
734  if (from->IsUnallocated()) {
735  const ZoneList<LMoveOperands>* move_operands = move->move_operands();
736  for (int i = 0; i < move_operands->length(); ++i) {
737  LMoveOperands cur = move_operands->at(i);
738  LOperand* cur_to = cur.destination();
739  if (cur_to->IsUnallocated()) {
740  if (LUnallocated::cast(cur_to)->virtual_register() ==
741  LUnallocated::cast(from)->virtual_register()) {
742  move->AddMove(cur.source(), to, chunk()->zone());
743  return;
744  }
745  }
746  }
747  }
748  move->AddMove(from, to, chunk()->zone());
749 }
750 
751 
752 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
753  int start = block->first_instruction_index();
754  int end = block->last_instruction_index();
755  if (start == -1) return;
756  for (int i = start; i <= end; ++i) {
757  if (IsGapAt(i)) {
758  LInstruction* instr = NULL;
759  LInstruction* prev_instr = NULL;
760  if (i < end) instr = InstructionAt(i + 1);
761  if (i > start) prev_instr = InstructionAt(i - 1);
762  MeetConstraintsBetween(prev_instr, instr, i);
763  if (!AllocationOk()) return;
764  }
765  }
766 }
767 
768 
769 void LAllocator::MeetConstraintsBetween(LInstruction* first,
770  LInstruction* second,
771  int gap_index) {
772  // Handle fixed temporaries.
773  if (first != NULL) {
774  for (TempIterator it(first); !it.Done(); it.Advance()) {
775  LUnallocated* temp = LUnallocated::cast(it.Current());
776  if (temp->HasFixedPolicy()) {
777  AllocateFixed(temp, gap_index - 1, false);
778  }
779  }
780  }
781 
782  // Handle fixed output operand.
783  if (first != NULL && first->Output() != NULL) {
784  LUnallocated* first_output = LUnallocated::cast(first->Output());
785  LiveRange* range = LiveRangeFor(first_output->virtual_register());
786  bool assigned = false;
787  if (first_output->HasFixedPolicy()) {
788  LUnallocated* output_copy = first_output->CopyUnconstrained(
789  chunk()->zone());
790  bool is_tagged = HasTaggedValue(first_output->virtual_register());
791  AllocateFixed(first_output, gap_index, is_tagged);
792 
793  // This value is produced on the stack, we never need to spill it.
794  if (first_output->IsStackSlot()) {
795  range->SetSpillOperand(first_output);
796  range->SetSpillStartIndex(gap_index - 1);
797  assigned = true;
798  }
799  chunk_->AddGapMove(gap_index, first_output, output_copy);
800  }
801 
802  if (!assigned) {
803  range->SetSpillStartIndex(gap_index);
804 
805  // This move to spill operand is not a real use. Liveness analysis
806  // and splitting of live ranges do not account for it.
807  // Thus it should be inserted to a lifetime position corresponding to
808  // the instruction end.
809  LGap* gap = GapAt(gap_index);
810  LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
811  chunk()->zone());
812  move->AddMove(first_output, range->GetSpillOperand(),
813  chunk()->zone());
814  }
815  }
816 
817  // Handle fixed input operands of second instruction.
818  if (second != NULL) {
819  for (UseIterator it(second); !it.Done(); it.Advance()) {
820  LUnallocated* cur_input = LUnallocated::cast(it.Current());
821  if (cur_input->HasFixedPolicy()) {
822  LUnallocated* input_copy = cur_input->CopyUnconstrained(
823  chunk()->zone());
824  bool is_tagged = HasTaggedValue(cur_input->virtual_register());
825  AllocateFixed(cur_input, gap_index + 1, is_tagged);
826  AddConstraintsGapMove(gap_index, input_copy, cur_input);
827  } else if (cur_input->HasWritableRegisterPolicy()) {
828  // The live range of writable input registers always goes until the end
829  // of the instruction.
830  DCHECK(!cur_input->IsUsedAtStart());
831 
832  LUnallocated* input_copy = cur_input->CopyUnconstrained(
833  chunk()->zone());
834  int vreg = GetVirtualRegister();
835  if (!AllocationOk()) return;
836  cur_input->set_virtual_register(vreg);
837 
838  if (RequiredRegisterKind(input_copy->virtual_register()) ==
840  double_artificial_registers_.Add(
841  cur_input->virtual_register() - first_artificial_register_,
842  zone());
843  }
844 
845  AddConstraintsGapMove(gap_index, input_copy, cur_input);
846  }
847  }
848  }
849 
850  // Handle "output same as input" for second instruction.
851  if (second != NULL && second->Output() != NULL) {
852  LUnallocated* second_output = LUnallocated::cast(second->Output());
853  if (second_output->HasSameAsInputPolicy()) {
854  LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
855  int output_vreg = second_output->virtual_register();
856  int input_vreg = cur_input->virtual_register();
857 
858  LUnallocated* input_copy = cur_input->CopyUnconstrained(
859  chunk()->zone());
860  cur_input->set_virtual_register(second_output->virtual_register());
861  AddConstraintsGapMove(gap_index, input_copy, cur_input);
862 
863  if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
864  int index = gap_index + 1;
865  LInstruction* instr = InstructionAt(index);
866  if (instr->HasPointerMap()) {
867  instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
868  }
869  } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
870  // The input is assumed to immediately have a tagged representation,
871  // before the pointer map can be used. I.e. the pointer map at the
872  // instruction will include the output operand (whose value at the
873  // beginning of the instruction is equal to the input operand). If
874  // this is not desired, then the pointer map at this instruction needs
875  // to be adjusted manually.
876  }
877  }
878  }
879 }
880 
881 
882 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
883  int block_start = block->first_instruction_index();
884  int index = block->last_instruction_index();
885 
886  LifetimePosition block_start_position =
888 
889  while (index >= block_start) {
890  LifetimePosition curr_position =
892 
893  if (IsGapAt(index)) {
894  // We have a gap at this position.
895  LGap* gap = GapAt(index);
896  LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
897  chunk()->zone());
898  const ZoneList<LMoveOperands>* move_operands = move->move_operands();
899  for (int i = 0; i < move_operands->length(); ++i) {
900  LMoveOperands* cur = &move_operands->at(i);
901  if (cur->IsIgnored()) continue;
902  LOperand* from = cur->source();
903  LOperand* to = cur->destination();
904  HPhi* phi = LookupPhi(to);
905  LOperand* hint = to;
906  if (phi != NULL) {
907  // This is a phi resolving move.
908  if (!phi->block()->IsLoopHeader()) {
909  hint = LiveRangeFor(phi->id())->current_hint_operand();
910  }
911  } else {
912  if (to->IsUnallocated()) {
913  if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
914  Define(curr_position, to, from);
915  live->Remove(LUnallocated::cast(to)->virtual_register());
916  } else {
917  cur->Eliminate();
918  continue;
919  }
920  } else {
921  Define(curr_position, to, from);
922  }
923  }
924  Use(block_start_position, curr_position, from, hint);
925  if (from->IsUnallocated()) {
926  live->Add(LUnallocated::cast(from)->virtual_register());
927  }
928  }
929  } else {
930  DCHECK(!IsGapAt(index));
931  LInstruction* instr = InstructionAt(index);
932 
933  if (instr != NULL) {
934  LOperand* output = instr->Output();
935  if (output != NULL) {
936  if (output->IsUnallocated()) {
937  live->Remove(LUnallocated::cast(output)->virtual_register());
938  }
939  Define(curr_position, output, NULL);
940  }
941 
942  if (instr->ClobbersRegisters()) {
943  for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) {
944  if (output == NULL || !output->IsRegister() ||
945  output->index() != i) {
946  LiveRange* range = FixedLiveRangeFor(i);
947  range->AddUseInterval(curr_position,
948  curr_position.InstructionEnd(),
949  zone());
950  }
951  }
952  }
953 
954  if (instr->ClobbersDoubleRegisters(isolate())) {
955  for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
956  if (output == NULL || !output->IsDoubleRegister() ||
957  output->index() != i) {
958  LiveRange* range = FixedDoubleLiveRangeFor(i);
959  range->AddUseInterval(curr_position,
960  curr_position.InstructionEnd(),
961  zone());
962  }
963  }
964  }
965 
966  for (UseIterator it(instr); !it.Done(); it.Advance()) {
967  LOperand* input = it.Current();
968 
969  LifetimePosition use_pos;
970  if (input->IsUnallocated() &&
971  LUnallocated::cast(input)->IsUsedAtStart()) {
972  use_pos = curr_position;
973  } else {
974  use_pos = curr_position.InstructionEnd();
975  }
976 
977  Use(block_start_position, use_pos, input, NULL);
978  if (input->IsUnallocated()) {
979  live->Add(LUnallocated::cast(input)->virtual_register());
980  }
981  }
982 
983  for (TempIterator it(instr); !it.Done(); it.Advance()) {
984  LOperand* temp = it.Current();
985  if (instr->ClobbersTemps()) {
986  if (temp->IsRegister()) continue;
987  if (temp->IsUnallocated()) {
988  LUnallocated* temp_unalloc = LUnallocated::cast(temp);
989  if (temp_unalloc->HasFixedPolicy()) {
990  continue;
991  }
992  }
993  }
994  Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
995  Define(curr_position, temp, NULL);
996 
997  if (temp->IsUnallocated()) {
998  LUnallocated* temp_unalloc = LUnallocated::cast(temp);
999  if (temp_unalloc->HasDoubleRegisterPolicy()) {
1000  double_artificial_registers_.Add(
1001  temp_unalloc->virtual_register() - first_artificial_register_,
1002  zone());
1003  }
1004  }
1005  }
1006  }
1007  }
1008 
1009  index = index - 1;
1010  }
1011 }
1012 
1013 
1014 void LAllocator::ResolvePhis(HBasicBlock* block) {
1015  const ZoneList<HPhi*>* phis = block->phis();
1016  for (int i = 0; i < phis->length(); ++i) {
1017  HPhi* phi = phis->at(i);
1018  LUnallocated* phi_operand =
1019  new (chunk()->zone()) LUnallocated(LUnallocated::NONE);
1020  phi_operand->set_virtual_register(phi->id());
1021  for (int j = 0; j < phi->OperandCount(); ++j) {
1022  HValue* op = phi->OperandAt(j);
1023  LOperand* operand = NULL;
1024  if (op->IsConstant() && op->EmitAtUses()) {
1025  HConstant* constant = HConstant::cast(op);
1026  operand = chunk_->DefineConstantOperand(constant);
1027  } else {
1028  DCHECK(!op->EmitAtUses());
1029  LUnallocated* unalloc =
1030  new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
1031  unalloc->set_virtual_register(op->id());
1032  operand = unalloc;
1033  }
1034  HBasicBlock* cur_block = block->predecessors()->at(j);
1035  // The gap move must be added without any special processing as in
1036  // the AddConstraintsGapMove.
1037  chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1038  operand,
1039  phi_operand);
1040 
1041  // We are going to insert a move before the branch instruction.
1042  // Some branch instructions (e.g. loops' back edges)
1043  // can potentially cause a GC so they have a pointer map.
1044  // By inserting a move we essentially create a copy of a
1045  // value which is invisible to PopulatePointerMaps(), because we store
1046  // it into a location different from the operand of a live range
1047  // covering a branch instruction.
1048  // Thus we need to manually record a pointer.
1049  LInstruction* branch =
1050  InstructionAt(cur_block->last_instruction_index());
1051  if (branch->HasPointerMap()) {
1052  if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
1053  branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
1054  } else if (!phi->representation().IsDouble()) {
1055  branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
1056  }
1057  }
1058  }
1059 
1060  LiveRange* live_range = LiveRangeFor(phi->id());
1061  LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1062  label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
1063  AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
1064  live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1065  }
1066 }
1067 
1068 
1069 bool LAllocator::Allocate(LChunk* chunk) {
1070  DCHECK(chunk_ == NULL);
1071  chunk_ = static_cast<LPlatformChunk*>(chunk);
1072  assigned_registers_ =
1073  new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(),
1074  chunk->zone());
1075  assigned_double_registers_ =
1076  new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(),
1077  chunk->zone());
1078  MeetRegisterConstraints();
1079  if (!AllocationOk()) return false;
1080  ResolvePhis();
1081  BuildLiveRanges();
1082  AllocateGeneralRegisters();
1083  if (!AllocationOk()) return false;
1084  AllocateDoubleRegisters();
1085  if (!AllocationOk()) return false;
1086  PopulatePointerMaps();
1087  ConnectRanges();
1088  ResolveControlFlow();
1089  return true;
1090 }
1091 
1092 
1093 void LAllocator::MeetRegisterConstraints() {
1094  LAllocatorPhase phase("L_Register constraints", this);
1095  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1096  for (int i = 0; i < blocks->length(); ++i) {
1097  HBasicBlock* block = blocks->at(i);
1098  MeetRegisterConstraints(block);
1099  if (!AllocationOk()) return;
1100  }
1101 }
1102 
1103 
1104 void LAllocator::ResolvePhis() {
1105  LAllocatorPhase phase("L_Resolve phis", this);
1106 
1107  // Process the blocks in reverse order.
1108  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1109  for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1110  HBasicBlock* block = blocks->at(block_id);
1111  ResolvePhis(block);
1112  }
1113 }
1114 
1115 
1116 void LAllocator::ResolveControlFlow(LiveRange* range,
1117  HBasicBlock* block,
1118  HBasicBlock* pred) {
1119  LifetimePosition pred_end =
1120  LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1121  LifetimePosition cur_start =
1122  LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1123  LiveRange* pred_cover = NULL;
1124  LiveRange* cur_cover = NULL;
1125  LiveRange* cur_range = range;
1126  while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1127  if (cur_range->CanCover(cur_start)) {
1128  DCHECK(cur_cover == NULL);
1129  cur_cover = cur_range;
1130  }
1131  if (cur_range->CanCover(pred_end)) {
1132  DCHECK(pred_cover == NULL);
1133  pred_cover = cur_range;
1134  }
1135  cur_range = cur_range->next();
1136  }
1137 
1138  if (cur_cover->IsSpilled()) return;
1139  DCHECK(pred_cover != NULL && cur_cover != NULL);
1140  if (pred_cover != cur_cover) {
1141  LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
1142  LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
1143  if (!pred_op->Equals(cur_op)) {
1144  LGap* gap = NULL;
1145  if (block->predecessors()->length() == 1) {
1146  gap = GapAt(block->first_instruction_index());
1147  } else {
1148  DCHECK(pred->end()->SecondSuccessor() == NULL);
1149  gap = GetLastGap(pred);
1150 
1151  // We are going to insert a move before the branch instruction.
1152  // Some branch instructions (e.g. loops' back edges)
1153  // can potentially cause a GC so they have a pointer map.
1154  // By inserting a move we essentially create a copy of a
1155  // value which is invisible to PopulatePointerMaps(), because we store
1156  // it into a location different from the operand of a live range
1157  // covering a branch instruction.
1158  // Thus we need to manually record a pointer.
1159  LInstruction* branch = InstructionAt(pred->last_instruction_index());
1160  if (branch->HasPointerMap()) {
1161  if (HasTaggedValue(range->id())) {
1162  branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
1163  } else if (!cur_op->IsDoubleStackSlot() &&
1164  !cur_op->IsDoubleRegister()) {
1165  branch->pointer_map()->RemovePointer(cur_op);
1166  }
1167  }
1168  }
1169  gap->GetOrCreateParallelMove(
1170  LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
1171  chunk()->zone());
1172  }
1173  }
1174 }
1175 
1176 
1177 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1178  int index = pos.InstructionIndex();
1179  if (IsGapAt(index)) {
1180  LGap* gap = GapAt(index);
1181  return gap->GetOrCreateParallelMove(
1182  pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
1183  }
1184  int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1185  return GapAt(gap_pos)->GetOrCreateParallelMove(
1186  (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
1187 }
1188 
1189 
1190 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1191  LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1192  return gap->block();
1193 }
1194 
1195 
1196 void LAllocator::ConnectRanges() {
1197  LAllocatorPhase phase("L_Connect ranges", this);
1198  for (int i = 0; i < live_ranges()->length(); ++i) {
1199  LiveRange* first_range = live_ranges()->at(i);
1200  if (first_range == NULL || first_range->parent() != NULL) continue;
1201 
1202  LiveRange* second_range = first_range->next();
1203  while (second_range != NULL) {
1204  LifetimePosition pos = second_range->Start();
1205 
1206  if (!second_range->IsSpilled()) {
1207  // Add gap move if the two live ranges touch and there is no block
1208  // boundary.
1209  if (first_range->End().Value() == pos.Value()) {
1210  bool should_insert = true;
1211  if (IsBlockBoundary(pos)) {
1212  should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1213  }
1214  if (should_insert) {
1215  LParallelMove* move = GetConnectingParallelMove(pos);
1216  LOperand* prev_operand = first_range->CreateAssignedOperand(
1217  chunk()->zone());
1218  LOperand* cur_operand = second_range->CreateAssignedOperand(
1219  chunk()->zone());
1220  move->AddMove(prev_operand, cur_operand,
1221  chunk()->zone());
1222  }
1223  }
1224  }
1225 
1226  first_range = second_range;
1227  second_range = second_range->next();
1228  }
1229  }
1230 }
1231 
1232 
1233 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1234  if (block->predecessors()->length() != 1) return false;
1235  return block->predecessors()->first()->block_id() == block->block_id() - 1;
1236 }
1237 
1238 
1239 void LAllocator::ResolveControlFlow() {
1240  LAllocatorPhase phase("L_Resolve control flow", this);
1241  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1242  for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1243  HBasicBlock* block = blocks->at(block_id);
1244  if (CanEagerlyResolveControlFlow(block)) continue;
1245  BitVector* live = live_in_sets_[block->block_id()];
1246  BitVector::Iterator iterator(live);
1247  while (!iterator.Done()) {
1248  int operand_index = iterator.Current();
1249  for (int i = 0; i < block->predecessors()->length(); ++i) {
1250  HBasicBlock* cur = block->predecessors()->at(i);
1251  LiveRange* cur_range = LiveRangeFor(operand_index);
1252  ResolveControlFlow(cur_range, block, cur);
1253  }
1254  iterator.Advance();
1255  }
1256  }
1257 }
1258 
1259 
1260 void LAllocator::BuildLiveRanges() {
1261  LAllocatorPhase phase("L_Build live ranges", this);
1262  InitializeLivenessAnalysis();
1263  // Process the blocks in reverse order.
1264  const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1265  for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1266  HBasicBlock* block = blocks->at(block_id);
1267  BitVector* live = ComputeLiveOut(block);
1268  // Initially consider all live_out values live for the entire block. We
1269  // will shorten these intervals if necessary.
1270  AddInitialIntervals(block, live);
1271 
1272  // Process the instructions in reverse order, generating and killing
1273  // live values.
1274  ProcessInstructions(block, live);
1275  // All phi output operands are killed by this block.
1276  const ZoneList<HPhi*>* phis = block->phis();
1277  for (int i = 0; i < phis->length(); ++i) {
1278  // The live range interval already ends at the first instruction of the
1279  // block.
1280  HPhi* phi = phis->at(i);
1281  live->Remove(phi->id());
1282 
1283  LOperand* hint = NULL;
1284  LOperand* phi_operand = NULL;
1285  LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1286  LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
1287  chunk()->zone());
1288  for (int j = 0; j < move->move_operands()->length(); ++j) {
1289  LOperand* to = move->move_operands()->at(j).destination();
1290  if (to->IsUnallocated() &&
1291  LUnallocated::cast(to)->virtual_register() == phi->id()) {
1292  hint = move->move_operands()->at(j).source();
1293  phi_operand = to;
1294  break;
1295  }
1296  }
1297  DCHECK(hint != NULL);
1298 
1299  LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1300  block->first_instruction_index());
1301  Define(block_start, phi_operand, hint);
1302  }
1303 
1304  // Now live is live_in for this block except not including values live
1305  // out on backward successor edges.
1306  live_in_sets_[block_id] = live;
1307 
1308  // If this block is a loop header go back and patch up the necessary
1309  // predecessor blocks.
1310  if (block->IsLoopHeader()) {
1311  // TODO(kmillikin): Need to be able to get the last block of the loop
1312  // in the loop information. Add a live range stretching from the first
1313  // loop instruction to the last for each value live on entry to the
1314  // header.
1315  HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1316  BitVector::Iterator iterator(live);
1317  LifetimePosition start = LifetimePosition::FromInstructionIndex(
1318  block->first_instruction_index());
1319  LifetimePosition end = LifetimePosition::FromInstructionIndex(
1320  back_edge->last_instruction_index()).NextInstruction();
1321  while (!iterator.Done()) {
1322  int operand_index = iterator.Current();
1323  LiveRange* range = LiveRangeFor(operand_index);
1324  range->EnsureInterval(start, end, zone());
1325  iterator.Advance();
1326  }
1327 
1328  for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1329  live_in_sets_[i]->Union(*live);
1330  }
1331  }
1332 
1333 #ifdef DEBUG
1334  if (block_id == 0) {
1335  BitVector::Iterator iterator(live);
1336  bool found = false;
1337  while (!iterator.Done()) {
1338  found = true;
1339  int operand_index = iterator.Current();
1340  if (chunk_->info()->IsStub()) {
1341  CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey();
1342  PrintF("Function: %s\n", CodeStub::MajorName(major_key, false));
1343  } else {
1344  DCHECK(chunk_->info()->IsOptimizing());
1345  AllowHandleDereference allow_deref;
1346  PrintF("Function: %s\n",
1347  chunk_->info()->function()->debug_name()->ToCString().get());
1348  }
1349  PrintF("Value %d used before first definition!\n", operand_index);
1350  LiveRange* range = LiveRangeFor(operand_index);
1351  PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1352  iterator.Advance();
1353  }
1354  DCHECK(!found);
1355  }
1356 #endif
1357  }
1358 
1359  for (int i = 0; i < live_ranges_.length(); ++i) {
1360  if (live_ranges_[i] != NULL) {
1361  live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
1362  }
1363  }
1364 }
1365 
1366 
1367 bool LAllocator::SafePointsAreInOrder() const {
1368  const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1369  int safe_point = 0;
1370  for (int i = 0; i < pointer_maps->length(); ++i) {
1371  LPointerMap* map = pointer_maps->at(i);
1372  if (safe_point > map->lithium_position()) return false;
1373  safe_point = map->lithium_position();
1374  }
1375  return true;
1376 }
1377 
1378 
1379 void LAllocator::PopulatePointerMaps() {
1380  LAllocatorPhase phase("L_Populate pointer maps", this);
1381  const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1382 
1383  DCHECK(SafePointsAreInOrder());
1384 
1385  // Iterate over all safe point positions and record a pointer
1386  // for all spilled live ranges at this point.
1387  int first_safe_point_index = 0;
1388  int last_range_start = 0;
1389  for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1390  LiveRange* range = live_ranges()->at(range_idx);
1391  if (range == NULL) continue;
1392  // Iterate over the first parts of multi-part live ranges.
1393  if (range->parent() != NULL) continue;
1394  // Skip non-pointer values.
1395  if (!HasTaggedValue(range->id())) continue;
1396  // Skip empty live ranges.
1397  if (range->IsEmpty()) continue;
1398 
1399  // Find the extent of the range and its children.
1400  int start = range->Start().InstructionIndex();
1401  int end = 0;
1402  for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1403  LifetimePosition this_end = cur->End();
1404  if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1405  DCHECK(cur->Start().InstructionIndex() >= start);
1406  }
1407 
1408  // Most of the ranges are in order, but not all. Keep an eye on when
1409  // they step backwards and reset the first_safe_point_index so we don't
1410  // miss any safe points.
1411  if (start < last_range_start) {
1412  first_safe_point_index = 0;
1413  }
1414  last_range_start = start;
1415 
1416  // Step across all the safe points that are before the start of this range,
1417  // recording how far we step in order to save doing this for the next range.
1418  while (first_safe_point_index < pointer_maps->length()) {
1419  LPointerMap* map = pointer_maps->at(first_safe_point_index);
1420  int safe_point = map->lithium_position();
1421  if (safe_point >= start) break;
1422  first_safe_point_index++;
1423  }
1424 
1425  // Step through the safe points to see whether they are in the range.
1426  for (int safe_point_index = first_safe_point_index;
1427  safe_point_index < pointer_maps->length();
1428  ++safe_point_index) {
1429  LPointerMap* map = pointer_maps->at(safe_point_index);
1430  int safe_point = map->lithium_position();
1431 
1432  // The safe points are sorted so we can stop searching here.
1433  if (safe_point - 1 > end) break;
1434 
1435  // Advance to the next active range that covers the current
1436  // safe point position.
1437  LifetimePosition safe_point_pos =
1439  LiveRange* cur = range;
1440  while (cur != NULL && !cur->Covers(safe_point_pos)) {
1441  cur = cur->next();
1442  }
1443  if (cur == NULL) continue;
1444 
1445  // Check if the live range is spilled and the safe point is after
1446  // the spill position.
1447  if (range->HasAllocatedSpillOperand() &&
1448  safe_point >= range->spill_start_index()) {
1449  TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1450  range->id(), range->spill_start_index(), safe_point);
1451  map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
1452  }
1453 
1454  if (!cur->IsSpilled()) {
1455  TraceAlloc("Pointer in register for range %d (start at %d) "
1456  "at safe point %d\n",
1457  cur->id(), cur->Start().Value(), safe_point);
1458  LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
1459  DCHECK(!operand->IsStackSlot());
1460  map->RecordPointer(operand, chunk()->zone());
1461  }
1462  }
1463  }
1464 }
1465 
1466 
1467 void LAllocator::AllocateGeneralRegisters() {
1468  LAllocatorPhase phase("L_Allocate general registers", this);
1469  num_registers_ = Register::NumAllocatableRegisters();
1470  mode_ = GENERAL_REGISTERS;
1471  AllocateRegisters();
1472 }
1473 
1474 
1475 void LAllocator::AllocateDoubleRegisters() {
1476  LAllocatorPhase phase("L_Allocate double registers", this);
1477  num_registers_ = DoubleRegister::NumAllocatableRegisters();
1478  mode_ = DOUBLE_REGISTERS;
1479  AllocateRegisters();
1480 }
1481 
1482 
1483 void LAllocator::AllocateRegisters() {
1484  DCHECK(unhandled_live_ranges_.is_empty());
1485 
1486  for (int i = 0; i < live_ranges_.length(); ++i) {
1487  if (live_ranges_[i] != NULL) {
1488  if (live_ranges_[i]->Kind() == mode_) {
1489  AddToUnhandledUnsorted(live_ranges_[i]);
1490  }
1491  }
1492  }
1493  SortUnhandled();
1494  DCHECK(UnhandledIsSorted());
1495 
1496  DCHECK(reusable_slots_.is_empty());
1497  DCHECK(active_live_ranges_.is_empty());
1498  DCHECK(inactive_live_ranges_.is_empty());
1499 
1500  if (mode_ == DOUBLE_REGISTERS) {
1501  for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
1502  LiveRange* current = fixed_double_live_ranges_.at(i);
1503  if (current != NULL) {
1504  AddToInactive(current);
1505  }
1506  }
1507  } else {
1508  DCHECK(mode_ == GENERAL_REGISTERS);
1509  for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1510  LiveRange* current = fixed_live_ranges_.at(i);
1511  if (current != NULL) {
1512  AddToInactive(current);
1513  }
1514  }
1515  }
1516 
1517  while (!unhandled_live_ranges_.is_empty()) {
1518  DCHECK(UnhandledIsSorted());
1519  LiveRange* current = unhandled_live_ranges_.RemoveLast();
1520  DCHECK(UnhandledIsSorted());
1521  LifetimePosition position = current->Start();
1522 #ifdef DEBUG
1523  allocation_finger_ = position;
1524 #endif
1525  TraceAlloc("Processing interval %d start=%d\n",
1526  current->id(),
1527  position.Value());
1528 
1529  if (current->HasAllocatedSpillOperand()) {
1530  TraceAlloc("Live range %d already has a spill operand\n", current->id());
1531  LifetimePosition next_pos = position;
1532  if (IsGapAt(next_pos.InstructionIndex())) {
1533  next_pos = next_pos.NextInstruction();
1534  }
1535  UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1536  // If the range already has a spill operand and it doesn't need a
1537  // register immediately, split it and spill the first part of the range.
1538  if (pos == NULL) {
1539  Spill(current);
1540  continue;
1541  } else if (pos->pos().Value() >
1542  current->Start().NextInstruction().Value()) {
1543  // Do not spill live range eagerly if use position that can benefit from
1544  // the register is too close to the start of live range.
1545  SpillBetween(current, current->Start(), pos->pos());
1546  if (!AllocationOk()) return;
1547  DCHECK(UnhandledIsSorted());
1548  continue;
1549  }
1550  }
1551 
1552  for (int i = 0; i < active_live_ranges_.length(); ++i) {
1553  LiveRange* cur_active = active_live_ranges_.at(i);
1554  if (cur_active->End().Value() <= position.Value()) {
1555  ActiveToHandled(cur_active);
1556  --i; // The live range was removed from the list of active live ranges.
1557  } else if (!cur_active->Covers(position)) {
1558  ActiveToInactive(cur_active);
1559  --i; // The live range was removed from the list of active live ranges.
1560  }
1561  }
1562 
1563  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1564  LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1565  if (cur_inactive->End().Value() <= position.Value()) {
1566  InactiveToHandled(cur_inactive);
1567  --i; // Live range was removed from the list of inactive live ranges.
1568  } else if (cur_inactive->Covers(position)) {
1569  InactiveToActive(cur_inactive);
1570  --i; // Live range was removed from the list of inactive live ranges.
1571  }
1572  }
1573 
1574  DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled());
1575 
1576  bool result = TryAllocateFreeReg(current);
1577  if (!AllocationOk()) return;
1578 
1579  if (!result) AllocateBlockedReg(current);
1580  if (!AllocationOk()) return;
1581 
1582  if (current->HasRegisterAssigned()) {
1583  AddToActive(current);
1584  }
1585  }
1586 
1587  reusable_slots_.Rewind(0);
1588  active_live_ranges_.Rewind(0);
1589  inactive_live_ranges_.Rewind(0);
1590 }
1591 
1592 
1593 const char* LAllocator::RegisterName(int allocation_index) {
1594  if (mode_ == GENERAL_REGISTERS) {
1595  return Register::AllocationIndexToString(allocation_index);
1596  } else {
1597  return DoubleRegister::AllocationIndexToString(allocation_index);
1598  }
1599 }
1600 
1601 
1602 void LAllocator::TraceAlloc(const char* msg, ...) {
1603  if (FLAG_trace_alloc) {
1604  va_list arguments;
1605  va_start(arguments, msg);
1606  base::OS::VPrint(msg, arguments);
1607  va_end(arguments);
1608  }
1609 }
1610 
1611 
1612 bool LAllocator::HasTaggedValue(int virtual_register) const {
1613  HValue* value = graph_->LookupValue(virtual_register);
1614  if (value == NULL) return false;
1615  return value->representation().IsTagged() && !value->type().IsSmi();
1616 }
1617 
1618 
1619 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1620  if (virtual_register < first_artificial_register_) {
1621  HValue* value = graph_->LookupValue(virtual_register);
1622  if (value != NULL && value->representation().IsDouble()) {
1623  return DOUBLE_REGISTERS;
1624  }
1625  } else if (double_artificial_registers_.Contains(
1626  virtual_register - first_artificial_register_)) {
1627  return DOUBLE_REGISTERS;
1628  }
1629 
1630  return GENERAL_REGISTERS;
1631 }
1632 
1633 
1634 void LAllocator::AddToActive(LiveRange* range) {
1635  TraceAlloc("Add live range %d to active\n", range->id());
1636  active_live_ranges_.Add(range, zone());
1637 }
1638 
1639 
1640 void LAllocator::AddToInactive(LiveRange* range) {
1641  TraceAlloc("Add live range %d to inactive\n", range->id());
1642  inactive_live_ranges_.Add(range, zone());
1643 }
1644 
1645 
1646 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1647  if (range == NULL || range->IsEmpty()) return;
1648  DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
1649  DCHECK(allocation_finger_.Value() <= range->Start().Value());
1650  for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1651  LiveRange* cur_range = unhandled_live_ranges_.at(i);
1652  if (range->ShouldBeAllocatedBefore(cur_range)) {
1653  TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1654  unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1655  DCHECK(UnhandledIsSorted());
1656  return;
1657  }
1658  }
1659  TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1660  unhandled_live_ranges_.InsertAt(0, range, zone());
1661  DCHECK(UnhandledIsSorted());
1662 }
1663 
1664 
1665 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1666  if (range == NULL || range->IsEmpty()) return;
1667  DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
1668  TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1669  unhandled_live_ranges_.Add(range, zone());
1670 }
1671 
1672 
1673 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1674  DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) ||
1675  !(*b)->ShouldBeAllocatedBefore(*a));
1676  if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1677  if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1678  return (*a)->id() - (*b)->id();
1679 }
1680 
1681 
1682 // Sort the unhandled live ranges so that the ranges to be processed first are
1683 // at the end of the array list. This is convenient for the register allocation
1684 // algorithm because it is efficient to remove elements from the end.
1685 void LAllocator::SortUnhandled() {
1686  TraceAlloc("Sort unhandled\n");
1687  unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1688 }
1689 
1690 
1691 bool LAllocator::UnhandledIsSorted() {
1692  int len = unhandled_live_ranges_.length();
1693  for (int i = 1; i < len; i++) {
1694  LiveRange* a = unhandled_live_ranges_.at(i - 1);
1695  LiveRange* b = unhandled_live_ranges_.at(i);
1696  if (a->Start().Value() < b->Start().Value()) return false;
1697  }
1698  return true;
1699 }
1700 
1701 
1702 void LAllocator::FreeSpillSlot(LiveRange* range) {
1703  // Check that we are the last range.
1704  if (range->next() != NULL) return;
1705 
1706  if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1707 
1708  int index = range->TopLevel()->GetSpillOperand()->index();
1709  if (index >= 0) {
1710  reusable_slots_.Add(range, zone());
1711  }
1712 }
1713 
1714 
1715 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1716  if (reusable_slots_.is_empty()) return NULL;
1717  if (reusable_slots_.first()->End().Value() >
1718  range->TopLevel()->Start().Value()) {
1719  return NULL;
1720  }
1721  LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1722  reusable_slots_.Remove(0);
1723  return result;
1724 }
1725 
1726 
1727 void LAllocator::ActiveToHandled(LiveRange* range) {
1728  DCHECK(active_live_ranges_.Contains(range));
1729  active_live_ranges_.RemoveElement(range);
1730  TraceAlloc("Moving live range %d from active to handled\n", range->id());
1731  FreeSpillSlot(range);
1732 }
1733 
1734 
1735 void LAllocator::ActiveToInactive(LiveRange* range) {
1736  DCHECK(active_live_ranges_.Contains(range));
1737  active_live_ranges_.RemoveElement(range);
1738  inactive_live_ranges_.Add(range, zone());
1739  TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1740 }
1741 
1742 
1743 void LAllocator::InactiveToHandled(LiveRange* range) {
1744  DCHECK(inactive_live_ranges_.Contains(range));
1745  inactive_live_ranges_.RemoveElement(range);
1746  TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1747  FreeSpillSlot(range);
1748 }
1749 
1750 
1751 void LAllocator::InactiveToActive(LiveRange* range) {
1752  DCHECK(inactive_live_ranges_.Contains(range));
1753  inactive_live_ranges_.RemoveElement(range);
1754  active_live_ranges_.Add(range, zone());
1755  TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1756 }
1757 
1758 
1759 // TryAllocateFreeReg and AllocateBlockedReg assume this
1760 // when allocating local arrays.
1763 
1764 
1765 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1767 
1768  for (int i = 0; i < num_registers_; i++) {
1769  free_until_pos[i] = LifetimePosition::MaxPosition();
1770  }
1771 
1772  for (int i = 0; i < active_live_ranges_.length(); ++i) {
1773  LiveRange* cur_active = active_live_ranges_.at(i);
1774  free_until_pos[cur_active->assigned_register()] =
1776  }
1777 
1778  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1779  LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1780  DCHECK(cur_inactive->End().Value() > current->Start().Value());
1781  LifetimePosition next_intersection =
1782  cur_inactive->FirstIntersection(current);
1783  if (!next_intersection.IsValid()) continue;
1784  int cur_reg = cur_inactive->assigned_register();
1785  free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1786  }
1787 
1788  LOperand* hint = current->FirstHint();
1789  if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
1790  int register_index = hint->index();
1791  TraceAlloc(
1792  "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1793  RegisterName(register_index),
1794  free_until_pos[register_index].Value(),
1795  current->id(),
1796  current->End().Value());
1797 
1798  // The desired register is free until the end of the current live range.
1799  if (free_until_pos[register_index].Value() >= current->End().Value()) {
1800  TraceAlloc("Assigning preferred reg %s to live range %d\n",
1801  RegisterName(register_index),
1802  current->id());
1803  SetLiveRangeAssignedRegister(current, register_index);
1804  return true;
1805  }
1806  }
1807 
1808  // Find the register which stays free for the longest time.
1809  int reg = 0;
1810  for (int i = 1; i < RegisterCount(); ++i) {
1811  if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1812  reg = i;
1813  }
1814  }
1815 
1816  LifetimePosition pos = free_until_pos[reg];
1817 
1818  if (pos.Value() <= current->Start().Value()) {
1819  // All registers are blocked.
1820  return false;
1821  }
1822 
1823  if (pos.Value() < current->End().Value()) {
1824  // Register reg is available at the range start but becomes blocked before
1825  // the range end. Split current at position where it becomes blocked.
1826  LiveRange* tail = SplitRangeAt(current, pos);
1827  if (!AllocationOk()) return false;
1828  AddToUnhandledSorted(tail);
1829  }
1830 
1831 
1832  // Register reg is available at the range start and is free until
1833  // the range end.
1834  DCHECK(pos.Value() >= current->End().Value());
1835  TraceAlloc("Assigning free reg %s to live range %d\n",
1836  RegisterName(reg),
1837  current->id());
1838  SetLiveRangeAssignedRegister(current, reg);
1839 
1840  return true;
1841 }
1842 
1843 
1844 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1845  UsePosition* register_use = current->NextRegisterPosition(current->Start());
1846  if (register_use == NULL) {
1847  // There is no use in the current live range that requires a register.
1848  // We can just spill it.
1849  Spill(current);
1850  return;
1851  }
1852 
1853 
1854  LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1855  LifetimePosition block_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1856 
1857  for (int i = 0; i < num_registers_; i++) {
1858  use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1859  }
1860 
1861  for (int i = 0; i < active_live_ranges_.length(); ++i) {
1862  LiveRange* range = active_live_ranges_[i];
1863  int cur_reg = range->assigned_register();
1864  if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1865  block_pos[cur_reg] = use_pos[cur_reg] =
1867  } else {
1868  UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1869  current->Start());
1870  if (next_use == NULL) {
1871  use_pos[cur_reg] = range->End();
1872  } else {
1873  use_pos[cur_reg] = next_use->pos();
1874  }
1875  }
1876  }
1877 
1878  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1879  LiveRange* range = inactive_live_ranges_.at(i);
1880  DCHECK(range->End().Value() > current->Start().Value());
1881  LifetimePosition next_intersection = range->FirstIntersection(current);
1882  if (!next_intersection.IsValid()) continue;
1883  int cur_reg = range->assigned_register();
1884  if (range->IsFixed()) {
1885  block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1886  use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1887  } else {
1888  use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1889  }
1890  }
1891 
1892  int reg = 0;
1893  for (int i = 1; i < RegisterCount(); ++i) {
1894  if (use_pos[i].Value() > use_pos[reg].Value()) {
1895  reg = i;
1896  }
1897  }
1898 
1899  LifetimePosition pos = use_pos[reg];
1900 
1901  if (pos.Value() < register_use->pos().Value()) {
1902  // All registers are blocked before the first use that requires a register.
1903  // Spill starting part of live range up to that use.
1904  SpillBetween(current, current->Start(), register_use->pos());
1905  return;
1906  }
1907 
1908  if (block_pos[reg].Value() < current->End().Value()) {
1909  // Register becomes blocked before the current range end. Split before that
1910  // position.
1911  LiveRange* tail = SplitBetween(current,
1912  current->Start(),
1913  block_pos[reg].InstructionStart());
1914  if (!AllocationOk()) return;
1915  AddToUnhandledSorted(tail);
1916  }
1917 
1918  // Register reg is not blocked for the whole range.
1919  DCHECK(block_pos[reg].Value() >= current->End().Value());
1920  TraceAlloc("Assigning blocked reg %s to live range %d\n",
1921  RegisterName(reg),
1922  current->id());
1923  SetLiveRangeAssignedRegister(current, reg);
1924 
1925  // This register was not free. Thus we need to find and spill
1926  // parts of active and inactive live regions that use the same register
1927  // at the same lifetime positions as current.
1928  SplitAndSpillIntersecting(current);
1929 }
1930 
1931 
1932 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
1933  LifetimePosition pos) {
1934  HBasicBlock* block = GetBlock(pos.InstructionStart());
1935  HBasicBlock* loop_header =
1936  block->IsLoopHeader() ? block : block->parent_loop_header();
1937 
1938  if (loop_header == NULL) return pos;
1939 
1940  UsePosition* prev_use =
1941  range->PreviousUsePositionRegisterIsBeneficial(pos);
1942 
1943  while (loop_header != NULL) {
1944  // We are going to spill live range inside the loop.
1945  // If possible try to move spilling position backwards to loop header.
1946  // This will reduce number of memory moves on the back edge.
1947  LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
1948  loop_header->first_instruction_index());
1949 
1950  if (range->Covers(loop_start)) {
1951  if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
1952  // No register beneficial use inside the loop before the pos.
1953  pos = loop_start;
1954  }
1955  }
1956 
1957  // Try hoisting out to an outer loop.
1958  loop_header = loop_header->parent_loop_header();
1959  }
1960 
1961  return pos;
1962 }
1963 
1964 
1965 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1966  DCHECK(current->HasRegisterAssigned());
1967  int reg = current->assigned_register();
1968  LifetimePosition split_pos = current->Start();
1969  for (int i = 0; i < active_live_ranges_.length(); ++i) {
1970  LiveRange* range = active_live_ranges_[i];
1971  if (range->assigned_register() == reg) {
1972  UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1973  LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
1974  if (next_pos == NULL) {
1975  SpillAfter(range, spill_pos);
1976  } else {
1977  // When spilling between spill_pos and next_pos ensure that the range
1978  // remains spilled at least until the start of the current live range.
1979  // This guarantees that we will not introduce new unhandled ranges that
1980  // start before the current range as this violates allocation invariant
1981  // and will lead to an inconsistent state of active and inactive
1982  // live-ranges: ranges are allocated in order of their start positions,
1983  // ranges are retired from active/inactive when the start of the
1984  // current live-range is larger than their end.
1985  SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
1986  }
1987  if (!AllocationOk()) return;
1988  ActiveToHandled(range);
1989  --i;
1990  }
1991  }
1992 
1993  for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1994  LiveRange* range = inactive_live_ranges_[i];
1995  DCHECK(range->End().Value() > current->Start().Value());
1996  if (range->assigned_register() == reg && !range->IsFixed()) {
1997  LifetimePosition next_intersection = range->FirstIntersection(current);
1998  if (next_intersection.IsValid()) {
1999  UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2000  if (next_pos == NULL) {
2001  SpillAfter(range, split_pos);
2002  } else {
2003  next_intersection = Min(next_intersection, next_pos->pos());
2004  SpillBetween(range, split_pos, next_intersection);
2005  }
2006  if (!AllocationOk()) return;
2007  InactiveToHandled(range);
2008  --i;
2009  }
2010  }
2011  }
2012 }
2013 
2014 
2015 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
2016  return pos.IsInstructionStart() &&
2017  InstructionAt(pos.InstructionIndex())->IsLabel();
2018 }
2019 
2020 
2021 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
2022  DCHECK(!range->IsFixed());
2023  TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
2024 
2025  if (pos.Value() <= range->Start().Value()) return range;
2026 
2027  // We can't properly connect liveranges if split occured at the end
2028  // of control instruction.
2029  DCHECK(pos.IsInstructionStart() ||
2030  !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
2031 
2032  int vreg = GetVirtualRegister();
2033  if (!AllocationOk()) return NULL;
2034  LiveRange* result = LiveRangeFor(vreg);
2035  range->SplitAt(pos, result, zone());
2036  return result;
2037 }
2038 
2039 
2040 LiveRange* LAllocator::SplitBetween(LiveRange* range,
2041  LifetimePosition start,
2042  LifetimePosition end) {
2043  DCHECK(!range->IsFixed());
2044  TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2045  range->id(),
2046  start.Value(),
2047  end.Value());
2048 
2049  LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2050  DCHECK(split_pos.Value() >= start.Value());
2051  return SplitRangeAt(range, split_pos);
2052 }
2053 
2054 
2055 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2056  LifetimePosition end) {
2057  int start_instr = start.InstructionIndex();
2058  int end_instr = end.InstructionIndex();
2059  DCHECK(start_instr <= end_instr);
2060 
2061  // We have no choice
2062  if (start_instr == end_instr) return end;
2063 
2064  HBasicBlock* start_block = GetBlock(start);
2065  HBasicBlock* end_block = GetBlock(end);
2066 
2067  if (end_block == start_block) {
2068  // The interval is split in the same basic block. Split at the latest
2069  // possible position.
2070  return end;
2071  }
2072 
2073  HBasicBlock* block = end_block;
2074  // Find header of outermost loop.
2075  while (block->parent_loop_header() != NULL &&
2076  block->parent_loop_header()->block_id() > start_block->block_id()) {
2077  block = block->parent_loop_header();
2078  }
2079 
2080  // We did not find any suitable outer loop. Split at the latest possible
2081  // position unless end_block is a loop header itself.
2082  if (block == end_block && !end_block->IsLoopHeader()) return end;
2083 
2085  block->first_instruction_index());
2086 }
2087 
2088 
2089 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2090  LiveRange* second_part = SplitRangeAt(range, pos);
2091  if (!AllocationOk()) return;
2092  Spill(second_part);
2093 }
2094 
2095 
2096 void LAllocator::SpillBetween(LiveRange* range,
2097  LifetimePosition start,
2098  LifetimePosition end) {
2099  SpillBetweenUntil(range, start, start, end);
2100 }
2101 
2102 
2103 void LAllocator::SpillBetweenUntil(LiveRange* range,
2104  LifetimePosition start,
2105  LifetimePosition until,
2106  LifetimePosition end) {
2107  CHECK(start.Value() < end.Value());
2108  LiveRange* second_part = SplitRangeAt(range, start);
2109  if (!AllocationOk()) return;
2110 
2111  if (second_part->Start().Value() < end.Value()) {
2112  // The split result intersects with [start, end[.
2113  // Split it at position between ]start+1, end[, spill the middle part
2114  // and put the rest to unhandled.
2115  LiveRange* third_part = SplitBetween(
2116  second_part,
2117  Max(second_part->Start().InstructionEnd(), until),
2119  if (!AllocationOk()) return;
2120 
2121  DCHECK(third_part != second_part);
2122 
2123  Spill(second_part);
2124  AddToUnhandledSorted(third_part);
2125  } else {
2126  // The split result does not intersect with [start, end[.
2127  // Nothing to spill. Just put it to unhandled as whole.
2128  AddToUnhandledSorted(second_part);
2129  }
2130 }
2131 
2132 
2133 void LAllocator::Spill(LiveRange* range) {
2134  DCHECK(!range->IsSpilled());
2135  TraceAlloc("Spilling live range %d\n", range->id());
2136  LiveRange* first = range->TopLevel();
2137 
2138  if (!first->HasAllocatedSpillOperand()) {
2139  LOperand* op = TryReuseSpillSlot(range);
2140  if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind());
2141  first->SetSpillOperand(op);
2142  }
2143  range->MakeSpilled(chunk()->zone());
2144 }
2145 
2146 
2147 int LAllocator::RegisterCount() const {
2148  return num_registers_;
2149 }
2150 
2151 
2152 #ifdef DEBUG
2153 
2154 
2155 void LAllocator::Verify() const {
2156  for (int i = 0; i < live_ranges()->length(); ++i) {
2157  LiveRange* current = live_ranges()->at(i);
2158  if (current != NULL) current->Verify();
2159  }
2160 }
2161 
2162 
2163 #endif
2164 
2165 
2166 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
2167  : CompilationPhase(name, allocator->graph()->info()),
2168  allocator_(allocator) {
2169  if (FLAG_hydrogen_stats) {
2171  allocator->zone()->allocation_size();
2172  }
2173 }
2174 
2175 
2177  if (FLAG_hydrogen_stats) {
2178  unsigned size = allocator_->zone()->allocation_size() -
2180  isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size);
2181  }
2182 
2183  if (ShouldProduceTraceOutput()) {
2184  isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
2185  isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
2186  }
2187 
2188 #ifdef DEBUG
2189  if (allocator_ != NULL) allocator_->Verify();
2190 #endif
2191 }
2192 
2193 
2194 } } // namespace v8::internal
The superclass of all JavaScript values and objects.
Definition: v8.h:1440
static void VPrint(const char *format, va_list args)
LAllocatorPhase(const char *name, LAllocator *allocator)
void ConvertTo(Kind kind, int index)
Definition: lithium.h:51
Kind kind() const
Definition: lithium.h:40
int index() const
Definition: lithium.h:41
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:138
int virtual_register() const
Definition: lithium.h:243
bool HasRegisterPolicy() const
Definition: lithium.h:190
void set_virtual_register(unsigned id)
Definition: lithium.h:246
bool HasAnyPolicy() const
Definition: lithium.h:181
bool HasDoubleRegisterPolicy() const
Definition: lithium.h:195
static LifetimePosition MaxPosition()
static LifetimePosition Invalid()
LifetimePosition InstructionEnd() const
static LifetimePosition FromInstructionIndex(int index)
LifetimePosition NextInstruction() const
LifetimePosition PrevInstruction() const
UsePosition * NextUsePositionRegisterIsBeneficial(LifetimePosition start)
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
bool Covers(LifetimePosition position)
void ShortenTo(LifetimePosition start)
LOperand * FirstHint() const
void SetSpillOperand(LOperand *operand)
static const int kInvalidAssignment
RegisterKind Kind() const
LifetimePosition Start() const
LOperand * GetSpillOperand() const
LOperand * CreateAssignedOperand(Zone *zone)
UsePosition * PreviousUsePositionRegisterIsBeneficial(LifetimePosition start)
bool HasAllocatedSpillOperand() const
LifetimePosition FirstIntersection(LiveRange *other)
UsePosition * NextRegisterPosition(LifetimePosition start)
UseInterval * first_interval() const
LiveRange(int id, Zone *zone)
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
LiveRange * next() const
void AdvanceLastProcessedMarker(UseInterval *to_start_of, LifetimePosition but_not_past) const
void ConvertOperands(Zone *zone)
bool ShouldBeAllocatedBefore(const LiveRange *other) const
LifetimePosition End() const
void set_assigned_register(int reg, Zone *zone)
bool CanCover(LifetimePosition position) const
UseInterval * FirstSearchIntervalForPosition(LifetimePosition position) const
UsePosition * first_pos() const
UsePosition * last_processed_use_
bool CanBeSpilled(LifetimePosition pos)
void SplitAt(LifetimePosition position, LiveRange *result, Zone *zone)
void MakeSpilled(Zone *zone)
UsePosition * NextUsePosition(LifetimePosition start)
void AddUsePosition(LifetimePosition pos, LOperand *operand, LOperand *hint, Zone *zone)
UseInterval * next() const
void set_next(UseInterval *next)
UseInterval(LifetimePosition start, LifetimePosition end)
LifetimePosition end() const
void set_start(LifetimePosition start)
LifetimePosition start() const
void SplitAt(LifetimePosition pos, Zone *zone)
bool Contains(LifetimePosition point) const
LifetimePosition Intersect(const UseInterval *other) const
LifetimePosition const pos_
LifetimePosition pos() const
void set_next(UsePosition *next)
UsePosition * next() const
LOperand * operand() const
UsePosition(LifetimePosition pos, LOperand *operand, LOperand *hint)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions true
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define UNREACHABLE()
Definition: logging.h:30
#define CHECK(condition)
Definition: logging.h:36
#define DCHECK(condition)
Definition: logging.h:205
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
PerThreadAssertScopeDebugOnly< HANDLE_DEREFERENCE_ASSERT, true > AllowHandleDereference
Definition: assert-scope.h:122
const int kMaxInt
Definition: globals.h:109
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
void PrintF(const char *format,...)
Definition: utils.cc:80
static int UnhandledSortHelper(LiveRange *const *a, LiveRange *const *b)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
@ GENERAL_REGISTERS
Definition: lithium.h:780
@ DOUBLE_REGISTERS
Definition: lithium.h:781
@ UNALLOCATED_REGISTERS
Definition: lithium.h:779
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
static const char * AllocationIndexToString(int index)
static const int kMaxNumAllocatableRegisters
static int NumAllocatableRegisters()
static const char * AllocationIndexToString(int index)
static const int kMaxNumAllocatableRegisters
Definition: assembler-arm.h:96