V8 Project
lithium-allocator.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_LITHIUM_ALLOCATOR_H_
6 #define V8_LITHIUM_ALLOCATOR_H_
7 
8 #include "src/v8.h"
9 
10 #include "src/allocation.h"
11 #include "src/lithium.h"
12 #include "src/zone.h"
13 
14 namespace v8 {
15 namespace internal {
16 
17 // Forward declarations.
18 class HBasicBlock;
19 class HGraph;
20 class HPhi;
21 class HTracer;
22 class HValue;
23 class BitVector;
24 class StringStream;
25 
26 class LPlatformChunk;
27 class LOperand;
28 class LUnallocated;
29 class LGap;
30 class LParallelMove;
31 class LPointerMap;
32 
33 
34 // This class represents a single point of a LOperand's lifetime.
35 // For each lithium instruction there are exactly two lifetime positions:
36 // the beginning and the end of the instruction. Lifetime positions for
37 // different lithium instructions are disjoint.
39  public:
40  // Return the lifetime position that corresponds to the beginning of
41  // the instruction with the given index.
43  return LifetimePosition(index * kStep);
44  }
45 
46  // Returns a numeric representation of this lifetime position.
47  int Value() const {
48  return value_;
49  }
50 
51  // Returns the index of the instruction to which this lifetime position
52  // corresponds.
53  int InstructionIndex() const {
54  DCHECK(IsValid());
55  return value_ / kStep;
56  }
57 
58  // Returns true if this lifetime position corresponds to the instruction
59  // start.
60  bool IsInstructionStart() const {
61  return (value_ & (kStep - 1)) == 0;
62  }
63 
64  // Returns the lifetime position for the start of the instruction which
65  // corresponds to this lifetime position.
67  DCHECK(IsValid());
68  return LifetimePosition(value_ & ~(kStep - 1));
69  }
70 
71  // Returns the lifetime position for the end of the instruction which
72  // corresponds to this lifetime position.
74  DCHECK(IsValid());
76  }
77 
78  // Returns the lifetime position for the beginning of the next instruction.
80  DCHECK(IsValid());
82  }
83 
84  // Returns the lifetime position for the beginning of the previous
85  // instruction.
87  DCHECK(IsValid());
88  DCHECK(value_ > 1);
90  }
91 
92  // Constructs the lifetime position which does not correspond to any
93  // instruction.
95 
96  // Returns true if this lifetime positions corrensponds to some
97  // instruction.
98  bool IsValid() const { return value_ != -1; }
99 
100  static inline LifetimePosition Invalid() { return LifetimePosition(); }
101 
102  static inline LifetimePosition MaxPosition() {
103  // We have to use this kind of getter instead of static member due to
104  // crash bug in GDB.
105  return LifetimePosition(kMaxInt);
106  }
107 
108  private:
109  static const int kStep = 2;
110 
111  // Code relies on kStep being a power of two.
113 
114  explicit LifetimePosition(int value) : value_(value) { }
115 
116  int value_;
117 };
118 
119 
120 // Representation of the non-empty interval [start,end[.
121 class UseInterval: public ZoneObject {
122  public:
124  : start_(start), end_(end), next_(NULL) {
125  DCHECK(start.Value() < end.Value());
126  }
127 
128  LifetimePosition start() const { return start_; }
129  LifetimePosition end() const { return end_; }
130  UseInterval* next() const { return next_; }
131 
132  // Split this interval at the given position without effecting the
133  // live range that owns it. The interval must contain the position.
134  void SplitAt(LifetimePosition pos, Zone* zone);
135 
136  // If this interval intersects with other return smallest position
137  // that belongs to both of them.
138  LifetimePosition Intersect(const UseInterval* other) const {
139  if (other->start().Value() < start_.Value()) return other->Intersect(this);
140  if (other->start().Value() < end_.Value()) return other->start();
141  return LifetimePosition::Invalid();
142  }
143 
144  bool Contains(LifetimePosition point) const {
145  return start_.Value() <= point.Value() && point.Value() < end_.Value();
146  }
147 
148  private:
151 
155 
156  friend class LiveRange; // Assigns to start_.
157 };
158 
159 // Representation of a use position.
160 class UsePosition: public ZoneObject {
161  public:
163 
164  LOperand* operand() const { return operand_; }
165  bool HasOperand() const { return operand_ != NULL; }
166 
167  LOperand* hint() const { return hint_; }
168  bool HasHint() const;
169  bool RequiresRegister() const;
170  bool RegisterIsBeneficial() const;
171 
172  LifetimePosition pos() const { return pos_; }
173  UsePosition* next() const { return next_; }
174 
175  private:
177 
179  LOperand* const hint_;
184 
185  friend class LiveRange;
186 };
187 
188 // Representation of SSA values' live ranges as a collection of (continuous)
189 // intervals over the instruction ordering.
190 class LiveRange: public ZoneObject {
191  public:
192  static const int kInvalidAssignment = 0x7fffffff;
193 
194  LiveRange(int id, Zone* zone);
195 
197  UsePosition* first_pos() const { return first_pos_; }
198  LiveRange* parent() const { return parent_; }
199  LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; }
200  LiveRange* next() const { return next_; }
201  bool IsChild() const { return parent() != NULL; }
202  int id() const { return id_; }
203  bool IsFixed() const { return id_ < 0; }
204  bool IsEmpty() const { return first_interval() == NULL; }
206  int assigned_register() const { return assigned_register_; }
207  int spill_start_index() const { return spill_start_index_; }
208  void set_assigned_register(int reg, Zone* zone);
209  void MakeSpilled(Zone* zone);
210 
211  // Returns use position in this live range that follows both start
212  // and last processed use position.
213  // Modifies internal state of live range!
215 
216  // Returns use position for which register is required in this live
217  // range and which follows both start and last processed use position
218  // Modifies internal state of live range!
220 
221  // Returns use position for which register is beneficial in this live
222  // range and which follows both start and last processed use position
223  // Modifies internal state of live range!
225 
226  // Returns use position for which register is beneficial in this live
227  // range and which precedes start.
229 
230  // Can this live range be spilled at this position.
231  bool CanBeSpilled(LifetimePosition pos);
232 
233  // Split this live range at the given position which must follow the start of
234  // the range.
235  // All uses following the given position will be moved from this
236  // live range to the result live range.
237  void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone);
238 
239  RegisterKind Kind() const { return kind_; }
240  bool HasRegisterAssigned() const {
242  }
243  bool IsSpilled() const { return spilled_; }
244 
247  return current_hint_operand_;
248  }
249  LOperand* FirstHint() const {
250  UsePosition* pos = first_pos_;
251  while (pos != NULL && !pos->HasHint()) pos = pos->next();
252  if (pos != NULL) return pos->hint();
253  return NULL;
254  }
255 
257  DCHECK(!IsEmpty());
258  return first_interval()->start();
259  }
260 
262  DCHECK(!IsEmpty());
263  return last_interval_->end();
264  }
265 
266  bool HasAllocatedSpillOperand() const;
268  void SetSpillOperand(LOperand* operand);
269 
270  void SetSpillStartIndex(int start) {
272  }
273 
274  bool ShouldBeAllocatedBefore(const LiveRange* other) const;
275  bool CanCover(LifetimePosition position) const;
276  bool Covers(LifetimePosition position);
278 
279  // Add a new interval or a new use position to this live range.
280  void EnsureInterval(LifetimePosition start,
281  LifetimePosition end,
282  Zone* zone);
283  void AddUseInterval(LifetimePosition start,
284  LifetimePosition end,
285  Zone* zone);
287  LOperand* operand,
288  LOperand* hint,
289  Zone* zone);
290 
291  // Shorten the most recently added interval by setting a new start.
292  void ShortenTo(LifetimePosition start);
293 
294 #ifdef DEBUG
295  // True if target overlaps an existing interval.
296  bool HasOverlap(UseInterval* target) const;
297  void Verify() const;
298 #endif
299 
300  private:
301  void ConvertOperands(Zone* zone);
303  void AdvanceLastProcessedMarker(UseInterval* to_start_of,
304  LifetimePosition but_not_past) const;
305 
306  int id_;
307  bool spilled_;
315  // This is used as a cache, it doesn't affect correctness.
318  // This is used as a cache, it's invalid outside of BuildLiveRanges.
322 
323  friend class LAllocator; // Assigns to kind_.
324 };
325 
326 
327 class LAllocator BASE_EMBEDDED {
328  public:
329  LAllocator(int first_virtual_register, HGraph* graph);
330 
331  static void TraceAlloc(const char* msg, ...);
332 
333  // Checks whether the value of a given virtual register is tagged.
334  bool HasTaggedValue(int virtual_register) const;
335 
336  // Returns the register kind required by the given virtual register.
337  RegisterKind RequiredRegisterKind(int virtual_register) const;
338 
339  bool Allocate(LChunk* chunk);
340 
341  const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; }
343  return &fixed_live_ranges_;
344  }
346  return &fixed_double_live_ranges_;
347  }
348 
349  LPlatformChunk* chunk() const { return chunk_; }
350  HGraph* graph() const { return graph_; }
351  Isolate* isolate() const { return graph_->isolate(); }
352  Zone* zone() { return &zone_; }
353 
355  if (next_virtual_register_ >= LUnallocated::kMaxVirtualRegisters) {
356  allocation_ok_ = false;
357  // Maintain the invariant that we return something below the maximum.
358  return 0;
359  }
360  return next_virtual_register_++;
361  }
362 
363  bool AllocationOk() { return allocation_ok_; }
364 
365  void MarkAsOsrEntry() {
366  // There can be only one.
367  DCHECK(!has_osr_entry_);
368  // Simply set a flag to find and process instruction later.
369  has_osr_entry_ = true;
370  }
371 
372 #ifdef DEBUG
373  void Verify() const;
374 #endif
375 
377  return assigned_registers_;
378  }
380  return assigned_double_registers_;
381  }
382 
383  private:
385  void ResolvePhis();
393  bool CanEagerlyResolveControlFlow(HBasicBlock* block) const;
394  inline bool SafePointsAreInOrder() const;
395 
396  // Liveness analysis support.
398  BitVector* ComputeLiveOut(HBasicBlock* block);
399  void AddInitialIntervals(HBasicBlock* block, BitVector* live_out);
400  void ProcessInstructions(HBasicBlock* block, BitVector* live);
401  void MeetRegisterConstraints(HBasicBlock* block);
403  LInstruction* second,
404  int gap_index);
405  void ResolvePhis(HBasicBlock* block);
406 
407  // Helper methods for building intervals.
408  LOperand* AllocateFixed(LUnallocated* operand, int pos, bool is_tagged);
410  void Define(LifetimePosition position, LOperand* operand, LOperand* hint);
411  void Use(LifetimePosition block_start,
412  LifetimePosition position,
413  LOperand* operand,
414  LOperand* hint);
415  void AddConstraintsGapMove(int index, LOperand* from, LOperand* to);
416 
417  // Helper methods for updating the life range lists.
418  void AddToActive(LiveRange* range);
419  void AddToInactive(LiveRange* range);
428  void FreeSpillSlot(LiveRange* range);
430 
431  // Helper methods for allocating registers.
434 
435  // Live range splitting helpers.
436 
437  // Split the given range at the given position.
438  // If range starts at or after the given position then the
439  // original range is returned.
440  // Otherwise returns the live range that starts at pos and contains
441  // all uses from the original range that follow pos. Uses at pos will
442  // still be owned by the original range after splitting.
444 
445  // Split the given range in a position from the interval [start, end].
447  LifetimePosition start,
448  LifetimePosition end);
449 
450  // Find a lifetime position in the interval [start, end] which
451  // is optimal for splitting: it is either header of the outermost
452  // loop covered by this interval or the latest possible position.
454  LifetimePosition end);
455 
456  // Spill the given life range after position pos.
458 
459  // Spill the given life range after position [start] and up to position [end].
460  void SpillBetween(LiveRange* range,
461  LifetimePosition start,
462  LifetimePosition end);
463 
464  // Spill the given life range after position [start] and up to position [end].
465  // Range is guaranteed to be spilled at least until position [until].
467  LifetimePosition start,
468  LifetimePosition until,
469  LifetimePosition end);
470 
472 
473  // If we are trying to spill a range inside the loop try to
474  // hoist spill position out to the point just before the loop.
476  LifetimePosition pos);
477 
478  void Spill(LiveRange* range);
480 
481  // Helper methods for resolving control flow.
483  HBasicBlock* block,
484  HBasicBlock* pred);
485 
486  inline void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
487 
488  // Return parallel move that should be used to connect ranges split at the
489  // given position.
491 
492  // Return the block which contains give lifetime position.
493  HBasicBlock* GetBlock(LifetimePosition pos);
494 
495  // Helper methods for the fixed registers.
496  int RegisterCount() const;
497  static int FixedLiveRangeID(int index) { return -index - 1; }
498  static int FixedDoubleLiveRangeID(int index);
501  LiveRange* LiveRangeFor(int index);
502  HPhi* LookupPhi(LOperand* operand) const;
503  LGap* GetLastGap(HBasicBlock* block);
504 
505  const char* RegisterName(int allocation_index);
506 
507  inline bool IsGapAt(int index);
508 
509  inline LInstruction* InstructionAt(int index);
510 
511  inline LGap* GapAt(int index);
512 
513  Zone zone_;
514 
515  LPlatformChunk* chunk_;
516 
517  // During liveness analysis keep a mapping from block id to live_in sets
518  // for blocks already analyzed.
520 
521  // Liveness analysis results.
523 
524  // Lists of live ranges
533 
534  // Next virtual register number to be assigned to temporaries.
537  GrowableBitVector double_artificial_registers_;
538 
541 
544 
545  HGraph* graph_;
546 
548 
549  // Indicates success or failure during register allocation.
551 
552 #ifdef DEBUG
553  LifetimePosition allocation_finger_;
554 #endif
555 
557 };
558 
559 
561  public:
562  LAllocatorPhase(const char* name, LAllocator* allocator);
564 
565  private:
566  LAllocator* allocator_;
568 
570 };
571 
572 
573 } } // namespace v8::internal
574 
575 #endif // V8_LITHIUM_ALLOCATOR_H_
ZoneList< LiveRange * > unhandled_live_ranges_
bool CanEagerlyResolveControlFlow(HBasicBlock *block) const
void SplitAndSpillIntersecting(LiveRange *range)
void AddToUnhandledUnsorted(LiveRange *range)
ZoneList< BitVector * > live_in_sets_
LiveRange * LiveRangeFor(LOperand *operand)
GrowableBitVector double_artificial_registers_
void ActiveToHandled(LiveRange *range)
bool IsBlockBoundary(LifetimePosition pos)
LiveRange * SplitRangeAt(LiveRange *range, LifetimePosition pos)
BitVector * ComputeLiveOut(HBasicBlock *block)
LInstruction * InstructionAt(int index)
void SetLiveRangeAssignedRegister(LiveRange *range, int reg)
LPlatformChunk * chunk() const
ZoneList< LiveRange * > active_live_ranges_
void ResolveControlFlow(LiveRange *range, HBasicBlock *block, HBasicBlock *pred)
void ResolvePhis(HBasicBlock *block)
LOperand * TryReuseSpillSlot(LiveRange *range)
LifetimePosition FindOptimalSplitPos(LifetimePosition start, LifetimePosition end)
HBasicBlock * GetBlock(LifetimePosition pos)
LiveRange * FixedDoubleLiveRangeFor(int index)
ZoneList< LiveRange * > inactive_live_ranges_
void InactiveToActive(LiveRange *range)
const Vector< LiveRange * > * fixed_live_ranges() const
EmbeddedVector< LiveRange *, Register::kMaxNumAllocatableRegisters > fixed_live_ranges_
void AddConstraintsGapMove(int index, LOperand *from, LOperand *to)
LParallelMove * GetConnectingParallelMove(LifetimePosition pos)
void Use(LifetimePosition block_start, LifetimePosition position, LOperand *operand, LOperand *hint)
void ProcessInstructions(HBasicBlock *block, BitVector *live)
void AddInitialIntervals(HBasicBlock *block, BitVector *live_out)
ZoneList< LiveRange * > reusable_slots_
void InactiveToHandled(LiveRange *range)
LAllocator(int first_virtual_register, HGraph *graph)
RegisterKind RequiredRegisterKind(int virtual_register) const
const ZoneList< LiveRange * > * live_ranges() const
void SpillBetweenUntil(LiveRange *range, LifetimePosition start, LifetimePosition until, LifetimePosition end)
LOperand * AllocateFixed(LUnallocated *operand, int pos, bool is_tagged)
void MeetConstraintsBetween(LInstruction *first, LInstruction *second, int gap_index)
bool TryAllocateFreeReg(LiveRange *range)
LGap * GetLastGap(HBasicBlock *block)
void SpillBetween(LiveRange *range, LifetimePosition start, LifetimePosition end)
LifetimePosition FindOptimalSpillingPos(LiveRange *range, LifetimePosition pos)
EmbeddedVector< LiveRange *, DoubleRegister::kMaxNumAllocatableRegisters > fixed_double_live_ranges_
LiveRange * LiveRangeFor(int index)
static int FixedLiveRangeID(int index)
void Spill(LiveRange *range)
void MeetRegisterConstraints(HBasicBlock *block)
void Define(LifetimePosition position, LOperand *operand, LOperand *hint)
void AddToActive(LiveRange *range)
void ActiveToInactive(LiveRange *range)
void FreeSpillSlot(LiveRange *range)
static void TraceAlloc(const char *msg,...)
LiveRange * FixedLiveRangeFor(int index)
HPhi * LookupPhi(LOperand *operand) const
ZoneList< LiveRange * > live_ranges_
void SpillAfter(LiveRange *range, LifetimePosition pos)
const char * RegisterName(int allocation_index)
void AddToInactive(LiveRange *range)
bool HasTaggedValue(int virtual_register) const
static int FixedDoubleLiveRangeID(int index)
DISALLOW_COPY_AND_ASSIGN(LAllocator)
void AddToUnhandledSorted(LiveRange *range)
bool Allocate(LChunk *chunk)
bool SafePointsAreInOrder() const
const Vector< LiveRange * > * fixed_double_live_ranges() const
LGap * GapAt(int index)
LiveRange * SplitBetween(LiveRange *range, LifetimePosition start, LifetimePosition end)
void AllocateBlockedReg(LiveRange *range)
LAllocatorPhase(const char *name, LAllocator *allocator)
DISALLOW_COPY_AND_ASSIGN(LAllocatorPhase)
static const int kMaxVirtualRegisters
Definition: lithium.h:175
static LifetimePosition MaxPosition()
LifetimePosition InstructionStart() const
static LifetimePosition Invalid()
STATIC_ASSERT(IS_POWER_OF_TWO(kStep))
LifetimePosition InstructionEnd() const
static LifetimePosition FromInstructionIndex(int index)
LifetimePosition NextInstruction() const
LifetimePosition PrevInstruction() const
UsePosition * NextUsePositionRegisterIsBeneficial(LifetimePosition start)
LOperand * current_hint_operand() const
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
bool Covers(LifetimePosition position)
void ShortenTo(LifetimePosition start)
LOperand * FirstHint() const
void SetSpillOperand(LOperand *operand)
static const int kInvalidAssignment
RegisterKind Kind() const
LifetimePosition Start() const
LOperand * GetSpillOperand() const
LOperand * CreateAssignedOperand(Zone *zone)
UsePosition * PreviousUsePositionRegisterIsBeneficial(LifetimePosition start)
bool HasAllocatedSpillOperand() const
LifetimePosition FirstIntersection(LiveRange *other)
UsePosition * NextRegisterPosition(LifetimePosition start)
UseInterval * first_interval() const
LiveRange(int id, Zone *zone)
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
void SetSpillStartIndex(int start)
LiveRange * parent() const
LiveRange * next() const
void AdvanceLastProcessedMarker(UseInterval *to_start_of, LifetimePosition but_not_past) const
void ConvertOperands(Zone *zone)
bool ShouldBeAllocatedBefore(const LiveRange *other) const
LifetimePosition End() const
void set_assigned_register(int reg, Zone *zone)
bool CanCover(LifetimePosition position) const
UseInterval * FirstSearchIntervalForPosition(LifetimePosition position) const
UsePosition * first_pos() const
UsePosition * last_processed_use_
bool CanBeSpilled(LifetimePosition pos)
void SplitAt(LifetimePosition position, LiveRange *result, Zone *zone)
void MakeSpilled(Zone *zone)
UsePosition * NextUsePosition(LifetimePosition start)
void AddUsePosition(LifetimePosition pos, LOperand *operand, LOperand *hint, Zone *zone)
UseInterval * next() const
void set_next(UseInterval *next)
UseInterval(LifetimePosition start, LifetimePosition end)
LifetimePosition end() const
void set_start(LifetimePosition start)
LifetimePosition start() const
void SplitAt(LifetimePosition pos, Zone *zone)
bool Contains(LifetimePosition point) const
LifetimePosition Intersect(const UseInterval *other) const
LifetimePosition const pos_
LifetimePosition pos() const
void set_next(UsePosition *next)
UsePosition * next() const
LOperand * operand() const
UsePosition(LifetimePosition pos, LOperand *operand, LOperand *hint)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
Definition: logging.h:205
#define IS_POWER_OF_TWO(x)
Definition: macros.h:325
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
const int kMaxInt
Definition: globals.h:109
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20