26 if (FLAG_trace_range) {
28 va_start(arguments, msg);
36 HBasicBlock* block(
graph()->entry_block());
38 while (block !=
NULL) {
39 TraceRange(
"Analyzing block B%d\n", block->block_id());
42 if (block->predecessors()->length() == 1) {
43 HBasicBlock* pred = block->predecessors()->first();
44 if (pred->end()->IsCompareNumericAndBranch()) {
51 for (
int i = 0;
i < block->phis()->length(); ++
i) {
52 HPhi* phi = block->phis()->at(
i);
57 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
58 HValue* value = it.Current();
62 if (value->IsChange()) {
63 HChange* instr = HChange::cast(value);
69 DCHECK(instr->to().IsTagged() ||
70 instr->to().IsDouble() ||
71 instr->to().IsSmiOrInteger32());
74 }
else if (value->IsCompareMinusZeroAndBranch()) {
75 HCompareMinusZeroAndBranch* instr =
76 HCompareMinusZeroAndBranch::cast(value);
77 if (instr->value()->representation().IsSmiOrInteger32()) {
85 if (!dominated_blocks->is_empty()) {
89 for (
int i = dominated_blocks->length() - 1;
i > 0; --
i) {
90 stack.
Add(
Pending(dominated_blocks->
at(
i), last_changed_range), zone());
92 block = dominated_blocks->
at(0);
93 }
else if (!stack.is_empty()) {
95 Pending pending = stack.RemoveLast();
97 block = pending.
block();
111 for (
int i = 0;
i <
graph()->blocks()->length(); ++
i) {
112 HBasicBlock* block =
graph()->blocks()->at(
i);
113 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
115 if (instr->
HasRange()) instr->PoisonRange();
143 Range* range = other->
range() !=
NULL ? other->
range() : &temp_range;
144 Range* new_range =
NULL;
146 TraceRange(
"Control flow range infer %d %s %d\n",
151 if (op ==
Token::EQ || op == Token::EQ_STRICT) {
153 new_range = range->Copy(
graph()->zone());
154 }
else if (op == Token::LT || op == Token::LTE) {
155 new_range = range->CopyClearLower(
graph()->zone());
156 if (op == Token::LT) {
157 new_range->AddConstant(-1);
159 }
else if (op == Token::GT || op == Token::GTE) {
160 new_range = range->CopyClearUpper(
graph()->zone());
161 if (op == Token::GT) {
162 new_range->AddConstant(1);
166 if (new_range !=
NULL && !new_range->IsMostGeneric()) {
176 Range* range = value->
range();
177 TraceRange(
"Initial inferred range of %d (%s) set to [%d,%d]\n",
196 Range* original_range = value->
range();
199 Range* new_range = value->
range();
200 TraceRange(
"Updated range of %d set to [%d,%d]\n",
204 if (original_range !=
NULL) {
206 original_range->lower(),
207 original_range->upper());
223 if (value->
IsPhi()) {
225 HPhi* phi = HPhi::cast(value);
226 for (
int i = 0;
i < phi->OperandCount(); ++
i) {
229 }
else if (value->IsUnaryMathOperation()) {
230 HUnaryMathOperation* instr = HUnaryMathOperation::cast(value);
231 if (instr->representation().IsSmiOrInteger32() &&
232 !instr->value()->representation().Equals(instr->representation())) {
233 if (instr->value()->range() ==
NULL ||
234 instr->value()->range()->CanBeMinusZero()) {
238 if (instr->RequiredInputRepresentation(0).IsSmiOrInteger32() &&
239 instr->representation().Equals(
240 instr->RequiredInputRepresentation(0))) {
243 }
else if (value->IsChange()) {
244 HChange* instr = HChange::cast(value);
245 if (!instr->from().IsSmiOrInteger32() &&
246 !instr->CanTruncateToInt32() &&
247 (instr->value()->range() ==
NULL ||
248 instr->value()->range()->CanBeMinusZero())) {
251 }
else if (value->IsForceRepresentation()) {
252 HForceRepresentation* instr = HForceRepresentation::cast(value);
254 }
else if (value->IsMod()) {
255 HMod* instr = HMod::cast(value);
256 if (instr->range() ==
NULL || instr->range()->CanBeMinusZero()) {
260 }
else if (value->IsDiv() || value->IsMul()) {
267 }
else if (value->IsMathFloorOfDiv()) {
268 HMathFloorOfDiv* instr = HMathFloorOfDiv::cast(value);
270 }
else if (value->IsAdd() || value->IsSub()) {
277 }
else if (value->IsMathMinMax()) {
278 HMathMinMax* instr = HMathMinMax::cast(value);
static void VPrint(const char *format, va_list args)
Token::Value token() const
HBasicBlock * FirstSuccessor()
HBasicBlock * SecondSuccessor()
void TraceRange(const char *msg,...)
void PropagateMinusZeroChecks(HValue *value)
ZoneList< HValue * > changed_ranges_
void UpdateControlFlowRange(Token::Value op, HValue *value, HValue *other)
void AddRange(HValue *value, Range *range)
void InferControlFlowRange(HCompareNumericAndBranch *test, HBasicBlock *dest)
void RollBackTo(int index)
ZoneList< HValue * > worklist_
void InferRange(HValue *value)
void AddToWorklist(HValue *value)
static HValue * cast(HValue *value)
void AddNewRange(Range *r, Zone *zone)
const char * Mnemonic() const
Representation representation() const
void ComputeInitialRange(Zone *zone)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
int last_changed_range() const
Pending(HBasicBlock *block, int last_changed_range)
HBasicBlock * block() const
bool IsSmiOrInteger32() const
bool Equals(const Representation &other) const
static const char * Name(Value tok)
static Value NegateCompareOp(Value op)
static Value ReverseCompareOp(Value op)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
Debugger support for the V8 JavaScript engine.