32 if (!check->index()->representation().IsSmiOrInteger32())
return NULL;
35 HConstant* constant =
NULL;
38 if (check->index()->IsAdd()) {
39 HAdd* index = HAdd::cast(check->index());
40 if (index->left()->IsConstant()) {
41 constant = HConstant::cast(index->left());
42 index_base = index->right();
43 }
else if (index->right()->IsConstant()) {
44 constant = HConstant::cast(index->right());
45 index_base = index->left();
47 }
else if (check->index()->IsSub()) {
48 HSub* index = HSub::cast(check->index());
50 if (index->right()->IsConstant()) {
51 constant = HConstant::cast(index->right());
52 index_base = index->left();
54 }
else if (check->index()->IsConstant()) {
55 index_base = check->
block()->graph()->GetConstant0();
56 constant = HConstant::cast(check->index());
59 if (constant !=
NULL && constant->HasInteger32Value()) {
60 *offset = is_sub ? - constant->Integer32Value()
61 : constant->Integer32Value();
64 index_base = check->index();
145 DCHECK(new_check->index()->representation().IsSmiOrInteger32());
146 bool keep_new_check =
false;
151 keep_new_check =
true;
160 keep_new_check =
true;
171 if (!keep_new_check) {
172 if (FLAG_trace_bce) {
176 new_check->block()->graph()->isolate()->counters()->
177 bounds_checks_eliminated()->Increment();
178 new_check->DeleteAndReplaceWith(new_check->ActualValue());
182 if (FLAG_trace_bce) {
184 new_check->id(), first_check->id());
187 DCHECK(new_check->length() == first_check->length());
190 new_check->InsertAfter(first_check);
199 HBoundsCheck* lower_check,
200 HBoundsCheck* upper_check,
223 HBoundsCheck* insert_before,
228 if (index_raw->IsAdd() || index_raw->IsSub()) {
233 bool must_move_index =
false;
234 bool must_move_left_input =
false;
235 bool must_move_right_input =
false;
236 for (
HInstruction* cursor = end_of_scan_range; cursor != insert_before;) {
237 if (cursor == left_input) must_move_left_input =
true;
238 if (cursor == right_input) must_move_right_input =
true;
239 if (cursor == index) must_move_index =
true;
240 if (cursor->previous() ==
NULL) {
241 cursor = cursor->block()->dominator()->end();
243 cursor = cursor->previous();
246 if (must_move_index) {
252 if (must_move_left_input) {
253 HConstant::cast(left_input)->Unlink();
254 HConstant::cast(left_input)->InsertBefore(index);
256 if (must_move_right_input) {
257 HConstant::cast(right_input)->Unlink();
258 HConstant::cast(right_input)->InsertBefore(index);
260 }
else if (index_raw->IsConstant()) {
261 HConstant* index = HConstant::cast(index_raw);
262 bool must_move =
false;
263 for (
HInstruction* cursor = end_of_scan_range; cursor != insert_before;) {
264 if (cursor == index) must_move =
true;
265 if (cursor->previous() ==
NULL) {
266 cursor = cursor->block()->dominator()->end();
268 cursor = cursor->previous();
273 index->InsertBefore(insert_before);
279 HBoundsCheck* tighter_check,
281 DCHECK(original_check->length() == tighter_check->length());
283 original_check->ReplaceAllUsesWith(original_check->index());
284 original_check->SetOperandAt(0, tighter_check->index());
285 if (FLAG_trace_bce) {
287 original_check->id(), new_offset, tighter_check->id());
314 void BoundsCheckTable::Insert(BoundsCheckKey* key,
315 BoundsCheckBbData* data,
317 Lookup(key, key->Hash(),
true, ZoneAllocationPolicy(zone))->value = data;
321 void BoundsCheckTable::Delete(BoundsCheckKey* key) {
340 HBasicBlock* entry) {
352 while (stack_depth > 0) {
353 int current = stack_depth - 1;
357 if (state->
index_ < children->length()) {
359 HBasicBlock* child = children->
at(state->
index_++);
360 int next = stack_depth++;
361 stack[next].
block_ = child;
377 for (HInstructionIterator it(bb); !it.Done(); it.Advance()) {
379 if (!
i->IsBoundsCheck())
continue;
381 HBoundsCheck* check = HBoundsCheck::cast(
i);
385 if (key ==
NULL)
continue;
397 *data_p = bb_data_list;
398 if (FLAG_trace_bce) {
400 bb->block_id(), offset);
403 bb->graph()->isolate()->counters()->
404 bounds_checks_eliminated()->Increment();
405 if (FLAG_trace_bce) {
406 base::OS::Print(
"Eliminating bounds check #%d, offset %d is covered\n",
407 check->id(), offset);
409 check->DeleteAndReplaceWith(check->ActualValue());
425 }
else if (
graph()->use_optimistic_licm() ||
426 bb->IsLoopSuccessorDominator()) {
441 if (FLAG_trace_bce) {
442 base::OS::Print(
"Updated bounds check data for block #%d: [%d - %d]\n",
443 bb->block_id(), new_lower_offset, new_upper_offset);
445 table_.Insert(key, bb_data_list, zone());
455 while (data !=
NULL) {
static void Print(const char *format,...)
int32_t UpperOffset() const
int32_t LowerOffset() const
BoundsCheckBbData * next_in_bb_
void CoverCheck(HBoundsCheck *new_check, int32_t new_offset)
BoundsCheckBbData * father_in_dt_
void UpdateLowerOffsets(HBoundsCheck *check, int32_t offset)
BoundsCheckBbData * FatherInDominatorTree() const
bool OffsetIsCovered(int32_t offset) const
HBasicBlock * BasicBlock() const
BoundsCheckBbData(BoundsCheckKey *key, int32_t lower_offset, int32_t upper_offset, HBasicBlock *bb, HBoundsCheck *lower_check, HBoundsCheck *upper_check, BoundsCheckBbData *next_in_bb, BoundsCheckBbData *father_in_dt)
BoundsCheckBbData * NextInBasicBlock() const
DISALLOW_COPY_AND_ASSIGN(BoundsCheckBbData)
HBoundsCheck * upper_check_
void TightenCheck(HBoundsCheck *original_check, HBoundsCheck *tighter_check, int32_t new_offset)
HBoundsCheck * lower_check_
void UpdateUpperOffsets(HBoundsCheck *check, int32_t offset)
HBasicBlock * basic_block_
void MoveIndexIfNecessary(HValue *index_raw, HBoundsCheck *insert_before, HInstruction *end_of_scan_range)
HBoundsCheck * LowerCheck() const
BoundsCheckKey * Key() const
HBoundsCheck * UpperCheck() const
BoundsCheckKey(HValue *index_base, HValue *length)
HValue * IndexBase() const
static BoundsCheckKey * Create(Zone *zone, HBoundsCheck *check, int32_t *offset)
DISALLOW_COPY_AND_ASSIGN(BoundsCheckKey)
BoundsCheckTable(Zone *zone)
void PostProcessBlock(HBasicBlock *bb, BoundsCheckBbData *data)
void EliminateRedundantBoundsChecks(HBasicBlock *bb)
BoundsCheckBbData * PreProcessBlock(HBasicBlock *bb)
BoundsCheckBbData * bb_data_list_
HInstruction * next() const
void InsertBefore(HInstruction *next)
static HValue * cast(HValue *value)
HBasicBlock * block() const
virtual intptr_t Hashcode()
Entry * Lookup(void *key, uint32_t hash, bool insert, ZoneAllocationPolicy allocator=ZoneAllocationPolicy())
void * Remove(void *key, uint32_t hash)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
static bool BoundsCheckKeyMatch(void *key1, void *key2)
Debugger support for the V8 JavaScript engine.