13 #if V8_TARGET_ARCH_IA32
15 #elif V8_TARGET_ARCH_X64
17 #elif V8_TARGET_ARCH_ARM64
19 #elif V8_TARGET_ARCH_ARM
21 #elif V8_TARGET_ARCH_MIPS
23 #elif V8_TARGET_ARCH_MIPS64
25 #elif V8_TARGET_ARCH_X87
28 #error Unsupported target architecture.
36 #define DEFINE_COMPILE(type) \
37 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
38 return builder->Do##type(this); \
46 return block()->isolate();
79 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
82 if (rep.
IsNone())
continue;
83 if (FLAG_trace_representation) {
84 PrintF(
"#%d %s is used by #%d %s as %s%s\n",
88 use_count[rep.
kind()] += 1;
90 if (
IsPhi()) HPhi::cast(
this)->AddIndirectUsesTo(&use_count[0]);
107 const char* reason) {
111 if (FLAG_trace_representation) {
112 PrintF(
"Changing #%d %s representation %s -> %s based on %s\n",
122 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
153 return static_cast<int32_t>(result);
161 int64_t result =
static_cast<int64_t
>(a) +
static_cast<int64_t
>(b);
170 int64_t result =
static_cast<int64_t
>(a) -
static_cast<int64_t
>(b);
179 int64_t result =
static_cast<int64_t
>(a) *
static_cast<int64_t
>(b);
185 if (lower_ == upper_)
return lower_;
188 while (res < upper_) {
189 res = (res << 1) | 1;
197 void Range::AddConstant(
int32_t value) {
198 if (value == 0)
return;
199 bool may_overflow =
false;
209 void Range::Intersect(Range* other) {
210 upper_ =
Min(upper_, other->upper_);
211 lower_ =
Max(lower_, other->lower_);
212 bool b = CanBeMinusZero() && other->CanBeMinusZero();
213 set_can_be_minus_zero(b);
217 void Range::Union(Range* other) {
218 upper_ =
Max(upper_, other->upper_);
219 lower_ =
Min(lower_, other->lower_);
220 bool b = CanBeMinusZero() || other->CanBeMinusZero();
221 set_can_be_minus_zero(b);
225 void Range::CombinedMax(Range* other) {
226 upper_ =
Max(upper_, other->upper_);
227 lower_ =
Max(lower_, other->lower_);
228 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
232 void Range::CombinedMin(Range* other) {
233 upper_ =
Min(upper_, other->upper_);
234 lower_ =
Min(lower_, other->lower_);
235 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
239 void Range::Sar(
int32_t value) {
241 lower_ = lower_ >> bits;
242 upper_ = upper_ >> bits;
243 set_can_be_minus_zero(
false);
247 void Range::Shl(
int32_t value) {
249 int old_lower = lower_;
250 int old_upper = upper_;
251 lower_ = lower_ << bits;
252 upper_ = upper_ << bits;
253 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
257 set_can_be_minus_zero(
false);
261 bool Range::AddAndCheckOverflow(
const Representation& r, Range* other) {
262 bool may_overflow =
false;
273 bool Range::SubAndCheckOverflow(
const Representation& r, Range* other) {
274 bool may_overflow =
false;
285 void Range::KeepOrder() {
286 if (lower_ > upper_) {
295 void Range::Verify()
const {
301 bool Range::MulAndCheckOverflow(
const Representation& r, Range* other) {
302 bool may_overflow =
false;
317 return block()->block_id() > other->block_id();
331 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
332 if (it.value()->IsSimulate())
continue;
333 if (!it.value()->CheckFlag(f))
return false;
340 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
341 if (it.value()->IsSimulate())
continue;
342 if (!it.value()->CheckFlag(f)) {
352 bool return_value =
false;
353 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) {
354 if (it.value()->IsSimulate())
continue;
355 if (!it.value()->CheckFlag(f))
return false;
362 HUseIterator::HUseIterator(
HUseListNode* head) : next_(head) {
367 void HUseIterator::Advance() {
369 if (current_ !=
NULL) {
370 next_ = current_->tail();
371 value_ = current_->value();
372 index_ = current_->index();
379 for (HUseIterator it(
uses()); !it.Done(); it.Advance()) ++count;
387 while (current !=
NULL) {
388 if (current->
value() == value && current->
index() == index) {
389 if (previous ==
NULL) {
398 current = current->
tail();
403 if (current !=
NULL) {
418 if (!other->
type_.Equals(
type_))
return false;
431 intptr_t result =
opcode();
433 for (
int i = 0;
i < count; ++
i) {
434 result = result * 19 +
OperandAt(
i)->
id() + (result >> 7);
442 #define MAKE_CASE(type) case k##type: return #type;
445 case kPhi:
return "Phi";
452 return FLAG_unreachable_code_elimination &&
453 !(
block()->IsReachable() ||
455 IsControlInstruction() ||
456 IsArgumentsObject() ||
457 IsCapturedObject() ||
465 return IsConstant() && HConstant::cast(
this)->HasInteger32Value();
470 return HConstant::cast(
this)->Integer32Value();
513 if (operand ==
NULL)
continue;
526 id_ =
block->graph()->GetNextValueID(
this);
536 !t.
value->
type().Equals(HType::Tagged()))
538 return os <<
" type:" << t.
value->
type();
544 if (changes_flags.
IsEmpty())
return os;
549 bool add_comma =
false;
550 #define PRINT_DO(Type) \
551 if (changes_flags.Contains(k##Type)) { \
552 if (add_comma) os << ","; \
579 if (old_value == new_value)
return;
582 if (old_value !=
NULL) {
583 removed = old_value->
RemoveUse(
this, index);
586 if (new_value !=
NULL) {
587 if (removed ==
NULL) {
624 }
else if (FLAG_hydrogen_track_positions) {
627 return os <<
"<0:" << p.
raw() <<
">";
643 if (
i > 0) os <<
" ";
652 DCHECK(!IsControlInstruction());
669 DCHECK(!IsControlInstruction());
691 if (
block->IsStartBlock() &&
block->IsFinished() && !IsConstant()) {
713 block->set_last(
this);
728 if (instr == other) {
737 void HInstruction::Verify() {
742 if (other_operand ==
NULL)
continue;
744 if (cur_block == other_block) {
745 if (!other_operand->
IsPhi()) {
747 while (cur !=
NULL) {
748 if (cur == other_operand)
break;
752 DCHECK(cur == other_operand);
757 DCHECK(other_block->Dominates(cur_block));
773 for (HUseIterator
use =
uses(); !
use.Done();
use.Advance()) {
774 if (
use.value()->IsInstruction()) {
785 case HValue::kAbnormalExit:
786 case HValue::kAccessArgumentsAt:
787 case HValue::kAllocate:
788 case HValue::kArgumentsElements:
789 case HValue::kArgumentsLength:
790 case HValue::kArgumentsObject:
791 case HValue::kBlockEntry:
792 case HValue::kBoundsCheckBaseIndexInformation:
793 case HValue::kCallFunction:
794 case HValue::kCallNew:
795 case HValue::kCallNewArray:
796 case HValue::kCallStub:
797 case HValue::kCallWithDescriptor:
798 case HValue::kCapturedObject:
799 case HValue::kClassOfTestAndBranch:
800 case HValue::kCompareGeneric:
801 case HValue::kCompareHoleAndBranch:
802 case HValue::kCompareMap:
803 case HValue::kCompareMinusZeroAndBranch:
804 case HValue::kCompareNumericAndBranch:
805 case HValue::kCompareObjectEqAndBranch:
806 case HValue::kConstant:
807 case HValue::kConstructDouble:
808 case HValue::kContext:
809 case HValue::kDebugBreak:
810 case HValue::kDeclareGlobals:
811 case HValue::kDoubleBits:
812 case HValue::kDummyUse:
813 case HValue::kEnterInlined:
814 case HValue::kEnvironmentMarker:
815 case HValue::kForceRepresentation:
816 case HValue::kGetCachedArrayIndex:
818 case HValue::kHasCachedArrayIndexAndBranch:
819 case HValue::kHasInstanceTypeAndBranch:
820 case HValue::kInnerAllocatedObject:
821 case HValue::kInstanceOf:
822 case HValue::kInstanceOfKnownGlobal:
823 case HValue::kIsConstructCallAndBranch:
824 case HValue::kIsObjectAndBranch:
825 case HValue::kIsSmiAndBranch:
826 case HValue::kIsStringAndBranch:
827 case HValue::kIsUndetectableAndBranch:
828 case HValue::kLeaveInlined:
829 case HValue::kLoadFieldByIndex:
830 case HValue::kLoadGlobalGeneric:
831 case HValue::kLoadNamedField:
832 case HValue::kLoadNamedGeneric:
833 case HValue::kLoadRoot:
834 case HValue::kMapEnumLength:
835 case HValue::kMathMinMax:
836 case HValue::kParameter:
838 case HValue::kPushArguments:
839 case HValue::kRegExpLiteral:
840 case HValue::kReturn:
841 case HValue::kSeqStringGetChar:
842 case HValue::kStoreCodeEntry:
843 case HValue::kStoreFrameContext:
844 case HValue::kStoreKeyed:
845 case HValue::kStoreNamedField:
846 case HValue::kStoreNamedGeneric:
847 case HValue::kStringCharCodeAt:
848 case HValue::kStringCharFromCode:
849 case HValue::kTailCallThroughMegamorphicCache:
850 case HValue::kThisFunction:
851 case HValue::kTypeofIsAndBranch:
852 case HValue::kUnknownOSRValue:
853 case HValue::kUseConst:
857 case HValue::kAllocateBlockContext:
858 case HValue::kApplyArguments:
859 case HValue::kBitwise:
860 case HValue::kBoundsCheck:
861 case HValue::kBranch:
862 case HValue::kCallJSFunction:
863 case HValue::kCallRuntime:
864 case HValue::kChange:
865 case HValue::kCheckHeapObject:
866 case HValue::kCheckInstanceType:
867 case HValue::kCheckMapValue:
868 case HValue::kCheckMaps:
869 case HValue::kCheckSmi:
870 case HValue::kCheckValue:
871 case HValue::kClampToUint8:
872 case HValue::kDateField:
873 case HValue::kDeoptimize:
875 case HValue::kForInCacheArray:
876 case HValue::kForInPrepareMap:
877 case HValue::kFunctionLiteral:
878 case HValue::kInvokeFunction:
879 case HValue::kLoadContextSlot:
880 case HValue::kLoadFunctionPrototype:
881 case HValue::kLoadGlobalCell:
882 case HValue::kLoadKeyed:
883 case HValue::kLoadKeyedGeneric:
884 case HValue::kMathFloorOfDiv:
887 case HValue::kOsrEntry:
891 case HValue::kSeqStringSetChar:
894 case HValue::kSimulate:
895 case HValue::kStackCheck:
896 case HValue::kStoreContextSlot:
897 case HValue::kStoreGlobalCell:
898 case HValue::kStoreKeyedGeneric:
899 case HValue::kStringAdd:
900 case HValue::kStringCompareAndBranch:
902 case HValue::kToFastProperties:
903 case HValue::kTransitionElementsKind:
904 case HValue::kTrapAllocationMemento:
905 case HValue::kTypeof:
906 case HValue::kUnaryMathOperation:
907 case HValue::kWrapReceiver:
919 OStream& HDummyUse::PrintDataTo(OStream& os)
const {
920 return os << NameOf(value());
924 OStream& HEnvironmentMarker::PrintDataTo(OStream& os)
const {
925 return os << (kind() == BIND ?
"bind" :
"lookup") <<
" var[" << index()
936 return os <<
NameOf(
function()) <<
" #" << argument_count();
940 HCallJSFunction* HCallJSFunction::New(
945 bool pass_argument_count) {
946 bool has_stack_check =
false;
947 if (function->IsConstant()) {
948 HConstant* fun_const = HConstant::cast(
function);
951 has_stack_check = !jsfun.is_null() &&
952 (jsfun->code()->kind() == Code::FUNCTION ||
953 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION);
956 return new(zone) HCallJSFunction(
957 function, argument_count, pass_argument_count,
968 void HBoundsCheck::ApplyIndexChange() {
969 if (skip_check())
return;
971 DecompositionResult decomposition;
972 bool index_is_decomposable = index()->TryDecompose(&decomposition);
973 if (index_is_decomposable) {
974 DCHECK(decomposition.base() == base());
975 if (decomposition.offset() == offset() &&
976 decomposition.scale() == scale())
return;
981 ReplaceAllUsesWith(index());
983 HValue* current_index = decomposition.base();
984 int actual_offset = decomposition.offset() + offset();
985 int actual_scale = decomposition.scale() + scale();
987 Zone* zone = block()->graph()->zone();
988 HValue* context = block()->graph()->GetInvalidContext();
989 if (actual_offset != 0) {
990 HConstant* add_offset = HConstant::New(zone, context, actual_offset);
991 add_offset->InsertBefore(
this);
992 HInstruction* add = HAdd::New(zone, context,
993 current_index, add_offset);
994 add->InsertBefore(
this);
995 add->AssumeRepresentation(index()->representation());
996 add->ClearFlag(kCanOverflow);
1000 if (actual_scale != 0) {
1001 HConstant* sar_scale = HConstant::New(zone, context, actual_scale);
1002 sar_scale->InsertBefore(
this);
1003 HInstruction* sar = HSar::New(zone, context,
1004 current_index, sar_scale);
1005 sar->InsertBefore(
this);
1006 sar->AssumeRepresentation(index()->representation());
1007 current_index = sar;
1010 SetOperandAt(0, current_index);
1018 OStream& HBoundsCheck::PrintDataTo(OStream& os)
const {
1019 os << NameOf(index()) <<
" " << NameOf(length());
1020 if (base() !=
NULL && (offset() != 0 || scale() != 0)) {
1022 if (base() != index()) {
1023 os << NameOf(index());
1027 os <<
" + " << offset() <<
") >> " << scale() <<
")";
1029 if (skip_check()) os <<
" [DISABLED]";
1034 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
1035 DCHECK(CheckFlag(kFlexibleRepresentation));
1036 HValue* actual_index = index()->ActualValue();
1037 HValue* actual_length = length()->ActualValue();
1038 Representation index_rep = actual_index->representation();
1039 Representation length_rep = actual_length->representation();
1040 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
1043 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
1046 Representation r = index_rep.generalize(length_rep);
1050 UpdateRepresentation(r, h_infer,
"boundscheck");
1054 Range* HBoundsCheck::InferRange(Zone* zone) {
1055 Representation r = representation();
1056 if (r.IsSmiOrInteger32() && length()->HasRange()) {
1057 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
1060 Range* result =
new(zone) Range(lower, upper);
1061 if (index()->HasRange()) {
1062 result->Intersect(index()->range());
1066 if (r.IsSmi()) result->ClampToSmi();
1073 OStream& HBoundsCheckBaseIndexInformation::PrintDataTo(
1074 OStream& os)
const {
1076 return os <<
"base: " << NameOf(base_index())
1077 <<
", check: " << NameOf(base_index());
1081 OStream& HCallWithDescriptor::PrintDataTo(OStream& os)
const {
1082 for (
int i = 0;
i < OperandCount();
i++) {
1083 os << NameOf(OperandAt(
i)) <<
" ";
1085 return os <<
"#" << argument_count();
1089 OStream& HCallNewArray::PrintDataTo(OStream& os)
const {
1095 OStream& HCallRuntime::PrintDataTo(OStream& os)
const {
1096 os <<
name()->ToCString().get() <<
" ";
1097 if (save_doubles() ==
kSaveFPRegs) os <<
"[save doubles] ";
1098 return os <<
"#" << argument_count();
1102 OStream& HClassOfTestAndBranch::PrintDataTo(OStream& os)
const {
1103 return os <<
"class_of_test(" << NameOf(value()) <<
", \""
1104 << class_name()->ToCString().get() <<
"\")";
1108 OStream& HWrapReceiver::PrintDataTo(OStream& os)
const {
1109 return os << NameOf(receiver()) <<
" " << NameOf(
function());
1113 OStream& HAccessArgumentsAt::PrintDataTo(OStream& os)
const {
1114 return os << NameOf(arguments()) <<
"[" << NameOf(index()) <<
"], length "
1115 << NameOf(length());
1126 bool first_block =
true;
1127 for (HSuccessorIterator it(
this); !it.Done(); it.Advance()) {
1128 if (!first_block) os <<
", ";
1129 os << *it.Current();
1130 first_block =
false;
1143 return os <<
NameOf(value()) <<
" (pop " <<
NameOf(parameter_count())
1148 Representation HBranch::observed_input_representation(
int index) {
1149 static const ToBooleanStub::Types tagged_types(
1154 if (expected_input_types_.ContainsAnyOf(tagged_types)) {
1173 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1174 HValue* value = this->value();
1175 if (value->EmitAtUses()) {
1176 DCHECK(value->IsConstant());
1177 DCHECK(!value->representation().IsDouble());
1178 *block = HConstant::cast(value)->BooleanValue()
1180 : SecondSuccessor();
1188 OStream& HBranch::PrintDataTo(OStream& os)
const {
1190 << expected_input_types();
1194 OStream& HCompareMap::PrintDataTo(OStream& os)
const {
1195 os << NameOf(value()) <<
" (" << *
map().handle() <<
")";
1197 if (known_successor_index() == 0) {
1199 }
else if (known_successor_index() == 1) {
1206 const char* HUnaryMathOperation::OpName()
const {
1233 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1234 Representation r = representation();
1235 if (op() == kMathClz32)
return new(zone) Range(0, 32);
1236 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1237 if (op() == kMathAbs) {
1238 int upper = value()->range()->upper();
1239 int lower = value()->range()->lower();
1240 bool spans_zero = value()->range()->CanBeZero();
1246 new(zone) Range(spans_zero ? 0 :
Min(abs_lower, abs_upper),
1247 Max(abs_lower, abs_upper));
1250 if (r.IsSmi()) result->ClampToSmi();
1258 OStream& HUnaryMathOperation::PrintDataTo(OStream& os)
const {
1259 return os << OpName() <<
" " << NameOf(value());
1268 OStream& HHasInstanceTypeAndBranch::PrintDataTo(
OStream& os)
const {
1272 if (to_ ==
LAST_TYPE) os <<
" spec_object";
1290 OStream& HTypeofIsAndBranch::PrintDataTo(OStream& os)
const {
1291 os << NameOf(value()) <<
" == " << type_literal()->ToCString().get();
1298 if (constant->HasNumberValue())
return heap->number_string();
1299 if (constant->IsUndetectable())
return heap->undefined_string();
1300 if (constant->HasStringValue())
return heap->string_string();
1301 switch (constant->GetInstanceType()) {
1306 return heap->boolean_string();
1309 return heap->object_string();
1312 return heap->undefined_string();
1315 return heap->symbol_string();
1318 return heap->function_string();
1320 return heap->object_string();
1325 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1326 if (FLAG_fold_constants && value()->IsConstant()) {
1327 HConstant* constant = HConstant::cast(value());
1329 bool same_type = type_literal_.IsKnownGlobal(type_string);
1330 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1332 }
else if (value()->representation().IsSpecialization()) {
1334 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1335 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1343 OStream& HCheckMapValue::PrintDataTo(OStream& os)
const {
1344 return os << NameOf(value()) <<
" " << NameOf(
map());
1349 if (
map()->IsConstant()) {
1350 HConstant* c_map = HConstant::cast(
map());
1351 return HCheckMaps::CreateAndInsertAfter(
1352 block()->graph()->zone(), value(), c_map->MapValue(),
1353 c_map->HasStableMapValue(),
this);
1359 OStream& HForInPrepareMap::PrintDataTo(OStream& os)
const {
1360 return os << NameOf(enumerable());
1364 OStream& HForInCacheArray::PrintDataTo(OStream& os)
const {
1365 return os << NameOf(enumerable()) <<
" " << NameOf(
map()) <<
"[" << idx_
1370 OStream& HLoadFieldByIndex::PrintDataTo(OStream& os)
const {
1371 return os << NameOf(
object()) <<
" " << NameOf(index());
1383 if (!instr->IsBitwise())
return false;
1384 HBitwise* b = HBitwise::cast(instr);
1385 return (b->op() == Token::BIT_XOR) &&
1399 if (!representation().IsSmiOrInteger32())
return this;
1401 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1402 if (left()->EqualsInteger32Constant(nop_constant) &&
1403 !right()->CheckFlag(kUint32)) {
1406 if (right()->EqualsInteger32Constant(nop_constant) &&
1407 !left()->CheckFlag(kUint32)) {
1419 Representation HAdd::RepresentationFromInputs() {
1420 Representation left_rep = left()->representation();
1421 if (left_rep.IsExternal()) {
1428 Representation HAdd::RequiredInputRepresentation(
int index) {
1430 Representation left_rep = left()->representation();
1431 if (left_rep.IsExternal()) {
1448 !left()->representation().IsDouble()) {
1452 !left()->representation().IsDouble()) {
1472 bool HMul::MulMinusOne() {
1473 if (left()->EqualsInteger32Constant(-1) ||
1474 right()->EqualsInteger32Constant(-1)) {
1494 return (from().
Equals(
to())) ? value() : this;
1499 if (HasNoUses())
return NULL;
1500 if (receiver()->type().IsJSObject()) {
1507 OStream& HTypeof::PrintDataTo(OStream& os)
const {
1508 return os << NameOf(value());
1512 HInstruction* HForceRepresentation::New(Zone* zone, HValue* context,
1513 HValue* value, Representation representation) {
1514 if (FLAG_fold_constants && value->IsConstant()) {
1515 HConstant* c = HConstant::cast(value);
1516 c = c->CopyToRepresentation(representation, zone);
1517 if (c !=
NULL)
return c;
1519 return new(zone) HForceRepresentation(value, representation);
1523 OStream& HForceRepresentation::PrintDataTo(OStream& os)
const {
1524 return os << representation().Mnemonic() <<
" " << NameOf(value());
1528 OStream& HChange::PrintDataTo(OStream& os)
const {
1530 os <<
" " << from().Mnemonic() <<
" to " <<
to().Mnemonic();
1532 if (CanTruncateToSmi()) os <<
" truncating-smi";
1533 if (CanTruncateToInt32()) os <<
" truncating-int32";
1534 if (CheckFlag(kBailoutOnMinusZero)) os <<
" -0?";
1535 if (CheckFlag(kAllowUndefinedAsNaN)) os <<
" allow-undefined-as-nan";
1541 if (op() == kMathRound || op() == kMathFloor) {
1542 HValue* val = value();
1543 if (val->IsChange()) val = HChange::cast(val)->value();
1544 if (val->representation().IsSmiOrInteger32()) {
1545 if (val->representation().Equals(representation()))
return val;
1546 return Prepend(
new(block()->zone()) HChange(
1547 val, representation(),
false,
false));
1550 if (op() == kMathFloor && value()->IsDiv() && value()->HasOneUse()) {
1551 HDiv* hdiv = HDiv::cast(value());
1553 HValue* left = hdiv->left();
1554 if (left->representation().IsInteger32()) {
1556 }
else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) {
1558 left = HChange::cast(left)->value();
1559 }
else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1560 left = Prepend(
new(block()->zone()) HChange(
1566 HValue* right = hdiv->right();
1567 if (right->IsInteger32Constant()) {
1568 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1570 }
else if (right->representation().IsInteger32()) {
1572 }
else if (right->IsChange() &&
1573 HChange::cast(right)->from().IsInteger32()) {
1575 right = HChange::cast(right)->value();
1576 }
else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1577 right = Prepend(
new(block()->zone()) HChange(
1583 return Prepend(HMathFloorOfDiv::New(
1584 block()->zone(), context(), left, right));
1591 if ((check_ == IS_SPEC_OBJECT && value()->type().IsJSObject()) ||
1592 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) ||
1593 (check_ == IS_STRING && value()->type().IsString())) {
1597 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1598 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1606 void HCheckInstanceType::GetCheckInterval(
InstanceType* first,
1608 DCHECK(is_interval_check());
1610 case IS_SPEC_OBJECT:
1623 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1624 DCHECK(!is_interval_check());
1630 case IS_INTERNALIZED_STRING:
1640 OStream& HCheckMaps::PrintDataTo(OStream& os)
const {
1641 os << NameOf(value()) <<
" [" << *maps()->at(0).handle();
1642 for (
int i = 1;
i < maps()->size(); ++
i) {
1643 os <<
"," << *maps()->at(
i).handle();
1646 if (IsStabilityCheck()) os <<
"(stability-check)";
1652 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) {
1653 HConstant* c_value = HConstant::cast(value());
1654 if (c_value->HasObjectMap()) {
1655 for (
int i = 0;
i < maps()->size(); ++
i) {
1656 if (c_value->ObjectMap() == maps()->at(
i)) {
1657 if (maps()->size() > 1) {
1658 set_maps(
new(block()->graph()->zone()) UniqueSet<Map>(
1659 maps()->at(
i), block()->graph()->zone()));
1661 MarkAsStabilityCheck();
1671 OStream& HCheckValue::PrintDataTo(OStream& os)
const {
1672 return os << NameOf(value()) <<
" " << Brief(*
object().
handle());
1677 return (value()->IsConstant() &&
1678 HConstant::cast(value())->EqualsUnique(object_)) ?
NULL :
this;
1682 const char* HCheckInstanceType::GetCheckName()
const {
1684 case IS_SPEC_OBJECT:
return "object";
1685 case IS_JS_ARRAY:
return "array";
1686 case IS_STRING:
return "string";
1687 case IS_INTERNALIZED_STRING:
return "internalized_string";
1694 OStream& HCheckInstanceType::PrintDataTo(OStream& os)
const {
1695 os << GetCheckName() <<
" ";
1700 OStream& HCallStub::PrintDataTo(OStream& os)
const {
1701 os << CodeStub::MajorName(major_key_,
false) <<
" ";
1706 OStream& HTailCallThroughMegamorphicCache::PrintDataTo(
1707 OStream& os)
const {
1708 for (
int i = 0;
i < OperandCount();
i++) {
1709 os << NameOf(OperandAt(
i)) <<
" ";
1711 return os <<
"flags: " <<
flags();
1715 OStream& HUnknownOSRValue::PrintDataTo(OStream& os)
const {
1716 const char* type =
"expression";
1717 if (environment_->is_local_index(index_)) type =
"local";
1718 if (environment_->is_special_index(index_)) type =
"special";
1719 if (environment_->is_parameter_index(index_)) type =
"parameter";
1720 return os << type <<
" @ " << index_;
1724 OStream& HInstanceOf::PrintDataTo(OStream& os)
const {
1725 return os << NameOf(left()) <<
" " << NameOf(right()) <<
" "
1726 << NameOf(context());
1734 result->set_can_be_minus_zero(
false);
1736 result =
new(zone) Range();
1745 Range* HChange::InferRange(
Zone* zone) {
1746 Range* input_range = value()->range();
1750 input_range !=
NULL &&
1751 input_range->IsInSmiRange()))) {
1752 set_type(HType::Smi());
1753 ClearChangesFlag(kNewSpacePromotion);
1755 if (
to().IsSmiOrTagged() &&
1756 input_range !=
NULL &&
1757 input_range->IsInSmiRange() &&
1760 input_range->upper() !=
kMaxInt)) {
1763 ClearFlag(kCanOverflow);
1765 Range* result = (input_range !=
NULL)
1766 ? input_range->Copy(zone)
1768 result->set_can_be_minus_zero(!
to().IsSmiOrInteger32() ||
1769 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1770 CheckFlag(kAllUsesTruncatingToSmi)));
1771 if (
to().IsSmi()) result->ClampToSmi();
1776 Range* HConstant::InferRange(Zone* zone) {
1777 if (has_int32_value_) {
1778 Range* result =
new(zone) Range(int32_value_, int32_value_);
1779 result->set_can_be_minus_zero(
false);
1786 HSourcePosition HPhi::position()
const {
1787 return block()->first()->position();
1791 Range* HPhi::InferRange(Zone* zone) {
1792 Representation r = representation();
1793 if (r.IsSmiOrInteger32()) {
1794 if (block()->IsLoopHeader()) {
1795 Range* range = r.IsSmi()
1800 Range* range = OperandAt(0)->range()->Copy(zone);
1801 for (
int i = 1;
i < OperandCount(); ++
i) {
1802 range->Union(OperandAt(
i)->range());
1812 Range* HAdd::InferRange(Zone* zone) {
1813 Representation r = representation();
1814 if (r.IsSmiOrInteger32()) {
1815 Range* a = left()->range();
1816 Range* b = right()->range();
1817 Range* res = a->Copy(zone);
1818 if (!res->AddAndCheckOverflow(r, b) ||
1819 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1820 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1821 ClearFlag(kCanOverflow);
1823 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1824 !CheckFlag(kAllUsesTruncatingToInt32) &&
1825 a->CanBeMinusZero() && b->CanBeMinusZero());
1833 Range* HSub::InferRange(Zone* zone) {
1834 Representation r = representation();
1835 if (r.IsSmiOrInteger32()) {
1836 Range* a = left()->range();
1837 Range* b = right()->range();
1838 Range* res = a->Copy(zone);
1839 if (!res->SubAndCheckOverflow(r, b) ||
1840 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1841 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1842 ClearFlag(kCanOverflow);
1844 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1845 !CheckFlag(kAllUsesTruncatingToInt32) &&
1846 a->CanBeMinusZero() && b->CanBeZero());
1854 Range* HMul::InferRange(Zone* zone) {
1855 Representation r = representation();
1856 if (r.IsSmiOrInteger32()) {
1857 Range* a = left()->range();
1858 Range* b = right()->range();
1859 Range* res = a->Copy(zone);
1860 if (!res->MulAndCheckOverflow(r, b) ||
1861 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1862 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1867 ClearFlag(kCanOverflow);
1869 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1870 !CheckFlag(kAllUsesTruncatingToInt32) &&
1871 ((a->CanBeZero() && b->CanBeNegative()) ||
1872 (a->CanBeNegative() && b->CanBeZero())));
1880 Range* HDiv::InferRange(Zone* zone) {
1882 Range* a = left()->range();
1883 Range* b = right()->range();
1884 Range* result =
new(zone) Range();
1885 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1886 (a->CanBeMinusZero() ||
1887 (a->CanBeZero() && b->CanBeNegative())));
1888 if (!a->Includes(
kMinInt) || !b->Includes(-1)) {
1889 ClearFlag(kCanOverflow);
1892 if (!b->CanBeZero()) {
1893 ClearFlag(kCanBeDivByZero);
1902 Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1904 Range* a = left()->range();
1905 Range* b = right()->range();
1906 Range* result =
new(zone) Range();
1907 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1908 (a->CanBeMinusZero() ||
1909 (a->CanBeZero() && b->CanBeNegative())));
1911 ClearFlag(kLeftCanBeMinInt);
1914 if (!a->CanBeNegative()) {
1918 if (!a->CanBePositive()) {
1922 if (!a->Includes(
kMinInt) || !b->Includes(-1)) {
1923 ClearFlag(kCanOverflow);
1926 if (!b->CanBeZero()) {
1927 ClearFlag(kCanBeDivByZero);
1941 Range* HMod::InferRange(Zone* zone) {
1943 Range* a = left()->range();
1944 Range* b = right()->range();
1950 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1951 Range* result =
new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1952 a->CanBePositive() ? positive_bound : 0);
1954 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1955 left_can_be_negative);
1957 if (!a->CanBeNegative()) {
1961 if (!a->Includes(
kMinInt) || !b->Includes(-1)) {
1965 if (!b->CanBeZero()) {
1975 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
1976 if (phi->block()->loop_information() ==
NULL)
return NULL;
1977 if (phi->OperandCount() != 2)
return NULL;
1980 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
1981 if (candidate_increment != 0) {
1982 return new(phi->block()->graph()->zone())
1983 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
1986 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
1987 if (candidate_increment != 0) {
1988 return new(phi->block()->graph()->zone())
1989 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
2004 void InductionVariableData::DecomposeBitwise(
2006 BitwiseDecompositionResult* result) {
2007 HValue* base = IgnoreOsrValue(value);
2008 result->base = value;
2010 if (!base->representation().IsInteger32())
return;
2012 if (base->IsBitwise()) {
2013 bool allow_offset =
false;
2016 HBitwise* bitwise = HBitwise::cast(base);
2017 if (bitwise->right()->IsInteger32Constant()) {
2018 mask = bitwise->right()->GetInteger32Constant();
2019 base = bitwise->left();
2020 }
else if (bitwise->left()->IsInteger32Constant()) {
2021 mask = bitwise->left()->GetInteger32Constant();
2022 base = bitwise->right();
2026 if (bitwise->op() == Token::BIT_AND) {
2027 result->and_mask = mask;
2028 allow_offset =
true;
2029 }
else if (bitwise->op() == Token::BIT_OR) {
2030 result->or_mask = mask;
2035 result->context = bitwise->context();
2038 if (base->IsAdd()) {
2039 HAdd* add = HAdd::cast(base);
2040 if (add->right()->IsInteger32Constant()) {
2042 }
else if (add->left()->IsInteger32Constant()) {
2043 base = add->right();
2045 }
else if (base->IsSub()) {
2046 HSub* sub = HSub::cast(base);
2047 if (sub->right()->IsInteger32Constant()) {
2053 result->base = base;
2058 void InductionVariableData::AddCheck(HBoundsCheck* check,
2061 if (limit_validity() != check->block() &&
2062 !limit_validity()->Dominates(check->block()))
return;
2063 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2064 check->block()->current_loop()))
return;
2066 ChecksRelatedToLength* length_checks = checks();
2067 while (length_checks !=
NULL) {
2068 if (length_checks->length() == check->length())
break;
2069 length_checks = length_checks->next();
2071 if (length_checks ==
NULL) {
2072 length_checks =
new(check->block()->zone())
2073 ChecksRelatedToLength(check->length(), checks());
2074 checks_ = length_checks;
2077 length_checks->AddCheck(check, upper_limit);
2081 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
2082 if (checks() !=
NULL) {
2083 InductionVariableCheck* c = checks();
2084 HBasicBlock* current_block = c->check()->block();
2085 while (c !=
NULL && c->check()->block() == current_block) {
2086 c->set_upper_limit(current_upper_limit_);
2093 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
2099 HValue* previous_index = first_check_in_block()->index();
2102 Zone* zone = index_base->block()->graph()->zone();
2103 set_added_constant(HConstant::New(zone, context, mask));
2104 if (added_index() !=
NULL) {
2105 added_constant()->InsertBefore(added_index());
2107 added_constant()->InsertBefore(first_check_in_block());
2110 if (added_index() ==
NULL) {
2111 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
2112 HInstruction* new_index = HBitwise::New(zone, context, token, index_base,
2114 DCHECK(new_index->IsBitwise());
2115 new_index->ClearAllSideEffects();
2117 set_added_index(HBitwise::cast(new_index));
2118 added_index()->InsertBefore(first_check_in_block());
2120 DCHECK(added_index()->op() == token);
2122 added_index()->SetOperandAt(1, index_base);
2123 added_index()->SetOperandAt(2, added_constant());
2124 first_check_in_block()->SetOperandAt(0, added_index());
2125 if (previous_index->HasNoUses()) {
2126 previous_index->DeleteAndReplaceWith(
NULL);
2130 void InductionVariableData::ChecksRelatedToLength::AddCheck(
2131 HBoundsCheck* check,
2133 BitwiseDecompositionResult decomposition;
2134 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
2136 if (first_check_in_block() ==
NULL ||
2137 first_check_in_block()->block() != check->block()) {
2138 CloseCurrentBlock();
2140 first_check_in_block_ = check;
2141 set_added_index(
NULL);
2142 set_added_constant(
NULL);
2143 current_and_mask_in_block_ = decomposition.and_mask;
2144 current_or_mask_in_block_ = decomposition.or_mask;
2145 current_upper_limit_ = upper_limit;
2147 InductionVariableCheck* new_check =
new(check->block()->graph()->zone())
2148 InductionVariableCheck(check, checks_, upper_limit);
2149 checks_ = new_check;
2153 if (upper_limit > current_upper_limit()) {
2154 current_upper_limit_ = upper_limit;
2157 if (decomposition.and_mask != 0 &&
2158 current_or_mask_in_block() == 0) {
2159 if (current_and_mask_in_block() == 0 ||
2160 decomposition.and_mask > current_and_mask_in_block()) {
2161 UseNewIndexInCurrentBlock(Token::BIT_AND,
2162 decomposition.and_mask,
2164 decomposition.context);
2165 current_and_mask_in_block_ = decomposition.and_mask;
2167 check->set_skip_check();
2169 if (current_and_mask_in_block() == 0) {
2170 if (decomposition.or_mask > current_or_mask_in_block()) {
2171 UseNewIndexInCurrentBlock(Token::BIT_OR,
2172 decomposition.or_mask,
2174 decomposition.context);
2175 current_or_mask_in_block_ = decomposition.or_mask;
2177 check->set_skip_check();
2180 if (!check->skip_check()) {
2181 InductionVariableCheck* new_check =
new(check->block()->graph()->zone())
2182 InductionVariableCheck(check, checks_, upper_limit);
2183 checks_ = new_check;
2197 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2198 HValue* phi_operand) {
2199 if (!phi_operand->representation().IsInteger32())
return 0;
2201 if (phi_operand->IsAdd()) {
2202 HAdd* operation = HAdd::cast(phi_operand);
2203 if (operation->left() == phi &&
2204 operation->right()->IsInteger32Constant()) {
2205 return operation->right()->GetInteger32Constant();
2206 }
else if (operation->right() == phi &&
2207 operation->left()->IsInteger32Constant()) {
2208 return operation->left()->GetInteger32Constant();
2210 }
else if (phi_operand->IsSub()) {
2211 HSub* operation = HSub::cast(phi_operand);
2212 if (operation->left() == phi &&
2213 operation->right()->IsInteger32Constant()) {
2214 return -operation->right()->GetInteger32Constant();
2228 void InductionVariableData::UpdateAdditionalLimit(
2229 InductionVariableLimitUpdate* update) {
2230 DCHECK(update->updated_variable ==
this);
2231 if (update->limit_is_upper) {
2232 swap(&additional_upper_limit_, &update->limit);
2233 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2235 swap(&additional_lower_limit_, &update->limit);
2236 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2241 int32_t InductionVariableData::ComputeUpperLimit(
int32_t and_mask,
2244 const int32_t MAX_LIMIT = 1 << 30;
2248 if (limit() !=
NULL &&
2249 limit()->IsInteger32Constant()) {
2250 int32_t limit_value = limit()->GetInteger32Constant();
2251 if (!limit_included()) {
2254 if (limit_value < result) result = limit_value;
2257 if (additional_upper_limit() !=
NULL &&
2258 additional_upper_limit()->IsInteger32Constant()) {
2259 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2260 if (!additional_upper_limit_is_included()) {
2263 if (limit_value < result) result = limit_value;
2266 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2267 if (and_mask < result) result = and_mask;
2274 return result >= MAX_LIMIT ? kNoLimit : result;
2278 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2279 if (!v->IsPhi())
return v;
2280 HPhi* phi = HPhi::cast(v);
2281 if (phi->OperandCount() != 2)
return v;
2282 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2283 return phi->OperandAt(1);
2284 }
else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2285 return phi->OperandAt(0);
2292 InductionVariableData* InductionVariableData::GetInductionVariableData(
2294 v = IgnoreOsrValue(v);
2296 return HPhi::cast(v)->induction_variable_data();
2314 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2316 HBasicBlock* current_branch,
2317 HBasicBlock* other_branch) {
2318 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2319 current_branch->current_loop())) {
2323 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2324 other_branch->current_loop())) {
2328 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2331 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2342 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2344 LimitFromPredecessorBlock* result) {
2345 if (block->predecessors()->length() != 1)
return;
2346 HBasicBlock* predecessor = block->predecessors()->at(0);
2347 HInstruction* end = predecessor->last();
2349 if (!end->IsCompareNumericAndBranch())
return;
2355 HBasicBlock* other_target;
2356 if (block == branch->SuccessorAt(0)) {
2357 other_target = branch->SuccessorAt(1);
2359 other_target = branch->SuccessorAt(0);
2361 DCHECK(block == branch->SuccessorAt(1));
2364 InductionVariableData* data;
2366 data = GetInductionVariableData(branch->left());
2367 HValue* limit = branch->right();
2369 data = GetInductionVariableData(branch->right());
2371 limit = branch->left();
2375 result->variable = data;
2376 result->token = token;
2377 result->limit = limit;
2378 result->other_target = other_target;
2392 bool InductionVariableData::ComputeInductionVariableLimit(
2394 InductionVariableLimitUpdate* additional_limit) {
2395 LimitFromPredecessorBlock limit;
2396 ComputeLimitFromPredecessorBlock(block, &limit);
2397 if (!limit.LimitIsValid())
return false;
2399 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2401 limit.other_target)) {
2402 limit.variable->limit_ = limit.limit;
2403 limit.variable->limit_included_ = limit.LimitIsIncluded();
2404 limit.variable->limit_validity_ = block;
2405 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2406 limit.variable->induction_exit_target_ = limit.other_target;
2409 additional_limit->updated_variable = limit.variable;
2410 additional_limit->limit = limit.limit;
2411 additional_limit->limit_is_upper = limit.LimitIsUpper();
2412 additional_limit->limit_is_included = limit.LimitIsIncluded();
2418 Range* HMathMinMax::InferRange(Zone* zone) {
2419 if (representation().IsSmiOrInteger32()) {
2420 Range* a = left()->range();
2421 Range* b = right()->range();
2422 Range* res = a->Copy(zone);
2423 if (operation_ == kMathMax) {
2424 res->CombinedMax(b);
2426 DCHECK(operation_ == kMathMin);
2427 res->CombinedMin(b);
2436 void HPushArguments::AddInput(HValue* value) {
2437 inputs_.Add(
NULL, value->block()->zone());
2438 SetOperandAt(OperandCount() - 1, value);
2442 OStream& HPhi::PrintTo(OStream& os)
const {
2444 for (
int i = 0;
i < OperandCount(); ++
i) {
2445 os <<
" " << NameOf(OperandAt(
i)) <<
" ";
2447 return os <<
" uses:" << UseCount() <<
"_"
2448 << smi_non_phi_uses() + smi_indirect_uses() <<
"s_"
2449 << int32_non_phi_uses() + int32_indirect_uses() <<
"i_"
2450 << double_non_phi_uses() + double_indirect_uses() <<
"d_"
2451 << tagged_non_phi_uses() + tagged_indirect_uses() <<
"t"
2456 void HPhi::AddInput(HValue* value) {
2457 inputs_.Add(
NULL, value->block()->zone());
2458 SetOperandAt(OperandCount() - 1, value);
2460 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2461 SetFlag(kIsArguments);
2466 bool HPhi::HasRealUses() {
2467 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2468 if (!it.value()->IsPhi())
return true;
2474 HValue* HPhi::GetRedundantReplacement() {
2475 HValue* candidate =
NULL;
2476 int count = OperandCount();
2478 while (position < count && candidate ==
NULL) {
2479 HValue* current = OperandAt(position++);
2480 if (current !=
this) candidate = current;
2482 while (position < count) {
2483 HValue* current = OperandAt(position++);
2484 if (current !=
this && current != candidate)
return NULL;
2486 DCHECK(candidate !=
this);
2491 void HPhi::DeleteFromGraph() {
2493 block()->RemovePhi(
this);
2498 void HPhi::InitRealUses(
int phi_id) {
2504 SetFlag(kTruncatingToSmi);
2505 SetFlag(kTruncatingToInt32);
2506 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2507 HValue* value = it.value();
2508 if (!value->IsPhi()) {
2509 Representation rep = value->observed_input_representation(it.index());
2510 non_phi_uses_[rep.kind()] += 1;
2511 if (FLAG_trace_representation) {
2512 PrintF(
"#%d Phi is used by real #%d %s as %s\n",
2513 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2515 if (!value->IsSimulate()) {
2516 if (!value->CheckFlag(kTruncatingToSmi)) {
2517 ClearFlag(kTruncatingToSmi);
2519 if (!value->CheckFlag(kTruncatingToInt32)) {
2520 ClearFlag(kTruncatingToInt32);
2528 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2529 if (FLAG_trace_representation) {
2530 PrintF(
"adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n",
2539 indirect_uses_[
i] += other->non_phi_uses_[
i];
2544 void HPhi::AddIndirectUsesTo(
int* dest) {
2546 dest[
i] += indirect_uses_[
i];
2551 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2552 while (!list->is_empty()) {
2553 HSimulate* from = list->RemoveLast();
2554 ZoneList<HValue*>* from_values = &from->values_;
2555 for (
int i = 0;
i < from_values->length(); ++
i) {
2556 if (from->HasAssignedIndexAt(
i)) {
2557 int index = from->GetAssignedIndexAt(
i);
2558 if (HasValueForIndex(index))
continue;
2559 AddAssignedValue(index, from_values->at(
i));
2561 if (pop_count_ > 0) {
2564 AddPushedValue(from_values->at(
i));
2568 pop_count_ += from->pop_count_;
2569 from->DeleteAndReplaceWith(
NULL);
2574 OStream& HSimulate::PrintDataTo(OStream& os)
const {
2575 os <<
"id=" << ast_id().ToInt();
2576 if (pop_count_ > 0) os <<
" pop " << pop_count_;
2577 if (values_.length() > 0) {
2578 if (pop_count_ > 0) os <<
" /";
2579 for (
int i = values_.length() - 1;
i >= 0; --
i) {
2580 if (HasAssignedIndexAt(
i)) {
2581 os <<
" var[" << GetAssignedIndexAt(
i) <<
"] = ";
2585 os << NameOf(values_[
i]);
2586 if (
i > 0) os <<
",";
2593 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2594 if (done_with_replay_)
return;
2596 env->set_ast_id(ast_id());
2597 env->Drop(pop_count());
2598 for (
int i = values()->length() - 1;
i >= 0; --
i) {
2599 HValue* value = values()->at(
i);
2600 if (HasAssignedIndexAt(
i)) {
2601 env->Bind(GetAssignedIndexAt(
i), value);
2606 done_with_replay_ =
true;
2611 HCapturedObject* other) {
2612 for (
int i = 0;
i < values->length(); ++
i) {
2614 if (value->IsCapturedObject()) {
2615 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2616 values->
at(
i) = other;
2627 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2629 while (env !=
NULL) {
2636 OStream& HCapturedObject::PrintDataTo(OStream& os)
const {
2637 os <<
"#" << capture_id() <<
" ";
2642 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2644 DCHECK(return_target->IsInlineReturnTarget());
2645 return_targets_.Add(return_target, zone);
2649 OStream& HEnterInlined::PrintDataTo(OStream& os)
const {
2650 return os <<
function()->debug_name()->ToCString().get()
2651 <<
", id=" <<
function()->
id().ToInt();
2656 double roundtrip_value =
static_cast<double>(
static_cast<int32_t>(value));
2657 return bit_cast<int64_t>(roundtrip_value) == bit_cast<int64_t>(value);
2662 : HTemplateInstruction<0>(HType::FromValue(object)),
2663 object_(Unique<
Object>::CreateUninitialized(object)),
2664 object_map_(
Handle<Map>::null()),
2665 has_stable_map_value_(
false),
2666 has_smi_value_(
false),
2667 has_int32_value_(
false),
2668 has_double_value_(
false),
2669 has_external_reference_value_(
false),
2670 is_not_in_new_space_(
true),
2671 boolean_value_(object->BooleanValue()),
2672 is_undetectable_(
false),
2673 instance_type_(kUnknownInstanceType) {
2674 if (object->IsHeapObject()) {
2676 Isolate* isolate = heap_object->GetIsolate();
2678 is_not_in_new_space_ = !isolate->heap()->InNewSpace(*
object);
2679 instance_type_ =
map->instance_type();
2680 is_undetectable_ =
map->is_undetectable();
2682 has_stable_map_value_ = (instance_type_ ==
MAP_TYPE &&
2685 if (object->IsNumber()) {
2686 double n =
object->Number();
2689 has_smi_value_ = has_int32_value_ &&
Smi::IsValid(int32_value_);
2691 has_double_value_ =
true;
2699 HConstant::HConstant(Unique<Object>
object,
2700 Unique<Map> object_map,
2701 bool has_stable_map_value,
2704 bool is_not_in_new_space,
2706 bool is_undetectable,
2708 : HTemplateInstruction<0>(type),
2710 object_map_(object_map),
2711 has_stable_map_value_(has_stable_map_value),
2712 has_smi_value_(
false),
2713 has_int32_value_(
false),
2714 has_double_value_(
false),
2715 has_external_reference_value_(
false),
2716 is_not_in_new_space_(is_not_in_new_space),
2717 boolean_value_(boolean_value),
2718 is_undetectable_(is_undetectable),
2719 instance_type_(instance_type) {
2721 DCHECK(!type.IsTaggedNumber() || type.IsNone());
2726 HConstant::HConstant(
int32_t integer_value,
2728 bool is_not_in_new_space,
2729 Unique<Object>
object)
2731 object_map_(Handle<Map>::null()),
2732 has_stable_map_value_(
false),
2733 has_smi_value_(Smi::IsValid(integer_value)),
2734 has_int32_value_(
true),
2735 has_double_value_(
true),
2736 has_external_reference_value_(
false),
2737 is_not_in_new_space_(is_not_in_new_space),
2738 boolean_value_(integer_value != 0),
2739 is_undetectable_(
false),
2740 int32_value_(integer_value),
2741 double_value_(
FastI2D(integer_value)),
2742 instance_type_(kUnknownInstanceType) {
2745 bool could_be_heapobject = r.IsTagged() && !
object.handle().is_null();
2746 bool is_smi = has_smi_value_ && !could_be_heapobject;
2747 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2752 HConstant::HConstant(
double double_value,
2754 bool is_not_in_new_space,
2755 Unique<Object>
object)
2757 object_map_(Handle<Map>::null()),
2758 has_stable_map_value_(
false),
2760 has_double_value_(
true),
2761 has_external_reference_value_(
false),
2762 is_not_in_new_space_(is_not_in_new_space),
2763 boolean_value_(double_value != 0 && !std::
isnan(double_value)),
2764 is_undetectable_(
false),
2766 double_value_(double_value),
2767 instance_type_(kUnknownInstanceType) {
2768 has_smi_value_ = has_int32_value_ &&
Smi::IsValid(int32_value_);
2771 bool could_be_heapobject = r.IsTagged() && !
object.handle().is_null();
2772 bool is_smi = has_smi_value_ && !could_be_heapobject;
2773 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2778 HConstant::HConstant(ExternalReference reference)
2779 : HTemplateInstruction<0>(HType::Any()),
2781 object_map_(Handle<Map>::null()),
2782 has_stable_map_value_(
false),
2783 has_smi_value_(
false),
2784 has_int32_value_(
false),
2785 has_double_value_(
false),
2786 has_external_reference_value_(
true),
2787 is_not_in_new_space_(
true),
2788 boolean_value_(
true),
2789 is_undetectable_(
false),
2790 external_reference_value_(reference),
2791 instance_type_(kUnknownInstanceType) {
2796 void HConstant::Initialize(Representation r) {
2799 r = Representation::Smi();
2800 }
else if (has_int32_value_) {
2801 r = Representation::Integer32();
2802 }
else if (has_double_value_) {
2803 r = Representation::Double();
2804 }
else if (has_external_reference_value_) {
2805 r = Representation::External();
2807 Handle<Object>
object = object_.handle();
2808 if (object->IsJSObject()) {
2810 Handle<JSObject> js_object = Handle<JSObject>::cast(
object);
2811 if (js_object->map()->is_deprecated()) {
2812 JSObject::TryMigrateInstance(js_object);
2815 r = Representation::Tagged();
2823 object_ = Unique<Object>(Handle<Object>::null());
2825 set_representation(r);
2830 bool HConstant::ImmortalImmovable()
const {
2831 if (has_int32_value_) {
2834 if (has_double_value_) {
2835 if (IsSpecialDouble()) {
2840 if (has_external_reference_value_) {
2844 DCHECK(!object_.handle().is_null());
2845 Heap* heap = isolate()->heap();
2846 DCHECK(!object_.IsKnownGlobal(heap->minus_zero_value()));
2847 DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
2849 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2850 object_.IsKnownGlobal(heap->name()) ||
2852 #undef IMMORTAL_IMMOVABLE_ROOT
2853 #define INTERNALIZED_STRING(name, value) \
2854 object_.IsKnownGlobal(heap->name()) ||
2856 #undef INTERNALIZED_STRING
2857 #define STRING_TYPE(NAME, size, name, Name) \
2858 object_.IsKnownGlobal(heap->name##_map()) ||
2865 bool HConstant::EmitAtUses() {
2867 if (block()->graph()->has_osr() &&
2868 block()->graph()->IsStandardConstant(
this)) {
2872 if (HasNoUses())
return true;
2873 if (IsCell())
return false;
2874 if (representation().IsDouble())
return false;
2875 if (representation().IsExternal())
return false;
2880 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone)
const {
2881 if (r.IsSmi() && !has_smi_value_)
return NULL;
2882 if (r.IsInteger32() && !has_int32_value_)
return NULL;
2883 if (r.IsDouble() && !has_double_value_)
return NULL;
2884 if (r.IsExternal() && !has_external_reference_value_)
return NULL;
2885 if (has_int32_value_) {
2886 return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, object_);
2888 if (has_double_value_) {
2889 return new(zone) HConstant(double_value_, r, is_not_in_new_space_, object_);
2891 if (has_external_reference_value_) {
2892 return new(zone) HConstant(external_reference_value_);
2894 DCHECK(!object_.handle().is_null());
2895 return new(zone) HConstant(object_,
2897 has_stable_map_value_,
2900 is_not_in_new_space_,
2907 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2908 HConstant* res =
NULL;
2909 if (has_int32_value_) {
2910 res =
new(zone) HConstant(int32_value_,
2911 Representation::Integer32(),
2912 is_not_in_new_space_,
2914 }
else if (has_double_value_) {
2916 Representation::Integer32(),
2917 is_not_in_new_space_,
2920 return Maybe<HConstant*>(res !=
NULL, res);
2924 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) {
2925 HConstant* res =
NULL;
2927 if (
handle->IsBoolean()) {
2928 res =
handle->BooleanValue() ?
2929 new(zone) HConstant(1) :
new(zone) HConstant(0);
2930 }
else if (
handle->IsUndefined()) {
2932 }
else if (
handle->IsNull()) {
2933 res =
new(zone) HConstant(0);
2935 return Maybe<HConstant*>(res !=
NULL, res);
2939 OStream& HConstant::PrintDataTo(OStream& os)
const {
2940 if (has_int32_value_) {
2941 os << int32_value_ <<
" ";
2942 }
else if (has_double_value_) {
2943 os << double_value_ <<
" ";
2944 }
else if (has_external_reference_value_) {
2945 os << reinterpret_cast<void*>(external_reference_value_.address()) <<
" ";
2948 Handle<Object> h =
const_cast<HConstant*
>(
this)->
handle(Isolate::Current());
2949 os << Brief(*h) <<
" ";
2950 if (HasStableMapValue()) os <<
"[stable-map] ";
2951 if (HasObjectMap()) os <<
"[map " << *ObjectMap().handle() <<
"] ";
2953 if (!is_not_in_new_space_) os <<
"[new space] ";
2960 if (CheckFlag(kCanOverflow)) os <<
" !";
2961 if (CheckFlag(kBailoutOnMinusZero)) os <<
" -0?";
2967 DCHECK(CheckFlag(kFlexibleRepresentation));
2969 UpdateRepresentation(new_rep, h_infer,
"inputs");
2971 if (representation().IsSmi() && HasNonSmiUse()) {
2972 UpdateRepresentation(
2973 Representation::Integer32(), h_infer,
"use requirements");
2976 if (observed_output_representation_.IsNone()) {
2977 new_rep = RepresentationFromUses();
2978 UpdateRepresentation(new_rep, h_infer,
"uses");
2980 new_rep = RepresentationFromOutput();
2981 UpdateRepresentation(new_rep, h_infer,
"output");
2990 for (
int i = 1;
i <= 2; ++
i) {
2991 rep = rep.
generalize(observed_input_representation(
i));
3004 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
3006 return ((current_rep.
IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
3007 (current_rep.
IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
3009 (!this->IsMul() || HMul::cast(
this)->MulMinusOne());
3018 if (observed_output_representation_.is_more_general_than(rep) &&
3019 !IgnoreObservedOutputRepresentation(rep)) {
3020 return observed_output_representation_;
3027 set_observed_input_representation(1, r);
3028 set_observed_input_representation(2, r);
3029 HValue::AssumeRepresentation(r);
3034 DCHECK(CheckFlag(kFlexibleRepresentation));
3036 UpdateRepresentation(new_rep, h_infer,
"inputs");
3041 Range* HBitwise::InferRange(Zone* zone) {
3042 if (op() == Token::BIT_XOR) {
3043 if (left()->HasRange() && right()->HasRange()) {
3050 int64_t left_upper = left()->range()->upper();
3051 int64_t left_lower = left()->range()->lower();
3052 int64_t right_upper = right()->range()->upper();
3053 int64_t right_lower = right()->range()->lower();
3055 if (left_upper < 0) left_upper = ~left_upper;
3056 if (left_lower < 0) left_lower = ~left_lower;
3057 if (right_upper < 0) right_upper = ~right_upper;
3058 if (right_lower < 0) right_lower = ~right_lower;
3062 left_upper | left_lower | right_upper | right_lower));
3066 int32_t min = (left()->range()->CanBeNegative() ||
3067 right()->range()->CanBeNegative())
3068 ?
static_cast<int32_t>(-limit) : 0;
3069 return new(zone) Range(
min,
static_cast<int32_t>(limit - 1));
3071 Range* result = HValue::InferRange(zone);
3072 result->set_can_be_minus_zero(
false);
3077 ? left()->range()->Mask()
3080 ? right()->range()->Mask()
3082 int32_t result_mask = (op() == Token::BIT_AND)
3083 ? left_mask & right_mask
3084 : left_mask | right_mask;
3085 if (result_mask >= 0)
return new(zone) Range(0, result_mask);
3087 Range* result = HValue::InferRange(zone);
3088 result->set_can_be_minus_zero(
false);
3093 Range* HSar::InferRange(Zone* zone) {
3094 if (right()->IsConstant()) {
3095 HConstant* c = HConstant::cast(right());
3096 if (c->HasInteger32Value()) {
3097 Range* result = (left()->range() !=
NULL)
3098 ? left()->range()->Copy(zone)
3099 :
new(zone) Range();
3100 result->Sar(c->Integer32Value());
3104 return HValue::InferRange(zone);
3108 Range* HShr::InferRange(Zone* zone) {
3109 if (right()->IsConstant()) {
3110 HConstant* c = HConstant::cast(right());
3111 if (c->HasInteger32Value()) {
3112 int shift_count = c->Integer32Value() & 0x1f;
3113 if (left()->range()->CanBeNegative()) {
3115 return (shift_count >= 1)
3116 ?
new(zone) Range(0,
3117 static_cast<uint32_t>(0xffffffff) >> shift_count)
3118 :
new(zone) Range();
3121 Range* result = (left()->range() !=
NULL)
3122 ? left()->range()->Copy(zone)
3123 :
new(zone) Range();
3124 result->Sar(c->Integer32Value());
3129 return HValue::InferRange(zone);
3133 Range* HShl::InferRange(Zone* zone) {
3134 if (right()->IsConstant()) {
3135 HConstant* c = HConstant::cast(right());
3136 if (c->HasInteger32Value()) {
3137 Range* result = (left()->range() !=
NULL)
3138 ? left()->range()->Copy(zone)
3139 :
new(zone) Range();
3140 result->Shl(c->Integer32Value());
3144 return HValue::InferRange(zone);
3148 Range* HLoadNamedField::InferRange(Zone* zone) {
3149 if (access().representation().IsInteger8()) {
3152 if (access().representation().IsUInteger8()) {
3155 if (access().representation().IsInteger16()) {
3158 if (access().representation().IsUInteger16()) {
3161 if (access().IsStringLength()) {
3162 return new(zone) Range(0, String::kMaxLength);
3164 return HValue::InferRange(zone);
3168 Range* HLoadKeyed::InferRange(Zone* zone) {
3169 switch (elements_kind()) {
3180 return HValue::InferRange(zone);
3185 OStream& HCompareGeneric::PrintDataTo(OStream& os)
const {
3186 os << Token::Name(token()) <<
" ";
3187 return HBinaryOperation::PrintDataTo(os);
3192 os << Token::Name(token()) <<
" ";
3193 return HControlInstruction::PrintDataTo(os);
3198 os << Token::Name(token()) <<
" " <<
NameOf(left()) <<
" " <<
NameOf(right());
3199 return HControlInstruction::PrintDataTo(os);
3205 return HControlInstruction::PrintDataTo(os);
3209 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3210 if (known_successor_index() != kNoKnownSuccessorIndex) {
3211 *block = SuccessorAt(known_successor_index());
3214 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
3215 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
3216 ? FirstSuccessor() : SecondSuccessor();
3225 if (constant->HasNumberValue())
return false;
3226 if (constant->GetUnique().IsKnownGlobal(isolate->
heap()->null_value())) {
3229 if (constant->IsUndetectable())
return false;
3236 bool HIsObjectAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3237 if (FLAG_fold_constants && value()->IsConstant()) {
3239 ? FirstSuccessor() : SecondSuccessor();
3247 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3248 if (known_successor_index() != kNoKnownSuccessorIndex) {
3249 *block = SuccessorAt(known_successor_index());
3252 if (FLAG_fold_constants && value()->IsConstant()) {
3253 *block = HConstant::cast(value())->HasStringValue()
3254 ? FirstSuccessor() : SecondSuccessor();
3257 if (value()->type().IsString()) {
3258 *block = FirstSuccessor();
3261 if (value()->type().IsSmi() ||
3262 value()->type().IsNull() ||
3263 value()->type().IsBoolean() ||
3264 value()->type().IsUndefined() ||
3265 value()->type().IsJSObject()) {
3266 *block = SecondSuccessor();
3274 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3275 if (FLAG_fold_constants && value()->IsConstant()) {
3276 *block = HConstant::cast(value())->IsUndetectable()
3277 ? FirstSuccessor() : SecondSuccessor();
3285 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3286 if (FLAG_fold_constants && value()->IsConstant()) {
3287 InstanceType type = HConstant::cast(value())->GetInstanceType();
3288 *block = (from_ <= type) && (type <= to_)
3289 ? FirstSuccessor() : SecondSuccessor();
3297 void HCompareHoleAndBranch::InferRepresentation(
3298 HInferRepresentationPhase* h_infer) {
3299 ChangeRepresentation(value()->representation());
3303 bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3304 if (left() == right() &&
3305 left()->representation().IsSmiOrInteger32()) {
3307 token() == Token::EQ_STRICT ||
3308 token() == Token::LTE ||
3309 token() == Token::GTE)
3310 ? FirstSuccessor() : SecondSuccessor();
3318 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3319 if (FLAG_fold_constants && value()->IsConstant()) {
3320 HConstant* constant = HConstant::cast(value());
3321 if (constant->HasDoubleValue()) {
3323 ? FirstSuccessor() : SecondSuccessor();
3327 if (value()->representation().IsSmiOrInteger32()) {
3329 *block = SecondSuccessor();
3337 void HCompareMinusZeroAndBranch::InferRepresentation(
3338 HInferRepresentationPhase* h_infer) {
3339 ChangeRepresentation(value()->representation());
3343 OStream& HGoto::PrintDataTo(OStream& os)
const {
3344 return os << *SuccessorAt(0);
3348 void HCompareNumericAndBranch::InferRepresentation(
3353 Representation observed_right = observed_input_representation(1);
3362 rep = Representation::Double();
3379 if (Token::IsOrderedRelationalCompareOp(token_)) {
3380 SetFlag(kAllowUndefinedAsNaN);
3383 ChangeRepresentation(rep);
3388 return os << index();
3392 OStream& HLoadNamedField::PrintDataTo(OStream& os)
const {
3393 os << NameOf(
object()) << access_;
3395 if (maps() !=
NULL) {
3396 os <<
" [" << *maps()->at(0).handle();
3397 for (
int i = 1;
i < maps()->size(); ++
i) {
3398 os <<
"," << *maps()->at(
i).handle();
3403 if (HasDependency()) os <<
" " << NameOf(dependency());
3408 OStream& HLoadNamedGeneric::PrintDataTo(OStream& os)
const {
3409 Handle<String> n = Handle<String>::cast(
name());
3410 return os << NameOf(
object()) <<
"." << n->ToCString().get();
3414 OStream& HLoadKeyed::PrintDataTo(OStream& os)
const {
3415 if (!is_external()) {
3416 os << NameOf(elements());
3423 os <<
"[" << NameOf(key());
3424 if (IsDehoisted()) os <<
" + " << base_offset();
3427 if (HasDependency()) os <<
" " << NameOf(dependency());
3428 if (RequiresHoleCheck()) os <<
" check_hole";
3433 bool HLoadKeyed::TryIncreaseBaseOffset(
uint32_t increase_by_value) {
3438 uint32_t base_offset = BaseOffsetField::decode(bit_field_);
3440 addition_result += increase_by_value;
3441 if (!addition_result.
IsValid())
return false;
3443 if (!BaseOffsetField::is_valid(base_offset))
return false;
3444 bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
3449 bool HLoadKeyed::UsesMustHandleHole()
const {
3460 return AllUsesCanTreatHoleAsNaN();
3470 if (!representation().IsTagged()) {
3474 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3475 HValue*
use = it.value();
3476 if (!
use->IsChange())
return false;
3483 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN()
const {
3485 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
3489 bool HLoadKeyed::RequiresHoleCheck()
const {
3498 return !UsesMustHandleHole();
3502 OStream& HLoadKeyedGeneric::PrintDataTo(OStream& os)
const {
3503 return os << NameOf(
object()) <<
"[" << NameOf(key()) <<
"]";
3511 if (key()->IsLoadKeyed()) {
3512 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3513 if (key_load->elements()->IsForInCacheArray()) {
3514 HForInCacheArray* names_cache =
3515 HForInCacheArray::cast(key_load->elements());
3517 if (names_cache->enumerable() ==
object()) {
3518 HForInCacheArray* index_cache =
3519 names_cache->index_cache();
3520 HCheckMapValue* map_check =
3521 HCheckMapValue::New(block()->graph()->zone(),
3522 block()->graph()->GetInvalidContext(),
3524 names_cache->map());
3525 HInstruction* index = HLoadKeyed::New(
3526 block()->graph()->zone(),
3527 block()->graph()->GetInvalidContext(),
3531 key_load->elements_kind());
3532 map_check->InsertBefore(
this);
3533 index->InsertBefore(
this);
3534 return Prepend(
new(block()->zone()) HLoadFieldByIndex(
3544 OStream& HStoreNamedGeneric::PrintDataTo(OStream& os)
const {
3545 Handle<String> n = Handle<String>::cast(
name());
3546 return os << NameOf(
object()) <<
"." << n->ToCString().get() <<
" = "
3551 OStream& HStoreNamedField::PrintDataTo(OStream& os)
const {
3552 os << NameOf(
object()) << access_ <<
" = " << NameOf(value());
3553 if (NeedsWriteBarrier()) os <<
" (write-barrier)";
3554 if (has_transition()) os <<
" (transition map " << *transition_map() <<
")";
3559 OStream& HStoreKeyed::PrintDataTo(OStream& os)
const {
3560 if (!is_external()) {
3561 os << NameOf(elements());
3568 os <<
"[" << NameOf(key());
3569 if (IsDehoisted()) os <<
" + " << base_offset();
3570 return os <<
"] = " << NameOf(value());
3574 OStream& HStoreKeyedGeneric::PrintDataTo(OStream& os)
const {
3575 return os << NameOf(
object()) <<
"[" << NameOf(key())
3576 <<
"] = " << NameOf(value());
3580 OStream& HTransitionElementsKind::PrintDataTo(OStream& os)
const {
3581 os << NameOf(
object());
3582 ElementsKind from_kind = original_map().handle()->elements_kind();
3583 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3584 os <<
" " << *original_map().handle() <<
" ["
3585 << ElementsAccessor::ForKind(from_kind)->name() <<
"] -> "
3586 << *transitioned_map().handle() <<
" ["
3587 << ElementsAccessor::ForKind(to_kind)->name() <<
"]";
3593 OStream& HLoadGlobalCell::PrintDataTo(OStream& os)
const {
3594 os <<
"[" << *cell().handle() <<
"]";
3595 if (details_.IsConfigurable()) os <<
" (configurable)";
3596 if (details_.IsReadOnly()) os <<
" (read-only)";
3601 bool HLoadGlobalCell::RequiresHoleCheck()
const {
3602 if (!details_.IsConfigurable())
return false;
3603 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3604 HValue*
use = it.value();
3605 if (!
use->IsChange())
return true;
3611 OStream& HLoadGlobalGeneric::PrintDataTo(OStream& os)
const {
3612 return os <<
name()->ToCString().get() <<
" ";
3616 OStream& HInnerAllocatedObject::PrintDataTo(OStream& os)
const {
3617 os << NameOf(base_object()) <<
" offset ";
3618 return offset()->PrintTo(os);
3622 OStream& HStoreGlobalCell::PrintDataTo(OStream& os)
const {
3623 os <<
"[" << *cell().handle() <<
"] = " << NameOf(value());
3624 if (details_.IsConfigurable()) os <<
" (configurable)";
3625 if (details_.IsReadOnly()) os <<
" (read-only)";
3630 OStream& HLoadContextSlot::PrintDataTo(OStream& os)
const {
3631 return os << NameOf(value()) <<
"[" << slot_index() <<
"]";
3635 OStream& HStoreContextSlot::PrintDataTo(OStream& os)
const {
3636 return os << NameOf(context()) <<
"[" << slot_index()
3637 <<
"] = " << NameOf(value());
3644 HType HValue::CalculateInferredType() {
3649 HType HPhi::CalculateInferredType() {
3650 if (OperandCount() == 0)
return HType::Tagged();
3651 HType result = OperandAt(0)->type();
3652 for (
int i = 1;
i < OperandCount(); ++
i) {
3653 HType current = OperandAt(
i)->type();
3654 result = result.Combine(current);
3660 HType HChange::CalculateInferredType() {
3661 if (from().IsDouble() &&
to().IsTagged())
return HType::HeapNumber();
3666 Representation HUnaryMathOperation::RepresentationFromInputs() {
3667 if (SupportsFlexibleFloorAndRound() &&
3668 (op_ == kMathFloor || op_ == kMathRound)) {
3673 Representation rep = representation();
3676 Representation input_rep = value()->representation();
3677 if (!input_rep.IsTagged()) {
3678 rep = rep.generalize(input_rep);
3684 bool HAllocate::HandleSideEffectDominator(
GVNFlag side_effect,
3685 HValue* dominator) {
3686 DCHECK(side_effect == kNewSpacePromotion);
3687 Zone* zone = block()->zone();
3688 if (!FLAG_use_allocation_folding)
return false;
3691 if (!dominator->IsAllocate()) {
3692 if (FLAG_trace_allocation_folding) {
3693 PrintF(
"#%d (%s) cannot fold into #%d (%s)\n",
3694 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3700 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3701 if (FLAG_trace_allocation_folding) {
3702 PrintF(
"#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3703 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3708 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3709 HValue* dominator_size = dominator_allocate->size();
3710 HValue* current_size =
size();
3713 if (!dominator_size->IsInteger32Constant()) {
3714 if (FLAG_trace_allocation_folding) {
3715 PrintF(
"#%d (%s) cannot fold into #%d (%s), "
3716 "dynamic allocation size in dominator\n",
3717 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3722 dominator_allocate = GetFoldableDominator(dominator_allocate);
3723 if (dominator_allocate ==
NULL) {
3727 if (!has_size_upper_bound()) {
3728 if (FLAG_trace_allocation_folding) {
3729 PrintF(
"#%d (%s) cannot fold into #%d (%s), "
3730 "can't estimate total allocation size\n",
3731 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3736 if (!current_size->IsInteger32Constant()) {
3739 DCHECK(current_size->IsInstruction());
3741 HInstruction* current_instr = HInstruction::cast(current_size);
3742 if (!current_instr->Dominates(dominator_allocate)) {
3743 if (FLAG_trace_allocation_folding) {
3744 PrintF(
"#%d (%s) cannot fold into #%d (%s), dynamic size "
3745 "value does not dominate target allocation\n",
3746 id(), Mnemonic(), dominator_allocate->id(),
3747 dominator_allocate->Mnemonic());
3753 DCHECK((IsNewSpaceAllocation() &&
3754 dominator_allocate->IsNewSpaceAllocation()) ||
3755 (IsOldDataSpaceAllocation() &&
3756 dominator_allocate->IsOldDataSpaceAllocation()) ||
3757 (IsOldPointerSpaceAllocation() &&
3758 dominator_allocate->IsOldPointerSpaceAllocation()));
3761 dominator_size = dominator_allocate->size();
3762 int32_t original_object_size =
3763 HConstant::cast(dominator_size)->GetInteger32Constant();
3764 int32_t dominator_size_constant = original_object_size;
3766 if (MustAllocateDoubleAligned()) {
3772 int32_t current_size_max_value = size_upper_bound()->GetInteger32Constant();
3773 int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
3777 if (new_dominator_size > Page::kMaxRegularHeapObjectSize -
kPointerSize) {
3778 if (FLAG_trace_allocation_folding) {
3779 PrintF(
"#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3780 id(), Mnemonic(), dominator_allocate->id(),
3781 dominator_allocate->Mnemonic(), new_dominator_size);
3786 HInstruction* new_dominator_size_value;
3788 if (current_size->IsInteger32Constant()) {
3789 new_dominator_size_value =
3790 HConstant::CreateAndInsertBefore(zone,
3794 dominator_allocate);
3796 HValue* new_dominator_size_constant =
3797 HConstant::CreateAndInsertBefore(zone,
3799 dominator_size_constant,
3800 Representation::Integer32(),
3801 dominator_allocate);
3804 current_size->ChangeRepresentation(Representation::Integer32());
3806 new_dominator_size_value = HAdd::New(zone, context(),
3807 new_dominator_size_constant, current_size);
3808 new_dominator_size_value->ClearFlag(HValue::kCanOverflow);
3809 new_dominator_size_value->ChangeRepresentation(Representation::Integer32());
3811 new_dominator_size_value->InsertBefore(dominator_allocate);
3814 dominator_allocate->UpdateSize(new_dominator_size_value);
3816 if (MustAllocateDoubleAligned()) {
3817 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3818 dominator_allocate->MakeDoubleAligned();
3822 bool keep_new_space_iterable = FLAG_log_gc || FLAG_heap_stats;
3824 keep_new_space_iterable = keep_new_space_iterable || FLAG_verify_heap;
3827 if (keep_new_space_iterable && dominator_allocate->IsNewSpaceAllocation()) {
3828 dominator_allocate->MakePrefillWithFiller();
3832 dominator_allocate->ClearNextMapWord(original_object_size);
3835 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3838 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3841 dominator_size_constant,
3845 HInstruction* dominated_allocate_instr =
3846 HInnerAllocatedObject::New(zone,
3851 dominated_allocate_instr->InsertBefore(
this);
3852 DeleteAndReplaceWith(dominated_allocate_instr);
3853 if (FLAG_trace_allocation_folding) {
3854 PrintF(
"#%d (%s) folded into #%d (%s)\n",
3855 id(), Mnemonic(), dominator_allocate->id(),
3856 dominator_allocate->Mnemonic());
3862 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
3863 if (!IsFoldable(dominator)) {
3865 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
3866 if (FLAG_trace_allocation_folding) {
3867 PrintF(
"#%d (%s) cannot fold into #%d (%s), new space hoisting\n",
3868 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3873 HAllocate* dominator_dominator = dominator->dominating_allocate_;
3878 if (dominator_dominator ==
NULL) {
3879 dominating_allocate_ = dominator;
3880 if (FLAG_trace_allocation_folding) {
3881 PrintF(
"#%d (%s) cannot fold into #%d (%s), different spaces\n",
3882 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3892 if (block()->block_id() != dominator_dominator->block()->block_id()) {
3893 if (FLAG_trace_allocation_folding) {
3894 PrintF(
"#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
3895 id(), Mnemonic(), dominator_dominator->id(),
3896 dominator_dominator->Mnemonic());
3901 DCHECK((IsOldDataSpaceAllocation() &&
3902 dominator_dominator->IsOldDataSpaceAllocation()) ||
3903 (IsOldPointerSpaceAllocation() &&
3904 dominator_dominator->IsOldPointerSpaceAllocation()));
3906 int32_t current_size = HConstant::cast(
size())->GetInteger32Constant();
3907 HStoreNamedField* dominator_free_space_size =
3908 dominator->filler_free_space_size_;
3909 if (dominator_free_space_size !=
NULL) {
3912 dominator->UpdateFreeSpaceFiller(current_size);
3916 dominator->CreateFreeSpaceFiller(current_size);
3920 return dominator_dominator;
3926 void HAllocate::UpdateFreeSpaceFiller(
int32_t free_space_size) {
3928 Zone* zone = block()->zone();
3932 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3935 filler_free_space_size_->value()->GetInteger32Constant() +
3937 Representation::Smi(),
3938 filler_free_space_size_);
3939 filler_free_space_size_->UpdateValue(new_free_space_size);
3943 void HAllocate::CreateFreeSpaceFiller(
int32_t free_space_size) {
3945 Zone* zone = block()->zone();
3946 HInstruction* free_space_instr =
3947 HInnerAllocatedObject::New(zone, context(), dominating_allocate_,
3948 dominating_allocate_->size(), type());
3949 free_space_instr->InsertBefore(
this);
3950 HConstant* filler_map = HConstant::CreateAndInsertAfter(
3951 zone, Unique<Map>::CreateImmovable(
3952 isolate()->factory()->free_space_map()),
true, free_space_instr);
3953 HInstruction* store_map = HStoreNamedField::New(zone, context(),
3954 free_space_instr, HObjectAccess::ForMap(), filler_map);
3955 store_map->SetFlag(HValue::kHasNoObservableSideEffects);
3956 store_map->InsertAfter(filler_map);
3961 HConstant* filler_size = HConstant::CreateAndInsertAfter(
3962 zone, context(), free_space_size, Representation::Smi(), store_map);
3964 HObjectAccess access =
3965 HObjectAccess::ForMapAndOffset(isolate()->factory()->free_space_map(),
3966 FreeSpace::kSizeOffset,
3967 Representation::Smi());
3968 HStoreNamedField* store_size = HStoreNamedField::New(zone, context(),
3969 free_space_instr, access, filler_size);
3970 store_size->SetFlag(HValue::kHasNoObservableSideEffects);
3971 store_size->InsertAfter(filler_size);
3972 filler_free_space_size_ = store_size;
3976 void HAllocate::ClearNextMapWord(
int offset) {
3977 if (MustClearNextMapWord()) {
3978 Zone* zone = block()->zone();
3979 HObjectAccess access =
3980 HObjectAccess::ForObservableJSObjectOffset(offset);
3981 HStoreNamedField* clear_next_map =
3982 HStoreNamedField::New(zone, context(),
this, access,
3983 block()->graph()->GetConstant0());
3984 clear_next_map->ClearAllSideEffects();
3985 clear_next_map->InsertAfter(
this);
3990 OStream& HAllocate::PrintDataTo(OStream& os)
const {
3991 os << NameOf(
size()) <<
" (";
3992 if (IsNewSpaceAllocation()) os <<
"N";
3993 if (IsOldPointerSpaceAllocation()) os <<
"P";
3994 if (IsOldDataSpaceAllocation()) os <<
"D";
3995 if (MustAllocateDoubleAligned()) os <<
"A";
3996 if (MustPrefillWithFiller()) os <<
"F";
4001 bool HStoreKeyed::TryIncreaseBaseOffset(
uint32_t increase_by_value) {
4007 addition_result += increase_by_value;
4008 if (!addition_result.
IsValid())
return false;
4014 bool HStoreKeyed::NeedsCanonicalization() {
4022 if (value()->IsConstant()) {
4026 if (value()->IsLoadKeyed()) {
4028 HLoadKeyed::cast(value())->elements_kind());
4031 if (value()->IsChange()) {
4032 if (HChange::cast(value())->from().IsSmiOrInteger32()) {
4035 if (HChange::cast(value())->value()->type().IsSmi()) {
4043 #define H_CONSTANT_INT(val) \
4044 HConstant::New(zone, context, static_cast<int32_t>(val))
4045 #define H_CONSTANT_DOUBLE(val) \
4046 HConstant::New(zone, context, static_cast<double>(val))
4048 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
4049 HInstruction* HInstr::New( \
4050 Zone* zone, HValue* context, HValue* left, HValue* right) { \
4051 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4052 HConstant* c_left = HConstant::cast(left); \
4053 HConstant* c_right = HConstant::cast(right); \
4054 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4055 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
4056 if (IsInt32Double(double_res)) { \
4057 return H_CONSTANT_INT(double_res); \
4059 return H_CONSTANT_DOUBLE(double_res); \
4062 return new(zone) HInstr(context, left, right); \
4070 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
4073 HInstruction* HStringAdd::New(Zone* zone,
4079 Handle<AllocationSite> allocation_site) {
4080 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4081 HConstant* c_right = HConstant::cast(right);
4082 HConstant* c_left = HConstant::cast(left);
4083 if (c_left->HasStringValue() && c_right->HasStringValue()) {
4084 Handle<String> left_string = c_left->StringValue();
4085 Handle<String> right_string = c_right->StringValue();
4087 if (left_string->length() + right_string->length() < String::kMaxLength) {
4088 MaybeHandle<String> concat = zone->isolate()->factory()->NewConsString(
4089 c_left->StringValue(), c_right->StringValue());
4090 return HConstant::New(zone, context, concat.ToHandleChecked());
4094 return new(zone) HStringAdd(
4095 context, left, right, pretenure_flag,
flags, allocation_site);
4099 OStream& HStringAdd::PrintDataTo(OStream& os)
const {
4105 os <<
"_CheckRight";
4107 HBinaryOperation::PrintDataTo(os);
4111 else if (pretenure_flag() ==
TENURED)
4117 HInstruction* HStringCharFromCode::New(
4118 Zone* zone, HValue* context, HValue* char_code) {
4119 if (FLAG_fold_constants && char_code->IsConstant()) {
4120 HConstant* c_code = HConstant::cast(char_code);
4121 Isolate* isolate = zone->isolate();
4122 if (c_code->HasNumberValue()) {
4123 if (std::isfinite(c_code->DoubleValue())) {
4124 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
4125 return HConstant::New(zone, context,
4126 isolate->factory()->LookupSingleCharacterStringFromCode(code));
4128 return HConstant::New(zone, context, isolate->factory()->empty_string());
4131 return new(zone) HStringCharFromCode(context, char_code);
4135 HInstruction* HUnaryMathOperation::New(
4138 if (!FLAG_fold_constants)
break;
4139 if (!value->IsConstant())
break;
4140 HConstant* constant = HConstant::cast(value);
4141 if (!constant->HasNumberValue())
break;
4142 double d = constant->DoubleValue();
4143 if (std::isnan(d)) {
4146 if (std::isinf(d)) {
4198 return new(zone) HUnaryMathOperation(context, value, op);
4202 Representation HUnaryMathOperation::RepresentationFromUses() {
4203 if (op_ != kMathFloor && op_ != kMathRound) {
4204 return HValue::RepresentationFromUses();
4209 bool use_double =
false;
4211 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4212 HValue*
use = it.value();
4213 int use_index = it.index();
4214 Representation rep_observed =
use->observed_input_representation(use_index);
4215 Representation rep_required =
use->RequiredInputRepresentation(use_index);
4216 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble());
4217 if (use_double && !FLAG_trace_representation) {
4221 if (FLAG_trace_representation) {
4222 if (!rep_required.IsDouble() || rep_observed.IsDouble()) {
4223 PrintF(
"#%d %s is used by #%d %s as %s%s\n",
4224 id(), Mnemonic(),
use->id(),
4225 use->Mnemonic(), rep_observed.Mnemonic(),
4226 (
use->CheckFlag(kTruncatingToInt32) ?
"-trunc" :
""));
4228 PrintF(
"#%d %s is required by #%d %s as %s%s\n",
4229 id(), Mnemonic(),
use->id(),
4230 use->Mnemonic(), rep_required.Mnemonic(),
4231 (
use->CheckFlag(kTruncatingToInt32) ?
"-trunc" :
""));
4235 return use_double ? Representation::Double() : Representation::Integer32();
4239 HInstruction* HPower::New(Zone* zone,
4243 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4244 HConstant* c_left = HConstant::cast(left);
4245 HConstant* c_right = HConstant::cast(right);
4246 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4248 c_right->DoubleValue());
4253 return new(zone) HPower(left, right);
4257 HInstruction* HMathMinMax::New(
4258 Zone* zone, HValue* context, HValue* left, HValue* right, Operation op) {
4259 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4260 HConstant* c_left = HConstant::cast(left);
4261 HConstant* c_right = HConstant::cast(right);
4262 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4263 double d_left = c_left->DoubleValue();
4264 double d_right = c_right->DoubleValue();
4265 if (op == kMathMin) {
4268 if (d_left == d_right) {
4276 if (d_left == d_right) {
4286 return new(zone) HMathMinMax(context, left, right, op);
4290 HInstruction* HMod::New(Zone* zone,
4294 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4295 HConstant* c_left = HConstant::cast(left);
4296 HConstant* c_right = HConstant::cast(right);
4297 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
4298 int32_t dividend = c_left->Integer32Value();
4299 int32_t divisor = c_right->Integer32Value();
4300 if (dividend ==
kMinInt && divisor == -1) {
4304 int32_t res = dividend % divisor;
4305 if ((res == 0) && (dividend < 0)) {
4312 return new(zone) HMod(context, left, right);
4316 HInstruction* HDiv::New(
4317 Zone* zone, HValue* context, HValue* left, HValue* right) {
4319 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4320 HConstant* c_left = HConstant::cast(left);
4321 HConstant* c_right = HConstant::cast(right);
4322 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4323 if (c_right->DoubleValue() != 0) {
4324 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
4330 int sign = Double(c_left->DoubleValue()).Sign() *
4331 Double(c_right->DoubleValue()).Sign();
4336 return new(zone) HDiv(context, left, right);
4340 HInstruction* HBitwise::New(
4341 Zone* zone, HValue* context, Token::Value op, HValue* left, HValue* right) {
4342 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4343 HConstant* c_left = HConstant::cast(left);
4344 HConstant* c_right = HConstant::cast(right);
4345 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4347 int32_t v_left = c_left->NumberValueAsInteger32();
4348 int32_t v_right = c_right->NumberValueAsInteger32();
4350 case Token::BIT_XOR:
4351 result = v_left ^ v_right;
4353 case Token::BIT_AND:
4354 result = v_left & v_right;
4357 result = v_left | v_right;
4366 return new(zone) HBitwise(context, op, left, right);
4370 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4371 HInstruction* HInstr::New( \
4372 Zone* zone, HValue* context, HValue* left, HValue* right) { \
4373 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4374 HConstant* c_left = HConstant::cast(left); \
4375 HConstant* c_right = HConstant::cast(right); \
4376 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4377 return H_CONSTANT_INT(result); \
4380 return new(zone) HInstr(context, left, right); \
4385 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4389 #undef DEFINE_NEW_H_BITWISE_INSTR
4394 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4395 HConstant* c_left = HConstant::cast(left);
4396 HConstant* c_right = HConstant::cast(right);
4397 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4398 int32_t left_val = c_left->NumberValueAsInteger32();
4399 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4400 if ((right_val == 0) && (left_val < 0)) {
4406 return new(zone) HShr(context, left, right);
4410 HInstruction* HSeqStringGetChar::New(Zone* zone,
4415 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4416 HConstant* c_string = HConstant::cast(
string);
4417 HConstant* c_index = HConstant::cast(index);
4418 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4420 int32_t i = c_index->Integer32Value();
4426 return new(zone) HSeqStringGetChar(encoding,
string, index);
4430 #undef H_CONSTANT_INT
4431 #undef H_CONSTANT_DOUBLE
4434 OStream& HBitwise::PrintDataTo(OStream& os)
const {
4435 os << Token::Name(op_) <<
" ";
4436 return HBitwiseBinaryOperation::PrintDataTo(os);
4440 void HPhi::SimplifyConstantInputs() {
4443 if (!CheckUsesForFlag(kTruncatingToInt32))
return;
4444 for (
int i = 0;
i < OperandCount(); ++
i) {
4445 if (!OperandAt(
i)->IsConstant())
return;
4447 HGraph* graph = block()->graph();
4448 for (
int i = 0;
i < OperandCount(); ++
i) {
4449 HConstant* operand = HConstant::cast(OperandAt(
i));
4450 if (operand->HasInteger32Value()) {
4452 }
else if (operand->HasDoubleValue()) {
4453 HConstant* integer_input =
4454 HConstant::New(graph->zone(), graph->GetInvalidContext(),
4456 integer_input->InsertAfter(operand);
4457 SetOperandAt(
i, integer_input);
4458 }
else if (operand->HasBooleanValue()) {
4459 SetOperandAt(
i, operand->BooleanValue() ? graph->GetConstant1()
4460 : graph->GetConstant0());
4461 }
else if (operand->ImmortalImmovable()) {
4462 SetOperandAt(
i, graph->GetConstant0());
4466 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4467 HValue*
use = it.value();
4468 if (
use->IsBinaryOperation()) {
4469 HBinaryOperation::cast(
use)->set_observed_input_representation(
4470 it.index(), Representation::Smi());
4476 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4477 DCHECK(CheckFlag(kFlexibleRepresentation));
4478 Representation new_rep = RepresentationFromInputs();
4479 UpdateRepresentation(new_rep, h_infer,
"inputs");
4480 new_rep = RepresentationFromUses();
4481 UpdateRepresentation(new_rep, h_infer,
"uses");
4482 new_rep = RepresentationFromUseRequirements();
4483 UpdateRepresentation(new_rep, h_infer,
"use requirements");
4487 Representation HPhi::RepresentationFromInputs() {
4489 for (
int i = 0;
i < OperandCount(); ++
i) {
4490 r = r.generalize(OperandAt(
i)->KnownOptimalRepresentation());
4500 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4502 if (it.value()->block()->IsUnreachable())
continue;
4506 it.value()->RequiredInputRepresentation(it.index());
4513 rep = Representation::Integer32();
4522 bool HValue::HasNonSmiUse() {
4523 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4526 it.value()->RequiredInputRepresentation(it.index());
4540 void HPhi::Verify() {
4541 DCHECK(OperandCount() == block()->predecessors()->length());
4542 for (
int i = 0;
i < OperandCount(); ++
i) {
4544 HBasicBlock* defining_block = value->
block();
4545 HBasicBlock* predecessor_block = block()->predecessors()->at(
i);
4546 DCHECK(defining_block == predecessor_block ||
4547 defining_block->Dominates(predecessor_block));
4552 void HSimulate::Verify() {
4553 HInstruction::Verify();
4554 DCHECK(HasAstId() || next()->IsEnterInlined());
4558 void HCheckHeapObject::Verify() {
4559 HInstruction::Verify();
4564 void HCheckValue::Verify() {
4565 HInstruction::Verify();
4572 HObjectAccess HObjectAccess::ForFixedArrayHeader(
int offset) {
4574 DCHECK(offset < FixedArray::kHeaderSize);
4575 if (offset == FixedArray::kLengthOffset)
return ForFixedArrayLength();
4576 return HObjectAccess(kInobject, offset);
4580 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map>
map,
int offset,
4581 Representation representation) {
4583 Portion portion = kInobject;
4585 if (offset == JSObject::kElementsOffset) {
4586 portion = kElementsPointer;
4587 }
else if (offset == JSObject::kMapOffset) {
4590 bool existing_inobject_property =
true;
4591 if (!
map.is_null()) {
4592 existing_inobject_property = (offset <
4595 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
4596 false, existing_inobject_property);
4600 HObjectAccess HObjectAccess::ForAllocationSiteOffset(
int offset) {
4602 case AllocationSite::kTransitionInfoOffset:
4603 return HObjectAccess(kInobject, offset, Representation::Tagged());
4604 case AllocationSite::kNestedSiteOffset:
4605 return HObjectAccess(kInobject, offset, Representation::Tagged());
4606 case AllocationSite::kPretenureDataOffset:
4607 return HObjectAccess(kInobject, offset, Representation::Smi());
4608 case AllocationSite::kPretenureCreateCountOffset:
4609 return HObjectAccess(kInobject, offset, Representation::Smi());
4610 case AllocationSite::kDependentCodeOffset:
4611 return HObjectAccess(kInobject, offset, Representation::Tagged());
4612 case AllocationSite::kWeakNextOffset:
4613 return HObjectAccess(kInobject, offset, Representation::Tagged());
4617 return HObjectAccess(kInobject, offset);
4621 HObjectAccess HObjectAccess::ForContextSlot(
int index) {
4623 Portion portion = kInobject;
4624 int offset = Context::kHeaderSize + index *
kPointerSize;
4626 return HObjectAccess(portion, offset, Representation::Tagged());
4630 HObjectAccess HObjectAccess::ForJSArrayOffset(
int offset) {
4632 Portion portion = kInobject;
4634 if (offset == JSObject::kElementsOffset) {
4635 portion = kElementsPointer;
4636 }
else if (offset == JSArray::kLengthOffset) {
4637 portion = kArrayLengths;
4638 }
else if (offset == JSObject::kMapOffset) {
4641 return HObjectAccess(portion, offset);
4645 HObjectAccess HObjectAccess::ForBackingStoreOffset(
int offset,
4646 Representation representation) {
4648 return HObjectAccess(kBackingStore, offset, representation,
4649 Handle<String>::null(),
false,
false);
4653 HObjectAccess HObjectAccess::ForField(Handle<Map>
map,
int index,
4654 Representation representation,
4655 Handle<String>
name) {
4660 return HObjectAccess(kInobject, offset, representation,
name,
false,
true);
4663 int offset = (index *
kPointerSize) + FixedArray::kHeaderSize;
4664 return HObjectAccess(kBackingStore, offset, representation,
name,
4670 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) {
4671 return HObjectAccess(kInobject, Cell::kValueOffset, Representation::Tagged(),
4672 isolate->factory()->cell_value_string());
4678 if (access_type ==
STORE) {
4680 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
4681 instr->SetFlag(HValue::kTrackSideEffectDominators);
4684 instr->SetFlag(HValue::kUseGVN);
4685 instr->SetDependsOnFlag(::v8::internal::kMaps);
4688 switch (portion()) {
4690 if (access_type ==
STORE) {
4691 instr->SetChangesFlag(::v8::internal::kArrayLengths);
4693 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
4696 case kStringLengths:
4697 if (access_type ==
STORE) {
4698 instr->SetChangesFlag(::v8::internal::kStringLengths);
4700 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
4704 if (access_type ==
STORE) {
4705 instr->SetChangesFlag(::v8::internal::kInobjectFields);
4707 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
4711 if (access_type ==
STORE) {
4712 instr->SetChangesFlag(::v8::internal::kDoubleFields);
4714 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
4718 if (access_type ==
STORE) {
4719 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
4721 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
4724 case kElementsPointer:
4725 if (access_type ==
STORE) {
4726 instr->SetChangesFlag(::v8::internal::kElementsPointer);
4728 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
4732 if (access_type ==
STORE) {
4733 instr->SetChangesFlag(::v8::internal::kMaps);
4735 instr->SetDependsOnFlag(::v8::internal::kMaps);
4738 case kExternalMemory:
4739 if (access_type ==
STORE) {
4740 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4742 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4752 switch (access.portion()) {
4753 case HObjectAccess::kArrayLengths:
4754 case HObjectAccess::kStringLengths:
4757 case HObjectAccess::kElementsPointer:
4760 case HObjectAccess::kMaps:
4763 case HObjectAccess::kDouble:
4764 case HObjectAccess::kInobject:
4765 if (!access.name().is_null()) {
4766 os << Handle<String>::cast(access.name())->ToCString().get();
4768 os <<
"[in-object]";
4770 case HObjectAccess::kBackingStore:
4771 if (!access.name().is_null()) {
4772 os << Handle<String>::cast(access.name())->ToCString().get();
4774 os <<
"[backing-store]";
4776 case HObjectAccess::kExternalMemory:
4777 os <<
"[external-memory]";
4781 return os <<
"@" << access.offset();
An object reference managed by the v8 garbage collector.
Isolate represents an isolated instance of the V8 engine.
A JavaScript object (ECMA-262, 4.3.3)
A JavaScript string value (ECMA-262, 4.3.17).
static double nan_value()
virtual OStream & PrintDataTo(OStream &os) const
virtual OStream & PrintDataTo(OStream &os) const OVERRIDE
virtual Representation RequiredInputRepresentation(int index) OVERRIDE
virtual Representation RepresentationFromInputs() OVERRIDE
virtual int argument_count() const
virtual OStream & PrintDataTo(OStream &os) const OVERRIDE
void AddToWorklist(HValue *current)
virtual HSourcePosition position() const OVERRIDE
void set_position(HSourcePosition position)
HInstruction * previous() const
bool has_position() const
void InsertAfter(HInstruction *previous)
HInstruction * next() const
bool Dominates(HInstruction *other)
virtual OStream & PrintTo(OStream &os) const OVERRIDE
virtual OStream & PrintDataTo(OStream &os) const
void InsertBefore(HInstruction *next)
virtual OStream & PrintDataTo(OStream &os) const OVERRIDE
virtual OStream & PrintDataTo(OStream &os) const OVERRIDE
virtual OStream & PrintDataTo(OStream &os) const OVERRIDE
void set_tail(HUseListNode *list)
static HValue * cast(HValue *value)
virtual void InternalSetOperandAt(int index, HValue *value)=0
bool Equals(HValue *other)
virtual void InferRepresentation(HInferRepresentationPhase *h_infer)
void RemoveLastAddedRange()
virtual Opcode opcode() const =0
virtual bool DataEquals(HValue *other)
void ReplaceAllUsesWith(HValue *other)
virtual void DeleteFromGraph()=0
bool HasObservableSideEffects() const
virtual int OperandCount() const =0
bool CheckUsesForFlag(Flag f) const
void AddNewRange(Range *r, Zone *zone)
bool IsDefinedAfter(HBasicBlock *other) const
HBasicBlock * block() const
int32_t GetInteger32Constant()
GVNFlagSet ChangesFlags() const
void ChangeRepresentation(Representation r)
bool EqualsInteger32Constant(int32_t value)
bool HasMonomorphicJSObjectType()
virtual HType CalculateInferredType()
virtual void AssumeRepresentation(Representation r)
virtual Representation RepresentationFromInputs()
const char * Mnemonic() const
bool IsInteger32Constant()
bool UpdateInferredType()
Isolate * isolate() const
void AddDependantsToWorklist(HInferRepresentationPhase *h_infer)
static const int kNoNumber
virtual Handle< Map > GetMonomorphicJSObjectMap()
virtual intptr_t Hashcode()
void RegisterUse(int index, HValue *new_value)
virtual Range * InferRange(Zone *zone)
virtual OStream & PrintTo(OStream &os) const =0
Representation representation() const
void SetOperandAt(int index, HValue *value)
bool HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const
bool CheckFlag(Flag f) const
HUseIterator uses() const
void SetBlock(HBasicBlock *block)
@ kHasNoObservableSideEffects
@ kFlexibleRepresentation
@ kAllUsesTruncatingToInt32
static GVNFlagSet AllSideEffectsFlagSet()
virtual HValue * OperandAt(int index) const =0
void DeleteAndReplaceWith(HValue *other)
virtual Representation RepresentationFromUses()
bool CanReplaceWithDummyUses()
HUseListNode * RemoveUse(HValue *value, int index)
virtual void UpdateRepresentation(Representation new_rep, HInferRepresentationPhase *h_infer, const char *reason)
void ComputeInitialRange(Zone *zone)
static Handle< T > cast(Handle< S > that)
bool is_more_general_than(const Representation &other) const
static Representation Double()
const char * Mnemonic() const
bool IsSpecialization() const
static Representation External()
static Representation Smi()
bool IsSmiOrInteger32() const
Representation generalize(Representation other)
static Representation Integer32()
bool Equals(const Representation &other) const
static Representation Tagged()
static Representation None()
static const int kMaxValue
static const int kMinValue
static bool IsValid(intptr_t value)
static bool IsInequalityOp(Value op)
static Value NegateCompareOp(Value op)
static bool IsArithmeticCompareOp(Value op)
static Value ReverseCompareOp(Value op)
bool IsKnownGlobal(void *global) const
static Unique< T > CreateImmovable(Handle< T > handle)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to use(in kBytes)") DEFINE_INT(max_stack_trace_source_length
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions true
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define IMMORTAL_IMMOVABLE_ROOT_LIST(V)
#define INTERNALIZED_STRING_LIST(V)
#define INTERNALIZED_STRING(name, value)
#define IMMORTAL_IMMOVABLE_ROOT(name)
#define DEFINE_COMPILE(type)
#define H_CONSTANT_DOUBLE(val)
#define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op)
#define STRING_TYPE(NAME, size, name, Name)
#define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result)
#define H_CONSTANT_INT(val)
#define HYDROGEN_CONCRETE_INSTRUCTION_LIST(V)
#define GVN_TRACKED_FLAG_LIST(V)
#define GVN_UNTRACKED_FLAG_LIST(V)
#define DCHECK_LE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
uint32_t CountLeadingZeros32(uint32_t value)
bool Equals(Node *a, Node *b)
MachineType TypeOf(MachineType machine_type)
double fast_exp(double input)
uint32_t DoubleToUint32(double x)
static int32_t AddWithoutOverflow(Representation r, int32_t a, int32_t b, bool *overflow)
static int32_t SubWithoutOverflow(Representation r, int32_t a, int32_t b, bool *overflow)
bool IsExternalArrayElementsKind(ElementsKind kind)
static int min(int a, int b)
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
double fast_sqrt(double input)
int MostSignificantBit(uint32_t x)
static void ReplayEnvironmentNested(const ZoneList< HValue * > *values, HCapturedObject *other)
static bool MatchDoubleNegation(HValue *instr, HValue **arg)
double power_helper(double x, double y)
static int32_t ConvertAndSetOverflow(Representation r, int64_t result, bool *overflow)
static bool IsInt32Double(double value)
kSerializedDataOffset Object
const uint32_t kStringTag
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
@ EXTERNAL_UINT16_ELEMENTS
@ LAST_EXTERNAL_ARRAY_ELEMENTS_KIND
@ EXTERNAL_INT16_ELEMENTS
@ EXTERNAL_UINT8_ELEMENTS
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
@ FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
const char * ElementsKindToString(ElementsKind kind)
bool IsFastDoubleElementsKind(ElementsKind kind)
Handle< T > handle(T *t, Isolate *isolate)
OStream & operator<<(OStream &os, const HObjectAccess &access)
bool IsFastPackedElementsKind(ElementsKind kind)
OStream & operator<<(OStream &os, const BasicBlockProfiler &p)
static bool IsIdentityOperation(HValue *arg1, HValue *arg2, int32_t identity)
c_left NumberValueAsInteger32()<<(c_right -> NumberValueAsInteger32() &0x1f)) HInstruction *HShr::New(Zone *zone, HValue *context, HValue *left, HValue *right)
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
static bool MatchNegationViaXor(HValue *instr, HValue **negated)
bool ConstantIsObject(HConstant *constant, Isolate *isolate)
static bool SmiValuesAre32Bits()
static int32_t MulWithoutOverflow(const Representation &r, int32_t a, int32_t b, bool *overflow)
void PrintF(const char *format,...)
int32_t DoubleToInt32(double x)
double power_double_double(double x, double y)
static bool IsInteger32(double value)
static bool SmiValuesAre31Bits()
unibrow::Mapping< unibrow::Ecma262Canonicalize > Canonicalize
static int32_t AbsMinus1(int32_t a)
const uint32_t kInternalizedTag
static bool MatchLeftIsOnes(HValue *l, HValue *r, HValue **negated)
const uint32_t kIsNotInternalizedMask
static bool IsMinusZero(double value)
bool IsExternalFloatOrDoubleElementsKind(ElementsKind kind)
const uint32_t kIsNotStringMask
const intptr_t kDoubleAlignmentMask
static String * TypeOfString(HConstant *constant, Isolate *isolate)
Debugger support for the V8 JavaScript engine.
#define STRING_TYPE_LIST(V)