24 if (!graph->Optimize(&bailout_reason)) {
118 if (FLAG_trace_hydrogen_stubs) {
119 const char*
name = CodeStub::MajorName(
stub()->MajorKey(),
false);
120 PrintF(
"-----------------------------------------------------------\n");
121 PrintF(
"Compiling stub %s using hydrogen\n",
name);
126 HEnvironment* start_environment =
graph()->start_environment();
134 for (
int i = 0;
i < param_count; ++
i) {
136 HParameter* param = Add<HParameter>(
i,
137 HParameter::REGISTER_PARAMETER, r);
138 start_environment->Bind(
i, param);
141 param->set_type(HType::Smi());
142 stack_parameter_count = param;
148 if (!runtime_stack_params) {
149 stack_parameter_count =
graph()->GetConstantMinus1();
154 start_environment->BindContext(
context_);
158 NoObservableSideEffectsScope no_effects(
this);
166 if (!stack_parameter_count->IsConstant() &&
169 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
175 stack_pop_count = Add<HConstant>(count);
180 HReturn* hreturn_instruction = New<HReturn>(return_value,
188 template <
class Stub>
210 HValue* undefined =
graph()->GetConstantUndefined();
211 IfBuilder builder(
this);
214 builder.ElseDeopt(
"Forced deopt to runtime");
223 ExternalReference miss) {
224 Factory* factory = isolate()->factory();
231 isolate()->counters()->code_stubs()->Increment();
255 template <
class Stub>
257 Isolate* isolate = stub->isolate();
265 return stub->GenerateLightweightMissCode(descriptor.
miss_handler());
267 base::ElapsedTimer timer;
268 if (FLAG_profile_hydrogen_code_stub_compilation) {
276 if (FLAG_profile_hydrogen_code_stub_compilation) {
278 os <<
"[Lazy compilation of " << stub <<
" took "
279 << timer.Elapsed().InMillisecondsF() <<
" ms]" <<
endl;
287 HValue* value = GetParameter(0);
290 IfBuilder if_number(
this);
291 if_number.If<HIsSmiAndBranch>(value);
292 if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map());
301 HValue*
function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
302 Add<HPushArguments>(value);
303 Push(Add<HInvokeFunction>(
function, 1));
318 info()->MarkAsSavesCallerDoubles();
319 HValue* number = GetParameter(NumberToStringStub::kNumber);
320 return BuildNumberToString(number, Type::Number(zone()));
331 Factory* factory = isolate()->factory();
332 HValue* undefined = graph()->GetConstantUndefined();
337 info()->MarkMustNotHaveEagerFrame();
339 HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
343 IfBuilder checker(
this);
348 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
351 allocation_site,
static_cast<HValue*
>(
NULL), access);
352 HValue* elements = AddLoadElements(boilerplate);
353 HValue* capacity = AddLoadFixedArrayLength(elements);
354 IfBuilder zero_capacity(
this);
357 zero_capacity.Then();
358 Push(BuildCloneShallowArrayEmpty(boilerplate,
361 zero_capacity.Else();
362 IfBuilder if_fixed_cow(
this);
363 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
365 Push(BuildCloneShallowArrayCow(boilerplate,
370 IfBuilder if_fixed(
this);
371 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
373 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
379 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
387 checker.ElseDeopt(
"Uninitialized boilerplate literals");
390 return environment()->Pop();
401 HValue* undefined = graph()->GetConstantUndefined();
403 HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
408 IfBuilder checker(
this);
413 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
416 allocation_site,
static_cast<HValue*
>(
NULL), access);
419 int object_size =
size;
420 if (FLAG_allocation_site_pretenuring) {
424 HValue* boilerplate_map = Add<HLoadNamedField>(
426 HObjectAccess::ForMap());
427 HValue* boilerplate_size = Add<HLoadNamedField>(
429 HObjectAccess::ForMapInstanceSize());
437 HInstruction*
object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
441 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
i);
442 Add<HStoreNamedField>(
443 object, access, Add<HLoadNamedField>(
444 boilerplate,
static_cast<HValue*
>(
NULL), access));
447 DCHECK(FLAG_allocation_site_pretenuring || (
size == object_size));
448 if (FLAG_allocation_site_pretenuring) {
449 BuildCreateAllocationMemento(
450 object, Add<HConstant>(object_size), allocation_site);
453 environment()->Push(
object);
454 checker.ElseDeopt(
"Uninitialized boilerplate in fast clone");
457 return environment()->Pop();
473 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
474 AddStoreMapConstant(
object, allocation_site_map);
478 Add<HStoreNamedField>(
object,
479 HObjectAccess::ForAllocationSiteOffset(
481 initial_elements_kind);
484 Add<HStoreNamedField>(
object,
485 HObjectAccess::ForAllocationSiteOffset(
487 graph()->GetConstant0());
490 Add<HStoreNamedField>(
object,
491 HObjectAccess::ForAllocationSiteOffset(
493 graph()->GetConstant0());
496 Add<HStoreNamedField>(
object,
497 HObjectAccess::ForAllocationSiteOffset(
499 graph()->GetConstant0());
502 HConstant* empty_fixed_array =
503 Add<HConstant>(isolate()->factory()->empty_fixed_array());
504 Add<HStoreNamedField>(
506 HObjectAccess::ForAllocationSiteOffset(
511 HValue* site_list = Add<HConstant>(
512 ExternalReference::allocation_sites_list_address(isolate()));
513 HValue* site = Add<HLoadNamedField>(
515 HObjectAccess::ForAllocationSiteList());
521 Add<HStoreNamedField>(
525 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
543 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
546 casted_stub()->is_js_array(), casted_stub()->elements_kind(),
LOAD,
558 HValue*
object, FieldIndex index) {
562 int offset = index.offset();
563 HObjectAccess access = index.is_inobject()
564 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
565 : HObjectAccess::ForBackingStoreOffset(offset, representation);
566 if (index.is_double()) {
568 object = Add<HLoadNamedField>(
572 access = HObjectAccess::ForHeapNumberValue();
574 return Add<HLoadNamedField>(
object,
static_cast<HValue*
>(
NULL), access);
580 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
592 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
595 Add<HLoadNamedField>(
map,
static_cast<HValue*
>(
NULL), descriptors_access);
596 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
598 return Add<HLoadNamedField>(descriptors,
static_cast<HValue*
>(
NULL),
611 Add<HCheckMaps>(backing_store,
isolate()->factory()->fixed_array_map());
612 HValue* backing_store_length =
613 Add<HLoadNamedField>(backing_store,
static_cast<HValue*
>(
NULL),
614 HObjectAccess::ForFixedArrayLength());
615 IfBuilder in_unmapped_range(
this);
618 in_unmapped_range.Then();
620 result = Add<HLoadKeyed>(backing_store, key,
static_cast<HValue*
>(
NULL),
623 in_unmapped_range.ElseDeopt(
"Outside of range");
624 in_unmapped_range.End();
661 IfBuilder positive_smi(
this);
664 positive_smi.ThenDeopt(
"key is negative");
667 HValue* constant_two = Add<HConstant>(2);
670 Add<HLoadNamedField>(elements,
static_cast<HValue*
>(
NULL),
671 HObjectAccess::ForFixedArrayLength());
672 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
673 IfBuilder in_range(
this);
677 HValue* index = AddUncasted<HAdd>(key, constant_two);
679 Add<HLoadKeyed>(elements, index,
static_cast<HValue*
>(
NULL),
682 IfBuilder is_valid(
this);
684 graph()->GetConstantHole());
691 Add<HLoadKeyed>(elements, graph()->GetConstant0(),
695 Add<HLoadKeyed>(the_context, mapped_index,
static_cast<HValue*
>(
NULL),
697 environment()->Push(result);
701 HValue* result = UnmappedCase(elements, key);
702 environment()->Push(result);
708 HValue* result = UnmappedCase(elements, key);
709 environment()->Push(result);
713 return environment()->Pop();
726 int offset = index.offset();
727 HObjectAccess access =
729 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
730 : HObjectAccess::ForBackingStoreOffset(offset, representation);
734 object = Add<HLoadNamedField>(
738 access = HObjectAccess::ForHeapNumberValue();
749 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
750 casted_stub()->representation());
751 return GetParameter(2);
760 HValue*
string = BuildLoadNamedField(GetParameter(0),
762 return BuildLoadNamedField(
string,
774 BuildUncheckedMonomorphicElementAccess(
779 casted_stub()->store_mode());
781 return GetParameter(2);
792 info()->MarkAsSavesCallerDoubles();
794 BuildTransitionElementsKind(GetParameter(0),
796 casted_stub()->from_kind(),
797 casted_stub()->to_kind(),
798 casted_stub()->is_js_array());
800 return GetParameter(0);
814 JSArrayBuilder array_builder(
this, kind, alloc_site, constructor,
817 switch (argument_class) {
822 result = array_builder.AllocateEmptyArray();
840 JSArrayBuilder array_builder(
this, kind, constructor);
843 switch (argument_class) {
848 result = array_builder.AllocateEmptyArray();
862 JSArrayBuilder* array_builder) {
867 HInstruction* elements = Add<HArgumentsElements>(
false);
869 elements, constant_one, constant_zero);
882 HConstant* max_alloc_length =
884 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
895 ? JSArrayBuilder::FILL_WITH_HOLE
896 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
897 HValue* new_object = array_builder->AllocateArray(checked_length,
901 HValue* elements = array_builder->GetElementsLocation();
905 LoopBuilder builder(
this,
907 LoopBuilder::kPostIncrement);
909 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
910 HInstruction* argument_elements = Add<HArgumentsElements>(
false);
912 argument_elements, checked_length, key);
914 Add<HStoreKeyed>(elements, key, argument, kind);
924 return BuildArrayConstructor(kind, override_mode,
NONE);
938 return BuildArrayConstructor(kind, override_mode, SINGLE);
951 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
964 return BuildInternalArrayConstructor(kind,
NONE);
977 return BuildInternalArrayConstructor(kind, SINGLE);
990 return BuildInternalArrayConstructor(kind, MULTIPLE);
1001 Isolate* isolate = graph()->isolate();
1003 HIfContinuation continuation;
1006 BuildCompareNil(GetParameter(0), type, &continuation);
1007 IfBuilder if_nil(
this, &continuation);
1009 if (continuation.IsFalseReachable()) {
1011 if_nil.Return(graph()->GetConstant0());
1014 return continuation.IsTrueReachable()
1015 ? graph()->GetConstant1()
1016 : graph()->GetConstantUndefined();
1027 BinaryOpICState state = casted_stub()->state();
1032 Type* left_type = state.GetLeftType(zone());
1033 Type* right_type = state.GetRightType(zone());
1034 Type* result_type = state.GetResultType(zone());
1037 (state.HasSideEffects() || !result_type->
Is(
Type::None())));
1042 (left_type->
Maybe(Type::String()) || right_type->
Maybe(Type::String())) &&
1043 !left_type->
Is(Type::String()) && !right_type->
Is(Type::String())) {
1046 if (left_type->
Maybe(Type::String())) {
1047 IfBuilder if_leftisstring(
this);
1048 if_leftisstring.If<HIsStringAndBranch>(left);
1049 if_leftisstring.Then();
1051 Push(BuildBinaryOperation(
1052 state.op(), left, right,
1053 Type::String(zone()), right_type,
1054 result_type, state.fixed_right_arg(),
1057 if_leftisstring.Else();
1059 Push(BuildBinaryOperation(
1060 state.op(), left, right,
1061 left_type, right_type, result_type,
1062 state.fixed_right_arg(), allocation_mode));
1064 if_leftisstring.End();
1067 IfBuilder if_rightisstring(
this);
1068 if_rightisstring.If<HIsStringAndBranch>(right);
1069 if_rightisstring.Then();
1071 Push(BuildBinaryOperation(
1072 state.op(), left, right,
1073 left_type, Type::String(zone()),
1074 result_type, state.fixed_right_arg(),
1077 if_rightisstring.Else();
1079 Push(BuildBinaryOperation(
1080 state.op(), left, right,
1081 left_type, right_type, result_type,
1082 state.fixed_right_arg(), allocation_mode));
1084 if_rightisstring.End();
1088 result = BuildBinaryOperation(
1089 state.op(), left, right,
1090 left_type, right_type, result_type,
1091 state.fixed_right_arg(), allocation_mode);
1096 if (!state.HasSideEffects()) {
1097 result = EnforceNumberType(result, result_type);
1102 if (state.CanReuseDoubleBox()) {
1104 IfBuilder if_heap_number(
this);
1106 if_heap_number.Then();
1107 Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
1109 if_heap_number.Else();
1111 if_heap_number.End();
1126 BinaryOpICState state = casted_stub()->state();
1128 HValue* allocation_site = GetParameter(
1129 BinaryOpWithAllocationSiteStub::kAllocationSite);
1130 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1131 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1133 Type* left_type = state.GetLeftType(zone());
1134 Type* right_type = state.GetRightType(zone());
1135 Type* result_type = state.GetResultType(zone());
1136 HAllocationMode allocation_mode(allocation_site);
1138 return BuildBinaryOperation(state.op(), left, right,
1139 left_type, right_type, result_type,
1140 state.fixed_right_arg(), allocation_mode);
1144 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1151 StringAddStub* stub = casted_stub();
1155 HValue* left = GetParameter(StringAddStub::kLeft);
1156 HValue* right = GetParameter(StringAddStub::kRight);
1160 left = BuildCheckString(left);
1163 right = BuildCheckString(right);
1166 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1181 switch (stub->
mode()) {
1183 true_value = graph()->GetConstant1();
1184 false_value = graph()->GetConstant0();
1187 true_value = graph()->GetConstantTrue();
1188 false_value = graph()->GetConstantFalse();
1191 true_value = graph()->GetConstantFalse();
1192 false_value = graph()->GetConstantTrue();
1196 IfBuilder if_true(
this);
1197 if_true.If<HBranch>(GetParameter(0), stub->
types());
1199 if_true.Return(true_value);
1216 isolate()->factory()->NewPropertyCell(placeholer_value);
1223 Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1224 HValue* global = Add<HConstant>(
1226 Add<HCheckMaps>(global, placeholder_map);
1229 HValue* cell = Add<HConstant>(placeholder_cell);
1230 HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
1231 HValue* cell_contents = Add<HLoadNamedField>(
1235 IfBuilder builder(
this);
1238 builder.ElseDeopt(
"Unexpected cell contents in constant global store");
1244 IfBuilder builder(
this);
1245 HValue* hole_value = graph()->GetConstantHole();
1248 builder.Deopt(
"Unexpected cell contents in global store");
1250 Add<HStoreNamedField>(cell, access, value);
1270 if (FLAG_trace_elements_transitions) {
1274 info()->MarkAsSavesCallerDoubles();
1276 BuildTransitionElementsKind(
object,
map,
1277 casted_stub()->from_kind(),
1278 casted_stub()->to_kind(),
1279 casted_stub()->is_jsarray());
1281 BuildUncheckedMonomorphicElementAccess(
object, key, value,
1282 casted_stub()->is_jsarray(),
1283 casted_stub()->to_kind(),
1285 casted_stub()->store_mode());
1324 Add<HStoreCodeEntry>(js_function, code_object);
1325 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1329 HValue* optimized_functions_list = Add<HLoadNamedField>(
1332 Add<HStoreNamedField>(js_function,
1333 HObjectAccess::ForNextFunctionLinkPointer(),
1334 optimized_functions_list);
1337 Add<HStoreNamedField>(native_context,
1347 Add<HStoreNamedField>(js_function,
1348 HObjectAccess::ForNextFunctionLinkPointer(),
1349 graph()->GetConstantUndefined());
1350 HValue* code_object = Add<HLoadNamedField>(
1351 shared_info,
static_cast<HValue*
>(
NULL), HObjectAccess::ForCodeOffset());
1352 Add<HStoreCodeEntry>(js_function, code_object);
1363 HValue* field_slot = iterator;
1364 if (field_offset > 0) {
1365 HValue* field_offset_value = Add<HConstant>(field_offset);
1366 field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1368 HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot,
1377 HValue* native_context) {
1379 IfBuilder is_optimized(
this);
1382 HObjectAccess::ForOptimizedCodeMap());
1383 HValue* null_constant = Add<HConstant>(0);
1385 is_optimized.Then();
1389 is_optimized.Else();
1395 HValue* first_entry_index =
1397 IfBuilder already_in(
this);
1399 optimized_map, first_entry_index);
1408 HValue* shared_function_entry_length =
1410 LoopBuilder loop_builder(
this,
1412 LoopBuilder::kPostDecrement,
1413 shared_function_entry_length);
1414 HValue* array_length = Add<HLoadNamedField>(
1416 HObjectAccess::ForFixedArrayLength());
1417 HValue* start_pos = AddUncasted<HSub>(array_length,
1418 shared_function_entry_length);
1419 HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1423 IfBuilder done_check(
this);
1429 loop_builder.Break();
1431 loop_builder.EndBody();
1435 IfBuilder no_optimized_code_check(
this);
1437 slot_iterator, first_entry_index,
Token::EQ);
1438 no_optimized_code_check.Then();
1450 Counters* counters = isolate()->counters();
1451 Factory* factory = isolate()->factory();
1453 Add<HConstant>(factory->empty_fixed_array());
1454 HValue* shared_info = GetParameter(0);
1456 AddIncrementCounter(counters->fast_new_closure_total());
1464 casted_stub()->kind());
1468 HInstruction* native_context = BuildGetNativeContext();
1471 HObjectAccess::ForContextSlot(map_index));
1472 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1475 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1477 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1479 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1481 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1482 graph()->GetConstantHole());
1483 Add<HStoreNamedField>(js_function,
1484 HObjectAccess::ForSharedFunctionInfoPointer(),
1486 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1492 if (FLAG_cache_optimized_code) {
1493 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1495 BuildInstallCode(js_function, shared_info);
1512 HParameter*
function = GetParameter(FastNewContextStub::kFunction);
1515 HAllocate* function_context = Add<HAllocate>(
1520 AddStoreMapConstant(function_context,
1521 isolate()->factory()->function_context_map());
1522 Add<HStoreNamedField>(function_context,
1523 HObjectAccess::ForFixedArrayLength(),
1524 Add<HConstant>(length));
1527 Add<HStoreNamedField>(function_context,
1530 Add<HStoreNamedField>(function_context,
1533 Add<HStoreNamedField>(function_context,
1535 graph()->GetConstant0());
1538 HValue* global_object = Add<HLoadNamedField>(
1541 Add<HStoreNamedField>(function_context,
1542 HObjectAccess::ForContextSlot(
1548 Add<HStoreNamedField>(function_context,
1549 HObjectAccess::ForContextSlot(
i),
1550 graph()->GetConstantUndefined());
1553 return function_context;
1567 Add<HCheckSmi>(key);
1569 HValue* elements = AddLoadElements(receiver);
1571 HValue* hash = BuildElementIndexHash(key);
1573 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
1585 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1586 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1587 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1589 info()->MarkMustNotHaveEagerFrame();
1591 return BuildRegExpConstructResult(length, index, input);
1595 Handle<Code> RegExpConstructResultStub::GenerateCode() {
1610 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
1614 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
1621 void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
1635 HGraphBuilder::IfBuilder* if_builder,
HValue* bit_field2,
1638 HValue* kind_limit = Add<HConstant>(
1647 HGraphBuilder::IfBuilder* if_builder,
HValue* receiver,
HValue* key,
1651 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1653 IfBuilder js_array_check(
this);
1656 js_array_check.Then();
1661 js_array_check.Else();
1666 js_array_check.End();
1671 HGraphBuilder::IfBuilder* if_builder,
HValue* receiver,
HValue* key,
1675 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1694 IfBuilder index_name_split(
this, &index_name_split_continuation);
1695 index_name_split.Then();
1705 HObjectAccess::ForMap());
1709 HObjectAccess::ForMapInstanceType());
1711 HValue* bit_field2 = Add<HLoadNamedField>(
map,
1713 HObjectAccess::ForMapBitField2());
1715 IfBuilder kind_if(
this);
1716 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1721 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1738 BuildElementsKindLimitCheck(&kind_if, bit_field2,
1741 Add<HDeoptimize>(
"non-strict elements in KeyedLoadGenericStub",
1746 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1750 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1754 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1758 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1762 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1766 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1770 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1774 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1778 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1781 kind_if.ElseDeopt(
"ElementsKind unhandled in KeyedLoadGenericStub");
1785 index_name_split.Else();
1794 HIfContinuation continuation;
1796 IfBuilder if_dict_properties(
this, &continuation);
1797 if_dict_properties.Then();
1802 HValue* properties = Add<HLoadNamedField>(
1804 HObjectAccess::ForPropertiesPointer());
1807 Add<HLoadNamedField>(key,
static_cast<HValue*
>(
NULL),
1808 HObjectAccess::ForNameHashField());
1818 if_dict_properties.Else();
1823 ExternalReference cache_keys_ref =
1824 ExternalReference::keyed_lookup_cache_keys(
isolate());
1825 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
1828 HObjectAccess::ForMap());
1829 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
1832 HIfContinuation inline_or_runtime_continuation(
1838 IfBuilder* lookup_if = &lookup_ifs[probe];
1839 lookup_if->Initialize(
this);
1841 HValue* map_index = AddUncasted<HAdd>(
1845 HValue* key_index = AddUncasted<HAdd>(
1850 Add<HLoadKeyed>(cache_keys, map_index,
static_cast<HValue*
>(
NULL),
1855 Add<HLoadKeyed>(cache_keys, key_index,
static_cast<HValue*
>(
NULL),
1860 ExternalReference cache_field_offsets_ref =
1861 ExternalReference::keyed_lookup_cache_field_offsets(
isolate());
1862 HValue* cache_field_offsets =
1863 Add<HConstant>(cache_field_offsets_ref);
1864 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
1866 HValue* property_index = Add<HLoadKeyed>(
1867 cache_field_offsets, index,
static_cast<HValue*
>(
NULL),
1869 Push(property_index);
1874 lookup_ifs[
i].JoinContinuation(&inline_or_runtime_continuation);
1878 IfBuilder inline_or_runtime(
this, &inline_or_runtime_continuation);
1879 inline_or_runtime.Then();
1882 Push(Add<HLoadFieldByIndex>(receiver,
Pop()));
1884 inline_or_runtime.Else();
1887 Add<HPushArguments>(receiver, key);
1888 Push(Add<HCallRuntime>(
1889 isolate()->factory()->empty_string(),
1892 inline_or_runtime.End();
1894 if_dict_properties.End();
1896 index_name_split.End();
1945 Add<HTailCallThroughMegamorphicCache>(receiver,
name,
flags);
1948 return graph()->GetConstant0();
An object reference managed by the v8 garbage collector.
static const int kPretenureDataOffset
static const int kWeakNextOffset
static const int kDependentCodeOffset
static const int kTransitionInfoOffset
static const int kPretenureCreateCountOffset
static const int kNestedSiteOffset
static const int kConstructor
static const int kAllocationSite
void GetCode(CodeDesc *desc)
static BailoutId StubEntry()
bool has_miss_handler() const
int hint_stack_parameter_count() const
int GetEnvironmentParameterCount() const
Representation GetEnvironmentParameterRepresentation(int index) const
StubFunctionMode function_mode() const
Register stack_parameter_count() const
ExternalReference miss_handler() const
bool IsEnvironmentParameterCountRegister(int index) const
HValue * BuildArrayNArgumentsConstructor(JSArrayBuilder *builder, ElementsKind kind)
HValue * GetArgumentsLength()
HValue * BuildArrayConstructor(ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class)
CompilationInfoWithZone info_
CodeStubGraphBuilderBase(Isolate *isolate, HydrogenCodeStub *stub)
CodeStubDescriptor descriptor_
HValue * BuildArraySingleArgumentConstructor(JSArrayBuilder *builder)
HValue * arguments_length_
HydrogenCodeStub * stub()
void BuildStoreNamedField(HValue *object, HValue *value, FieldIndex index, Representation representation)
HLoadNamedField * BuildLoadNamedField(HValue *object, FieldIndex index)
void BuildInstallFromOptimizedCodeMap(HValue *js_function, HValue *shared_info, HValue *native_context)
HValue * UnmappedCase(HValue *elements, HValue *key)
void BuildInstallCode(HValue *js_function, HValue *shared_info)
void BuildCheckAndInstallOptimizedCode(HValue *js_function, HValue *native_context, IfBuilder *builder, HValue *optimized_map, HValue *map_index)
HInstruction * LoadFromOptimizedCodeMap(HValue *optimized_map, HValue *iterator, int field_offset)
virtual HValue * BuildCodeStub()=0
HValue * BuildInternalArrayConstructor(ElementsKind kind, ArgumentClass argument_class)
SmartArrayPointer< HParameter * > parameters_
virtual bool BuildGraph()
HParameter * GetParameter(int parameter)
KeyedLoadGenericStub * casted_stub()
CodeStubGraphBuilder(Isolate *isolate, KeyedLoadGenericStub *stub)
virtual HValue * BuildCodeInitializedStub()
virtual HValue * BuildCodeStub()
virtual HValue * BuildCodeUninitializedStub()
CodeStubGraphBuilder(Isolate *isolate, Stub *stub)
static Flags ComputeHandlerFlags(Kind handler_kind, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
static Flags RemoveTypeAndHolderFromFlags(Flags flags)
Type * GetType(Zone *zone, Handle< Map > map=Handle< Map >())
void PrepareForSerializing()
HydrogenCodeStub * code_stub() const
void MarkMustNotHaveEagerFrame()
Isolate * isolate() const
static int FunctionMapIndex(StrictMode strict_mode, FunctionKind kind)
@ OPTIMIZED_FUNCTIONS_LIST
static int GetValueOffset(int descriptor_number)
static const int kHeaderSize
HValue * BuildUncheckedDictionaryElementLoad(HValue *receiver, HValue *elements, HValue *key, HValue *hash)
void BuildJSObjectCheck(HValue *receiver, int bit_field_mask)
HValue * BuildAllocateArrayFromLength(JSArrayBuilder *array_builder, HValue *length_argument)
HValue * BuildCheckHeapObject(HValue *object)
void BuildNonGlobalObjectCheck(HValue *receiver)
HValue * BuildElementIndexHash(HValue *index)
void set_current_block(HBasicBlock *block)
HInstruction * BuildUncheckedMonomorphicElementAccess(HValue *checked_object, HValue *key, HValue *val, bool is_js_array, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode, KeyedAccessStoreMode store_mode)
HLoadNamedField * AddLoadElements(HValue *object, HValue *dependency=NULL)
HValue * BuildKeyedLookupCacheHash(HValue *object, HValue *key)
void Goto(HBasicBlock *from, HBasicBlock *target, FunctionState *state=NULL, bool add_simulate=true)
HBasicBlock * CreateBasicBlock(HEnvironment *env)
void BuildKeyedIndexCheck(HValue *key, HIfContinuation *join_continuation)
void AddIncrementCounter(StatsCounter *counter)
void BuildTestForDictionaryProperties(HValue *object, HIfContinuation *continuation)
HBasicBlock * current_block() const
void FinishCurrentBlock(HControlInstruction *last)
virtual Handle< Code > GenerateCode()=0
virtual Code::Kind GetCodeKind() const
void GenerateLightweightMiss(MacroAssembler *masm, ExternalReference miss)
Handle< Code > GenerateLightweightMissCode(ExternalReference miss)
static const int kConstructor
static const int kHeaderSize
static const int kInitialMaxFastElementArray
static const int kValueOffset
static const int kKeyIndex
static const int kEntriesPerBucket
static const int kEntryLength
static const int kMapIndex
CompilationInfo * info() const
static LChunk * NewChunk(HGraph *graph)
void set_generating_stub(bool value)
Handle< Object > CodeObject()
static const int kHasIndexedInterceptor
static const int kDescriptorsOffset
static const int kIsAccessCheckNeeded
static const int kHasNamedInterceptor
static const int kHashShift
static Representation Double()
bool IsHeapObject() const
static Representation Smi()
static Representation Tagged()
static const Function * FunctionForId(FunctionId id)
static const int kCachedCodeOffset
static const int kEntriesStart
static const int kOsrAstIdOffset
static const int kLiteralsOffset
static const int kContextOffset
static const int kEntryLength
static Smi * FromInt(int value)
bool check_global() const
static Handle< HeapObject > global_placeholder(Isolate *isolate)
static const int kLengthOffset
@ RESULT_AS_INVERSE_ODDBALL
bool Maybe(TypeImpl *that)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
bool IsExternalArrayElementsKind(ElementsKind kind)
OStream & endl(OStream &os)
const int kPointerSizeLog2
@ EXTERNAL_UINT16_ELEMENTS
@ EXTERNAL_INT16_ELEMENTS
@ EXTERNAL_UINT8_ELEMENTS
@ EXTERNAL_INT32_ELEMENTS
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ SLOPPY_ARGUMENTS_ELEMENTS
@ EXTERNAL_FLOAT32_ELEMENTS
@ EXTERNAL_FLOAT64_ELEMENTS
@ EXTERNAL_UINT32_ELEMENTS
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
AllocationSiteOverrideMode
const char * GetBailoutReason(BailoutReason reason)
void PrintF(const char *format,...)
static Handle< Code > DoGenerateCode(Stub *stub)
bool IsFastSmiElementsKind(ElementsKind kind)
ElementsKind GetInitialFastElementsKind()
static LChunk * OptimizeGraph(HGraph *graph)
Debugger support for the V8 JavaScript engine.