7 #if V8_TARGET_ARCH_ARM64
26 #define __ ACCESS_MASM(masm_)
30 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
32 info_emitted_ =
false;
37 if (patch_site_.is_bound()) {
44 void EmitJumpIfNotSmi(Register reg, Label* target) {
46 InstructionAccurateScope scope(masm_, 1);
51 __ bind(&patch_site_);
52 __ tbz(xzr, 0, target);
55 void EmitJumpIfSmi(Register reg, Label* target) {
57 InstructionAccurateScope scope(masm_, 1);
62 __ bind(&patch_site_);
63 __ tbnz(xzr, 0, target);
66 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
67 UseScratchRegisterScope temps(masm_);
68 Register temp = temps.AcquireX();
69 __ Orr(temp, reg1, reg2);
70 EmitJumpIfNotSmi(temp, target);
73 void EmitPatchInfo() {
74 Assembler::BlockPoolsScope scope(masm_);
82 MacroAssembler* masm_;
106 CompilationInfo* info =
info_;
108 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
111 Handle<Smi>(
Smi::FromInt(FLAG_interrupt_budget), isolate()));
113 Comment cmnt(
masm_,
"[ Function compiled by full code generator");
118 if (strlen(FLAG_stop_at) > 0 &&
119 info->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
120 __ Debug(
"stop-at", __LINE__,
BREAK);
127 if (info->strict_mode() ==
SLOPPY && !info->is_native()) {
129 int receiver_offset = info->scope()->num_parameters() *
kXRegSize;
130 __ Peek(x10, receiver_offset);
131 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
135 __ Poke(x10, receiver_offset);
144 FrameScope frame_scope(
masm_, StackFrame::MANUAL);
151 __ Prologue(info->IsCodePreAgingActive());
155 { Comment cmnt(
masm_,
"[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
158 DCHECK(!info->function()->is_generator() || locals_count == 0);
160 if (locals_count > 0) {
161 if (locals_count >= 128) {
165 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
170 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
171 if (FLAG_optimize_for_size) {
172 __ PushMultipleTimes(x10 , locals_count);
174 const int kMaxPushes = 32;
175 if (locals_count >= kMaxPushes) {
176 int loop_iterations = locals_count / kMaxPushes;
177 __ Mov(x3, loop_iterations);
179 __ Bind(&loop_header);
181 __ PushMultipleTimes(x10 , kMaxPushes);
183 __ B(
ne, &loop_header);
185 int remaining = locals_count % kMaxPushes;
187 __ PushMultipleTimes(x10 , remaining);
192 bool function_in_register_x1 =
true;
195 if (heap_slots > 0) {
197 Comment cmnt(
masm_,
"[ Allocate context");
198 bool need_write_barrier =
true;
199 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
200 __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
202 __ CallRuntime(Runtime::kNewGlobalContext, 2);
203 }
else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
204 FastNewContextStub stub(isolate(), heap_slots);
207 need_write_barrier =
false;
210 __ CallRuntime(Runtime::kNewFunctionContext, 1);
212 function_in_register_x1 =
false;
218 int num_parameters = info->scope()->num_parameters();
219 for (
int i = 0;
i < num_parameters;
i++) {
221 if (var->IsContextSlot()) {
231 if (need_write_barrier) {
232 __ RecordWriteContextSlot(
234 }
else if (FLAG_debug_code) {
236 __ JumpIfInNewSpace(
cp, &done);
237 __ Abort(kExpectedNewSpaceObject);
245 if (arguments !=
NULL) {
247 Comment cmnt(
masm_,
"[ Allocate arguments object");
248 if (!function_in_register_x1) {
255 int num_parameters = info->scope()->num_parameters();
273 ArgumentsAccessStub stub(isolate(), type);
276 SetVar(arguments, x0, x1, x2);
280 __ CallRuntime(Runtime::kTraceEnter, 0);
286 if (
scope()->HasIllegalRedeclaration()) {
287 Comment cmnt(
masm_,
"[ Declarations");
292 { Comment cmnt(
masm_,
"[ Declarations");
293 if (
scope()->is_function_scope() &&
scope()->
function() !=
NULL) {
298 VisitVariableDeclaration(
function);
304 { Comment cmnt(
masm_,
"[ Stack check");
308 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
310 PredictableCodeSizeScope predictable(
masm_,
316 { Comment cmnt(
masm_,
"[ Body");
318 VisitStatements(
function()->body());
324 { Comment cmnt(
masm_,
"[ return <undefined>;");
325 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
350 int reset_value = FLAG_interrupt_budget;
353 reset_value = FLAG_interrupt_budget >> 4;
362 Label* back_edge_target) {
364 Comment cmnt(
masm_,
"[ Back edge bookkeeping");
366 Assembler::BlockPoolsScope block_const_pool(
masm_);
369 DCHECK(back_edge_target->is_bound());
377 Max(1, distance / kCodeSizeMultiplier));
399 Comment cmnt(
masm_,
"[ Return sequence");
410 __ CallRuntime(Runtime::kTraceExit, 1);
416 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
420 Max(1, distance / kCodeSizeMultiplier));
426 __ Call(isolate()->builtins()->InterruptCheck(),
443 const Register& current_sp =
__ StackPointer();
445 DCHECK(!current_sp.Is(csp));
446 __ mov(current_sp,
fp);
454 __ add(current_sp, current_sp, ip0);
464 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
469 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
475 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
482 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
485 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
486 codegen()->DoTest(
this);
509 codegen()->PrepareForBailoutBeforeSplit(condition(),
true, true_label_,
511 if (index == Heap::kUndefinedValueRootIndex ||
512 index == Heap::kNullValueRootIndex ||
513 index == Heap::kFalseValueRootIndex) {
514 if (false_label_ != fall_through_)
__ B(false_label_);
515 }
else if (index == Heap::kTrueValueRootIndex) {
516 if (true_label_ != fall_through_)
__ B(true_label_);
519 codegen()->DoTest(
this);
529 Handle<Object> lit)
const {
542 codegen()->PrepareForBailoutBeforeSplit(condition(),
546 DCHECK(!lit->IsUndetectableObject());
547 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
548 if (false_label_ != fall_through_)
__ B(false_label_);
549 }
else if (lit->IsTrue() || lit->IsJSObject()) {
550 if (true_label_ != fall_through_)
__ B(true_label_);
551 }
else if (lit->IsString()) {
552 if (String::cast(*lit)->length() == 0) {
553 if (false_label_ != fall_through_)
__ B(false_label_);
555 if (true_label_ != fall_through_)
__ B(true_label_);
557 }
else if (lit->IsSmi()) {
558 if (Smi::cast(*lit)->value() == 0) {
559 if (false_label_ != fall_through_)
__ B(false_label_);
561 if (true_label_ != fall_through_)
__ B(true_label_);
566 codegen()->DoTest(
this);
572 Register reg)
const {
580 Register reg)
const {
588 Register reg)
const {
590 if (count > 1)
__ Drop(count - 1);
596 Register reg)
const {
601 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
602 codegen()->DoTest(
this);
607 Label* materialize_false)
const {
608 DCHECK(materialize_true == materialize_false);
609 __ Bind(materialize_true);
614 Label* materialize_true,
615 Label* materialize_false)
const {
617 __ Bind(materialize_true);
620 __ Bind(materialize_false);
627 Label* materialize_true,
628 Label* materialize_false)
const {
630 __ Bind(materialize_true);
631 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
633 __ Bind(materialize_false);
634 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
641 Label* materialize_false)
const {
642 DCHECK(materialize_true == true_label_);
643 DCHECK(materialize_false == false_label_);
653 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
660 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
661 __ LoadRoot(x10, value_root_index);
667 codegen()->PrepareForBailoutBeforeSplit(condition(),
672 if (true_label_ != fall_through_) {
676 if (false_label_ != fall_through_) {
686 Label* fall_through) {
688 CallIC(ic, condition->test_id());
700 Label* fall_through) {
701 if (if_false == fall_through) {
703 }
else if (if_true == fall_through) {
704 DCHECK(if_false != fall_through);
717 if (var->IsParameter()) {
727 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
728 if (var->IsContextSlot()) {
730 __ LoadContext(scratch, context_chain_length);
741 __ Ldr(dest, location);
749 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
752 __ Str(src, location);
755 if (var->IsContextSlot()) {
757 __ RecordWriteContextSlot(scratch0,
768 bool should_normalize,
778 if (should_normalize) {
782 if (should_normalize) {
783 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
797 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
798 __ Check(
ne, kDeclarationInWithContext);
799 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
800 __ Check(
ne, kDeclarationInCatchContext);
805 void FullCodeGenerator::VisitVariableDeclaration(
806 VariableDeclaration* declaration) {
810 VariableProxy* proxy = declaration->proxy();
812 Variable* variable = proxy->var();
815 switch (variable->location()) {
817 globals_->Add(variable->name(), zone());
818 globals_->Add(variable->binding_needs_init()
819 ? isolate()->factory()->the_hole_value()
820 : isolate()->factory()->undefined_value(),
827 Comment cmnt(
masm_,
"[ VariableDeclaration");
828 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
835 Comment cmnt(
masm_,
"[ VariableDeclaration");
837 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
845 Comment cmnt(
masm_,
"[ VariableDeclaration");
846 __ Mov(x2, Operand(variable->name()));
857 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
863 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
870 void FullCodeGenerator::VisitFunctionDeclaration(
871 FunctionDeclaration* declaration) {
872 VariableProxy* proxy = declaration->proxy();
873 Variable* variable = proxy->var();
874 switch (variable->location()) {
876 globals_->Add(variable->name(), zone());
877 Handle<SharedFunctionInfo>
function =
880 if (
function.is_null())
return SetStackOverflow();
887 Comment cmnt(
masm_,
"[ Function Declaration");
894 Comment cmnt(
masm_,
"[ Function Declaration");
900 __ RecordWriteContextSlot(
cp,
913 Comment cmnt(
masm_,
"[ Function Declaration");
914 __ Mov(x2, Operand(variable->name()));
919 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
926 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
927 Variable* variable = declaration->proxy()->var();
929 DCHECK(variable->interface()->IsFrozen());
931 Comment cmnt(
masm_,
"[ ModuleDeclaration");
942 __ RecordWriteContextSlot(
cp,
953 Visit(declaration->module());
957 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
958 VariableProxy* proxy = declaration->proxy();
959 Variable* variable = proxy->var();
960 switch (variable->location()) {
966 Comment cmnt(
masm_,
"[ ImportDeclaration");
980 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
988 Register
flags = xzr;
994 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1002 __ CallRuntime(Runtime::kDeclareModules, 1);
1007 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1008 ASM_LOCATION(
"FullCodeGenerator::VisitSwitchStatement");
1009 Comment cmnt(
masm_,
"[ SwitchStatement");
1010 Breakable nested_statement(
this, stmt);
1017 ZoneList<CaseClause*>* clauses = stmt->cases();
1018 CaseClause* default_clause =
NULL;
1022 for (
int i = 0;
i < clauses->length();
i++) {
1023 CaseClause* clause = clauses->at(
i);
1024 clause->body_target()->Unuse();
1027 if (clause->is_default()) {
1028 default_clause = clause;
1032 Comment cmnt(
masm_,
"[ Case comparison");
1033 __ Bind(&next_test);
1042 JumpPatchSite patch_site(
masm_);
1045 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1047 __ B(
ne, &next_test);
1049 __ B(clause->body_target());
1050 __ Bind(&slow_case);
1056 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1057 CallIC(ic, clause->CompareId());
1058 patch_site.EmitPatchInfo();
1063 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1065 __ B(clause->body_target());
1068 __ Cbnz(x0, &next_test);
1070 __ B(clause->body_target());
1075 __ Bind(&next_test);
1077 if (default_clause ==
NULL) {
1078 __ B(nested_statement.break_label());
1080 __ B(default_clause->body_target());
1084 for (
int i = 0;
i < clauses->length();
i++) {
1085 Comment cmnt(
masm_,
"[ Case body");
1086 CaseClause* clause = clauses->at(
i);
1087 __ Bind(clause->body_target());
1089 VisitStatements(clause->statements());
1092 __ Bind(nested_statement.break_label());
1097 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1098 ASM_LOCATION(
"FullCodeGenerator::VisitForInStatement");
1099 Comment cmnt(
masm_,
"[ ForInStatement");
1100 int slot = stmt->ForInFeedbackSlot();
1105 ForIn loop_statement(
this, stmt);
1111 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1112 Register null_value = x15;
1113 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1114 __ Cmp(x0, null_value);
1120 Label convert, done_convert;
1121 __ JumpIfSmi(x0, &convert);
1126 __ Bind(&done_convert);
1138 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1147 __ Bind(&call_runtime);
1149 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1154 Label fixed_array, no_descriptors;
1156 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1159 __ Bind(&use_cache);
1161 __ EnumLengthUntagged(x1, x0);
1162 __ Cbz(x1, &no_descriptors);
1164 __ LoadInstanceDescriptors(x0, x2);
1172 __ Push(x0, x2, x1, xzr);
1175 __ Bind(&no_descriptors);
1180 __ Bind(&fixed_array);
1195 __ Push(x1, x0, x2, xzr);
1201 __ PeekPair(x0, x1, 0);
1203 __ B(
hs, loop_statement.break_label());
1220 __ B(
eq, &update_each);
1225 __ Cbz(x2, &update_each);
1233 __ Cbz(x0, loop_statement.continue_label());
1237 __ Bind(&update_each);
1240 { EffectContext
context(
this);
1245 Visit(stmt->body());
1249 __ Bind(loop_statement.continue_label());
1259 __ Bind(loop_statement.break_label());
1269 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1270 Comment cmnt(
masm_,
"[ ForOfStatement");
1273 Iteration loop_statement(
this, stmt);
1280 __ Bind(loop_statement.continue_label());
1286 Label result_not_done;
1288 loop_statement.break_label(),
1291 __ Bind(&result_not_done);
1297 Visit(stmt->body());
1302 __ B(loop_statement.continue_label());
1306 __ Bind(loop_statement.break_label());
1319 if (!FLAG_always_opt &&
1320 !FLAG_prepare_always_opt &&
1322 scope()->is_function_scope() &&
1323 info->num_literals() == 0) {
1324 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1325 __ Mov(x2, Operand(info));
1328 __ Mov(x11, Operand(info));
1329 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1330 : Heap::kFalseValueRootIndex);
1332 __ CallRuntime(Runtime::kNewClosure, 3);
1338 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1339 Comment cmnt(
masm_,
"[ VariableProxy");
1345 Comment cnmt(
masm_,
"[ SuperReference ");
1350 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1355 __ Mov(x10, Operand(isolate()->factory()->undefined_value()));
1359 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1367 Register current =
cp;
1368 Register next = x10;
1369 Register temp = x11;
1373 if (s->num_heap_slots() > 0) {
1374 if (s->calls_sloppy_eval()) {
1377 __ Cbnz(temp, slow);
1386 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope())
break;
1387 s = s->outer_scope();
1390 if (s->is_eval_scope()) {
1392 __ Mov(next, current);
1397 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1400 __ Cbnz(temp, slow);
1409 if (FLAG_vector_ics) {
1422 DCHECK(var->IsContextSlot());
1424 Register next = x10;
1425 Register temp = x11;
1428 if (s->num_heap_slots() > 0) {
1429 if (s->calls_sloppy_eval()) {
1432 __ Cbnz(temp, slow);
1441 __ Cbnz(temp, slow);
1459 Variable* var = proxy->var();
1464 Variable* local = var->local_if_not_shadowed();
1466 if (local->mode() ==
LET || local->mode() ==
CONST ||
1468 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1470 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1472 __ Mov(x0, Operand(var->name()));
1474 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1485 Variable* var = proxy->var();
1489 switch (var->location()) {
1491 Comment cmnt(
masm_,
"Global variable");
1494 if (FLAG_vector_ics) {
1506 Comment cmnt(
masm_, var->IsContextSlot()
1507 ?
"Context variable"
1508 :
"Stack variable");
1509 if (var->binding_needs_init()) {
1533 bool skip_init_check;
1534 if (var->scope()->DeclarationScope() !=
scope()->DeclarationScope()) {
1535 skip_init_check =
false;
1541 var->initializer_position() < proxy->position();
1544 if (!skip_init_check) {
1548 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1549 if (var->mode() ==
LET || var->mode() ==
CONST) {
1552 __ Mov(x0, Operand(var->name()));
1554 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1559 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1576 Comment cmnt(
masm_,
"Lookup variable");
1577 __ Mov(x1, Operand(var->name()));
1579 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1588 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1589 Comment cmnt(
masm_,
"[ RegExpLiteral");
1600 int literal_offset =
1603 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1608 __ Mov(x2, Operand(expr->pattern()));
1609 __ Mov(x1, Operand(expr->flags()));
1611 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1614 __ Bind(&materialized);
1616 Label allocated, runtime_allocate;
1620 __ Bind(&runtime_allocate);
1623 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1626 __ Bind(&allocated);
1637 if (expression ==
NULL) {
1638 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1646 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1647 Comment cmnt(
masm_,
"[ ObjectLiteral");
1649 expr->BuildConstantProperties(isolate());
1650 Handle<FixedArray> constant_properties = expr->constant_properties();
1654 __ Mov(x1, Operand(constant_properties));
1655 int flags = expr->fast_elements()
1656 ? ObjectLiteral::kFastElements
1657 : ObjectLiteral::kNoFlags;
1658 flags |= expr->has_function()
1659 ? ObjectLiteral::kHasFunction
1660 : ObjectLiteral::kNoFlags;
1662 int properties_count = constant_properties->length() / 2;
1663 const int max_cloned_properties =
1665 if (expr->may_store_doubles() || expr->depth() > 1 ||
1666 masm()->serializer_enabled() ||
flags != ObjectLiteral::kFastElements ||
1667 properties_count > max_cloned_properties) {
1669 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1671 FastCloneShallowObjectStub stub(isolate(), properties_count);
1677 bool result_saved =
false;
1682 expr->CalculateEmitStore(zone());
1684 AccessorTable accessor_table(zone());
1685 for (
int i = 0;
i < expr->properties()->length();
i++) {
1686 ObjectLiteral::Property*
property = expr->properties()->at(
i);
1687 if (property->IsCompileTimeValue())
continue;
1689 Literal* key =
property->key();
1690 Expression* value =
property->value();
1691 if (!result_saved) {
1693 result_saved =
true;
1695 switch (property->kind()) {
1698 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1701 case ObjectLiteral::Property::COMPUTED:
1702 if (key->value()->IsInternalizedString()) {
1703 if (property->emit_store()) {
1715 if (property->emit_store()) {
1723 __ CallRuntime(Runtime::kSetProperty, 4);
1729 case ObjectLiteral::Property::PROTOTYPE:
1730 if (property->emit_store()) {
1735 __ CallRuntime(Runtime::kSetPrototype, 2);
1740 case ObjectLiteral::Property::GETTER:
1741 accessor_table.lookup(key)->second->getter = value;
1743 case ObjectLiteral::Property::SETTER:
1744 accessor_table.lookup(key)->second->setter = value;
1751 for (AccessorTable::Iterator it = accessor_table.begin();
1752 it != accessor_table.end();
1761 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1764 if (expr->has_function()) {
1768 __ CallRuntime(Runtime::kToFastProperties, 1);
1779 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1780 Comment cmnt(
masm_,
"[ ArrayLiteral");
1782 expr->BuildConstantElements(isolate());
1783 int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1784 : ArrayLiteral::kNoFlags;
1786 ZoneList<Expression*>* subexprs = expr->values();
1787 int length = subexprs->length();
1788 Handle<FixedArray> constant_elements = expr->constant_elements();
1789 DCHECK_EQ(2, constant_elements->length());
1791 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1793 Handle<FixedArrayBase> constant_elements_values(
1794 FixedArrayBase::cast(constant_elements->get(1)));
1797 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1806 __ Mov(x1, Operand(constant_elements));
1810 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1812 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1816 bool result_saved =
false;
1820 for (
int i = 0;
i < length;
i++) {
1821 Expression* subexpr = subexprs->at(
i);
1826 if (!result_saved) {
1829 result_saved =
true;
1844 StoreArrayLiteralElementStub stub(isolate());
1860 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1861 DCHECK(expr->target()->IsValidReferenceExpression());
1863 Comment cmnt(
masm_,
"[ Assignment");
1871 NAMED_SUPER_PROPERTY
1874 Property*
property = expr->target()->AsProperty();
1875 if (property !=
NULL) {
1876 assign_type = (
property->key()->IsPropertyName())
1877 ? (property->IsSuperAccess() ? NAMED_SUPER_PROPERTY
1883 switch (assign_type) {
1888 if (expr->is_compound()) {
1896 case NAMED_SUPER_PROPERTY:
1900 if (expr->is_compound()) {
1901 const Register scratch = x10;
1907 if (expr->is_compound()) {
1921 if (expr->is_compound()) {
1922 { AccumulatorValueContext
context(
this);
1923 switch (assign_type) {
1932 case NAMED_SUPER_PROPERTY:
1951 AccumulatorValueContext
context(
this);
1972 switch (assign_type) {
1982 case NAMED_SUPER_PROPERTY:
1994 Literal* key = prop->key()->AsLiteral();
1995 DCHECK(!prop->IsSuperAccess());
1998 if (FLAG_vector_ics) {
2011 Literal* key = prop->key()->AsLiteral();
2012 DCHECK(!key->value()->IsSmi());
2013 DCHECK(prop->IsSuperAccess());
2016 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2023 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2024 if (FLAG_vector_ics) {
2029 CallIC(ic, prop->PropertyFeedbackId());
2037 Expression* left_expr,
2038 Expression* right_expr) {
2039 Label done, both_smis, stub_call;
2043 Register right = x0;
2044 Register result = x0;
2048 __ Orr(x10, left, right);
2049 JumpPatchSite patch_site(
masm_);
2050 patch_site.EmitJumpIfSmi(x10, &both_smis);
2052 __ Bind(&stub_call);
2054 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op,
mode).code();
2057 CallIC(code, expr->BinaryOperationFeedbackId());
2058 patch_site.EmitPatchInfo();
2062 __ Bind(&both_smis);
2073 __ Asr(result, left, right);
2078 __ Lsl(result, left, right);
2084 __ Lsr(x10, left, right);
2089 __ Adds(x10, left, right);
2090 __ B(
vs, &stub_call);
2091 __ Mov(result, x10);
2094 __ Subs(x10, left, right);
2095 __ B(
vs, &stub_call);
2096 __ Mov(result, x10);
2099 Label not_minus_zero, done;
2102 __ Smulh(x10, left, right);
2103 __ Cbnz(x10, ¬_minus_zero);
2104 __ Eor(x11, left, right);
2106 __ Mov(result, x10);
2108 __ Bind(¬_minus_zero);
2111 __ B(
lt, &stub_call);
2112 __ SmiTag(result, x10);
2117 __ Orr(result, left, right);
2119 case Token::BIT_AND:
2120 __ And(result, left, right);
2122 case Token::BIT_XOR:
2123 __ Eor(result, left, right);
2138 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op,
mode).code();
2139 JumpPatchSite patch_site(
masm_);
2142 CallIC(code, expr->BinaryOperationFeedbackId());
2143 patch_site.EmitPatchInfo();
2150 DCHECK(expr->IsValidReferenceExpression());
2156 Property* prop = expr->AsProperty();
2158 assign_type = (prop->key()->IsPropertyName())
2163 switch (assign_type) {
2165 Variable* var = expr->AsVariableProxy()->var();
2178 Operand(prop->key()->AsLiteral()->value()));
2190 CodeFactory::KeyedStoreIC(isolate(),
strict_mode()).code();
2202 if (var->IsContextSlot()) {
2206 __ RecordWriteContextSlot(
2214 ASM_LOCATION(
"FullCodeGenerator::EmitVariableAssignment");
2215 if (var->IsUnallocated()) {
2221 }
else if (op == Token::INIT_CONST_LEGACY) {
2223 DCHECK(!var->IsParameter());
2224 if (var->IsLookupSlot()) {
2225 __ Mov(x1, Operand(var->name()));
2227 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2229 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2232 __ Ldr(x10, location);
2233 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2238 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2240 DCHECK(!var->IsLookupSlot());
2241 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2244 __ Ldr(x10, location);
2245 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2246 __ Mov(x10, Operand(var->name()));
2248 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2253 }
else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2254 if (var->IsLookupSlot()) {
2256 __ Mov(x11, Operand(var->name()));
2263 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2267 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2269 if (FLAG_debug_code && op == Token::INIT_LET) {
2270 __ Ldr(x10, location);
2271 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2272 __ Check(
eq, kLetBindingReInitialization);
2282 ASM_LOCATION(
"FullCodeGenerator::EmitNamedPropertyAssignment");
2284 Property* prop = expr->target()->AsProperty();
2286 DCHECK(prop->key()->IsLiteral());
2291 Operand(prop->key()->AsLiteral()->value()));
2304 Property* prop = expr->target()->AsProperty();
2306 Literal* key = prop->key()->AsLiteral();
2312 : Runtime::kStoreToSuper_Sloppy),
2319 ASM_LOCATION(
"FullCodeGenerator::EmitKeyedPropertyAssignment");
2328 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(),
strict_mode()).code();
2329 CallIC(ic, expr->AssignmentFeedbackId());
2336 void FullCodeGenerator::VisitProperty(Property* expr) {
2337 Comment cmnt(
masm_,
"[ Property");
2338 Expression* key = expr->key();
2340 if (key->IsPropertyName()) {
2341 if (!expr->IsSuperAccess()) {
2365 TypeFeedbackId ast_id) {
2375 Expression* callee = expr->expression();
2377 CallICState::CallType call_type =
2378 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2381 if (call_type == CallICState::FUNCTION) {
2382 { StackValueContext
context(
this);
2388 __ Push(isolate()->factory()->undefined_value());
2391 DCHECK(callee->IsProperty());
2392 DCHECK(!callee->AsProperty()->IsSuperAccess());
2406 Expression* callee = expr->expression();
2407 DCHECK(callee->IsProperty());
2408 Property* prop = callee->AsProperty();
2409 DCHECK(prop->IsSuperAccess());
2412 Literal* key = prop->key()->AsLiteral();
2413 DCHECK(!key->value()->IsSmi());
2416 const Register scratch = x10;
2417 SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2432 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2440 EmitCall(expr, CallICState::METHOD);
2450 Expression* callee = expr->expression();
2453 DCHECK(callee->IsProperty());
2463 EmitCall(expr, CallICState::METHOD);
2469 ZoneList<Expression*>* args = expr->arguments();
2470 int arg_count = args->length();
2471 { PreservePositionScope
scope(
masm()->positions_recorder());
2472 for (
int i = 0;
i < arg_count;
i++) {
2480 isolate(), arg_count, call_type);
2490 context()->DropAndPlug(1, x0);
2495 ASM_LOCATION(
"FullCodeGenerator::EmitResolvePossiblyDirectEval");
2498 if (arg_count > 0) {
2501 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2514 __ Push(x10, x11, x12, x13);
2517 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2521 void FullCodeGenerator::VisitCall(Call* expr) {
2525 expr->return_is_recorded_ =
false;
2528 Comment cmnt(
masm_,
"[ Call");
2529 Expression* callee = expr->expression();
2530 Call::CallType call_type = expr->GetCallType(isolate());
2532 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2537 ZoneList<Expression*>* args = expr->arguments();
2538 int arg_count = args->length();
2541 PreservePositionScope pos_scope(
masm()->positions_recorder());
2543 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2547 for (
int i = 0;
i < arg_count;
i++) {
2572 context()->DropAndPlug(1, x0);
2574 }
else if (call_type == Call::GLOBAL_CALL) {
2577 }
else if (call_type == Call::LOOKUP_SLOT_CALL) {
2579 VariableProxy* proxy = callee->AsVariableProxy();
2582 { PreservePositionScope
scope(
masm()->positions_recorder());
2591 __ Mov(x10, Operand(proxy->name()));
2593 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2599 if (done.is_linked()) {
2606 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2614 }
else if (call_type == Call::PROPERTY_CALL) {
2615 Property*
property = callee->AsProperty();
2616 bool is_named_call =
property->key()->IsPropertyName();
2618 if (property->IsSuperAccess() && is_named_call) {
2622 PreservePositionScope
scope(
masm()->positions_recorder());
2625 if (is_named_call) {
2632 DCHECK(call_type == Call::OTHER_CALL);
2634 { PreservePositionScope
scope(
masm()->positions_recorder());
2637 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2645 DCHECK(expr->return_is_recorded_);
2650 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2651 Comment cmnt(
masm_,
"[ CallNew");
2662 ZoneList<Expression*>* args = expr->arguments();
2663 int arg_count = args->length();
2664 for (
int i = 0;
i < arg_count;
i++) {
2673 __ Mov(x0, arg_count);
2677 if (FLAG_pretenuring_call_new) {
2679 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2680 expr->CallNewFeedbackSlot() + 1);
2693 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2694 ZoneList<Expression*>* args = expr->arguments();
2695 DCHECK(args->length() == 1);
2699 Label materialize_true, materialize_false;
2700 Label* if_true =
NULL;
2701 Label* if_false =
NULL;
2702 Label* fall_through =
NULL;
2703 context()->PrepareTest(&materialize_true, &materialize_false,
2704 &if_true, &if_false, &fall_through);
2707 __ TestAndSplit(x0,
kSmiTagMask, if_true, if_false, fall_through);
2709 context()->Plug(if_true, if_false);
2713 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2714 ZoneList<Expression*>* args = expr->arguments();
2715 DCHECK(args->length() == 1);
2719 Label materialize_true, materialize_false;
2720 Label* if_true =
NULL;
2721 Label* if_false =
NULL;
2722 Label* fall_through =
NULL;
2723 context()->PrepareTest(&materialize_true, &materialize_false,
2724 &if_true, &if_false, &fall_through);
2729 __ TestAndSplit(x0,
kSmiTagMask | sign_mask, if_true, if_false, fall_through);
2731 context()->Plug(if_true, if_false);
2735 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2736 ZoneList<Expression*>* args = expr->arguments();
2737 DCHECK(args->length() == 1);
2741 Label materialize_true, materialize_false;
2742 Label* if_true =
NULL;
2743 Label* if_false =
NULL;
2744 Label* fall_through =
NULL;
2745 context()->PrepareTest(&materialize_true, &materialize_false,
2746 &if_true, &if_false, &fall_through);
2748 __ JumpIfSmi(x0, if_false);
2749 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2759 Split(
le, if_true, if_false, fall_through);
2761 context()->Plug(if_true, if_false);
2765 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2766 ZoneList<Expression*>* args = expr->arguments();
2767 DCHECK(args->length() == 1);
2771 Label materialize_true, materialize_false;
2772 Label* if_true =
NULL;
2773 Label* if_false =
NULL;
2774 Label* fall_through =
NULL;
2775 context()->PrepareTest(&materialize_true, &materialize_false,
2776 &if_true, &if_false, &fall_through);
2778 __ JumpIfSmi(x0, if_false);
2781 Split(
ge, if_true, if_false, fall_through);
2783 context()->Plug(if_true, if_false);
2787 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2788 ASM_LOCATION(
"FullCodeGenerator::EmitIsUndetectableObject");
2789 ZoneList<Expression*>* args = expr->arguments();
2790 DCHECK(args->length() == 1);
2794 Label materialize_true, materialize_false;
2795 Label* if_true =
NULL;
2796 Label* if_false =
NULL;
2797 Label* fall_through =
NULL;
2798 context()->PrepareTest(&materialize_true, &materialize_false,
2799 &if_true, &if_false, &fall_through);
2801 __ JumpIfSmi(x0, if_false);
2806 Split(
ne, if_true, if_false, fall_through);
2808 context()->Plug(if_true, if_false);
2812 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2813 CallRuntime* expr) {
2814 ZoneList<Expression*>* args = expr->arguments();
2815 DCHECK(args->length() == 1);
2818 Label materialize_true, materialize_false, skip_lookup;
2819 Label* if_true =
NULL;
2820 Label* if_false =
NULL;
2821 Label* fall_through =
NULL;
2822 context()->PrepareTest(&materialize_true, &materialize_false,
2823 &if_true, &if_false, &fall_through);
2825 Register
object = x0;
2826 __ AssertNotSmi(
object);
2829 Register bitfield2 = x11;
2835 Register props = x12;
2836 Register props_map = x12;
2837 Register hash_table_map = x13;
2840 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
2841 __ Cmp(props_map, hash_table_map);
2850 Register descriptors = x12;
2851 Register descriptors_length = x13;
2852 __ NumberOfOwnDescriptors(descriptors_length,
map);
2853 __ Cbz(descriptors_length, &done);
2855 __ LoadInstanceDescriptors(
map, descriptors);
2858 Register descriptors_end = x14;
2860 __ Mul(descriptors_length, descriptors_length, x15);
2862 __ Add(descriptors, descriptors,
2865 __ Add(descriptors_end, descriptors,
2870 Register valueof_string = x1;
2872 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
2875 __ Cmp(x15, valueof_string);
2877 __ Cmp(descriptors, descriptors_end);
2887 __ Bind(&skip_lookup);
2891 Register prototype = x1;
2892 Register global_idx = x2;
2893 Register native_context = x2;
2894 Register string_proto = x3;
2895 Register proto_map = x4;
2897 __ JumpIfSmi(prototype, if_false);
2900 __ Ldr(native_context,
2902 __ Ldr(string_proto,
2905 __ Cmp(proto_map, string_proto);
2908 Split(
eq, if_true, if_false, fall_through);
2910 context()->Plug(if_true, if_false);
2914 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2915 ZoneList<Expression*>* args = expr->arguments();
2916 DCHECK(args->length() == 1);
2920 Label materialize_true, materialize_false;
2921 Label* if_true =
NULL;
2922 Label* if_false =
NULL;
2923 Label* fall_through =
NULL;
2924 context()->PrepareTest(&materialize_true, &materialize_false,
2925 &if_true, &if_false, &fall_through);
2927 __ JumpIfSmi(x0, if_false);
2930 Split(
eq, if_true, if_false, fall_through);
2932 context()->Plug(if_true, if_false);
2936 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
2937 ZoneList<Expression*>* args = expr->arguments();
2938 DCHECK(args->length() == 1);
2942 Label materialize_true, materialize_false;
2943 Label* if_true =
NULL;
2944 Label* if_false =
NULL;
2945 Label* fall_through =
NULL;
2946 context()->PrepareTest(&materialize_true, &materialize_false,
2947 &if_true, &if_false, &fall_through);
2957 Split(
vs, if_true, if_false, fall_through);
2959 context()->Plug(if_true, if_false);
2963 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2964 ZoneList<Expression*>* args = expr->arguments();
2965 DCHECK(args->length() == 1);
2969 Label materialize_true, materialize_false;
2970 Label* if_true =
NULL;
2971 Label* if_false =
NULL;
2972 Label* fall_through =
NULL;
2973 context()->PrepareTest(&materialize_true, &materialize_false,
2974 &if_true, &if_false, &fall_through);
2976 __ JumpIfSmi(x0, if_false);
2979 Split(
eq, if_true, if_false, fall_through);
2981 context()->Plug(if_true, if_false);
2985 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2986 ZoneList<Expression*>* args = expr->arguments();
2987 DCHECK(args->length() == 1);
2991 Label materialize_true, materialize_false;
2992 Label* if_true =
NULL;
2993 Label* if_false =
NULL;
2994 Label* fall_through =
NULL;
2995 context()->PrepareTest(&materialize_true, &materialize_false,
2996 &if_true, &if_false, &fall_through);
2998 __ JumpIfSmi(x0, if_false);
3001 Split(
eq, if_true, if_false, fall_through);
3003 context()->Plug(if_true, if_false);
3008 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3009 DCHECK(expr->arguments()->length() == 0);
3011 Label materialize_true, materialize_false;
3012 Label* if_true =
NULL;
3013 Label* if_false =
NULL;
3014 Label* fall_through =
NULL;
3015 context()->PrepareTest(&materialize_true, &materialize_false,
3016 &if_true, &if_false, &fall_through);
3022 Label check_frame_marker;
3025 __ B(
ne, &check_frame_marker);
3029 __ Bind(&check_frame_marker);
3033 Split(
eq, if_true, if_false, fall_through);
3035 context()->Plug(if_true, if_false);
3039 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3040 ZoneList<Expression*>* args = expr->arguments();
3041 DCHECK(args->length() == 2);
3047 Label materialize_true, materialize_false;
3048 Label* if_true =
NULL;
3049 Label* if_false =
NULL;
3050 Label* fall_through =
NULL;
3051 context()->PrepareTest(&materialize_true, &materialize_false,
3052 &if_true, &if_false, &fall_through);
3057 Split(
eq, if_true, if_false, fall_through);
3059 context()->Plug(if_true, if_false);
3063 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3064 ZoneList<Expression*>* args = expr->arguments();
3065 DCHECK(args->length() == 1);
3077 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3078 DCHECK(expr->arguments()->length() == 0);
3098 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3100 ZoneList<Expression*>* args = expr->arguments();
3101 DCHECK(args->length() == 1);
3102 Label done,
null,
function, non_function_constructor;
3107 __ JumpIfSmi(x0, &
null);
3120 __ B(
eq, &
function);
3125 __ B(
eq, &
function);
3132 &non_function_constructor);
3143 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3147 __ Bind(&non_function_constructor);
3148 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3153 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3162 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3164 SubStringStub stub(isolate());
3165 ZoneList<Expression*>* args = expr->arguments();
3166 DCHECK(args->length() == 3);
3175 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3177 RegExpExecStub stub(isolate());
3178 ZoneList<Expression*>* args = expr->arguments();
3179 DCHECK(args->length() == 4);
3189 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3191 ZoneList<Expression*>* args = expr->arguments();
3192 DCHECK(args->length() == 1);
3197 __ JumpIfSmi(x0, &done);
3207 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3208 ZoneList<Expression*>* args = expr->arguments();
3209 DCHECK(args->length() == 2);
3211 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3215 Label runtime, done, not_date_object;
3216 Register
object = x0;
3217 Register result = x0;
3218 Register stamp_addr = x10;
3219 Register stamp_cache = x11;
3221 __ JumpIfSmi(
object, ¬_date_object);
3222 __ JumpIfNotObjectType(
object, x10, x10,
JS_DATE_TYPE, ¬_date_object);
3224 if (index->value() == 0) {
3229 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3233 __ Cmp(stamp_addr, stamp_cache);
3242 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3246 __ Bind(¬_date_object);
3247 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3253 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3254 ZoneList<Expression*>* args = expr->arguments();
3257 Register
string = x0;
3258 Register index = x1;
3259 Register value = x2;
3260 Register scratch = x10;
3265 __ Pop(value, index);
3267 if (FLAG_debug_code) {
3268 __ AssertSmi(value, kNonSmiValue);
3269 __ AssertSmi(index, kNonSmiIndex);
3271 __ EmitSeqStringSetCharCheck(
string, index,
kIndexIsSmi, scratch,
3283 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3284 ZoneList<Expression*>* args = expr->arguments();
3287 Register
string = x0;
3288 Register index = x1;
3289 Register value = x2;
3290 Register scratch = x10;
3295 __ Pop(value, index);
3297 if (FLAG_debug_code) {
3298 __ AssertSmi(value, kNonSmiValue);
3299 __ AssertSmi(index, kNonSmiIndex);
3301 __ EmitSeqStringSetCharCheck(
string, index,
kIndexIsSmi, scratch,
3313 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3315 ZoneList<Expression*>* args = expr->arguments();
3316 DCHECK(args->length() == 2);
3325 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3326 ZoneList<Expression*>* args = expr->arguments();
3327 DCHECK(args->length() == 2);
3336 __ JumpIfSmi(x1, &done);
3346 __ RecordWriteField(
3354 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3355 ZoneList<Expression*>* args = expr->arguments();
3361 NumberToStringStub stub(isolate());
3367 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3368 ZoneList<Expression*>* args = expr->arguments();
3369 DCHECK(args->length() == 1);
3375 Register result = x1;
3377 StringCharFromCodeGenerator generator(code, result);
3378 generator.GenerateFast(
masm_);
3381 NopRuntimeCallHelper call_helper;
3382 generator.GenerateSlow(
masm_, call_helper);
3389 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3390 ZoneList<Expression*>* args = expr->arguments();
3391 DCHECK(args->length() == 2);
3396 Register
object = x1;
3397 Register index = x0;
3398 Register result = x3;
3402 Label need_conversion;
3403 Label index_out_of_range;
3405 StringCharCodeAtGenerator generator(
object,
3410 &index_out_of_range,
3412 generator.GenerateFast(
masm_);
3415 __ Bind(&index_out_of_range);
3417 __ LoadRoot(result, Heap::kNanValueRootIndex);
3420 __ Bind(&need_conversion);
3423 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3426 NopRuntimeCallHelper call_helper;
3427 generator.GenerateSlow(
masm_, call_helper);
3434 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3435 ZoneList<Expression*>* args = expr->arguments();
3436 DCHECK(args->length() == 2);
3441 Register
object = x1;
3442 Register index = x0;
3443 Register result = x0;
3447 Label need_conversion;
3448 Label index_out_of_range;
3450 StringCharAtGenerator generator(
object,
3456 &index_out_of_range,
3458 generator.GenerateFast(
masm_);
3461 __ Bind(&index_out_of_range);
3464 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3467 __ Bind(&need_conversion);
3472 NopRuntimeCallHelper call_helper;
3473 generator.GenerateSlow(
masm_, call_helper);
3480 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3482 ZoneList<Expression*>* args = expr->arguments();
3496 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3497 ZoneList<Expression*>* args = expr->arguments();
3502 StringCompareStub stub(isolate());
3508 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3510 ZoneList<Expression*>* args = expr->arguments();
3511 DCHECK(args->length() >= 2);
3513 int arg_count = args->length() - 2;
3514 for (
int i = 0;
i < arg_count + 1;
i++) {
3519 Label runtime, done;
3521 __ JumpIfSmi(x0, &runtime);
3526 ParameterCount count(arg_count);
3533 __ CallRuntime(Runtime::kCall, args->length());
3540 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3541 RegExpConstructResultStub stub(isolate());
3542 ZoneList<Expression*>* args = expr->arguments();
3543 DCHECK(args->length() == 3);
3553 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3554 ZoneList<Expression*>* args = expr->arguments();
3557 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3559 Handle<FixedArray> jsfunction_result_caches(
3560 isolate()->native_context()->jsfunction_result_caches());
3561 if (jsfunction_result_caches->length() <= cache_id) {
3562 __ Abort(kAttemptToUseUndefinedCache);
3563 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3571 Register cache = x1;
3589 __ CmovX(x0, x3,
eq);
3594 __ CallRuntime(Runtime::kGetFromCache, 2);
3601 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3602 ZoneList<Expression*>* args = expr->arguments();
3605 Label materialize_true, materialize_false;
3606 Label* if_true =
NULL;
3607 Label* if_false =
NULL;
3608 Label* fall_through =
NULL;
3609 context()->PrepareTest(&materialize_true, &materialize_false,
3610 &if_true, &if_false, &fall_through);
3615 Split(
eq, if_true, if_false, fall_through);
3617 context()->Plug(if_true, if_false);
3621 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3622 ZoneList<Expression*>* args = expr->arguments();
3623 DCHECK(args->length() == 1);
3626 __ AssertString(x0);
3629 __ IndexFromHash(x10, x0);
3635 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3636 ASM_LOCATION(
"FullCodeGenerator::EmitFastOneByteArrayJoin");
3638 ZoneList<Expression*>* args = expr->arguments();
3639 DCHECK(args->length() == 2);
3643 Register array = x0;
3644 Register result = x0;
3645 Register elements = x1;
3646 Register element = x2;
3647 Register separator = x3;
3648 Register array_length = x4;
3649 Register result_pos = x5;
3651 Register string_length = x10;
3652 Register elements_end = x11;
3653 Register
string = x12;
3654 Register scratch1 = x13;
3655 Register scratch2 = x14;
3656 Register scratch3 = x7;
3657 Register separator_length = x15;
3659 Label bailout, done, one_char_separator, long_separator,
3660 non_trivial_array, not_size_one_array, loop,
3661 empty_separator_loop, one_char_separator_loop,
3662 one_char_separator_loop_entry, long_separator_loop;
3668 __ JumpIfSmi(array, &bailout);
3672 __ CheckFastElements(
map, scratch1, &bailout);
3678 __ Ldrsw(array_length,
3680 __ Cbnz(array_length, &non_trivial_array);
3681 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3684 __ Bind(&non_trivial_array);
3690 __ Mov(string_length, 0);
3701 if (FLAG_debug_code) {
3702 __ Cmp(array_length, 0);
3703 __ Assert(
gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3707 __ JumpIfSmi(
string, &bailout);
3710 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3713 __ Adds(string_length, string_length, scratch1);
3715 __ Cmp(element, elements_end);
3719 __ Cmp(array_length, 1);
3720 __ B(
ne, ¬_size_one_array);
3724 __ Bind(¬_size_one_array);
3733 __ JumpIfSmi(separator, &bailout);
3736 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3742 __ Ldrsw(separator_length,
3745 __ Sub(string_length, string_length, separator_length);
3746 __ Umaddl(string_length, array_length.W(), separator_length.W(),
3756 __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
3767 __ Cmp(separator_length, 1);
3768 __ B(
eq, &one_char_separator);
3769 __ B(
gt, &long_separator);
3772 __ Bind(&empty_separator_loop);
3780 __ Ldrsw(string_length,
3783 __ CopyBytes(result_pos,
string, string_length, scratch1);
3784 __ Cmp(element, elements_end);
3785 __ B(
lt, &empty_separator_loop);
3789 __ Bind(&one_char_separator);
3794 __ B(&one_char_separator_loop_entry);
3796 __ Bind(&one_char_separator_loop);
3807 __ Bind(&one_char_separator_loop_entry);
3809 __ Ldrsw(string_length,
3812 __ CopyBytes(result_pos,
string, string_length, scratch1);
3813 __ Cmp(element, elements_end);
3814 __ B(
lt, &one_char_separator_loop);
3819 __ Bind(&long_separator_loop);
3828 __ Ldrsw(string_length,
3831 __ CopyBytes(result_pos,
string, string_length, scratch1);
3833 __ Bind(&long_separator);
3835 __ Ldrsw(string_length,
3838 __ CopyBytes(result_pos,
string, string_length, scratch1);
3839 __ Cmp(element, elements_end);
3840 __ B(
lt, &long_separator_loop);
3845 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3851 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3852 DCHECK(expr->arguments()->length() == 0);
3853 ExternalReference debug_is_active =
3854 ExternalReference::debug_is_active_address(isolate());
3855 __ Mov(x10, debug_is_active);
3862 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3863 if (expr->function() !=
NULL &&
3865 Comment cmnt(
masm_,
"[ InlineRuntimeCall");
3870 Comment cmnt(
masm_,
"[ CallRunTime");
3871 ZoneList<Expression*>* args = expr->arguments();
3872 int arg_count = args->length();
3874 if (expr->is_jsruntime()) {
3882 Handle<String>
name = expr->name();
3884 if (FLAG_vector_ics) {
3896 int arg_count = args->length();
3897 for (
int i = 0;
i < arg_count;
i++) {
3910 context()->DropAndPlug(1, x0);
3913 for (
int i = 0;
i < arg_count;
i++) {
3918 __ CallRuntime(expr->function(), arg_count);
3924 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3925 switch (expr->op()) {
3926 case Token::DELETE: {
3927 Comment cmnt(
masm_,
"[ UnaryOperation (DELETE)");
3928 Property*
property = expr->expression()->AsProperty();
3929 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3931 if (property !=
NULL) {
3938 }
else if (proxy !=
NULL) {
3939 Variable* var = proxy->var();
3943 if (var->IsUnallocated()) {
3945 __ Mov(x11, Operand(var->name()));
3950 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
3953 context()->Plug(var->is_this());
3957 __ Mov(x2, Operand(var->name()));
3959 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
3972 Comment cmnt(
masm_,
"[ UnaryOperation (VOID)");
3974 context()->Plug(Heap::kUndefinedValueRootIndex);
3978 Comment cmnt(
masm_,
"[ UnaryOperation (NOT)");
3983 }
else if (
context()->IsTest()) {
3987 test->false_label(),
3989 test->fall_through());
3990 context()->Plug(test->true_label(), test->false_label());
3995 Label materialize_true, materialize_false, done;
4001 __ Bind(&materialize_true);
4006 __ Bind(&materialize_false);
4012 if (
context()->IsStackValue()) {
4018 case Token::TYPEOF: {
4019 Comment cmnt(
masm_,
"[ UnaryOperation (TYPEOF)");
4021 StackValueContext
context(
this);
4024 __ CallRuntime(Runtime::kTypeof, 1);
4034 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4035 DCHECK(expr->expression()->IsValidReferenceExpression());
4037 Comment cmnt(
masm_,
"[ CountOperation");
4044 Property* prop = expr->expression()->AsProperty();
4050 if (prop->IsSuperAccess()) {
4052 VisitSuperReference(prop->obj()->AsSuperReference());
4059 DCHECK(expr->expression()->AsVariableProxy()->var() !=
NULL);
4060 AccumulatorValueContext
context(
this);
4064 if (expr->is_postfix() && !
context()->IsEffect()) {
4091 Label stub_call, done;
4092 JumpPatchSite patch_site(
masm_);
4094 int count_value = expr->op() == Token::INC ? 1 : -1;
4097 patch_site.EmitJumpIfNotSmi(x0, &slow);
4100 if (expr->is_postfix()) {
4105 switch (assign_type) {
4126 ToNumberStub convert_stub(isolate());
4127 __ CallStub(&convert_stub);
4130 if (expr->is_postfix()) {
4135 switch (assign_type) {
4149 __ Bind(&stub_call);
4160 CallIC(code, expr->CountBinOpFeedbackId());
4161 patch_site.EmitPatchInfo();
4166 switch (assign_type) {
4168 if (expr->is_postfix()) {
4169 { EffectContext
context(
this);
4189 Operand(prop->key()->AsLiteral()->value()));
4193 if (expr->is_postfix()) {
4206 CodeFactory::KeyedStoreIC(isolate(),
strict_mode()).code();
4207 CallIC(ic, expr->CountStoreFeedbackId());
4209 if (expr->is_postfix()) {
4225 VariableProxy* proxy = expr->AsVariableProxy();
4226 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4227 Comment cmnt(
masm_,
"Global variable");
4230 if (FLAG_vector_ics) {
4239 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4247 __ Mov(x0, Operand(proxy->name()));
4249 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4262 Expression* sub_expr,
4263 Handle<String> check) {
4264 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof");
4265 Comment cmnt(
masm_,
"[ EmitLiteralCompareTypeof");
4266 Label materialize_true, materialize_false;
4267 Label* if_true =
NULL;
4268 Label* if_false =
NULL;
4269 Label* fall_through =
NULL;
4270 context()->PrepareTest(&materialize_true, &materialize_false,
4271 &if_true, &if_false, &fall_through);
4273 { AccumulatorValueContext
context(
this);
4278 Factory* factory = isolate()->factory();
4280 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4281 __ JumpIfSmi(x0, if_true);
4283 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4284 Split(
eq, if_true, if_false, fall_through);
4286 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4287 __ JumpIfSmi(x0, if_false);
4294 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4295 __ JumpIfSmi(x0, if_false);
4297 Split(
eq, if_true, if_false, fall_through);
4299 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4300 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4301 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4302 Split(
eq, if_true, if_false, fall_through);
4305 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4306 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4307 __ JumpIfSmi(x0, if_false);
4314 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4315 __ JumpIfSmi(x0, if_false);
4322 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4323 __ JumpIfSmi(x0, if_false);
4324 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4338 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareTypeof other");
4339 if (if_false != fall_through)
__ B(if_false);
4341 context()->Plug(if_true, if_false);
4345 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4346 Comment cmnt(
masm_,
"[ CompareOperation");
4357 Label materialize_true;
4358 Label materialize_false;
4359 Label* if_true =
NULL;
4360 Label* if_false =
NULL;
4361 Label* fall_through =
NULL;
4362 context()->PrepareTest(&materialize_true, &materialize_false,
4363 &if_true, &if_false, &fall_through);
4372 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4373 Split(
eq, if_true, if_false, fall_through);
4376 case Token::INSTANCEOF: {
4382 __ CompareAndSplit(x0, 0,
eq, if_true, if_false, fall_through);
4393 JumpPatchSite patch_site(
masm_);
4396 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4399 __ Bind(&slow_case);
4404 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4405 CallIC(ic, expr->CompareOperationFeedbackId());
4406 patch_site.EmitPatchInfo();
4408 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4414 context()->Plug(if_true, if_false);
4419 Expression* sub_expr,
4421 ASM_LOCATION(
"FullCodeGenerator::EmitLiteralCompareNil");
4422 Label materialize_true, materialize_false;
4423 Label* if_true =
NULL;
4424 Label* if_false =
NULL;
4425 Label* fall_through =
NULL;
4426 context()->PrepareTest(&materialize_true, &materialize_false,
4427 &if_true, &if_false, &fall_through);
4432 if (expr->op() == Token::EQ_STRICT) {
4434 Heap::kNullValueRootIndex :
4435 Heap::kUndefinedValueRootIndex;
4436 __ CompareRoot(x0, nil_value);
4437 Split(
eq, if_true, if_false, fall_through);
4440 CallIC(ic, expr->CompareOperationFeedbackId());
4441 __ CompareAndSplit(x0, 0,
ne, if_true, if_false, fall_through);
4444 context()->Plug(if_true, if_false);
4448 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4454 void FullCodeGenerator::VisitYield(Yield* expr) {
4455 Comment cmnt(
masm_,
"[ Yield");
4464 switch (expr->yield_kind()) {
4465 case Yield::kSuspend:
4470 case Yield::kInitial: {
4471 Label suspend, continuation, post_runtime, resume;
4478 __ Bind(&continuation);
4491 __ Cmp(
__ StackPointer(), x1);
4492 __ B(
eq, &post_runtime);
4494 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4496 __ Bind(&post_runtime);
4505 case Yield::kFinal: {
4517 case Yield::kDelegating: {
4524 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4525 Label l_next, l_call, l_loop;
4530 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4536 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex);
4538 __ Push(load_name, x3, x0);
4546 __ PushTryHandler(StackHandler::CATCH, expr->index());
4554 __ Bind(&l_continuation);
4557 __ Bind(&l_suspend);
4558 const int generator_object_depth =
kPointerSize + handler_size;
4559 __ Peek(x0, generator_object_depth);
4568 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4578 __ LoadRoot(load_name, Heap::knext_stringRootIndex);
4580 __ Push(load_name, x3, x0);
4586 if (FLAG_vector_ics) {
4590 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
4602 __ Move(load_receiver, x0);
4605 __ LoadRoot(load_name, Heap::kdone_stringRootIndex);
4606 if (FLAG_vector_ics) {
4617 __ Pop(load_receiver);
4618 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex);
4619 if (FLAG_vector_ics) {
4624 context()->DropAndPlug(2, x0);
4634 ASM_LOCATION(
"FullCodeGenerator::EmitGeneratorResume");
4635 Register value_reg = x0;
4636 Register generator_object = x1;
4637 Register the_hole = x2;
4638 Register operand_stack_size = w3;
4639 Register
function = x4;
4647 __ Pop(generator_object);
4650 Label wrong_state, closed_state, done;
4678 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4679 __ PushMultipleTimes(the_hole, w10);
4684 __ Bl(&resume_frame);
4687 __ Bind(&resume_frame);
4697 __ Ldr(operand_stack_size,
4704 __ Cbnz(operand_stack_size, &slow_resume);
4709 __ Add(x10, x10, x11);
4715 __ Bind(&slow_resume);
4720 __ PushMultipleTimes(the_hole, operand_stack_size);
4724 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
4729 __ Bind(&closed_state);
4732 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
4739 __ CallRuntime(Runtime::kThrow, 1);
4744 __ Bind(&wrong_state);
4745 __ Push(generator_object);
4746 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
4757 Handle<Map>
map(isolate()->native_context()->iterator_result_map());
4761 Register result = x0;
4762 __ Allocate(
map->instance_size(), result, x10, x11, &gc_required,
TAG_OBJECT);
4765 __ Bind(&gc_required);
4767 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
4771 __ Bind(&allocated);
4772 Register map_reg = x1;
4773 Register result_value = x2;
4774 Register boolean_done = x3;
4775 Register empty_fixed_array = x4;
4776 Register untagged_result = x5;
4777 __ Mov(map_reg, Operand(
map));
4778 __ Pop(result_value);
4779 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4780 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4786 __ ObjectUntag(untagged_result, result);
4788 __ Stp(empty_fixed_array, empty_fixed_array,
4790 __ Stp(result_value, boolean_done,
4833 if (declaration_scope->is_global_scope() ||
4834 declaration_scope->is_module_scope()) {
4841 }
else if (declaration_scope->is_eval_scope()) {
4848 DCHECK(declaration_scope->is_function_scope());
4865 ExternalReference pending_message_obj =
4866 ExternalReference::address_of_pending_message_obj(isolate());
4867 __ Mov(x10, pending_message_obj);
4870 ExternalReference has_pending_message =
4871 ExternalReference::address_of_has_pending_message(isolate());
4873 __ Mov(x11, has_pending_message);
4879 ExternalReference pending_message_script =
4880 ExternalReference::address_of_pending_message_script(isolate());
4881 __ Mov(x10, pending_message_script);
4892 __ Pop(x10, x11, x12);
4893 ExternalReference pending_message_script =
4894 ExternalReference::address_of_pending_message_script(isolate());
4895 __ Mov(x13, pending_message_script);
4899 ExternalReference has_pending_message =
4900 ExternalReference::address_of_has_pending_message(isolate());
4901 __ Mov(x13, has_pending_message);
4905 ExternalReference pending_message_obj =
4906 ExternalReference::address_of_pending_message_obj(isolate());
4907 __ Mov(x13, pending_message_obj);
4925 BackEdgeState target_state,
4926 Code* replacement_code) {
4929 PatchingAssembler patcher(branch_address, 1);
4937 switch (target_state) {
4948 case ON_STACK_REPLACEMENT:
4949 case OSR_AFTER_STACK_CHECK:
4960 Address interrupt_address_pointer =
4961 reinterpret_cast<Address>(load) + load->ImmPCOffset();
4963 reinterpret_cast<uint64_t
>(unoptimized_code->GetIsolate()
4965 ->OnStackReplacement()
4968 reinterpret_cast<uint64_t
>(unoptimized_code->GetIsolate()
4973 reinterpret_cast<uint64_t
>(unoptimized_code->GetIsolate()
4975 ->OsrAfterStackCheck()
4978 reinterpret_cast<uint64_t
>(unoptimized_code->GetIsolate()
4980 ->OnStackReplacement()
4983 reinterpret_cast<uint64_t
>(replacement_code->entry());
4985 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4986 unoptimized_code,
reinterpret_cast<Address>(load), replacement_code);
4992 Code* unoptimized_code,
5002 load->ImmPCOffset());
5003 if (entry ==
reinterpret_cast<uint64_t
>(
5004 isolate->builtins()->OnStackReplacement()->entry())) {
5005 return ON_STACK_REPLACEMENT;
5006 }
else if (entry ==
reinterpret_cast<uint64_t
>(
5007 isolate->builtins()->OsrAfterStackCheck()->entry())) {
5008 return OSR_AFTER_STACK_CHECK;
5018 #define __ ACCESS_MASM(masm())
5023 int* context_length) {
5031 __ Drop(*stack_depth);
5032 if (*context_length > 0) {
5038 __ Bl(finally_entry_);
5041 *context_length = 0;
static const int kLengthOffset
void CheckConstPool(bool force_emit, bool require_jump)
void CheckVeneerPool(bool force_emit, bool require_jump, int margin=kVeneerDistanceMargin)
static const int kJSRetSequenceInstructions
static const int kCallSizeWithRelocation
int SizeOfCodeGeneratedSince(Label *label)
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static BailoutId Declarations()
static BailoutId FunctionEntry()
static Handle< Code > initialize_stub(Isolate *isolate, int argc, CallICState::CallType call_type)
static const int kValueOffset
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static Condition ComputeCondition(Token::Value op)
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
bool IsOptimizable() const
bool ShouldSelfOptimize()
void AddNoFrameRange(int from, int to)
static bool IsCompileTimeValue(Expression *expression)
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
@ STRING_FUNCTION_PROTOTYPE_MAP_INDEX
@ JSFUNCTION_RESULT_CACHES_INDEX
static int SlotOffset(int index)
static const int kDescriptorSize
static const int kEnumCacheBridgeCacheOffset
static const int kEnumCacheOffset
static const int kFirstOffset
static const int kMaximumClonedProperties
static const int kLengthOffset
static const int kHeaderSize
static int OffsetOfElementAt(int index)
virtual void DropAndPlug(int count, Register reg) const
virtual void Plug(bool flag) const
virtual void DropAndPlug(int count, Register reg) const
virtual void Plug(bool flag) const
virtual void DropAndPlug(int count, Register reg) const
virtual void Plug(bool flag) const
virtual void DropAndPlug(int count, Register reg) const
static const TestContext * cast(const ExpressionContext *context)
virtual void Plug(bool flag) const
virtual NestedStatement * Exit(int *stack_depth, int *context_length)
static const int kMaxBackEdgeWeight
void EmitAccessor(Expression *expression)
void Split(Condition cc, Label *if_true, Label *if_false, Label *fall_through)
void EmitBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode)
void EmitLiteralCompareTypeof(Expression *expr, Expression *sub_expr, Handle< String > check)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
void PrepareForBailoutForId(BailoutId id, State state)
void EmitNewClosure(Handle< SharedFunctionInfo > info, bool pretenure)
void GetVar(Register destination, Variable *var)
static Register context_register()
ZoneList< Handle< Object > > * globals_
void VisitForControl(Expression *expr, Label *if_true, Label *if_false, Label *fall_through)
void PushFunctionArgumentForContextAllocation()
void CallLoadIC(ContextualMode mode, TypeFeedbackId id=TypeFeedbackId::None())
bool generate_debug_code_
void RecordBackEdge(BailoutId osr_ast_id)
MemOperand StackOperand(Variable *var)
void EmitVariableLoad(VariableProxy *proxy)
friend class NestedStatement
void EmitReturnSequence()
void SetVar(Variable *var, Register source, Register scratch0, Register scratch1)
MemOperand ContextSlotOperandCheckExtensions(Variable *var, Label *slow)
void EmitKeyedPropertyAssignment(Assignment *expr)
void DeclareGlobals(Handle< FixedArray > pairs)
void EmitResolvePossiblyDirectEval(int arg_count)
void VisitForStackValue(Expression *expr)
void EmitKeyedCallWithLoadIC(Call *expr, Expression *key)
void EmitProfilingCounterReset()
void EmitKeyedPropertyLoad(Property *expr)
void EmitDebugCheckDeclarationContext(Variable *variable)
void EmitUnwindBeforeReturn()
FunctionLiteral * function()
void EmitNamedSuperPropertyLoad(Property *expr)
Handle< Cell > profiling_counter_
bool TryLiteralCompare(CompareOperation *compare)
void increment_loop_depth()
void SetStatementPosition(Statement *stmt)
Handle< FixedArray > FeedbackVector()
void StoreToFrameField(int frame_offset, Register value)
void LoadContextField(Register dst, int context_index)
const ExpressionContext * context()
void EmitNamedPropertyLoad(Property *expr)
void EmitBackEdgeBookkeeping(IterationStatement *stmt, Label *back_edge_target)
void DoTest(Expression *condition, Label *if_true, Label *if_false, Label *fall_through)
void VisitForAccumulatorValue(Expression *expr)
void PrepareForBailout(Expression *node, State state)
void CallStoreIC(TypeFeedbackId id=TypeFeedbackId::None())
MemOperand VarOperand(Variable *var, Register scratch)
void DeclareModules(Handle< FixedArray > descriptions)
void EmitGeneratorResume(Expression *generator, Expression *value, JSGeneratorObject::ResumeMode resume_mode)
void VisitForEffect(Expression *expr)
void EmitAssignment(Expression *expr)
void EmitCall(Call *expr, CallICState::CallType=CallICState::FUNCTION)
void SetFunctionPosition(FunctionLiteral *fun)
int DeclareGlobalsFlags()
void EmitLoadHomeObject(SuperReference *expr)
void EmitStoreToStackLocalOrContextSlot(Variable *var, MemOperand location)
void decrement_loop_depth()
void EmitInlineSmiBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode, Expression *left, Expression *right)
void EmitLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
void EmitVariableAssignment(Variable *var, Token::Value op)
void CallIC(Handle< Code > code, TypeFeedbackId id=TypeFeedbackId::None())
void EmitCreateIteratorResult(bool done)
void EmitLoadGlobalCheckExtensions(VariableProxy *proxy, TypeofState typeof_state, Label *slow)
void EmitCallWithLoadIC(Call *expr)
void EnsureSlotContainsAllocationSite(int slot)
void PrepareForBailoutBeforeSplit(Expression *expr, bool should_normalize, Label *if_true, Label *if_false)
Handle< Script > script()
void EmitInlineRuntimeCall(CallRuntime *expr)
void SetSourcePosition(int pos)
void EmitSuperCallWithLoadIC(Call *expr)
void EmitNamedSuperPropertyAssignment(Assignment *expr)
void EmitNamedPropertyAssignment(Assignment *expr)
Handle< FixedArray > handler_table_
void RecordJSReturnSite(Call *call)
static Register result_register()
void VisitForTypeofValue(Expression *expr)
void EmitDynamicLookupFastCase(VariableProxy *proxy, TypeofState typeof_state, Label *slow, Label *done)
bool ShouldInlineSmiCase(Token::Value op)
Handle< FixedArray > handler_table()
void EmitProfilingCounterDecrement(int delta)
void VisitInDuplicateContext(Expression *expr)
static const int kBuiltinsOffset
static const int kNativeContextOffset
static const int kGlobalProxyOffset
static const int kValueOffset
static const int kMapOffset
static void Emit(MacroAssembler *masm, const Register ®, const Label *smi_check)
static Instruction * Cast(T src)
Instruction * preceding(int count=1)
static const int kLengthOffset
static const int kValueOffset
static const int kCacheStampOffset
static const int kFingerOffset
static const int kSharedFunctionInfoOffset
static const int kLiteralsOffset
static const int kCodeEntryOffset
static const int kResultDonePropertyOffset
static const int kFunctionOffset
static const int kGeneratorClosed
static const int kResultValuePropertyOffset
static const int kGeneratorExecuting
static const int kOperandStackOffset
static const int kReceiverOffset
static const int kContextOffset
static const int kContinuationOffset
static const int kInitialMaxFastElementArray
static const int kPropertiesOffset
static const int kElementsOffset
static const int kInObjectFieldCount
static const int kValueOffset
static const int kLocal0Offset
static const int kFunctionOffset
static const Register ReceiverRegister()
static const Register NameRegister()
Handle< Object > CodeObject()
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kStringWrapperSafeForDefaultValueOf
static const int kInstanceTypeOffset
static const int kBitField2Offset
static const int kConstructorOffset
static const int kPrototypeOffset
static uint64_t & uint64_at(Address addr)
static const int kHashFieldOffset
static Operand UntagSmiAndScale(Register smi, int scale)
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
Scope * outer_scope() const
int num_parameters() const
VariableDeclaration * function() const
int ContextChainLength(Scope *scope)
void VisitIllegalRedeclaration(AstVisitor *visitor)
Scope * DeclarationScope()
Variable * arguments() const
Variable * parameter(int index) const
static const int kHeaderSize
static const int kFormalParameterCountOffset
static const int kInstanceClassNameOffset
static Smi * FromInt(int value)
static bool IsValid(intptr_t value)
static const int kContextOffset
static const int kContextOffset
static const int kExpressionsOffset
static const int kCallerSPOffset
static const int kMarkerOffset
static const int kCallerFPOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const unsigned int kContainsCachedArrayIndexMask
static const int kLengthOffset
bool Equals(String *other)
static Handle< Code > GetUninitialized(Isolate *isolate)
static TypeFeedbackId None()
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const Register SlotRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_BOOL(enable_unaligned_accesses
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define POINTER_SIZE_ALIGN(value)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define ASM_LOCATION(message)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Vector< const char > CStrVector(const char *data)
@ DONT_TRACK_ALLOCATION_SITE
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
const unsigned kXRegSizeInBits
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
bool IsDeclaredVariableMode(VariableMode mode)
const uint32_t kTwoByteStringTag
MemOperand GlobalObjectMemOperand()
const int kPointerSizeLog2
MemOperand ContextMemOperand(Register context, int index)
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
const uint32_t kOneByteStringTag
MemOperand FieldMemOperand(Register object, int offset)
const unsigned kLoadLiteralScaleLog2
bool IsImmutableVariableMode(VariableMode mode)
const uint64_t kSmiShiftMask
Condition NegateCondition(Condition cond)
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
kFeedbackVectorOffset flag
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
@ RECORD_CONSTRUCTOR_TARGET
const unsigned kInstructionSize
void CopyBytes(uint8_t *target, uint8_t *source)
bool IsFastObjectElementsKind(ElementsKind kind)
Debugger support for the V8 JavaScript engine.