26 #define __ ACCESS_MASM(masm_)
37 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
39 info_emitted_ =
false;
44 DCHECK(patch_site_.is_bound() == info_emitted_);
49 void EmitJumpIfNotSmi(Register reg, Label* target) {
50 DCHECK(!patch_site_.is_bound() && !info_emitted_);
52 __ bind(&patch_site_);
53 __ cmp(reg, Operand(reg));
59 void EmitJumpIfSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
62 __ bind(&patch_site_);
63 __ cmp(reg, Operand(reg));
67 void EmitPatchInfo() {
70 if (patch_site_.is_bound()) {
71 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
73 reg.set_code(delta_to_patch_site /
kOff12Mask);
74 __ cmp_raw_immediate(reg, delta_to_patch_site %
kOff12Mask);
84 MacroAssembler* masm_;
108 CompilationInfo* info =
info_;
110 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
113 Handle<Smi>(
Smi::FromInt(FLAG_interrupt_budget), isolate()));
115 Comment cmnt(
masm_,
"[ function compiled by full code generator");
120 if (strlen(FLAG_stop_at) > 0 &&
121 info->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
129 if (info->strict_mode() ==
SLOPPY && !info->is_native()) {
131 int receiver_offset = info->scope()->num_parameters() *
kPointerSize;
133 __ CompareRoot(
r2, Heap::kUndefinedValueRootIndex);
147 FrameScope frame_scope(
masm_, StackFrame::MANUAL);
150 __ Prologue(info->IsCodePreAgingActive());
153 { Comment cmnt(
masm_,
"[ Allocate locals");
154 int locals_count = info->scope()->num_stack_slots();
156 DCHECK(!info->function()->is_generator() || locals_count == 0);
157 if (locals_count > 0) {
158 if (locals_count >= 128) {
161 __ LoadRoot(
r2, Heap::kRealStackLimitRootIndex);
167 __ LoadRoot(
r9, Heap::kUndefinedValueRootIndex);
168 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
169 if (locals_count >= kMaxPushes) {
170 int loop_iterations = locals_count / kMaxPushes;
171 __ mov(
r2, Operand(loop_iterations));
173 __ bind(&loop_header);
175 for (
int i = 0;
i < kMaxPushes;
i++) {
180 __ b(&loop_header,
ne);
182 int remaining = locals_count % kMaxPushes;
184 for (
int i = 0;
i < remaining;
i++) {
190 bool function_in_register =
true;
194 if (heap_slots > 0) {
196 Comment cmnt(
masm_,
"[ Allocate context");
197 bool need_write_barrier =
true;
198 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
200 __ Push(info->scope()->GetScopeInfo());
201 __ CallRuntime(Runtime::kNewGlobalContext, 2);
202 }
else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
203 FastNewContextStub stub(isolate(), heap_slots);
206 need_write_barrier =
false;
209 __ CallRuntime(Runtime::kNewFunctionContext, 1);
211 function_in_register =
false;
217 int num_parameters = info->scope()->num_parameters();
218 for (
int i = 0;
i < num_parameters;
i++) {
220 if (var->IsContextSlot()) {
230 if (need_write_barrier) {
231 __ RecordWriteContextSlot(
233 }
else if (FLAG_debug_code) {
235 __ JumpIfInNewSpace(
cp,
r0, &done);
236 __ Abort(kExpectedNewSpaceObject);
244 if (arguments !=
NULL) {
246 Comment cmnt(
masm_,
"[ Allocate arguments object");
247 if (!function_in_register) {
254 int num_parameters = info->scope()->num_parameters();
273 ArgumentsAccessStub stub(isolate(), type);
280 __ CallRuntime(Runtime::kTraceEnter, 0);
285 if (
scope()->HasIllegalRedeclaration()) {
286 Comment cmnt(
masm_,
"[ Declarations");
291 { Comment cmnt(
masm_,
"[ Declarations");
294 if (
scope()->is_function_scope() &&
scope()->
function() !=
NULL) {
299 VisitVariableDeclaration(
function);
304 { Comment cmnt(
masm_,
"[ Stack check");
307 __ LoadRoot(
ip, Heap::kStackLimitRootIndex);
310 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
311 PredictableCodeSizeScope predictable(
masm_,
317 { Comment cmnt(
masm_,
"[ Body");
319 VisitStatements(
function()->body());
326 { Comment cmnt(
masm_,
"[ return <undefined>;");
327 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
350 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
359 PredictableCodeSizeScope predictable_code_size_scope(
360 masm_, kProfileCounterResetSequenceLength);
363 int reset_value = FLAG_interrupt_budget;
366 reset_value = FLAG_interrupt_budget >> 4;
372 int expected_instr_count =
384 Label* back_edge_target) {
385 Comment cmnt(
masm_,
"[ Back edge bookkeeping");
390 DCHECK(back_edge_target->is_bound());
393 Max(1, distance / kCodeSizeMultiplier));
415 Comment cmnt(
masm_,
"[ Return sequence");
424 __ CallRuntime(Runtime::kTraceExit, 1);
429 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
433 Max(1, distance / kCodeSizeMultiplier));
439 __ Call(isolate()->builtins()->InterruptCheck(),
447 Label check_exit_codesize;
448 __ bind(&check_exit_codesize);
456 PredictableCodeSizeScope predictable(
masm_, -1);
458 int no_frame_start =
__ LeaveFrame(StackFrame::JAVA_SCRIPT);
459 { ConstantPoolUnavailableScope constant_pool_unavailable(
masm_);
460 __ add(
sp,
sp, Operand(sp_delta));
477 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
482 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
488 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
495 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
498 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
499 codegen()->DoTest(
this);
521 codegen()->PrepareForBailoutBeforeSplit(condition(),
525 if (index == Heap::kUndefinedValueRootIndex ||
526 index == Heap::kNullValueRootIndex ||
527 index == Heap::kFalseValueRootIndex) {
528 if (false_label_ != fall_through_)
__ b(false_label_);
529 }
else if (index == Heap::kTrueValueRootIndex) {
530 if (true_label_ != fall_through_)
__ b(true_label_);
533 codegen()->DoTest(
this);
543 Handle<Object> lit)
const {
556 codegen()->PrepareForBailoutBeforeSplit(condition(),
560 DCHECK(!lit->IsUndetectableObject());
561 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
562 if (false_label_ != fall_through_)
__ b(false_label_);
563 }
else if (lit->IsTrue() || lit->IsJSObject()) {
564 if (true_label_ != fall_through_)
__ b(true_label_);
565 }
else if (lit->IsString()) {
566 if (String::cast(*lit)->length() == 0) {
567 if (false_label_ != fall_through_)
__ b(false_label_);
569 if (true_label_ != fall_through_)
__ b(true_label_);
571 }
else if (lit->IsSmi()) {
572 if (Smi::cast(*lit)->value() == 0) {
573 if (false_label_ != fall_through_)
__ b(false_label_);
575 if (true_label_ != fall_through_)
__ b(true_label_);
580 codegen()->DoTest(
this);
586 Register reg)
const {
594 Register reg)
const {
602 Register reg)
const {
604 if (count > 1)
__ Drop(count - 1);
610 Register reg)
const {
615 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
616 codegen()->DoTest(
this);
621 Label* materialize_false)
const {
622 DCHECK(materialize_true == materialize_false);
623 __ bind(materialize_true);
628 Label* materialize_true,
629 Label* materialize_false)
const {
631 __ bind(materialize_true);
634 __ bind(materialize_false);
641 Label* materialize_true,
642 Label* materialize_false)
const {
644 __ bind(materialize_true);
645 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
647 __ bind(materialize_false);
648 __ LoadRoot(
ip, Heap::kFalseValueRootIndex);
655 Label* materialize_false)
const {
656 DCHECK(materialize_true == true_label_);
657 DCHECK(materialize_false == false_label_);
667 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
674 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
675 __ LoadRoot(
ip, value_root_index);
681 codegen()->PrepareForBailoutBeforeSplit(condition(),
686 if (true_label_ != fall_through_)
__ b(true_label_);
688 if (false_label_ != fall_through_)
__ b(false_label_);
696 Label* fall_through) {
698 CallIC(ic, condition->test_id());
700 Split(
ne, if_true, if_false, fall_through);
707 Label* fall_through) {
708 if (if_false == fall_through) {
710 }
else if (if_true == fall_through) {
720 DCHECK(var->IsStackAllocated());
724 if (var->IsParameter()) {
734 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
735 if (var->IsContextSlot()) {
737 __ LoadContext(scratch, context_chain_length);
748 __ ldr(dest, location);
756 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
757 DCHECK(!scratch0.is(src));
758 DCHECK(!scratch0.is(scratch1));
759 DCHECK(!scratch1.is(src));
761 __ str(src, location);
764 if (var->IsContextSlot()) {
765 __ RecordWriteContextSlot(scratch0,
776 bool should_normalize,
785 if (should_normalize)
__ b(&skip);
787 if (should_normalize) {
788 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
803 __ CompareRoot(
r1, Heap::kWithContextMapRootIndex);
804 __ Check(
ne, kDeclarationInWithContext);
805 __ CompareRoot(
r1, Heap::kCatchContextMapRootIndex);
806 __ Check(
ne, kDeclarationInCatchContext);
811 void FullCodeGenerator::VisitVariableDeclaration(
812 VariableDeclaration* declaration) {
816 VariableProxy* proxy = declaration->proxy();
818 Variable* variable = proxy->var();
820 switch (variable->location()) {
822 globals_->Add(variable->name(), zone());
823 globals_->Add(variable->binding_needs_init()
824 ? isolate()->factory()->the_hole_value()
825 : isolate()->factory()->undefined_value(),
832 Comment cmnt(
masm_,
"[ VariableDeclaration");
833 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
840 Comment cmnt(
masm_,
"[ VariableDeclaration");
842 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
850 Comment cmnt(
masm_,
"[ VariableDeclaration");
851 __ mov(
r2, Operand(variable->name()));
862 __ LoadRoot(
r0, Heap::kTheHoleValueRootIndex);
868 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
875 void FullCodeGenerator::VisitFunctionDeclaration(
876 FunctionDeclaration* declaration) {
877 VariableProxy* proxy = declaration->proxy();
878 Variable* variable = proxy->var();
879 switch (variable->location()) {
881 globals_->Add(variable->name(), zone());
882 Handle<SharedFunctionInfo>
function =
885 if (
function.is_null())
return SetStackOverflow();
892 Comment cmnt(
masm_,
"[ FunctionDeclaration");
899 Comment cmnt(
masm_,
"[ FunctionDeclaration");
905 __ RecordWriteContextSlot(
cp,
918 Comment cmnt(
masm_,
"[ FunctionDeclaration");
919 __ mov(
r2, Operand(variable->name()));
924 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
931 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
932 Variable* variable = declaration->proxy()->var();
934 DCHECK(variable->interface()->IsFrozen());
936 Comment cmnt(
masm_,
"[ ModuleDeclaration");
947 __ RecordWriteContextSlot(
cp,
958 Visit(declaration->module());
962 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
963 VariableProxy* proxy = declaration->proxy();
964 Variable* variable = proxy->var();
965 switch (variable->location()) {
971 Comment cmnt(
masm_,
"[ ImportDeclaration");
985 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
996 __ CallRuntime(Runtime::kDeclareGlobals, 3);
1004 __ CallRuntime(Runtime::kDeclareModules, 1);
1009 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1010 Comment cmnt(
masm_,
"[ SwitchStatement");
1011 Breakable nested_statement(
this, stmt);
1018 ZoneList<CaseClause*>* clauses = stmt->cases();
1019 CaseClause* default_clause =
NULL;
1023 for (
int i = 0;
i < clauses->length();
i++) {
1024 CaseClause* clause = clauses->at(
i);
1025 clause->body_target()->Unuse();
1028 if (clause->is_default()) {
1029 default_clause = clause;
1033 Comment cmnt(
masm_,
"[ Case comparison");
1034 __ bind(&next_test);
1043 JumpPatchSite patch_site(
masm_);
1044 if (inline_smi_code) {
1047 patch_site.EmitJumpIfNotSmi(
r2, &slow_case);
1050 __ b(
ne, &next_test);
1052 __ b(clause->body_target());
1053 __ bind(&slow_case);
1059 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1060 CallIC(ic, clause->CompareId());
1061 patch_site.EmitPatchInfo();
1066 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
1068 __ b(
ne, &next_test);
1070 __ jmp(clause->body_target());
1073 __ cmp(
r0, Operand::Zero());
1074 __ b(
ne, &next_test);
1076 __ b(clause->body_target());
1081 __ bind(&next_test);
1083 if (default_clause ==
NULL) {
1084 __ b(nested_statement.break_label());
1086 __ b(default_clause->body_target());
1090 for (
int i = 0;
i < clauses->length();
i++) {
1091 Comment cmnt(
masm_,
"[ Case body");
1092 CaseClause* clause = clauses->at(
i);
1093 __ bind(clause->body_target());
1095 VisitStatements(clause->statements());
1098 __ bind(nested_statement.break_label());
1103 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1104 Comment cmnt(
masm_,
"[ ForInStatement");
1105 int slot = stmt->ForInFeedbackSlot();
1109 ForIn loop_statement(
this, stmt);
1115 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1118 Register null_value =
r5;
1119 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1120 __ cmp(
r0, null_value);
1126 Label convert, done_convert;
1127 __ JumpIfSmi(
r0, &convert);
1129 __ b(
ge, &done_convert);
1133 __ bind(&done_convert);
1140 __ b(
le, &call_runtime);
1146 __ CheckEnumCache(null_value, &call_runtime);
1155 __ bind(&call_runtime);
1157 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1164 __ LoadRoot(
ip, Heap::kMetaMapRootIndex);
1166 __ b(
ne, &fixed_array);
1169 Label no_descriptors;
1170 __ bind(&use_cache);
1174 __ b(
eq, &no_descriptors);
1176 __ LoadInstanceDescriptors(
r0,
r2);
1187 __ bind(&no_descriptors);
1193 __ bind(&fixed_array);
1203 __ b(
gt, &non_proxy);
1205 __ bind(&non_proxy);
1217 __ b(
hs, loop_statement.break_label());
1222 __ ldr(
r3, MemOperand::PointerAddressFromSmiKey(
r2,
r0));
1234 __ b(
eq, &update_each);
1239 __ b(
eq, &update_each);
1248 __ b(
eq, loop_statement.continue_label());
1252 __ bind(&update_each);
1255 { EffectContext
context(
this);
1260 Visit(stmt->body());
1264 __ bind(loop_statement.continue_label());
1273 __ bind(loop_statement.break_label());
1283 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1284 Comment cmnt(
masm_,
"[ ForOfStatement");
1287 Iteration loop_statement(
this, stmt);
1294 __ bind(loop_statement.continue_label());
1300 Label result_not_done;
1302 loop_statement.break_label(),
1305 __ bind(&result_not_done);
1311 Visit(stmt->body());
1316 __ jmp(loop_statement.continue_label());
1320 __ bind(loop_statement.break_label());
1333 if (!FLAG_always_opt &&
1334 !FLAG_prepare_always_opt &&
1336 scope()->is_function_scope() &&
1337 info->num_literals() == 0) {
1338 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1339 __ mov(
r2, Operand(info));
1342 __ mov(
r0, Operand(info));
1343 __ LoadRoot(
r1, pretenure ? Heap::kTrueValueRootIndex
1344 : Heap::kFalseValueRootIndex);
1346 __ CallRuntime(Runtime::kNewClosure, 3);
1352 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1353 Comment cmnt(
masm_,
"[ VariableProxy");
1359 Comment cnmt(
masm_,
"[ SuperReference ");
1364 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1369 __ cmp(
r0, Operand(isolate()->factory()->undefined_value()));
1372 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1380 Register current =
cp;
1386 if (s->num_heap_slots() > 0) {
1387 if (s->calls_sloppy_eval()) {
1400 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope())
break;
1401 s = s->outer_scope();
1404 if (s->is_eval_scope()) {
1406 if (!current.is(next)) {
1407 __ Move(next, current);
1412 __ LoadRoot(
ip, Heap::kNativeContextMapRootIndex);
1427 if (FLAG_vector_ics) {
1441 DCHECK(var->IsContextSlot());
1447 if (s->num_heap_slots() > 0) {
1448 if (s->calls_sloppy_eval()) {
1480 Variable* var = proxy->var();
1485 Variable* local = var->local_if_not_shadowed();
1487 if (local->mode() ==
LET || local->mode() ==
CONST ||
1489 __ CompareRoot(
r0, Heap::kTheHoleValueRootIndex);
1491 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex,
eq);
1494 __ mov(
r0, Operand(var->name()));
1496 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1507 Variable* var = proxy->var();
1511 switch (var->location()) {
1513 Comment cmnt(
masm_,
"[ Global variable");
1516 if (FLAG_vector_ics) {
1528 Comment cmnt(
masm_, var->IsContextSlot() ?
"[ Context variable"
1529 :
"[ Stack variable");
1530 if (var->binding_needs_init()) {
1554 bool skip_init_check;
1555 if (var->scope()->DeclarationScope() !=
scope()->DeclarationScope()) {
1556 skip_init_check =
false;
1562 var->initializer_position() < proxy->position();
1565 if (!skip_init_check) {
1568 __ CompareRoot(
r0, Heap::kTheHoleValueRootIndex);
1569 if (var->mode() ==
LET || var->mode() ==
CONST) {
1574 __ mov(
r0, Operand(var->name()));
1576 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1581 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex,
eq);
1592 Comment cmnt(
masm_,
"[ Lookup variable");
1598 __ mov(
r1, Operand(var->name()));
1600 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1608 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1609 Comment cmnt(
masm_,
"[ RegExpLiteral");
1620 int literal_offset =
1623 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
1625 __ b(
ne, &materialized);
1630 __ mov(
r2, Operand(expr->pattern()));
1631 __ mov(
r1, Operand(expr->flags()));
1633 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1636 __ bind(&materialized);
1638 Label allocated, runtime_allocate;
1642 __ bind(&runtime_allocate);
1645 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1648 __ bind(&allocated);
1659 if (expression ==
NULL) {
1660 __ LoadRoot(
r1, Heap::kNullValueRootIndex);
1668 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1669 Comment cmnt(
masm_,
"[ ObjectLiteral");
1671 expr->BuildConstantProperties(isolate());
1672 Handle<FixedArray> constant_properties = expr->constant_properties();
1676 __ mov(
r1, Operand(constant_properties));
1677 int flags = expr->fast_elements()
1678 ? ObjectLiteral::kFastElements
1679 : ObjectLiteral::kNoFlags;
1680 flags |= expr->has_function()
1681 ? ObjectLiteral::kHasFunction
1682 : ObjectLiteral::kNoFlags;
1684 int properties_count = constant_properties->length() / 2;
1685 if (expr->may_store_doubles() || expr->depth() > 1 ||
1686 masm()->serializer_enabled() ||
flags != ObjectLiteral::kFastElements ||
1689 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1691 FastCloneShallowObjectStub stub(isolate(), properties_count);
1697 bool result_saved =
false;
1702 expr->CalculateEmitStore(zone());
1704 AccessorTable accessor_table(zone());
1705 for (
int i = 0;
i < expr->properties()->length();
i++) {
1706 ObjectLiteral::Property*
property = expr->properties()->at(
i);
1707 if (property->IsCompileTimeValue())
continue;
1709 Literal* key =
property->key();
1710 Expression* value =
property->value();
1711 if (!result_saved) {
1713 result_saved =
true;
1715 switch (property->kind()) {
1718 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1721 case ObjectLiteral::Property::COMPUTED:
1722 if (key->value()->IsInternalizedString()) {
1723 if (property->emit_store()) {
1740 if (property->emit_store()) {
1743 __ CallRuntime(Runtime::kSetProperty, 4);
1748 case ObjectLiteral::Property::PROTOTYPE:
1753 if (property->emit_store()) {
1754 __ CallRuntime(Runtime::kSetPrototype, 2);
1760 case ObjectLiteral::Property::GETTER:
1761 accessor_table.lookup(key)->second->getter = value;
1763 case ObjectLiteral::Property::SETTER:
1764 accessor_table.lookup(key)->second->setter = value;
1771 for (AccessorTable::Iterator it = accessor_table.begin();
1772 it != accessor_table.end();
1781 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1784 if (expr->has_function()) {
1788 __ CallRuntime(Runtime::kToFastProperties, 1);
1799 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1800 Comment cmnt(
masm_,
"[ ArrayLiteral");
1802 expr->BuildConstantElements(isolate());
1803 int flags = expr->depth() == 1
1804 ? ArrayLiteral::kShallowElements
1805 : ArrayLiteral::kNoFlags;
1807 ZoneList<Expression*>* subexprs = expr->values();
1808 int length = subexprs->length();
1809 Handle<FixedArray> constant_elements = expr->constant_elements();
1810 DCHECK_EQ(2, constant_elements->length());
1812 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1814 Handle<FixedArrayBase> constant_elements_values(
1815 FixedArrayBase::cast(constant_elements->get(1)));
1818 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1827 __ mov(
r1, Operand(constant_elements));
1831 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1833 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1837 bool result_saved =
false;
1841 for (
int i = 0;
i < length;
i++) {
1842 Expression* subexpr = subexprs->at(
i);
1847 if (!result_saved) {
1850 result_saved =
true;
1865 StoreArrayLiteralElementStub stub(isolate());
1881 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1882 DCHECK(expr->target()->IsValidReferenceExpression());
1884 Comment cmnt(
masm_,
"[ Assignment");
1892 NAMED_SUPER_PROPERTY
1895 Property*
property = expr->target()->AsProperty();
1896 if (property !=
NULL) {
1897 assign_type = (
property->key()->IsPropertyName())
1898 ? (property->IsSuperAccess() ? NAMED_SUPER_PROPERTY
1904 switch (assign_type) {
1909 if (expr->is_compound()) {
1917 case NAMED_SUPER_PROPERTY:
1921 if (expr->is_compound()) {
1922 const Register scratch =
r1;
1929 if (expr->is_compound()) {
1944 if (expr->is_compound()) {
1945 { AccumulatorValueContext
context(
this);
1946 switch (assign_type) {
1955 case NAMED_SUPER_PROPERTY:
1974 AccumulatorValueContext
context(
this);
1995 switch (assign_type) {
2005 case NAMED_SUPER_PROPERTY:
2015 void FullCodeGenerator::VisitYield(Yield* expr) {
2016 Comment cmnt(
masm_,
"[ Yield");
2021 switch (expr->yield_kind()) {
2022 case Yield::kSuspend:
2027 case Yield::kInitial: {
2028 Label suspend, continuation, post_runtime, resume;
2032 __ bind(&continuation);
2046 __ b(
eq, &post_runtime);
2048 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2050 __ bind(&post_runtime);
2059 case Yield::kFinal: {
2071 case Yield::kDelegating: {
2078 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2079 Label l_next, l_call, l_loop;
2084 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
2090 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex);
2100 __ PushTryHandler(StackHandler::CATCH, expr->index());
2104 __ bind(&l_continuation);
2106 __ bind(&l_suspend);
2107 const int generator_object_depth =
kPointerSize + handler_size;
2117 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2127 __ LoadRoot(load_name, Heap::knext_stringRootIndex);
2135 if (FLAG_vector_ics) {
2139 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2151 __ Move(load_receiver,
r0);
2153 __ push(load_receiver);
2154 __ LoadRoot(load_name, Heap::kdone_stringRootIndex);
2155 if (FLAG_vector_ics) {
2162 __ cmp(
r0, Operand(0));
2166 __ pop(load_receiver);
2167 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex);
2168 if (FLAG_vector_ics) {
2192 Label wrong_state, closed_state, done;
2197 __ b(
eq, &closed_state);
2198 __ b(
lt, &wrong_state);
2212 __ LoadRoot(
r2, Heap::kTheHoleValueRootIndex);
2213 Label push_argument_holes, push_frame;
2214 __ bind(&push_argument_holes);
2216 __ b(
mi, &push_frame);
2218 __ jmp(&push_argument_holes);
2223 __ bind(&push_frame);
2224 __ bl(&resume_frame);
2226 __ bind(&resume_frame);
2232 __ PushFixedFrame(
r4);
2245 __ cmp(
r3, Operand(0));
2246 __ b(
ne, &slow_resume);
2249 { ConstantPoolUnavailableScope constant_pool_unavailable(
masm_);
2250 if (FLAG_enable_ool_constant_pool) {
2263 __ bind(&slow_resume);
2268 Label push_operand_holes, call_resume;
2269 __ bind(&push_operand_holes);
2271 __ b(
mi, &call_resume);
2273 __ b(&push_operand_holes);
2274 __ bind(&call_resume);
2278 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2280 __ stop(
"not-reached");
2283 __ bind(&closed_state);
2286 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
2293 __ CallRuntime(Runtime::kThrow, 1);
2298 __ bind(&wrong_state);
2300 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2311 Handle<Map>
map(isolate()->native_context()->iterator_result_map());
2316 __ bind(&gc_required);
2318 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2322 __ bind(&allocated);
2325 __ mov(
r3, Operand(isolate()->factory()->ToBoolean(done)));
2326 __ mov(
r4, Operand(isolate()->factory()->empty_fixed_array()));
2345 Literal* key = prop->key()->AsLiteral();
2346 DCHECK(!prop->IsSuperAccess());
2349 if (FLAG_vector_ics) {
2362 Literal* key = prop->key()->AsLiteral();
2363 DCHECK(!key->value()->IsSmi());
2364 DCHECK(prop->IsSuperAccess());
2367 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2373 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2374 if (FLAG_vector_ics) {
2379 CallIC(ic, prop->PropertyFeedbackId());
2387 Expression* left_expr,
2388 Expression* right_expr) {
2389 Label done, smi_case, stub_call;
2391 Register scratch1 =
r2;
2392 Register scratch2 =
r3;
2396 Register right =
r0;
2400 __ orr(scratch1, left, Operand(right));
2402 JumpPatchSite patch_site(
masm_);
2403 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2405 __ bind(&stub_call);
2406 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op,
mode).code();
2407 CallIC(code, expr->BinaryOperationFeedbackId());
2408 patch_site.EmitPatchInfo();
2416 __ GetLeastBitsFromSmi(scratch1, right, 5);
2417 __ mov(right, Operand(left,
ASR, scratch1));
2421 __ SmiUntag(scratch1, left);
2422 __ GetLeastBitsFromSmi(scratch2, right, 5);
2423 __ mov(scratch1, Operand(scratch1,
LSL, scratch2));
2424 __ TrySmiTag(right, scratch1, &stub_call);
2428 __ SmiUntag(scratch1, left);
2429 __ GetLeastBitsFromSmi(scratch2, right, 5);
2430 __ mov(scratch1, Operand(scratch1,
LSR, scratch2));
2431 __ tst(scratch1, Operand(0xc0000000));
2432 __ b(
ne, &stub_call);
2433 __ SmiTag(right, scratch1);
2437 __ add(scratch1, left, Operand(right),
SetCC);
2438 __ b(
vs, &stub_call);
2439 __ mov(right, scratch1);
2442 __ sub(scratch1, left, Operand(right),
SetCC);
2443 __ b(
vs, &stub_call);
2444 __ mov(right, scratch1);
2447 __ SmiUntag(
ip, right);
2448 __ smull(scratch1, scratch2, left,
ip);
2449 __ mov(
ip, Operand(scratch1,
ASR, 31));
2450 __ cmp(
ip, Operand(scratch2));
2451 __ b(
ne, &stub_call);
2452 __ cmp(scratch1, Operand::Zero());
2455 __ add(scratch2, right, Operand(left),
SetCC);
2457 __ b(
mi, &stub_call);
2461 __ orr(right, left, Operand(right));
2463 case Token::BIT_AND:
2464 __ and_(right, left, Operand(right));
2466 case Token::BIT_XOR:
2467 __ eor(right, left, Operand(right));
2482 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op,
mode).code();
2483 JumpPatchSite patch_site(
masm_);
2484 CallIC(code, expr->BinaryOperationFeedbackId());
2485 patch_site.EmitPatchInfo();
2491 DCHECK(expr->IsValidReferenceExpression());
2497 Property* prop = expr->AsProperty();
2499 assign_type = (prop->key()->IsPropertyName())
2504 switch (assign_type) {
2506 Variable* var = expr->AsVariableProxy()->var();
2517 Operand(prop->key()->AsLiteral()->value()));
2529 CodeFactory::KeyedStoreIC(isolate(),
strict_mode()).code();
2541 if (var->IsContextSlot()) {
2545 __ RecordWriteContextSlot(
2552 if (var->IsUnallocated()) {
2558 }
else if (op == Token::INIT_CONST_LEGACY) {
2560 DCHECK(!var->IsParameter());
2561 if (var->IsLookupSlot()) {
2563 __ mov(
r0, Operand(var->name()));
2565 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2567 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2570 __ ldr(
r2, location);
2571 __ CompareRoot(
r2, Heap::kTheHoleValueRootIndex);
2577 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2579 DCHECK(!var->IsLookupSlot());
2580 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2583 __ ldr(
r3, location);
2584 __ CompareRoot(
r3, Heap::kTheHoleValueRootIndex);
2586 __ mov(
r3, Operand(var->name()));
2588 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2593 }
else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2594 if (var->IsLookupSlot()) {
2597 __ mov(
r1, Operand(var->name()));
2600 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2604 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2608 __ ldr(
r2, location);
2609 __ CompareRoot(
r2, Heap::kTheHoleValueRootIndex);
2610 __ Check(
eq, kLetBindingReInitialization);
2621 Property* prop = expr->target()->AsProperty();
2623 DCHECK(prop->key()->IsLiteral());
2628 Operand(prop->key()->AsLiteral()->value()));
2641 Property* prop = expr->target()->AsProperty();
2643 Literal* key = prop->key()->AsLiteral();
2649 : Runtime::kStoreToSuper_Sloppy),
2663 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(),
strict_mode()).code();
2664 CallIC(ic, expr->AssignmentFeedbackId());
2671 void FullCodeGenerator::VisitProperty(Property* expr) {
2672 Comment cmnt(
masm_,
"[ Property");
2673 Expression* key = expr->key();
2675 if (key->IsPropertyName()) {
2676 if (!expr->IsSuperAccess()) {
2700 TypeFeedbackId ast_id) {
2711 Expression* callee = expr->expression();
2713 CallICState::CallType call_type =
2714 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2717 if (call_type == CallICState::FUNCTION) {
2718 { StackValueContext
context(
this);
2724 __ Push(isolate()->factory()->undefined_value());
2727 DCHECK(callee->IsProperty());
2728 DCHECK(!callee->AsProperty()->IsSuperAccess());
2743 Expression* callee = expr->expression();
2744 DCHECK(callee->IsProperty());
2745 Property* prop = callee->AsProperty();
2746 DCHECK(prop->IsSuperAccess());
2749 Literal* key = prop->key()->AsLiteral();
2750 DCHECK(!key->value()->IsSmi());
2752 const Register scratch =
r1;
2753 SuperReference* super_ref = prop->obj()->AsSuperReference();
2769 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2777 EmitCall(expr, CallICState::METHOD);
2787 Expression* callee = expr->expression();
2790 DCHECK(callee->IsProperty());
2801 EmitCall(expr, CallICState::METHOD);
2807 ZoneList<Expression*>* args = expr->arguments();
2808 int arg_count = args->length();
2809 { PreservePositionScope
scope(
masm()->positions_recorder());
2810 for (
int i = 0;
i < arg_count;
i++) {
2818 isolate(), arg_count, call_type);
2834 if (arg_count > 0) {
2837 __ LoadRoot(
r4, Heap::kUndefinedValueRootIndex);
2852 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2856 void FullCodeGenerator::VisitCall(Call* expr) {
2860 expr->return_is_recorded_ =
false;
2863 Comment cmnt(
masm_,
"[ Call");
2864 Expression* callee = expr->expression();
2865 Call::CallType call_type = expr->GetCallType(isolate());
2867 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2872 ZoneList<Expression*>* args = expr->arguments();
2873 int arg_count = args->length();
2875 { PreservePositionScope pos_scope(
masm()->positions_recorder());
2877 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
2881 for (
int i = 0;
i < arg_count;
i++) {
2906 }
else if (call_type == Call::GLOBAL_CALL) {
2909 }
else if (call_type == Call::LOOKUP_SLOT_CALL) {
2911 VariableProxy* proxy = callee->AsVariableProxy();
2914 { PreservePositionScope
scope(
masm()->positions_recorder());
2924 __ mov(
r2, Operand(proxy->name()));
2926 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2932 if (done.is_linked()) {
2940 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
2948 }
else if (call_type == Call::PROPERTY_CALL) {
2949 Property*
property = callee->AsProperty();
2950 bool is_named_call =
property->key()->IsPropertyName();
2952 if (property->IsSuperAccess() && is_named_call) {
2956 PreservePositionScope
scope(
masm()->positions_recorder());
2959 if (is_named_call) {
2966 DCHECK(call_type == Call::OTHER_CALL);
2968 { PreservePositionScope
scope(
masm()->positions_recorder());
2971 __ LoadRoot(
r1, Heap::kUndefinedValueRootIndex);
2979 DCHECK(expr->return_is_recorded_);
2984 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2985 Comment cmnt(
masm_,
"[ CallNew");
2996 ZoneList<Expression*>* args = expr->arguments();
2997 int arg_count = args->length();
2998 for (
int i = 0;
i < arg_count;
i++) {
3007 __ mov(
r0, Operand(arg_count));
3011 if (FLAG_pretenuring_call_new) {
3013 DCHECK(expr->AllocationSiteFeedbackSlot() ==
3014 expr->CallNewFeedbackSlot() + 1);
3027 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3028 ZoneList<Expression*>* args = expr->arguments();
3029 DCHECK(args->length() == 1);
3033 Label materialize_true, materialize_false;
3034 Label* if_true =
NULL;
3035 Label* if_false =
NULL;
3036 Label* fall_through =
NULL;
3037 context()->PrepareTest(&materialize_true, &materialize_false,
3038 &if_true, &if_false, &fall_through);
3042 Split(
eq, if_true, if_false, fall_through);
3044 context()->Plug(if_true, if_false);
3048 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3049 ZoneList<Expression*>* args = expr->arguments();
3050 DCHECK(args->length() == 1);
3054 Label materialize_true, materialize_false;
3055 Label* if_true =
NULL;
3056 Label* if_false =
NULL;
3057 Label* fall_through =
NULL;
3058 context()->PrepareTest(&materialize_true, &materialize_false,
3059 &if_true, &if_false, &fall_through);
3062 __ NonNegativeSmiTst(
r0);
3063 Split(
eq, if_true, if_false, fall_through);
3065 context()->Plug(if_true, if_false);
3069 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3070 ZoneList<Expression*>* args = expr->arguments();
3071 DCHECK(args->length() == 1);
3075 Label materialize_true, materialize_false;
3076 Label* if_true =
NULL;
3077 Label* if_false =
NULL;
3078 Label* fall_through =
NULL;
3079 context()->PrepareTest(&materialize_true, &materialize_false,
3080 &if_true, &if_false, &fall_through);
3082 __ JumpIfSmi(
r0, if_false);
3083 __ LoadRoot(
ip, Heap::kNullValueRootIndex);
3096 Split(
le, if_true, if_false, fall_through);
3098 context()->Plug(if_true, if_false);
3102 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3103 ZoneList<Expression*>* args = expr->arguments();
3104 DCHECK(args->length() == 1);
3108 Label materialize_true, materialize_false;
3109 Label* if_true =
NULL;
3110 Label* if_false =
NULL;
3111 Label* fall_through =
NULL;
3112 context()->PrepareTest(&materialize_true, &materialize_false,
3113 &if_true, &if_false, &fall_through);
3115 __ JumpIfSmi(
r0, if_false);
3118 Split(
ge, if_true, if_false, fall_through);
3120 context()->Plug(if_true, if_false);
3124 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3125 ZoneList<Expression*>* args = expr->arguments();
3126 DCHECK(args->length() == 1);
3130 Label materialize_true, materialize_false;
3131 Label* if_true =
NULL;
3132 Label* if_false =
NULL;
3133 Label* fall_through =
NULL;
3134 context()->PrepareTest(&materialize_true, &materialize_false,
3135 &if_true, &if_false, &fall_through);
3137 __ JumpIfSmi(
r0, if_false);
3142 Split(
ne, if_true, if_false, fall_through);
3144 context()->Plug(if_true, if_false);
3148 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3149 CallRuntime* expr) {
3150 ZoneList<Expression*>* args = expr->arguments();
3151 DCHECK(args->length() == 1);
3155 Label materialize_true, materialize_false, skip_lookup;
3156 Label* if_true =
NULL;
3157 Label* if_false =
NULL;
3158 Label* fall_through =
NULL;
3159 context()->PrepareTest(&materialize_true, &materialize_false,
3160 &if_true, &if_false, &fall_through);
3162 __ AssertNotSmi(
r0);
3167 __ b(
ne, &skip_lookup);
3172 __ LoadRoot(
ip, Heap::kHashTableMapRootIndex);
3179 Label entry, loop, done;
3182 __ NumberOfOwnDescriptors(
r3,
r1);
3183 __ cmp(
r3, Operand::Zero());
3186 __ LoadInstanceDescriptors(
r1,
r4);
3201 __ mov(
ip, Operand(isolate()->factory()->value_of_string()));
3219 __ bind(&skip_lookup);
3224 __ JumpIfSmi(
r2, if_false);
3231 Split(
eq, if_true, if_false, fall_through);
3233 context()->Plug(if_true, if_false);
3237 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3238 ZoneList<Expression*>* args = expr->arguments();
3239 DCHECK(args->length() == 1);
3243 Label materialize_true, materialize_false;
3244 Label* if_true =
NULL;
3245 Label* if_false =
NULL;
3246 Label* fall_through =
NULL;
3247 context()->PrepareTest(&materialize_true, &materialize_false,
3248 &if_true, &if_false, &fall_through);
3250 __ JumpIfSmi(
r0, if_false);
3253 Split(
eq, if_true, if_false, fall_through);
3255 context()->Plug(if_true, if_false);
3259 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3260 ZoneList<Expression*>* args = expr->arguments();
3261 DCHECK(args->length() == 1);
3265 Label materialize_true, materialize_false;
3266 Label* if_true =
NULL;
3267 Label* if_false =
NULL;
3268 Label* fall_through =
NULL;
3269 context()->PrepareTest(&materialize_true, &materialize_false,
3270 &if_true, &if_false, &fall_through);
3275 __ cmp(
r2, Operand(0x80000000));
3276 __ cmp(
r1, Operand(0x00000000),
eq);
3279 Split(
eq, if_true, if_false, fall_through);
3281 context()->Plug(if_true, if_false);
3285 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3286 ZoneList<Expression*>* args = expr->arguments();
3287 DCHECK(args->length() == 1);
3291 Label materialize_true, materialize_false;
3292 Label* if_true =
NULL;
3293 Label* if_false =
NULL;
3294 Label* fall_through =
NULL;
3295 context()->PrepareTest(&materialize_true, &materialize_false,
3296 &if_true, &if_false, &fall_through);
3298 __ JumpIfSmi(
r0, if_false);
3301 Split(
eq, if_true, if_false, fall_through);
3303 context()->Plug(if_true, if_false);
3307 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3308 ZoneList<Expression*>* args = expr->arguments();
3309 DCHECK(args->length() == 1);
3313 Label materialize_true, materialize_false;
3314 Label* if_true =
NULL;
3315 Label* if_false =
NULL;
3316 Label* fall_through =
NULL;
3317 context()->PrepareTest(&materialize_true, &materialize_false,
3318 &if_true, &if_false, &fall_through);
3320 __ JumpIfSmi(
r0, if_false);
3323 Split(
eq, if_true, if_false, fall_through);
3325 context()->Plug(if_true, if_false);
3330 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3331 DCHECK(expr->arguments()->length() == 0);
3333 Label materialize_true, materialize_false;
3334 Label* if_true =
NULL;
3335 Label* if_false =
NULL;
3336 Label* fall_through =
NULL;
3337 context()->PrepareTest(&materialize_true, &materialize_false,
3338 &if_true, &if_false, &fall_through);
3352 Split(
eq, if_true, if_false, fall_through);
3354 context()->Plug(if_true, if_false);
3358 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3359 ZoneList<Expression*>* args = expr->arguments();
3360 DCHECK(args->length() == 2);
3366 Label materialize_true, materialize_false;
3367 Label* if_true =
NULL;
3368 Label* if_false =
NULL;
3369 Label* fall_through =
NULL;
3370 context()->PrepareTest(&materialize_true, &materialize_false,
3371 &if_true, &if_false, &fall_through);
3376 Split(
eq, if_true, if_false, fall_through);
3378 context()->Plug(if_true, if_false);
3382 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3383 ZoneList<Expression*>* args = expr->arguments();
3384 DCHECK(args->length() == 1);
3397 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3398 DCHECK(expr->arguments()->length() == 0);
3416 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3417 ZoneList<Expression*>* args = expr->arguments();
3418 DCHECK(args->length() == 1);
3419 Label done,
null,
function, non_function_constructor;
3424 __ JumpIfSmi(
r0, &
null);
3436 __ b(
eq, &
function);
3441 __ b(
eq, &
function);
3448 __ b(
ne, &non_function_constructor);
3458 __ LoadRoot(
r0, Heap::kFunction_stringRootIndex);
3462 __ bind(&non_function_constructor);
3463 __ LoadRoot(
r0, Heap::kObject_stringRootIndex);
3468 __ LoadRoot(
r0, Heap::kNullValueRootIndex);
3477 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3479 SubStringStub stub(isolate());
3480 ZoneList<Expression*>* args = expr->arguments();
3481 DCHECK(args->length() == 3);
3490 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3492 RegExpExecStub stub(isolate());
3493 ZoneList<Expression*>* args = expr->arguments();
3494 DCHECK(args->length() == 4);
3504 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3505 ZoneList<Expression*>* args = expr->arguments();
3506 DCHECK(args->length() == 1);
3511 __ JumpIfSmi(
r0, &done);
3521 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3522 ZoneList<Expression*>* args = expr->arguments();
3523 DCHECK(args->length() == 2);
3525 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3529 Label runtime, done, not_date_object;
3530 Register
object =
r0;
3531 Register result =
r0;
3532 Register scratch0 =
r9;
3533 Register scratch1 =
r1;
3535 __ JumpIfSmi(
object, ¬_date_object);
3537 __ b(
ne, ¬_date_object);
3539 if (index->value() == 0) {
3544 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3545 __ mov(scratch1, Operand(stamp));
3548 __ cmp(scratch1, scratch0);
3555 __ PrepareCallCFunction(2, scratch1);
3556 __ mov(
r1, Operand(index));
3557 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3561 __ bind(¬_date_object);
3562 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3568 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3569 ZoneList<Expression*>* args = expr->arguments();
3572 Register
string =
r0;
3573 Register index =
r1;
3574 Register value =
r2;
3579 __ Pop(index, value);
3581 if (FLAG_debug_code) {
3583 __ Check(
eq, kNonSmiValue);
3585 __ Check(
eq, kNonSmiIndex);
3586 __ SmiUntag(index, index);
3588 __ EmitSeqStringSetCharCheck(
string, index, value, one_byte_seq_type);
3589 __ SmiTag(index, index);
3592 __ SmiUntag(value, value);
3601 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3602 ZoneList<Expression*>* args = expr->arguments();
3605 Register
string =
r0;
3606 Register index =
r1;
3607 Register value =
r2;
3612 __ Pop(index, value);
3614 if (FLAG_debug_code) {
3616 __ Check(
eq, kNonSmiValue);
3618 __ Check(
eq, kNonSmiIndex);
3619 __ SmiUntag(index, index);
3621 __ EmitSeqStringSetCharCheck(
string, index, value, two_byte_seq_type);
3622 __ SmiTag(index, index);
3625 __ SmiUntag(value, value);
3636 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3638 ZoneList<Expression*>* args = expr->arguments();
3639 DCHECK(args->length() == 2);
3648 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3649 ZoneList<Expression*>* args = expr->arguments();
3650 DCHECK(args->length() == 2);
3657 __ JumpIfSmi(
r1, &done);
3668 __ RecordWriteField(
3676 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3677 ZoneList<Expression*>* args = expr->arguments();
3682 NumberToStringStub stub(isolate());
3688 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3689 ZoneList<Expression*>* args = expr->arguments();
3690 DCHECK(args->length() == 1);
3694 StringCharFromCodeGenerator generator(
r0,
r1);
3695 generator.GenerateFast(
masm_);
3698 NopRuntimeCallHelper call_helper;
3699 generator.GenerateSlow(
masm_, call_helper);
3706 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3707 ZoneList<Expression*>* args = expr->arguments();
3708 DCHECK(args->length() == 2);
3712 Register
object =
r1;
3713 Register index =
r0;
3714 Register result =
r3;
3718 Label need_conversion;
3719 Label index_out_of_range;
3721 StringCharCodeAtGenerator generator(
object,
3726 &index_out_of_range,
3728 generator.GenerateFast(
masm_);
3731 __ bind(&index_out_of_range);
3734 __ LoadRoot(result, Heap::kNanValueRootIndex);
3737 __ bind(&need_conversion);
3740 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3743 NopRuntimeCallHelper call_helper;
3744 generator.GenerateSlow(
masm_, call_helper);
3751 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3752 ZoneList<Expression*>* args = expr->arguments();
3753 DCHECK(args->length() == 2);
3757 Register
object =
r1;
3758 Register index =
r0;
3759 Register scratch =
r3;
3760 Register result =
r0;
3764 Label need_conversion;
3765 Label index_out_of_range;
3767 StringCharAtGenerator generator(
object,
3773 &index_out_of_range,
3775 generator.GenerateFast(
masm_);
3778 __ bind(&index_out_of_range);
3781 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3784 __ bind(&need_conversion);
3790 NopRuntimeCallHelper call_helper;
3791 generator.GenerateSlow(
masm_, call_helper);
3798 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3799 ZoneList<Expression*>* args = expr->arguments();
3811 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3812 ZoneList<Expression*>* args = expr->arguments();
3817 StringCompareStub stub(isolate());
3823 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3824 ZoneList<Expression*>* args = expr->arguments();
3825 DCHECK(args->length() >= 2);
3827 int arg_count = args->length() - 2;
3828 for (
int i = 0;
i < arg_count + 1;
i++) {
3833 Label runtime, done;
3835 __ JumpIfSmi(
r0, &runtime);
3841 ParameterCount count(arg_count);
3848 __ CallRuntime(Runtime::kCall, args->length());
3855 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3856 RegExpConstructResultStub stub(isolate());
3857 ZoneList<Expression*>* args = expr->arguments();
3858 DCHECK(args->length() == 3);
3869 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3870 ZoneList<Expression*>* args = expr->arguments();
3873 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3875 Handle<FixedArray> jsfunction_result_caches(
3876 isolate()->native_context()->jsfunction_result_caches());
3877 if (jsfunction_result_caches->length() <= cache_id) {
3878 __ Abort(kAttemptToUseUndefinedCache);
3879 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
3887 Register cache =
r1;
3895 Label done, not_found;
3903 __ b(
ne, ¬_found);
3908 __ bind(¬_found);
3911 __ CallRuntime(Runtime::kGetFromCache, 2);
3918 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3919 ZoneList<Expression*>* args = expr->arguments();
3922 Label materialize_true, materialize_false;
3923 Label* if_true =
NULL;
3924 Label* if_false =
NULL;
3925 Label* fall_through =
NULL;
3926 context()->PrepareTest(&materialize_true, &materialize_false,
3927 &if_true, &if_false, &fall_through);
3932 Split(
eq, if_true, if_false, fall_through);
3934 context()->Plug(if_true, if_false);
3938 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3939 ZoneList<Expression*>* args = expr->arguments();
3940 DCHECK(args->length() == 1);
3943 __ AssertString(
r0);
3946 __ IndexFromHash(
r0,
r0);
3952 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3953 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3954 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3955 one_char_separator_loop_entry, long_separator_loop;
3956 ZoneList<Expression*>* args = expr->arguments();
3957 DCHECK(args->length() == 2);
3962 Register array =
r0;
3963 Register elements =
no_reg;
3964 Register result =
no_reg;
3965 Register separator =
r1;
3966 Register array_length =
r2;
3967 Register result_pos =
no_reg;
3968 Register string_length =
r3;
3969 Register
string =
r4;
3970 Register element =
r5;
3971 Register elements_end =
r6;
3972 Register scratch =
r9;
3978 __ JumpIfSmi(array, &bailout);
3983 __ CheckFastElements(scratch, array_length, &bailout);
3987 __ SmiUntag(array_length,
SetCC);
3988 __ b(
ne, &non_trivial_array);
3989 __ LoadRoot(
r0, Heap::kempty_stringRootIndex);
3992 __ bind(&non_trivial_array);
4001 __ mov(string_length, Operand::Zero());
4014 __ cmp(array_length, Operand::Zero());
4015 __ Assert(
gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4019 __ JumpIfSmi(
string, &bailout);
4022 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4024 __ add(string_length, string_length, Operand(scratch),
SetCC);
4026 __ cmp(element, elements_end);
4030 __ cmp(array_length, Operand(1));
4031 __ b(
ne, ¬_size_one_array);
4035 __ bind(¬_size_one_array);
4044 __ JumpIfSmi(separator, &bailout);
4047 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4053 __ sub(string_length, string_length, Operand(scratch));
4054 __ smull(scratch,
ip, array_length, scratch);
4057 __ cmp(
ip, Operand::Zero());
4059 __ tst(scratch, Operand(0x80000000));
4061 __ add(string_length, string_length, Operand(scratch),
SetCC);
4063 __ SmiUntag(string_length);
4076 __ AllocateOneByteString(result, string_length, scratch,
4084 result_pos = array_length;
4093 __ b(
eq, &one_char_separator);
4094 __ b(
gt, &long_separator);
4097 __ bind(&empty_separator_loop);
4106 __ SmiUntag(string_length);
4110 __ CopyBytes(
string, result_pos, string_length, scratch);
4111 __ cmp(element, elements_end);
4112 __ b(
lt, &empty_separator_loop);
4117 __ bind(&one_char_separator);
4122 __ jmp(&one_char_separator_loop_entry);
4124 __ bind(&one_char_separator_loop);
4135 __ bind(&one_char_separator_loop_entry);
4138 __ SmiUntag(string_length);
4142 __ CopyBytes(
string, result_pos, string_length, scratch);
4143 __ cmp(element, elements_end);
4144 __ b(
lt, &one_char_separator_loop);
4150 __ bind(&long_separator_loop);
4159 __ SmiUntag(string_length);
4163 __ CopyBytes(
string, result_pos, string_length, scratch);
4165 __ bind(&long_separator);
4168 __ SmiUntag(string_length);
4172 __ CopyBytes(
string, result_pos, string_length, scratch);
4173 __ cmp(element, elements_end);
4174 __ b(
lt, &long_separator_loop);
4179 __ LoadRoot(
r0, Heap::kUndefinedValueRootIndex);
4185 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4186 DCHECK(expr->arguments()->length() == 0);
4187 ExternalReference debug_is_active =
4188 ExternalReference::debug_is_active_address(isolate());
4189 __ mov(
ip, Operand(debug_is_active));
4196 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4197 if (expr->function() !=
NULL &&
4199 Comment cmnt(
masm_,
"[ InlineRuntimeCall");
4204 Comment cmnt(
masm_,
"[ CallRuntime");
4205 ZoneList<Expression*>* args = expr->arguments();
4206 int arg_count = args->length();
4208 if (expr->is_jsruntime()) {
4217 if (FLAG_vector_ics) {
4219 Operand(
Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4231 int arg_count = args->length();
4232 for (
int i = 0;
i < arg_count;
i++) {
4248 for (
int i = 0;
i < arg_count;
i++) {
4253 __ CallRuntime(expr->function(), arg_count);
4259 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4260 switch (expr->op()) {
4261 case Token::DELETE: {
4262 Comment cmnt(
masm_,
"[ UnaryOperation (DELETE)");
4263 Property*
property = expr->expression()->AsProperty();
4264 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4266 if (property !=
NULL) {
4273 }
else if (proxy !=
NULL) {
4274 Variable* var = proxy->var();
4278 if (var->IsUnallocated()) {
4280 __ mov(
r1, Operand(var->name()));
4285 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
4288 context()->Plug(var->is_this());
4293 __ mov(
r2, Operand(var->name()));
4295 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4308 Comment cmnt(
masm_,
"[ UnaryOperation (VOID)");
4310 context()->Plug(Heap::kUndefinedValueRootIndex);
4315 Comment cmnt(
masm_,
"[ UnaryOperation (NOT)");
4320 }
else if (
context()->IsTest()) {
4324 test->false_label(),
4326 test->fall_through());
4327 context()->Plug(test->true_label(), test->false_label());
4334 Label materialize_true, materialize_false, done;
4339 __ bind(&materialize_true);
4341 __ LoadRoot(
r0, Heap::kTrueValueRootIndex);
4344 __ bind(&materialize_false);
4346 __ LoadRoot(
r0, Heap::kFalseValueRootIndex);
4353 case Token::TYPEOF: {
4354 Comment cmnt(
masm_,
"[ UnaryOperation (TYPEOF)");
4355 { StackValueContext
context(
this);
4358 __ CallRuntime(Runtime::kTypeof, 1);
4369 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4370 DCHECK(expr->expression()->IsValidReferenceExpression());
4372 Comment cmnt(
masm_,
"[ CountOperation");
4379 Property* prop = expr->expression()->AsProperty();
4385 if (prop->IsSuperAccess()) {
4387 VisitSuperReference(prop->obj()->AsSuperReference());
4394 DCHECK(expr->expression()->AsVariableProxy()->var() !=
NULL);
4395 AccumulatorValueContext
context(
this);
4399 if (expr->is_postfix() && !
context()->IsEffect()) {
4427 Label stub_call, done;
4428 JumpPatchSite patch_site(
masm_);
4430 int count_value = expr->op() == Token::INC ? 1 : -1;
4433 patch_site.EmitJumpIfNotSmi(
r0, &slow);
4436 if (expr->is_postfix()) {
4441 switch (assign_type) {
4462 ToNumberStub convert_stub(isolate());
4463 __ CallStub(&convert_stub);
4466 if (expr->is_postfix()) {
4471 switch (assign_type) {
4486 __ bind(&stub_call);
4495 CallIC(code, expr->CountBinOpFeedbackId());
4496 patch_site.EmitPatchInfo();
4500 switch (assign_type) {
4502 if (expr->is_postfix()) {
4503 { EffectContext
context(
this);
4523 Operand(prop->key()->AsLiteral()->value()));
4527 if (expr->is_postfix()) {
4540 CodeFactory::KeyedStoreIC(isolate(),
strict_mode()).code();
4541 CallIC(ic, expr->CountStoreFeedbackId());
4543 if (expr->is_postfix()) {
4559 VariableProxy* proxy = expr->AsVariableProxy();
4560 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4561 Comment cmnt(
masm_,
"[ Global variable");
4564 if (FLAG_vector_ics) {
4573 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4574 Comment cmnt(
masm_,
"[ Lookup slot");
4582 __ mov(
r0, Operand(proxy->name()));
4584 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4597 Expression* sub_expr,
4598 Handle<String> check) {
4599 Label materialize_true, materialize_false;
4600 Label* if_true =
NULL;
4601 Label* if_false =
NULL;
4602 Label* fall_through =
NULL;
4603 context()->PrepareTest(&materialize_true, &materialize_false,
4604 &if_true, &if_false, &fall_through);
4606 { AccumulatorValueContext
context(
this);
4611 Factory* factory = isolate()->factory();
4613 __ JumpIfSmi(
r0, if_true);
4615 __ LoadRoot(
ip, Heap::kHeapNumberMapRootIndex);
4617 Split(
eq, if_true, if_false, fall_through);
4619 __ JumpIfSmi(
r0, if_false);
4625 Split(
eq, if_true, if_false, fall_through);
4627 __ JumpIfSmi(
r0, if_false);
4629 Split(
eq, if_true, if_false, fall_through);
4631 __ CompareRoot(
r0, Heap::kTrueValueRootIndex);
4633 __ CompareRoot(
r0, Heap::kFalseValueRootIndex);
4634 Split(
eq, if_true, if_false, fall_through);
4636 __ CompareRoot(
r0, Heap::kUndefinedValueRootIndex);
4638 __ JumpIfSmi(
r0, if_false);
4643 Split(
ne, if_true, if_false, fall_through);
4646 __ JumpIfSmi(
r0, if_false);
4651 Split(
eq, if_true, if_false, fall_through);
4653 __ JumpIfSmi(
r0, if_false);
4654 __ CompareRoot(
r0, Heap::kNullValueRootIndex);
4664 Split(
eq, if_true, if_false, fall_through);
4666 if (if_false != fall_through)
__ jmp(if_false);
4668 context()->Plug(if_true, if_false);
4672 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4673 Comment cmnt(
masm_,
"[ CompareOperation");
4682 Label materialize_true, materialize_false;
4683 Label* if_true =
NULL;
4684 Label* if_false =
NULL;
4685 Label* fall_through =
NULL;
4686 context()->PrepareTest(&materialize_true, &materialize_false,
4687 &if_true, &if_false, &fall_through);
4696 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
4698 Split(
eq, if_true, if_false, fall_through);
4701 case Token::INSTANCEOF: {
4708 Split(
eq, if_true, if_false, fall_through);
4718 JumpPatchSite patch_site(
masm_);
4719 if (inline_smi_code) {
4722 patch_site.EmitJumpIfNotSmi(
r2, &slow_case);
4725 __ bind(&slow_case);
4730 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4731 CallIC(ic, expr->CompareOperationFeedbackId());
4732 patch_site.EmitPatchInfo();
4734 __ cmp(
r0, Operand::Zero());
4735 Split(cond, if_true, if_false, fall_through);
4741 context()->Plug(if_true, if_false);
4746 Expression* sub_expr,
4748 Label materialize_true, materialize_false;
4749 Label* if_true =
NULL;
4750 Label* if_false =
NULL;
4751 Label* fall_through =
NULL;
4752 context()->PrepareTest(&materialize_true, &materialize_false,
4753 &if_true, &if_false, &fall_through);
4757 if (expr->op() == Token::EQ_STRICT) {
4759 Heap::kNullValueRootIndex :
4760 Heap::kUndefinedValueRootIndex;
4761 __ LoadRoot(
r1, nil_value);
4763 Split(
eq, if_true, if_false, fall_through);
4766 CallIC(ic, expr->CompareOperationFeedbackId());
4767 __ cmp(
r0, Operand(0));
4768 Split(
ne, if_true, if_false, fall_through);
4770 context()->Plug(if_true, if_false);
4774 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4803 if (declaration_scope->is_global_scope() ||
4804 declaration_scope->is_module_scope()) {
4810 }
else if (declaration_scope->is_eval_scope()) {
4816 DCHECK(declaration_scope->is_function_scope());
4838 ExternalReference pending_message_obj =
4839 ExternalReference::address_of_pending_message_obj(isolate());
4840 __ mov(
ip, Operand(pending_message_obj));
4844 ExternalReference has_pending_message =
4845 ExternalReference::address_of_has_pending_message(isolate());
4846 __ mov(
ip, Operand(has_pending_message));
4852 ExternalReference pending_message_script =
4853 ExternalReference::address_of_pending_message_script(isolate());
4854 __ mov(
ip, Operand(pending_message_script));
4864 ExternalReference pending_message_script =
4865 ExternalReference::address_of_pending_message_script(isolate());
4866 __ mov(
ip, Operand(pending_message_script));
4871 ExternalReference has_pending_message =
4872 ExternalReference::address_of_has_pending_message(isolate());
4873 __ mov(
ip, Operand(has_pending_message));
4878 ExternalReference pending_message_obj =
4879 ExternalReference::address_of_pending_message_obj(isolate());
4880 __ mov(
ip, Operand(pending_message_obj));
4895 #define __ ACCESS_MASM(masm())
4899 int* context_length) {
4906 __ Drop(*stack_depth);
4907 if (*context_length > 0) {
4913 __ bl(finally_entry_);
4916 *context_length = 0;
4926 if (!FLAG_enable_ool_constant_pool) {
4963 return load_address;
4969 BackEdgeState target_state,
4970 Code* replacement_code) {
4971 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(
pc);
4973 CodePatcher patcher(branch_address, 1);
4974 switch (target_state) {
4998 kProfileCounterResetSequenceLength;
4999 patcher.masm()->b(branch_offset,
pl);
5002 case ON_STACK_REPLACEMENT:
5003 case OSR_AFTER_STACK_CHECK:
5020 patcher.masm()->nop();
5026 replacement_code->entry());
5028 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5029 unoptimized_code, pc_immediate_load_address, replacement_code);
5035 Code* unoptimized_code,
5039 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(
pc);
5042 pc_immediate_load_address, unoptimized_code);
5045 DCHECK(interrupt_address ==
5046 isolate->builtins()->InterruptCheck()->entry());
5052 if (interrupt_address ==
5053 isolate->builtins()->OnStackReplacement()->entry()) {
5054 return ON_STACK_REPLACEMENT;
5057 DCHECK(interrupt_address ==
5058 isolate->builtins()->OsrAfterStackCheck()->entry());
5059 return OSR_AFTER_STACK_CHECK;
static const int kLengthOffset
static const int kInstrSize
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
int InstructionsGeneratedSince(Label *label)
void CheckConstPool(bool force_emit, bool require_jump)
friend class BlockConstPoolScope
static bool IsMovImmed(Instr instr)
static bool IsBranch(Instr instr)
static bool IsMovW(Instr instr)
static bool IsMovT(Instr instr)
static const int kJSReturnSequenceInstructions
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target, ICacheFlushMode icache_flush_mode=FLUSH_ICACHE_IF_NEEDED)
static bool IsOrrImmed(Instr instr)
static bool IsLdrPpRegOffset(Instr instr)
static bool IsLdrPcImmediateOffset(Instr instr)
static bool IsBlxIp(Instr instr)
static bool IsLdrPpImmediateOffset(Instr instr)
int SizeOfCodeGeneratedSince(Label *label)
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static BailoutId Declarations()
static BailoutId FunctionEntry()
static Handle< Code > initialize_stub(Isolate *isolate, int argc, CallICState::CallType call_type)
static const int kValueOffset
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static const int kConstantPoolOffset
static const int kHeaderSize
static Condition ComputeCondition(Token::Value op)
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
bool IsOptimizable() const
bool ShouldSelfOptimize()
void AddNoFrameRange(int from, int to)
static bool IsCompileTimeValue(Expression *expression)
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
@ STRING_FUNCTION_PROTOTYPE_MAP_INDEX
@ JSFUNCTION_RESULT_CACHES_INDEX
static int SlotOffset(int index)
static bool IsSupported(CpuFeature f)
static const int kDescriptorSize
static const int kEnumCacheBridgeCacheOffset
static const int kEnumCacheOffset
static const int kFirstOffset
static const int kMaximumClonedProperties
static const int kLengthOffset
static const int kHeaderSize
static int OffsetOfElementAt(int index)
virtual void DropAndPlug(int count, Register reg) const
virtual void Plug(bool flag) const
virtual void DropAndPlug(int count, Register reg) const
virtual void Plug(bool flag) const
virtual void DropAndPlug(int count, Register reg) const
virtual void Plug(bool flag) const
virtual void DropAndPlug(int count, Register reg) const
static const TestContext * cast(const ExpressionContext *context)
virtual void Plug(bool flag) const
virtual NestedStatement * Exit(int *stack_depth, int *context_length)
static const int kMaxBackEdgeWeight
void EmitAccessor(Expression *expression)
void Split(Condition cc, Label *if_true, Label *if_false, Label *fall_through)
void EmitBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode)
void EmitLiteralCompareTypeof(Expression *expr, Expression *sub_expr, Handle< String > check)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
void PrepareForBailoutForId(BailoutId id, State state)
void EmitNewClosure(Handle< SharedFunctionInfo > info, bool pretenure)
void GetVar(Register destination, Variable *var)
static Register context_register()
ZoneList< Handle< Object > > * globals_
void VisitForControl(Expression *expr, Label *if_true, Label *if_false, Label *fall_through)
void PushFunctionArgumentForContextAllocation()
void CallLoadIC(ContextualMode mode, TypeFeedbackId id=TypeFeedbackId::None())
bool generate_debug_code_
void RecordBackEdge(BailoutId osr_ast_id)
MemOperand StackOperand(Variable *var)
void EmitVariableLoad(VariableProxy *proxy)
friend class NestedStatement
void EmitReturnSequence()
void SetVar(Variable *var, Register source, Register scratch0, Register scratch1)
MemOperand ContextSlotOperandCheckExtensions(Variable *var, Label *slow)
void EmitKeyedPropertyAssignment(Assignment *expr)
void DeclareGlobals(Handle< FixedArray > pairs)
void EmitResolvePossiblyDirectEval(int arg_count)
void VisitForStackValue(Expression *expr)
void EmitKeyedCallWithLoadIC(Call *expr, Expression *key)
void EmitProfilingCounterReset()
void EmitKeyedPropertyLoad(Property *expr)
void EmitDebugCheckDeclarationContext(Variable *variable)
void EmitUnwindBeforeReturn()
FunctionLiteral * function()
void EmitNamedSuperPropertyLoad(Property *expr)
Handle< Cell > profiling_counter_
bool TryLiteralCompare(CompareOperation *compare)
void increment_loop_depth()
void SetStatementPosition(Statement *stmt)
Handle< FixedArray > FeedbackVector()
void StoreToFrameField(int frame_offset, Register value)
void LoadContextField(Register dst, int context_index)
const ExpressionContext * context()
void EmitNamedPropertyLoad(Property *expr)
void EmitBackEdgeBookkeeping(IterationStatement *stmt, Label *back_edge_target)
void DoTest(Expression *condition, Label *if_true, Label *if_false, Label *fall_through)
void VisitForAccumulatorValue(Expression *expr)
void PrepareForBailout(Expression *node, State state)
void CallStoreIC(TypeFeedbackId id=TypeFeedbackId::None())
MemOperand VarOperand(Variable *var, Register scratch)
void DeclareModules(Handle< FixedArray > descriptions)
void EmitGeneratorResume(Expression *generator, Expression *value, JSGeneratorObject::ResumeMode resume_mode)
void VisitForEffect(Expression *expr)
void EmitAssignment(Expression *expr)
void EmitCall(Call *expr, CallICState::CallType=CallICState::FUNCTION)
void SetFunctionPosition(FunctionLiteral *fun)
int DeclareGlobalsFlags()
void EmitLoadHomeObject(SuperReference *expr)
void EmitStoreToStackLocalOrContextSlot(Variable *var, MemOperand location)
void decrement_loop_depth()
void EmitInlineSmiBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode, Expression *left, Expression *right)
void EmitLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
void EmitVariableAssignment(Variable *var, Token::Value op)
void CallIC(Handle< Code > code, TypeFeedbackId id=TypeFeedbackId::None())
void EmitCreateIteratorResult(bool done)
void EmitLoadGlobalCheckExtensions(VariableProxy *proxy, TypeofState typeof_state, Label *slow)
void EmitCallWithLoadIC(Call *expr)
void EnsureSlotContainsAllocationSite(int slot)
void PrepareForBailoutBeforeSplit(Expression *expr, bool should_normalize, Label *if_true, Label *if_false)
Handle< Script > script()
void EmitInlineRuntimeCall(CallRuntime *expr)
void SetSourcePosition(int pos)
void EmitSuperCallWithLoadIC(Call *expr)
void EmitNamedSuperPropertyAssignment(Assignment *expr)
void EmitNamedPropertyAssignment(Assignment *expr)
Handle< FixedArray > handler_table_
void RecordJSReturnSite(Call *call)
static Register result_register()
void VisitForTypeofValue(Expression *expr)
void EmitDynamicLookupFastCase(VariableProxy *proxy, TypeofState typeof_state, Label *slow, Label *done)
bool ShouldInlineSmiCase(Token::Value op)
Handle< FixedArray > handler_table()
void EmitProfilingCounterDecrement(int delta)
void VisitInDuplicateContext(Expression *expr)
static const int kBuiltinsOffset
static const int kNativeContextOffset
static const int kGlobalProxyOffset
static const int kMapOffset
static const int kLengthOffset
static const int kValueOffset
static const int kCacheStampOffset
static const int kFingerOffset
static const int kSharedFunctionInfoOffset
static const int kLiteralsOffset
static const int kCodeEntryOffset
static const int kResultDonePropertyOffset
static const int kFunctionOffset
static const int kGeneratorClosed
static const int kResultValuePropertyOffset
static const int kGeneratorExecuting
static const int kOperandStackOffset
static const int kReceiverOffset
static const int kContextOffset
static const int kContinuationOffset
static const int kInitialMaxFastElementArray
static const int kPropertiesOffset
static const int kElementsOffset
static const int kInObjectFieldCount
static const int kValueOffset
static const int kLocal0Offset
static const int kFunctionOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static int CallSize(Register target, Condition cond=al)
Handle< Object > CodeObject()
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kStringWrapperSafeForDefaultValueOf
static const int kInstanceTypeOffset
static const int kBitField2Offset
static const int kConstructorOffset
static const int kPrototypeOffset
static int32_t & int32_at(Address addr)
static const int kHashFieldOffset
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
Scope * outer_scope() const
int num_parameters() const
VariableDeclaration * function() const
int ContextChainLength(Scope *scope)
void VisitIllegalRedeclaration(AstVisitor *visitor)
Scope * DeclarationScope()
Variable * arguments() const
Variable * parameter(int index) const
static const int kHeaderSize
static const int kFormalParameterCountOffset
static const int kInstanceClassNameOffset
static Smi * FromInt(int value)
static bool IsValid(intptr_t value)
static const int kContextOffset
static const int kFixedFrameSizeFromFp
static const int kContextOffset
static const int kExpressionsOffset
static const int kCallerSPOffset
static const int kMarkerOffset
static const int kCallerFPOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const unsigned int kContainsCachedArrayIndexMask
static const int kLengthOffset
bool Equals(String *other)
static Handle< Code > GetUninitialized(Isolate *isolate)
static TypeFeedbackId None()
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const Register SlotRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_BOOL(enable_unaligned_accesses
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define POINTER_SIZE_ALIGN(value)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Vector< const char > CStrVector(const char *data)
MemOperand ContextOperand(Register context, int index)
@ DONT_TRACK_ALLOCATION_SITE
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
bool IsDeclaredVariableMode(VariableMode mode)
const uint32_t kTwoByteStringTag
const LowDwVfpRegister d0
const int kPointerSizeLog2
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
const uint32_t kOneByteStringTag
MemOperand FieldMemOperand(Register object, int offset)
bool IsImmutableVariableMode(VariableMode mode)
Condition NegateCondition(Condition cond)
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
@ NEVER_INLINE_TARGET_ADDRESS
kFeedbackVectorOffset flag
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
@ RECORD_CONSTRUCTOR_TARGET
void CopyBytes(uint8_t *target, uint8_t *source)
bool IsFastObjectElementsKind(ElementsKind kind)
Debugger support for the V8 JavaScript engine.