7 #if V8_TARGET_ARCH_MIPS
34 #define __ ACCESS_MASM(masm_)
47 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
49 info_emitted_ =
false;
54 DCHECK(patch_site_.is_bound() == info_emitted_);
59 void EmitJumpIfNotSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
62 __ bind(&patch_site_);
65 __ BranchShort(target,
eq, at, Operand(zero_reg));
70 void EmitJumpIfSmi(Register reg, Label* target) {
72 DCHECK(!patch_site_.is_bound() && !info_emitted_);
73 __ bind(&patch_site_);
76 __ BranchShort(target,
ne, at, Operand(zero_reg));
79 void EmitPatchInfo() {
80 if (patch_site_.is_bound()) {
81 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
83 __ andi(zero_reg, reg, delta_to_patch_site %
kImm16Mask);
93 MacroAssembler* masm_;
116 CompilationInfo* info =
info_;
118 isolate()->factory()->NewFixedArray(
function()->handler_count(),
TENURED);
121 Handle<Smi>(
Smi::FromInt(FLAG_interrupt_budget), isolate()));
123 Comment cmnt(
masm_,
"[ function compiled by full code generator");
128 if (strlen(FLAG_stop_at) > 0 &&
129 info->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
137 if (info->strict_mode() ==
SLOPPY && !info->is_native()) {
139 int receiver_offset = info->scope()->num_parameters() *
kPointerSize;
141 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142 __ Branch(&ok,
ne, a2, Operand(at));
155 FrameScope frame_scope(
masm_, StackFrame::MANUAL);
158 __ Prologue(info->IsCodePreAgingActive());
161 { Comment cmnt(
masm_,
"[ Allocate locals");
162 int locals_count = info->scope()->num_stack_slots();
164 DCHECK(!info->function()->is_generator() || locals_count == 0);
165 if (locals_count > 0) {
166 if (locals_count >= 128) {
169 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
170 __ Branch(&ok,
hs, t5, Operand(a2));
174 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
175 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
176 if (locals_count >= kMaxPushes) {
177 int loop_iterations = locals_count / kMaxPushes;
178 __ li(a2, Operand(loop_iterations));
180 __ bind(&loop_header);
183 for (
int i = 0;
i < kMaxPushes;
i++) {
187 __ Subu(a2, a2, Operand(1));
188 __ Branch(&loop_header,
ne, a2, Operand(zero_reg));
190 int remaining = locals_count % kMaxPushes;
193 for (
int i = 0;
i < remaining;
i++) {
199 bool function_in_register =
true;
203 if (heap_slots > 0) {
204 Comment cmnt(
masm_,
"[ Allocate context");
206 bool need_write_barrier =
true;
207 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
209 __ Push(info->scope()->GetScopeInfo());
210 __ CallRuntime(Runtime::kNewGlobalContext, 2);
211 }
else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
212 FastNewContextStub stub(isolate(), heap_slots);
215 need_write_barrier =
false;
218 __ CallRuntime(Runtime::kNewFunctionContext, 1);
220 function_in_register =
false;
226 int num_parameters = info->scope()->num_parameters();
227 for (
int i = 0;
i < num_parameters;
i++) {
229 if (var->IsContextSlot()) {
239 if (need_write_barrier) {
240 __ RecordWriteContextSlot(
242 }
else if (FLAG_debug_code) {
244 __ JumpIfInNewSpace(
cp, a0, &done);
245 __ Abort(kExpectedNewSpaceObject);
253 if (arguments !=
NULL) {
255 Comment cmnt(
masm_,
"[ Allocate arguments object");
256 if (!function_in_register) {
263 int num_parameters = info->scope()->num_parameters();
282 ArgumentsAccessStub stub(isolate(), type);
285 SetVar(arguments, v0, a1, a2);
289 __ CallRuntime(Runtime::kTraceEnter, 0);
294 if (
scope()->HasIllegalRedeclaration()) {
295 Comment cmnt(
masm_,
"[ Declarations");
300 { Comment cmnt(
masm_,
"[ Declarations");
303 if (
scope()->is_function_scope() &&
scope()->
function() !=
NULL) {
308 VisitVariableDeclaration(
function);
313 { Comment cmnt(
masm_,
"[ Stack check");
316 __ LoadRoot(at, Heap::kStackLimitRootIndex);
317 __ Branch(&ok,
hs,
sp, Operand(at));
318 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
319 PredictableCodeSizeScope predictable(
masm_,
325 { Comment cmnt(
masm_,
"[ Body");
327 VisitStatements(
function()->body());
334 { Comment cmnt(
masm_,
"[ return <undefined>;");
335 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
343 __ mov(v0, zero_reg);
356 int reset_value = FLAG_interrupt_budget;
359 reset_value = FLAG_interrupt_budget >> 4;
368 Label* back_edge_target) {
375 Comment cmnt(
masm_,
"[ Back edge bookkeeping");
377 DCHECK(back_edge_target->is_bound());
380 Max(1, distance / kCodeSizeMultiplier));
382 __ slt(at, a3, zero_reg);
383 __ beq(at, zero_reg, &ok);
402 Comment cmnt(
masm_,
"[ Return sequence");
411 __ CallRuntime(Runtime::kTraceExit, 1);
416 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
420 Max(1, distance / kCodeSizeMultiplier));
424 __ Branch(&ok,
ge, a3, Operand(zero_reg));
426 __ Call(isolate()->builtins()->InterruptCheck(),
434 Label check_exit_codesize;
464 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
469 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
475 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
484 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
485 codegen()->DoTest(
this);
507 codegen()->PrepareForBailoutBeforeSplit(condition(),
511 if (index == Heap::kUndefinedValueRootIndex ||
512 index == Heap::kNullValueRootIndex ||
513 index == Heap::kFalseValueRootIndex) {
514 if (false_label_ != fall_through_)
__ Branch(false_label_);
515 }
else if (index == Heap::kTrueValueRootIndex) {
516 if (true_label_ != fall_through_)
__ Branch(true_label_);
519 codegen()->DoTest(
this);
529 Handle<Object> lit)
const {
542 codegen()->PrepareForBailoutBeforeSplit(condition(),
546 DCHECK(!lit->IsUndetectableObject());
547 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
548 if (false_label_ != fall_through_)
__ Branch(false_label_);
549 }
else if (lit->IsTrue() || lit->IsJSObject()) {
550 if (true_label_ != fall_through_)
__ Branch(true_label_);
551 }
else if (lit->IsString()) {
552 if (String::cast(*lit)->length() == 0) {
553 if (false_label_ != fall_through_)
__ Branch(false_label_);
555 if (true_label_ != fall_through_)
__ Branch(true_label_);
557 }
else if (lit->IsSmi()) {
558 if (Smi::cast(*lit)->value() == 0) {
559 if (false_label_ != fall_through_)
__ Branch(false_label_);
561 if (true_label_ != fall_through_)
__ Branch(true_label_);
566 codegen()->DoTest(
this);
572 Register reg)
const {
580 Register reg)
const {
588 Register reg)
const {
590 if (count > 1)
__ Drop(count - 1);
596 Register reg)
const {
601 codegen()->PrepareForBailoutBeforeSplit(condition(),
false,
NULL,
NULL);
602 codegen()->DoTest(
this);
607 Label* materialize_false)
const {
608 DCHECK(materialize_true == materialize_false);
609 __ bind(materialize_true);
614 Label* materialize_true,
615 Label* materialize_false)
const {
617 __ bind(materialize_true);
620 __ bind(materialize_false);
627 Label* materialize_true,
628 Label* materialize_false)
const {
630 __ bind(materialize_true);
631 __ LoadRoot(at, Heap::kTrueValueRootIndex);
635 __ bind(materialize_false);
636 __ LoadRoot(at, Heap::kFalseValueRootIndex);
643 Label* materialize_false)
const {
644 DCHECK(materialize_true == true_label_);
645 DCHECK(materialize_false == false_label_);
655 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
662 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
663 __ LoadRoot(at, value_root_index);
669 codegen()->PrepareForBailoutBeforeSplit(condition(),
674 if (true_label_ != fall_through_)
__ Branch(true_label_);
676 if (false_label_ != fall_through_)
__ Branch(false_label_);
684 Label* fall_through) {
687 CallIC(ic, condition->test_id());
688 __ mov(at, zero_reg);
689 Split(
ne, v0, Operand(at), if_true, if_false, fall_through);
698 Label* fall_through) {
699 if (if_false == fall_through) {
700 __ Branch(if_true,
cc, lhs, rhs);
701 }
else if (if_true == fall_through) {
704 __ Branch(if_true,
cc, lhs, rhs);
711 DCHECK(var->IsStackAllocated());
715 if (var->IsParameter()) {
725 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
726 if (var->IsContextSlot()) {
728 __ LoadContext(scratch, context_chain_length);
739 __ lw(dest, location);
747 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
748 DCHECK(!scratch0.is(src));
749 DCHECK(!scratch0.is(scratch1));
750 DCHECK(!scratch1.is(src));
752 __ sw(src, location);
754 if (var->IsContextSlot()) {
755 __ RecordWriteContextSlot(scratch0,
766 bool should_normalize,
775 if (should_normalize)
__ Branch(&skip);
777 if (should_normalize) {
778 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
779 Split(
eq, a0, Operand(t0), if_true, if_false,
NULL);
792 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
793 __ Check(
ne, kDeclarationInWithContext,
795 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
796 __ Check(
ne, kDeclarationInCatchContext,
802 void FullCodeGenerator::VisitVariableDeclaration(
803 VariableDeclaration* declaration) {
807 VariableProxy* proxy = declaration->proxy();
809 Variable* variable = proxy->var();
811 switch (variable->location()) {
813 globals_->Add(variable->name(), zone());
814 globals_->Add(variable->binding_needs_init()
815 ? isolate()->factory()->the_hole_value()
816 : isolate()->factory()->undefined_value(),
823 Comment cmnt(
masm_,
"[ VariableDeclaration");
824 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
831 Comment cmnt(
masm_,
"[ VariableDeclaration");
833 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
841 Comment cmnt(
masm_,
"[ VariableDeclaration");
842 __ li(a2, Operand(variable->name()));
853 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
857 __ mov(a0, zero_reg);
860 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
867 void FullCodeGenerator::VisitFunctionDeclaration(
868 FunctionDeclaration* declaration) {
869 VariableProxy* proxy = declaration->proxy();
870 Variable* variable = proxy->var();
871 switch (variable->location()) {
873 globals_->Add(variable->name(), zone());
874 Handle<SharedFunctionInfo>
function =
877 if (
function.is_null())
return SetStackOverflow();
884 Comment cmnt(
masm_,
"[ FunctionDeclaration");
891 Comment cmnt(
masm_,
"[ FunctionDeclaration");
897 __ RecordWriteContextSlot(
cp,
910 Comment cmnt(
masm_,
"[ FunctionDeclaration");
911 __ li(a2, Operand(variable->name()));
916 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
923 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
924 Variable* variable = declaration->proxy()->var();
926 DCHECK(variable->interface()->IsFrozen());
928 Comment cmnt(
masm_,
"[ ModuleDeclaration");
939 __ RecordWriteContextSlot(
cp,
950 Visit(declaration->module());
954 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
955 VariableProxy* proxy = declaration->proxy();
956 Variable* variable = proxy->var();
957 switch (variable->location()) {
963 Comment cmnt(
masm_,
"[ ImportDeclaration");
977 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
988 __ CallRuntime(Runtime::kDeclareGlobals, 3);
996 __ CallRuntime(Runtime::kDeclareModules, 1);
1001 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1002 Comment cmnt(
masm_,
"[ SwitchStatement");
1003 Breakable nested_statement(
this, stmt);
1010 ZoneList<CaseClause*>* clauses = stmt->cases();
1011 CaseClause* default_clause =
NULL;
1015 for (
int i = 0;
i < clauses->length();
i++) {
1016 CaseClause* clause = clauses->at(
i);
1017 clause->body_target()->Unuse();
1020 if (clause->is_default()) {
1021 default_clause = clause;
1025 Comment cmnt(
masm_,
"[ Case comparison");
1026 __ bind(&next_test);
1036 JumpPatchSite patch_site(
masm_);
1037 if (inline_smi_code) {
1040 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1042 __ Branch(&next_test,
ne, a1, Operand(a0));
1044 __ Branch(clause->body_target());
1046 __ bind(&slow_case);
1052 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1053 CallIC(ic, clause->CompareId());
1054 patch_site.EmitPatchInfo();
1059 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1060 __ Branch(&next_test,
ne, v0, Operand(at));
1062 __ Branch(clause->body_target());
1065 __ Branch(&next_test,
ne, v0, Operand(zero_reg));
1067 __ Branch(clause->body_target());
1072 __ bind(&next_test);
1074 if (default_clause ==
NULL) {
1075 __ Branch(nested_statement.break_label());
1077 __ Branch(default_clause->body_target());
1081 for (
int i = 0;
i < clauses->length();
i++) {
1082 Comment cmnt(
masm_,
"[ Case body");
1083 CaseClause* clause = clauses->at(
i);
1084 __ bind(clause->body_target());
1086 VisitStatements(clause->statements());
1089 __ bind(nested_statement.break_label());
1094 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1095 Comment cmnt(
masm_,
"[ ForInStatement");
1096 int slot = stmt->ForInFeedbackSlot();
1100 ForIn loop_statement(
this, stmt);
1107 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1108 __ Branch(&exit,
eq, a0, Operand(at));
1109 Register null_value = t1;
1110 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1111 __ Branch(&exit,
eq, a0, Operand(null_value));
1115 Label convert, done_convert;
1116 __ JumpIfSmi(a0, &convert);
1117 __ GetObjectType(a0, a1, a1);
1123 __ bind(&done_convert);
1129 __ GetObjectType(a0, a1, a1);
1136 __ CheckEnumCache(null_value, &call_runtime);
1142 __ Branch(&use_cache);
1145 __ bind(&call_runtime);
1147 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1154 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1155 __ Branch(&fixed_array,
ne, a2, Operand(at));
1158 Label no_descriptors;
1159 __ bind(&use_cache);
1161 __ EnumLength(a1, v0);
1164 __ LoadInstanceDescriptors(v0, a2);
1174 __ bind(&no_descriptors);
1180 __ bind(&fixed_array);
1189 __ GetObjectType(a2, a3, a3);
1192 __ bind(&non_proxy);
1204 __ Branch(loop_statement.break_label(),
hs, a0, Operand(a1));
1210 __ addu(t0, a2, t0);
1222 __ Branch(&update_each,
eq, t0, Operand(a2));
1227 __ Branch(&update_each,
eq, a2, Operand(zero_reg));
1235 __ Branch(loop_statement.continue_label(),
eq, a3, Operand(zero_reg));
1239 __ bind(&update_each);
1242 { EffectContext
context(
this);
1247 Visit(stmt->body());
1251 __ bind(loop_statement.continue_label());
1260 __ bind(loop_statement.break_label());
1270 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1271 Comment cmnt(
masm_,
"[ ForOfStatement");
1274 Iteration loop_statement(
this, stmt);
1281 __ bind(loop_statement.continue_label());
1287 Label result_not_done;
1289 loop_statement.break_label(),
1292 __ bind(&result_not_done);
1298 Visit(stmt->body());
1303 __ jmp(loop_statement.continue_label());
1307 __ bind(loop_statement.break_label());
1320 if (!FLAG_always_opt &&
1321 !FLAG_prepare_always_opt &&
1323 scope()->is_function_scope() &&
1324 info->num_literals() == 0) {
1325 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1326 __ li(a2, Operand(info));
1329 __ li(a0, Operand(info));
1330 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1331 : Heap::kFalseValueRootIndex);
1333 __ CallRuntime(Runtime::kNewClosure, 3);
1339 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1340 Comment cmnt(
masm_,
"[ VariableProxy");
1346 Comment cnmt(
masm_,
"[ SuperReference ");
1351 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1357 __ Branch(&done,
ne, v0, Operand(isolate()->factory()->undefined_value()));
1358 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1366 Register current =
cp;
1372 if (s->num_heap_slots() > 0) {
1373 if (s->calls_sloppy_eval()) {
1376 __ Branch(slow,
ne, temp, Operand(zero_reg));
1385 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope())
break;
1386 s = s->outer_scope();
1389 if (s->is_eval_scope()) {
1391 if (!current.is(next)) {
1392 __ Move(next, current);
1397 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1398 __ Branch(&fast,
eq, temp, Operand(t0));
1401 __ Branch(slow,
ne, temp, Operand(zero_reg));
1410 if (FLAG_vector_ics) {
1424 DCHECK(var->IsContextSlot());
1430 if (s->num_heap_slots() > 0) {
1431 if (s->calls_sloppy_eval()) {
1434 __ Branch(slow,
ne, temp, Operand(zero_reg));
1443 __ Branch(slow,
ne, temp, Operand(zero_reg));
1461 Variable* var = proxy->var();
1466 Variable* local = var->local_if_not_shadowed();
1468 if (local->mode() ==
LET || local->mode() ==
CONST ||
1470 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1471 __ subu(at, v0, at);
1473 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1474 __ Movz(v0, a0, at);
1476 __ Branch(done,
ne, at, Operand(zero_reg));
1477 __ li(a0, Operand(var->name()));
1479 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1490 Variable* var = proxy->var();
1494 switch (var->location()) {
1496 Comment cmnt(
masm_,
"[ Global variable");
1499 if (FLAG_vector_ics) {
1511 Comment cmnt(
masm_, var->IsContextSlot() ?
"[ Context variable"
1512 :
"[ Stack variable");
1513 if (var->binding_needs_init()) {
1537 bool skip_init_check;
1538 if (var->scope()->DeclarationScope() !=
scope()->DeclarationScope()) {
1539 skip_init_check =
false;
1545 var->initializer_position() < proxy->position();
1548 if (!skip_init_check) {
1551 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1552 __ subu(at, v0, at);
1553 if (var->mode() ==
LET || var->mode() ==
CONST) {
1557 __ Branch(&done,
ne, at, Operand(zero_reg));
1558 __ li(a0, Operand(var->name()));
1560 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1565 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1566 __ Movz(v0, a0, at);
1577 Comment cmnt(
masm_,
"[ Lookup variable");
1583 __ li(a1, Operand(var->name()));
1585 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1593 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1594 Comment cmnt(
masm_,
"[ RegExpLiteral");
1605 int literal_offset =
1608 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1609 __ Branch(&materialized,
ne, t1, Operand(at));
1614 __ li(a2, Operand(expr->pattern()));
1615 __ li(a1, Operand(expr->flags()));
1617 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1620 __ bind(&materialized);
1622 Label allocated, runtime_allocate;
1626 __ bind(&runtime_allocate);
1629 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1632 __ bind(&allocated);
1644 if (expression ==
NULL) {
1645 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1653 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1654 Comment cmnt(
masm_,
"[ ObjectLiteral");
1656 expr->BuildConstantProperties(isolate());
1657 Handle<FixedArray> constant_properties = expr->constant_properties();
1661 __ li(a1, Operand(constant_properties));
1662 int flags = expr->fast_elements()
1663 ? ObjectLiteral::kFastElements
1664 : ObjectLiteral::kNoFlags;
1665 flags |= expr->has_function()
1666 ? ObjectLiteral::kHasFunction
1667 : ObjectLiteral::kNoFlags;
1669 int properties_count = constant_properties->length() / 2;
1670 if (expr->may_store_doubles() || expr->depth() > 1 ||
1671 masm()->serializer_enabled() ||
flags != ObjectLiteral::kFastElements ||
1674 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1676 FastCloneShallowObjectStub stub(isolate(), properties_count);
1682 bool result_saved =
false;
1687 expr->CalculateEmitStore(zone());
1689 AccessorTable accessor_table(zone());
1690 for (
int i = 0;
i < expr->properties()->length();
i++) {
1691 ObjectLiteral::Property*
property = expr->properties()->at(
i);
1692 if (property->IsCompileTimeValue())
continue;
1694 Literal* key =
property->key();
1695 Expression* value =
property->value();
1696 if (!result_saved) {
1698 result_saved =
true;
1700 switch (property->kind()) {
1703 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1706 case ObjectLiteral::Property::COMPUTED:
1707 if (key->value()->IsInternalizedString()) {
1708 if (property->emit_store()) {
1726 if (property->emit_store()) {
1729 __ CallRuntime(Runtime::kSetProperty, 4);
1734 case ObjectLiteral::Property::PROTOTYPE:
1739 if (property->emit_store()) {
1740 __ CallRuntime(Runtime::kSetPrototype, 2);
1745 case ObjectLiteral::Property::GETTER:
1746 accessor_table.lookup(key)->second->getter = value;
1748 case ObjectLiteral::Property::SETTER:
1749 accessor_table.lookup(key)->second->setter = value;
1756 for (AccessorTable::Iterator it = accessor_table.begin();
1757 it != accessor_table.end();
1766 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1769 if (expr->has_function()) {
1773 __ CallRuntime(Runtime::kToFastProperties, 1);
1784 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1785 Comment cmnt(
masm_,
"[ ArrayLiteral");
1787 expr->BuildConstantElements(isolate());
1788 int flags = expr->depth() == 1
1789 ? ArrayLiteral::kShallowElements
1790 : ArrayLiteral::kNoFlags;
1792 ZoneList<Expression*>* subexprs = expr->values();
1793 int length = subexprs->length();
1795 Handle<FixedArray> constant_elements = expr->constant_elements();
1796 DCHECK_EQ(2, constant_elements->length());
1798 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1799 bool has_fast_elements =
1801 Handle<FixedArrayBase> constant_elements_values(
1802 FixedArrayBase::cast(constant_elements->get(1)));
1805 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1815 __ li(a1, Operand(constant_elements));
1819 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1821 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1825 bool result_saved =
false;
1829 for (
int i = 0;
i < length;
i++) {
1830 Expression* subexpr = subexprs->at(
i);
1835 if (!result_saved) {
1838 result_saved =
true;
1855 StoreArrayLiteralElementStub stub(isolate());
1870 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1871 DCHECK(expr->target()->IsValidReferenceExpression());
1873 Comment cmnt(
masm_,
"[ Assignment");
1879 Property*
property = expr->target()->AsProperty();
1880 if (property !=
NULL) {
1881 assign_type = (
property->key()->IsPropertyName())
1887 switch (assign_type) {
1892 if (expr->is_compound()) {
1902 if (expr->is_compound()) {
1917 if (expr->is_compound()) {
1918 { AccumulatorValueContext
context(
this);
1919 switch (assign_type) {
1943 AccumulatorValueContext
context(
this);
1964 switch (assign_type) {
1981 void FullCodeGenerator::VisitYield(Yield* expr) {
1982 Comment cmnt(
masm_,
"[ Yield");
1987 switch (expr->yield_kind()) {
1988 case Yield::kSuspend:
1993 case Yield::kInitial: {
1994 Label suspend, continuation, post_runtime, resume;
1998 __ bind(&continuation);
2011 __ Branch(&post_runtime,
eq,
sp, Operand(a1));
2013 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2015 __ bind(&post_runtime);
2024 case Yield::kFinal: {
2036 case Yield::kDelegating: {
2043 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2044 Label l_next, l_call;
2049 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2056 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex);
2058 __ Push(load_name, a3, a0);
2066 __ PushTryHandler(StackHandler::CATCH, expr->index());
2070 __ bind(&l_continuation);
2073 __ bind(&l_suspend);
2074 const int generator_object_depth =
kPointerSize + handler_size;
2084 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2095 __ LoadRoot(load_name, Heap::knext_stringRootIndex);
2097 __ Push(load_name, a3, a0);
2103 if (FLAG_vector_ics) {
2107 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2119 __ Move(load_receiver, v0);
2121 __ push(load_receiver);
2122 __ LoadRoot(load_name, Heap::kdone_stringRootIndex);
2123 if (FLAG_vector_ics) {
2131 __ Branch(&l_try,
eq, v0, Operand(zero_reg));
2134 __ pop(load_receiver);
2135 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex);
2136 if (FLAG_vector_ics) {
2141 context()->DropAndPlug(2, v0);
2160 Label wrong_state, closed_state, done;
2164 __ Branch(&closed_state,
eq, a3, Operand(zero_reg));
2165 __ Branch(&wrong_state,
lt, a3, Operand(zero_reg));
2179 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2180 Label push_argument_holes, push_frame;
2181 __ bind(&push_argument_holes);
2183 __ Branch(&push_frame,
lt, a3, Operand(zero_reg));
2185 __ jmp(&push_argument_holes);
2190 __ bind(&push_frame);
2191 __ Call(&resume_frame);
2193 __ bind(&resume_frame);
2211 __ Branch(&slow_resume,
ne, a3, Operand(zero_reg));
2215 __ Addu(a3, a3, Operand(a2));
2219 __ bind(&slow_resume);
2224 Label push_operand_holes, call_resume;
2225 __ bind(&push_operand_holes);
2226 __ Subu(a3, a3, Operand(1));
2227 __ Branch(&call_resume,
lt, a3, Operand(zero_reg));
2229 __ Branch(&push_operand_holes);
2230 __ bind(&call_resume);
2234 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2236 __ stop(
"not-reached");
2239 __ bind(&closed_state);
2242 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2249 __ CallRuntime(Runtime::kThrow, 1);
2254 __ bind(&wrong_state);
2256 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2267 Handle<Map>
map(isolate()->native_context()->iterator_result_map());
2269 __ Allocate(
map->instance_size(), v0, a2, a3, &gc_required,
TAG_OBJECT);
2272 __ bind(&gc_required);
2274 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2278 __ bind(&allocated);
2279 __ li(a1, Operand(
map));
2281 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2282 __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2301 Literal* key = prop->key()->AsLiteral();
2304 if (FLAG_vector_ics) {
2316 Literal* key = prop->key()->AsLiteral();
2317 DCHECK(!key->value()->IsSmi());
2318 DCHECK(prop->IsSuperAccess());
2320 SuperReference* super_ref = prop->obj()->AsSuperReference();
2325 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2331 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2332 if (FLAG_vector_ics) {
2337 CallIC(ic, prop->PropertyFeedbackId());
2345 Expression* left_expr,
2346 Expression* right_expr) {
2347 Label done, smi_case, stub_call;
2349 Register scratch1 = a2;
2350 Register scratch2 = a3;
2354 Register right = a0;
2359 __ Or(scratch1, left, Operand(right));
2361 JumpPatchSite patch_site(
masm_);
2362 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2364 __ bind(&stub_call);
2365 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op,
mode).code();
2366 CallIC(code, expr->BinaryOperationFeedbackId());
2367 patch_site.EmitPatchInfo();
2375 __ GetLeastBitsFromSmi(scratch1, right, 5);
2376 __ srav(right, left, scratch1);
2380 __ SmiUntag(scratch1, left);
2381 __ GetLeastBitsFromSmi(scratch2, right, 5);
2382 __ sllv(scratch1, scratch1, scratch2);
2383 __ Addu(scratch2, scratch1, Operand(0x40000000));
2384 __ Branch(&stub_call,
lt, scratch2, Operand(zero_reg));
2385 __ SmiTag(v0, scratch1);
2389 __ SmiUntag(scratch1, left);
2390 __ GetLeastBitsFromSmi(scratch2, right, 5);
2391 __ srlv(scratch1, scratch1, scratch2);
2392 __ And(scratch2, scratch1, 0xc0000000);
2393 __ Branch(&stub_call,
ne, scratch2, Operand(zero_reg));
2394 __ SmiTag(v0, scratch1);
2398 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2399 __ BranchOnOverflow(&stub_call, scratch1);
2402 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2403 __ BranchOnOverflow(&stub_call, scratch1);
2406 __ SmiUntag(scratch1, right);
2407 __ Mul(scratch2, v0, left, scratch1);
2408 __ sra(scratch1, v0, 31);
2409 __ Branch(&stub_call,
ne, scratch1, Operand(scratch2));
2410 __ Branch(&done,
ne, v0, Operand(zero_reg));
2411 __ Addu(scratch2, right, left);
2412 __ Branch(&stub_call,
lt, scratch2, Operand(zero_reg));
2414 __ mov(v0, zero_reg);
2418 __ Or(v0, left, Operand(right));
2420 case Token::BIT_AND:
2421 __ And(v0, left, Operand(right));
2423 case Token::BIT_XOR:
2424 __ Xor(v0, left, Operand(right));
2440 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op,
mode).code();
2441 JumpPatchSite patch_site(
masm_);
2442 CallIC(code, expr->BinaryOperationFeedbackId());
2443 patch_site.EmitPatchInfo();
2449 DCHECK(expr->IsValidReferenceExpression());
2455 Property* prop = expr->AsProperty();
2457 assign_type = (prop->key()->IsPropertyName())
2462 switch (assign_type) {
2464 Variable* var = expr->AsVariableProxy()->var();
2475 Operand(prop->key()->AsLiteral()->value()));
2487 CodeFactory::KeyedStoreIC(isolate(),
strict_mode()).code();
2499 if (var->IsContextSlot()) {
2503 __ RecordWriteContextSlot(
2510 if (var->IsUnallocated()) {
2517 }
else if (op == Token::INIT_CONST_LEGACY) {
2519 DCHECK(!var->IsParameter());
2520 if (var->IsLookupSlot()) {
2521 __ li(a0, Operand(var->name()));
2523 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2525 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2528 __ lw(a2, location);
2529 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2530 __ Branch(&skip,
ne, a2, Operand(at));
2535 }
else if (var->mode() ==
LET && op != Token::INIT_LET) {
2537 DCHECK(!var->IsLookupSlot());
2538 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2541 __ lw(a3, location);
2542 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2543 __ Branch(&assign,
ne, a3, Operand(t0));
2544 __ li(a3, Operand(var->name()));
2546 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2551 }
else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2552 if (var->IsLookupSlot()) {
2554 __ li(a1, Operand(var->name()));
2557 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2561 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2565 __ lw(a2, location);
2566 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2567 __ Check(
eq, kLetBindingReInitialization, a2, Operand(t0));
2578 Property* prop = expr->target()->AsProperty();
2580 DCHECK(prop->key()->IsLiteral());
2586 Operand(prop->key()->AsLiteral()->value()));
2609 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(),
strict_mode()).code();
2610 CallIC(ic, expr->AssignmentFeedbackId());
2617 void FullCodeGenerator::VisitProperty(Property* expr) {
2618 Comment cmnt(
masm_,
"[ Property");
2619 Expression* key = expr->key();
2621 if (key->IsPropertyName()) {
2622 if (!expr->IsSuperAccess()) {
2643 TypeFeedbackId
id) {
2651 Expression* callee = expr->expression();
2653 CallICState::CallType call_type =
2654 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2657 if (call_type == CallICState::FUNCTION) {
2658 { StackValueContext
context(
this);
2664 __ Push(isolate()->factory()->undefined_value());
2667 DCHECK(callee->IsProperty());
2668 DCHECK(!callee->AsProperty()->IsSuperAccess());
2683 Expression* callee = expr->expression();
2684 DCHECK(callee->IsProperty());
2685 Property* prop = callee->AsProperty();
2686 DCHECK(prop->IsSuperAccess());
2689 Literal* key = prop->key()->AsLiteral();
2690 DCHECK(!key->value()->IsSmi());
2692 const Register scratch = a1;
2693 SuperReference* super_ref = prop->obj()->AsSuperReference();
2708 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2716 EmitCall(expr, CallICState::METHOD);
2726 Expression* callee = expr->expression();
2729 DCHECK(callee->IsProperty());
2740 EmitCall(expr, CallICState::METHOD);
2746 ZoneList<Expression*>* args = expr->arguments();
2747 int arg_count = args->length();
2748 { PreservePositionScope
scope(
masm()->positions_recorder());
2749 for (
int i = 0;
i < arg_count;
i++) {
2757 isolate(), arg_count, call_type);
2767 context()->DropAndPlug(1, v0);
2773 if (arg_count > 0) {
2776 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
2791 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2795 void FullCodeGenerator::VisitCall(Call* expr) {
2799 expr->return_is_recorded_ =
false;
2802 Comment cmnt(
masm_,
"[ Call");
2803 Expression* callee = expr->expression();
2804 Call::CallType call_type = expr->GetCallType(isolate());
2806 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2811 ZoneList<Expression*>* args = expr->arguments();
2812 int arg_count = args->length();
2814 { PreservePositionScope pos_scope(
masm()->positions_recorder());
2816 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2820 for (
int i = 0;
i < arg_count;
i++) {
2843 context()->DropAndPlug(1, v0);
2844 }
else if (call_type == Call::GLOBAL_CALL) {
2846 }
else if (call_type == Call::LOOKUP_SLOT_CALL) {
2848 VariableProxy* proxy = callee->AsVariableProxy();
2851 { PreservePositionScope
scope(
masm()->positions_recorder());
2861 __ li(a2, Operand(proxy->name()));
2863 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2869 if (done.is_linked()) {
2877 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2885 }
else if (call_type == Call::PROPERTY_CALL) {
2886 Property*
property = callee->AsProperty();
2887 bool is_named_call =
property->key()->IsPropertyName();
2889 if (property->IsSuperAccess() && is_named_call) {
2893 PreservePositionScope
scope(
masm()->positions_recorder());
2896 if (is_named_call) {
2903 DCHECK(call_type == Call::OTHER_CALL);
2905 { PreservePositionScope
scope(
masm()->positions_recorder());
2908 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2916 DCHECK(expr->return_is_recorded_);
2921 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2922 Comment cmnt(
masm_,
"[ CallNew");
2933 ZoneList<Expression*>* args = expr->arguments();
2934 int arg_count = args->length();
2935 for (
int i = 0;
i < arg_count;
i++) {
2944 __ li(a0, Operand(arg_count));
2948 if (FLAG_pretenuring_call_new) {
2950 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2951 expr->CallNewFeedbackSlot() + 1);
2964 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2965 ZoneList<Expression*>* args = expr->arguments();
2966 DCHECK(args->length() == 1);
2970 Label materialize_true, materialize_false;
2971 Label* if_true =
NULL;
2972 Label* if_false =
NULL;
2973 Label* fall_through =
NULL;
2974 context()->PrepareTest(&materialize_true, &materialize_false,
2975 &if_true, &if_false, &fall_through);
2979 Split(
eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2981 context()->Plug(if_true, if_false);
2985 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2986 ZoneList<Expression*>* args = expr->arguments();
2987 DCHECK(args->length() == 1);
2991 Label materialize_true, materialize_false;
2992 Label* if_true =
NULL;
2993 Label* if_false =
NULL;
2994 Label* fall_through =
NULL;
2995 context()->PrepareTest(&materialize_true, &materialize_false,
2996 &if_true, &if_false, &fall_through);
2999 __ NonNegativeSmiTst(v0, at);
3000 Split(
eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3002 context()->Plug(if_true, if_false);
3006 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3007 ZoneList<Expression*>* args = expr->arguments();
3008 DCHECK(args->length() == 1);
3012 Label materialize_true, materialize_false;
3013 Label* if_true =
NULL;
3014 Label* if_false =
NULL;
3015 Label* fall_through =
NULL;
3016 context()->PrepareTest(&materialize_true, &materialize_false,
3017 &if_true, &if_false, &fall_through);
3019 __ JumpIfSmi(v0, if_false);
3020 __ LoadRoot(at, Heap::kNullValueRootIndex);
3021 __ Branch(if_true,
eq, v0, Operand(at));
3026 __ Branch(if_false,
ne, at, Operand(zero_reg));
3031 if_true, if_false, fall_through);
3033 context()->Plug(if_true, if_false);
3037 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3038 ZoneList<Expression*>* args = expr->arguments();
3039 DCHECK(args->length() == 1);
3043 Label materialize_true, materialize_false;
3044 Label* if_true =
NULL;
3045 Label* if_false =
NULL;
3046 Label* fall_through =
NULL;
3047 context()->PrepareTest(&materialize_true, &materialize_false,
3048 &if_true, &if_false, &fall_through);
3050 __ JumpIfSmi(v0, if_false);
3051 __ GetObjectType(v0, a1, a1);
3054 if_true, if_false, fall_through);
3056 context()->Plug(if_true, if_false);
3060 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3061 ZoneList<Expression*>* args = expr->arguments();
3062 DCHECK(args->length() == 1);
3066 Label materialize_true, materialize_false;
3067 Label* if_true =
NULL;
3068 Label* if_false =
NULL;
3069 Label* fall_through =
NULL;
3070 context()->PrepareTest(&materialize_true, &materialize_false,
3071 &if_true, &if_false, &fall_through);
3073 __ JumpIfSmi(v0, if_false);
3078 Split(
ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3080 context()->Plug(if_true, if_false);
3084 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3085 CallRuntime* expr) {
3086 ZoneList<Expression*>* args = expr->arguments();
3087 DCHECK(args->length() == 1);
3091 Label materialize_true, materialize_false, skip_lookup;
3092 Label* if_true =
NULL;
3093 Label* if_false =
NULL;
3094 Label* fall_through =
NULL;
3095 context()->PrepareTest(&materialize_true, &materialize_false,
3096 &if_true, &if_false, &fall_through);
3098 __ AssertNotSmi(v0);
3103 __ Branch(&skip_lookup,
ne, t0, Operand(zero_reg));
3108 __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3109 __ Branch(if_false,
eq, a2, Operand(t0));
3114 Label entry, loop, done;
3117 __ NumberOfOwnDescriptors(a3, a1);
3118 __ Branch(&done,
eq, a3, Operand(zero_reg));
3120 __ LoadInstanceDescriptors(a1, t0);
3133 __ Addu(a2, a2, t1);
3139 __ li(t2, Operand(isolate()->factory()->value_of_string()));
3143 __ Branch(if_false,
eq, a3, Operand(t2));
3146 __ Branch(&loop,
ne, t0, Operand(a2));
3155 __ bind(&skip_lookup);
3160 __ JumpIfSmi(a2, if_false);
3166 Split(
eq, a2, Operand(a3), if_true, if_false, fall_through);
3168 context()->Plug(if_true, if_false);
3172 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3173 ZoneList<Expression*>* args = expr->arguments();
3174 DCHECK(args->length() == 1);
3178 Label materialize_true, materialize_false;
3179 Label* if_true =
NULL;
3180 Label* if_false =
NULL;
3181 Label* fall_through =
NULL;
3182 context()->PrepareTest(&materialize_true, &materialize_false,
3183 &if_true, &if_false, &fall_through);
3185 __ JumpIfSmi(v0, if_false);
3186 __ GetObjectType(v0, a1, a2);
3189 __ Branch(if_false);
3191 context()->Plug(if_true, if_false);
3195 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3196 ZoneList<Expression*>* args = expr->arguments();
3197 DCHECK(args->length() == 1);
3201 Label materialize_true, materialize_false;
3202 Label* if_true =
NULL;
3203 Label* if_false =
NULL;
3204 Label* fall_through =
NULL;
3205 context()->PrepareTest(&materialize_true, &materialize_false,
3206 &if_true, &if_false, &fall_through);
3208 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false,
DO_SMI_CHECK);
3211 __ li(t0, 0x80000000);
3213 __ Branch(¬_nan,
ne, a2, Operand(t0));
3214 __ mov(t0, zero_reg);
3219 Split(
eq, a2, Operand(t0), if_true, if_false, fall_through);
3221 context()->Plug(if_true, if_false);
3225 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3226 ZoneList<Expression*>* args = expr->arguments();
3227 DCHECK(args->length() == 1);
3231 Label materialize_true, materialize_false;
3232 Label* if_true =
NULL;
3233 Label* if_false =
NULL;
3234 Label* fall_through =
NULL;
3235 context()->PrepareTest(&materialize_true, &materialize_false,
3236 &if_true, &if_false, &fall_through);
3238 __ JumpIfSmi(v0, if_false);
3239 __ GetObjectType(v0, a1, a1);
3242 if_true, if_false, fall_through);
3244 context()->Plug(if_true, if_false);
3248 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3249 ZoneList<Expression*>* args = expr->arguments();
3250 DCHECK(args->length() == 1);
3254 Label materialize_true, materialize_false;
3255 Label* if_true =
NULL;
3256 Label* if_false =
NULL;
3257 Label* fall_through =
NULL;
3258 context()->PrepareTest(&materialize_true, &materialize_false,
3259 &if_true, &if_false, &fall_through);
3261 __ JumpIfSmi(v0, if_false);
3262 __ GetObjectType(v0, a1, a1);
3266 context()->Plug(if_true, if_false);
3270 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3271 DCHECK(expr->arguments()->length() == 0);
3273 Label materialize_true, materialize_false;
3274 Label* if_true =
NULL;
3275 Label* if_false =
NULL;
3276 Label* fall_through =
NULL;
3277 context()->PrepareTest(&materialize_true, &materialize_false,
3278 &if_true, &if_false, &fall_through);
3284 Label check_frame_marker;
3286 __ Branch(&check_frame_marker,
ne,
3291 __ bind(&check_frame_marker);
3295 if_true, if_false, fall_through);
3297 context()->Plug(if_true, if_false);
3301 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3302 ZoneList<Expression*>* args = expr->arguments();
3303 DCHECK(args->length() == 2);
3309 Label materialize_true, materialize_false;
3310 Label* if_true =
NULL;
3311 Label* if_false =
NULL;
3312 Label* fall_through =
NULL;
3313 context()->PrepareTest(&materialize_true, &materialize_false,
3314 &if_true, &if_false, &fall_through);
3318 Split(
eq, v0, Operand(a1), if_true, if_false, fall_through);
3320 context()->Plug(if_true, if_false);
3324 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3325 ZoneList<Expression*>* args = expr->arguments();
3326 DCHECK(args->length() == 1);
3339 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3340 DCHECK(expr->arguments()->length() == 0);
3348 __ Branch(&exit,
ne, a3,
3360 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3361 ZoneList<Expression*>* args = expr->arguments();
3362 DCHECK(args->length() == 1);
3363 Label done,
null,
function, non_function_constructor;
3368 __ JumpIfSmi(v0, &
null);
3375 __ GetObjectType(v0, v0, a1);
3390 __ GetObjectType(v0, a1, a1);
3401 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3405 __ bind(&non_function_constructor);
3406 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3411 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3420 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3422 SubStringStub stub(isolate());
3423 ZoneList<Expression*>* args = expr->arguments();
3424 DCHECK(args->length() == 3);
3433 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3435 RegExpExecStub stub(isolate());
3436 ZoneList<Expression*>* args = expr->arguments();
3437 DCHECK(args->length() == 4);
3447 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3448 ZoneList<Expression*>* args = expr->arguments();
3449 DCHECK(args->length() == 1);
3455 __ JumpIfSmi(v0, &done);
3457 __ GetObjectType(v0, a1, a1);
3467 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3468 ZoneList<Expression*>* args = expr->arguments();
3469 DCHECK(args->length() == 2);
3471 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3475 Label runtime, done, not_date_object;
3476 Register
object = v0;
3477 Register result = v0;
3478 Register scratch0 = t5;
3479 Register scratch1 = a1;
3481 __ JumpIfSmi(
object, ¬_date_object);
3482 __ GetObjectType(
object, scratch1, scratch1);
3485 if (index->value() == 0) {
3490 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3491 __ li(scratch1, Operand(stamp));
3494 __ Branch(&runtime,
ne, scratch1, Operand(scratch0));
3500 __ PrepareCallCFunction(2, scratch1);
3501 __ li(a1, Operand(index));
3502 __ Move(a0,
object);
3503 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3507 __ bind(¬_date_object);
3508 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3514 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3515 ZoneList<Expression*>* args = expr->arguments();
3518 Register
string = v0;
3519 Register index = a1;
3520 Register value = a2;
3525 __ Pop(index, value);
3527 if (FLAG_debug_code) {
3528 __ SmiTst(value, at);
3529 __ Check(
eq, kNonSmiValue, at, Operand(zero_reg));
3530 __ SmiTst(index, at);
3531 __ Check(
eq, kNonSmiIndex, at, Operand(zero_reg));
3532 __ SmiUntag(index, index);
3534 Register scratch = t5;
3535 __ EmitSeqStringSetCharCheck(
3536 string, index, value, scratch, one_byte_seq_type);
3537 __ SmiTag(index, index);
3540 __ SmiUntag(value, value);
3545 __ Addu(at, at, index);
3551 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3552 ZoneList<Expression*>* args = expr->arguments();
3555 Register
string = v0;
3556 Register index = a1;
3557 Register value = a2;
3562 __ Pop(index, value);
3564 if (FLAG_debug_code) {
3565 __ SmiTst(value, at);
3566 __ Check(
eq, kNonSmiValue, at, Operand(zero_reg));
3567 __ SmiTst(index, at);
3568 __ Check(
eq, kNonSmiIndex, at, Operand(zero_reg));
3569 __ SmiUntag(index, index);
3571 Register scratch = t5;
3572 __ EmitSeqStringSetCharCheck(
3573 string, index, value, scratch, two_byte_seq_type);
3574 __ SmiTag(index, index);
3577 __ SmiUntag(value, value);
3581 __ Addu(at, at, index);
3588 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3590 ZoneList<Expression*>* args = expr->arguments();
3591 DCHECK(args->length() == 2);
3600 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3601 ZoneList<Expression*>* args = expr->arguments();
3602 DCHECK(args->length() == 2);
3610 __ JumpIfSmi(a1, &done);
3613 __ GetObjectType(a1, a2, a2);
3621 __ RecordWriteField(
3629 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3630 ZoneList<Expression*>* args = expr->arguments();
3637 NumberToStringStub stub(isolate());
3643 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3644 ZoneList<Expression*>* args = expr->arguments();
3645 DCHECK(args->length() == 1);
3650 StringCharFromCodeGenerator generator(v0, a1);
3651 generator.GenerateFast(
masm_);
3654 NopRuntimeCallHelper call_helper;
3655 generator.GenerateSlow(
masm_, call_helper);
3662 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3663 ZoneList<Expression*>* args = expr->arguments();
3664 DCHECK(args->length() == 2);
3670 Register
object = a1;
3671 Register index = a0;
3672 Register result = v0;
3676 Label need_conversion;
3677 Label index_out_of_range;
3679 StringCharCodeAtGenerator generator(
object,
3684 &index_out_of_range,
3686 generator.GenerateFast(
masm_);
3689 __ bind(&index_out_of_range);
3692 __ LoadRoot(result, Heap::kNanValueRootIndex);
3695 __ bind(&need_conversion);
3698 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3701 NopRuntimeCallHelper call_helper;
3702 generator.GenerateSlow(
masm_, call_helper);
3709 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3710 ZoneList<Expression*>* args = expr->arguments();
3711 DCHECK(args->length() == 2);
3717 Register
object = a1;
3718 Register index = a0;
3719 Register scratch = a3;
3720 Register result = v0;
3724 Label need_conversion;
3725 Label index_out_of_range;
3727 StringCharAtGenerator generator(
object,
3733 &index_out_of_range,
3735 generator.GenerateFast(
masm_);
3738 __ bind(&index_out_of_range);
3741 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3744 __ bind(&need_conversion);
3750 NopRuntimeCallHelper call_helper;
3751 generator.GenerateSlow(
masm_, call_helper);
3758 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3759 ZoneList<Expression*>* args = expr->arguments();
3772 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3773 ZoneList<Expression*>* args = expr->arguments();
3779 StringCompareStub stub(isolate());
3785 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3786 ZoneList<Expression*>* args = expr->arguments();
3787 DCHECK(args->length() >= 2);
3789 int arg_count = args->length() - 2;
3790 for (
int i = 0;
i < arg_count + 1;
i++) {
3795 Label runtime, done;
3797 __ JumpIfSmi(v0, &runtime);
3798 __ GetObjectType(v0, a1, a1);
3803 ParameterCount count(arg_count);
3810 __ CallRuntime(Runtime::kCall, args->length());
3817 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3818 RegExpConstructResultStub stub(isolate());
3819 ZoneList<Expression*>* args = expr->arguments();
3820 DCHECK(args->length() == 3);
3832 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3833 ZoneList<Expression*>* args = expr->arguments();
3837 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3839 Handle<FixedArray> jsfunction_result_caches(
3840 isolate()->native_context()->jsfunction_result_caches());
3841 if (jsfunction_result_caches->length() <= cache_id) {
3842 __ Abort(kAttemptToUseUndefinedCache);
3843 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3851 Register cache = a1;
3861 Label done, not_found;
3868 __ addu(a3, a3, at);
3871 __ Branch(¬_found,
ne, key, Operand(a2));
3876 __ bind(¬_found);
3879 __ CallRuntime(Runtime::kGetFromCache, 2);
3886 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3887 ZoneList<Expression*>* args = expr->arguments();
3890 Label materialize_true, materialize_false;
3891 Label* if_true =
NULL;
3892 Label* if_false =
NULL;
3893 Label* fall_through =
NULL;
3894 context()->PrepareTest(&materialize_true, &materialize_false,
3895 &if_true, &if_false, &fall_through);
3901 Split(
eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3903 context()->Plug(if_true, if_false);
3907 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3908 ZoneList<Expression*>* args = expr->arguments();
3909 DCHECK(args->length() == 1);
3912 __ AssertString(v0);
3915 __ IndexFromHash(v0, v0);
3921 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3922 Label bailout, done, one_char_separator, long_separator,
3923 non_trivial_array, not_size_one_array, loop,
3924 empty_separator_loop, one_char_separator_loop,
3925 one_char_separator_loop_entry, long_separator_loop;
3926 ZoneList<Expression*>* args = expr->arguments();
3927 DCHECK(args->length() == 2);
3932 Register array = v0;
3933 Register elements =
no_reg;
3934 Register result =
no_reg;
3935 Register separator = a1;
3936 Register array_length = a2;
3937 Register result_pos =
no_reg;
3938 Register string_length = a3;
3939 Register
string = t0;
3940 Register element = t1;
3941 Register elements_end = t2;
3942 Register scratch1 = t3;
3943 Register scratch2 = t5;
3944 Register scratch3 = t4;
3950 __ JumpIfSmi(array, &bailout);
3951 __ GetObjectType(array, scratch1, scratch2);
3955 __ CheckFastElements(scratch1, scratch2, &bailout);
3959 __ SmiUntag(array_length);
3960 __ Branch(&non_trivial_array,
ne, array_length, Operand(zero_reg));
3961 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
3964 __ bind(&non_trivial_array);
3973 __ mov(string_length, zero_reg);
3977 __ Addu(elements_end, element, elements_end);
3987 __ Assert(
gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
3993 __ JumpIfSmi(
string, &bailout);
3996 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3998 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3999 __ BranchOnOverflow(&bailout, scratch3);
4000 __ Branch(&loop,
lt, element, Operand(elements_end));
4003 __ Branch(¬_size_one_array,
ne, array_length, Operand(1));
4007 __ bind(¬_size_one_array);
4016 __ JumpIfSmi(separator, &bailout);
4019 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4025 __ Subu(string_length, string_length, Operand(scratch1));
4026 __ Mul(scratch3, scratch2, array_length, scratch1);
4029 __ Branch(&bailout,
ne, scratch3, Operand(zero_reg));
4030 __ And(scratch3, scratch2, Operand(0x80000000));
4031 __ Branch(&bailout,
ne, scratch3, Operand(zero_reg));
4032 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4033 __ BranchOnOverflow(&bailout, scratch3);
4034 __ SmiUntag(string_length);
4047 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4048 elements_end, &bailout);
4053 __ Addu(elements_end, element, elements_end);
4054 result_pos = array_length;
4063 __ Branch(&one_char_separator,
eq, scratch1, Operand(at));
4064 __ Branch(&long_separator,
gt, scratch1, Operand(at));
4067 __ bind(&empty_separator_loop);
4077 __ SmiUntag(string_length);
4079 __ CopyBytes(
string, result_pos, string_length, scratch1);
4081 __ Branch(&empty_separator_loop,
lt, element, Operand(elements_end));
4086 __ bind(&one_char_separator);
4091 __ jmp(&one_char_separator_loop_entry);
4093 __ bind(&one_char_separator_loop);
4102 __ Addu(result_pos, result_pos, 1);
4105 __ bind(&one_char_separator_loop_entry);
4109 __ SmiUntag(string_length);
4111 __ CopyBytes(
string, result_pos, string_length, scratch1);
4113 __ Branch(&one_char_separator_loop,
lt, element, Operand(elements_end));
4119 __ bind(&long_separator_loop);
4128 __ SmiUntag(string_length);
4132 __ CopyBytes(
string, result_pos, string_length, scratch1);
4134 __ bind(&long_separator);
4138 __ SmiUntag(string_length);
4140 __ CopyBytes(
string, result_pos, string_length, scratch1);
4142 __ Branch(&long_separator_loop,
lt, element, Operand(elements_end));
4147 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4153 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4154 DCHECK(expr->arguments()->length() == 0);
4155 ExternalReference debug_is_active =
4156 ExternalReference::debug_is_active_address(isolate());
4157 __ li(at, Operand(debug_is_active));
4164 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4165 if (expr->function() !=
NULL &&
4167 Comment cmnt(
masm_,
"[ InlineRuntimeCall");
4172 Comment cmnt(
masm_,
"[ CallRuntime");
4173 ZoneList<Expression*>* args = expr->arguments();
4174 int arg_count = args->length();
4176 if (expr->is_jsruntime()) {
4185 if (FLAG_vector_ics) {
4187 Operand(
Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4199 int arg_count = args->length();
4200 for (
int i = 0;
i < arg_count;
i++) {
4213 context()->DropAndPlug(1, v0);
4216 for (
int i = 0;
i < arg_count;
i++) {
4221 __ CallRuntime(expr->function(), arg_count);
4227 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4228 switch (expr->op()) {
4229 case Token::DELETE: {
4230 Comment cmnt(
masm_,
"[ UnaryOperation (DELETE)");
4231 Property*
property = expr->expression()->AsProperty();
4232 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4234 if (property !=
NULL) {
4241 }
else if (proxy !=
NULL) {
4242 Variable* var = proxy->var();
4246 if (var->IsUnallocated()) {
4248 __ li(a1, Operand(var->name()));
4253 }
else if (var->IsStackAllocated() || var->IsContextSlot()) {
4256 context()->Plug(var->is_this());
4261 __ li(a2, Operand(var->name()));
4263 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4276 Comment cmnt(
masm_,
"[ UnaryOperation (VOID)");
4278 context()->Plug(Heap::kUndefinedValueRootIndex);
4283 Comment cmnt(
masm_,
"[ UnaryOperation (NOT)");
4288 }
else if (
context()->IsTest()) {
4292 test->false_label(),
4294 test->fall_through());
4295 context()->Plug(test->true_label(), test->false_label());
4302 Label materialize_true, materialize_false, done;
4307 __ bind(&materialize_true);
4309 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4310 if (
context()->IsStackValue())
__ push(v0);
4312 __ bind(&materialize_false);
4314 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4315 if (
context()->IsStackValue())
__ push(v0);
4321 case Token::TYPEOF: {
4322 Comment cmnt(
masm_,
"[ UnaryOperation (TYPEOF)");
4323 { StackValueContext
context(
this);
4326 __ CallRuntime(Runtime::kTypeof, 1);
4337 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4338 DCHECK(expr->expression()->IsValidReferenceExpression());
4340 Comment cmnt(
masm_,
"[ CountOperation");
4347 Property* prop = expr->expression()->AsProperty();
4357 DCHECK(expr->expression()->AsVariableProxy()->var() !=
NULL);
4358 AccumulatorValueContext
context(
this);
4362 if (expr->is_postfix() && !
context()->IsEffect()) {
4390 Label stub_call, done;
4391 JumpPatchSite patch_site(
masm_);
4393 int count_value = expr->op() == Token::INC ? 1 : -1;
4397 patch_site.EmitJumpIfNotSmi(v0, &slow);
4400 if (expr->is_postfix()) {
4405 switch (assign_type) {
4419 Register scratch1 = a1;
4420 Register scratch2 = t0;
4422 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4423 __ BranchOnNoOverflow(&done, scratch2);
4429 ToNumberStub convert_stub(isolate());
4430 __ CallStub(&convert_stub);
4433 if (expr->is_postfix()) {
4438 switch (assign_type) {
4452 __ bind(&stub_call);
4461 CallIC(code, expr->CountBinOpFeedbackId());
4462 patch_site.EmitPatchInfo();
4466 switch (assign_type) {
4468 if (expr->is_postfix()) {
4469 { EffectContext
context(
this);
4490 Operand(prop->key()->AsLiteral()->value()));
4494 if (expr->is_postfix()) {
4508 CodeFactory::KeyedStoreIC(isolate(),
strict_mode()).code();
4509 CallIC(ic, expr->CountStoreFeedbackId());
4511 if (expr->is_postfix()) {
4527 VariableProxy* proxy = expr->AsVariableProxy();
4528 if (proxy !=
NULL && proxy->var()->IsUnallocated()) {
4529 Comment cmnt(
masm_,
"[ Global variable");
4532 if (FLAG_vector_ics) {
4541 }
else if (proxy !=
NULL && proxy->var()->IsLookupSlot()) {
4542 Comment cmnt(
masm_,
"[ Lookup slot");
4550 __ li(a0, Operand(proxy->name()));
4552 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4564 Expression* sub_expr,
4565 Handle<String> check) {
4566 Label materialize_true, materialize_false;
4567 Label* if_true =
NULL;
4568 Label* if_false =
NULL;
4569 Label* fall_through =
NULL;
4570 context()->PrepareTest(&materialize_true, &materialize_false,
4571 &if_true, &if_false, &fall_through);
4573 { AccumulatorValueContext
context(
this);
4578 Factory* factory = isolate()->factory();
4580 __ JumpIfSmi(v0, if_true);
4582 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4583 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4585 __ JumpIfSmi(v0, if_false);
4587 __ GetObjectType(v0, v0, a1);
4591 Split(
eq, a1, Operand(zero_reg),
4592 if_true, if_false, fall_through);
4594 __ JumpIfSmi(v0, if_false);
4595 __ GetObjectType(v0, v0, a1);
4598 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4599 __ Branch(if_true,
eq, v0, Operand(at));
4600 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4601 Split(
eq, v0, Operand(at), if_true, if_false, fall_through);
4603 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4604 __ Branch(if_true,
eq, v0, Operand(at));
4605 __ JumpIfSmi(v0, if_false);
4610 Split(
ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4612 __ JumpIfSmi(v0, if_false);
4614 __ GetObjectType(v0, v0, a1);
4617 if_true, if_false, fall_through);
4619 __ JumpIfSmi(v0, if_false);
4620 __ LoadRoot(at, Heap::kNullValueRootIndex);
4621 __ Branch(if_true,
eq, v0, Operand(at));
4623 __ GetObjectType(v0, v0, a1);
4630 Split(
eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4632 if (if_false != fall_through)
__ jmp(if_false);
4634 context()->Plug(if_true, if_false);
4638 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4639 Comment cmnt(
masm_,
"[ CompareOperation");
4648 Label materialize_true, materialize_false;
4649 Label* if_true =
NULL;
4650 Label* if_false =
NULL;
4651 Label* fall_through =
NULL;
4652 context()->PrepareTest(&materialize_true, &materialize_false,
4653 &if_true, &if_false, &fall_through);
4662 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4663 Split(
eq, v0, Operand(t0), if_true, if_false, fall_through);
4666 case Token::INSTANCEOF: {
4672 Split(
eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4683 JumpPatchSite patch_site(
masm_);
4684 if (inline_smi_code) {
4686 __ Or(a2, a0, Operand(a1));
4687 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4688 Split(
cc, a1, Operand(a0), if_true, if_false,
NULL);
4689 __ bind(&slow_case);
4693 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4694 CallIC(ic, expr->CompareOperationFeedbackId());
4695 patch_site.EmitPatchInfo();
4697 Split(
cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4703 context()->Plug(if_true, if_false);
4708 Expression* sub_expr,
4710 Label materialize_true, materialize_false;
4711 Label* if_true =
NULL;
4712 Label* if_false =
NULL;
4713 Label* fall_through =
NULL;
4714 context()->PrepareTest(&materialize_true, &materialize_false,
4715 &if_true, &if_false, &fall_through);
4720 if (expr->op() == Token::EQ_STRICT) {
4722 Heap::kNullValueRootIndex :
4723 Heap::kUndefinedValueRootIndex;
4724 __ LoadRoot(a1, nil_value);
4725 Split(
eq, a0, Operand(a1), if_true, if_false, fall_through);
4728 CallIC(ic, expr->CompareOperationFeedbackId());
4729 Split(
ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
4731 context()->Plug(if_true, if_false);
4735 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4764 if (declaration_scope->is_global_scope() ||
4765 declaration_scope->is_module_scope()) {
4771 }
else if (declaration_scope->is_eval_scope()) {
4777 DCHECK(declaration_scope->is_function_scope());
4795 __ Addu(a1, a1, Operand(a1));
4801 ExternalReference pending_message_obj =
4802 ExternalReference::address_of_pending_message_obj(isolate());
4803 __ li(at, Operand(pending_message_obj));
4807 ExternalReference has_pending_message =
4808 ExternalReference::address_of_has_pending_message(isolate());
4809 __ li(at, Operand(has_pending_message));
4814 ExternalReference pending_message_script =
4815 ExternalReference::address_of_pending_message_script(isolate());
4816 __ li(at, Operand(pending_message_script));
4826 ExternalReference pending_message_script =
4827 ExternalReference::address_of_pending_message_script(isolate());
4828 __ li(at, Operand(pending_message_script));
4833 ExternalReference has_pending_message =
4834 ExternalReference::address_of_has_pending_message(isolate());
4835 __ li(at, Operand(has_pending_message));
4839 ExternalReference pending_message_obj =
4840 ExternalReference::address_of_pending_message_obj(isolate());
4841 __ li(at, Operand(pending_message_obj));
4858 #define __ ACCESS_MASM(masm())
4862 int* context_length) {
4869 __ Drop(*stack_depth);
4870 if (*context_length > 0) {
4876 __ Call(finally_entry_);
4879 *context_length = 0;
4889 BackEdgeState target_state,
4890 Code* replacement_code) {
4892 Address branch_address =
pc - 6 * kInstrSize;
4893 CodePatcher patcher(branch_address, 1);
4895 switch (target_state) {
4904 patcher.masm()->slt(at, a3, zero_reg);
4906 case ON_STACK_REPLACEMENT:
4907 case OSR_AFTER_STACK_CHECK:
4915 patcher.masm()->addiu(at, zero_reg, 1);
4918 Address pc_immediate_load_address =
pc - 4 * kInstrSize;
4922 replacement_code->entry());
4924 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4925 unoptimized_code, pc_immediate_load_address, replacement_code);
4931 Code* unoptimized_code,
4934 Address branch_address =
pc - 6 * kInstrSize;
4935 Address pc_immediate_load_address =
pc - 4 * kInstrSize;
4942 isolate->builtins()->InterruptCheck()->entry()));
4951 isolate->builtins()->OnStackReplacement()->entry())) {
4952 return ON_STACK_REPLACEMENT;
4958 isolate->builtins()->OsrAfterStackCheck()->entry()));
4959 return OSR_AFTER_STACK_CHECK;
static const int kLengthOffset
static const int kInstrSize
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
friend class BlockTrampolinePoolScope
int InstructionsGeneratedSince(Label *label)
static bool IsAddImmediate(Instr instr)
static const int kJSReturnSequenceInstructions
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target, ICacheFlushMode icache_flush_mode=FLUSH_ICACHE_IF_NEEDED)
static bool IsBeq(Instr instr)
int SizeOfCodeGeneratedSince(Label *label)
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static BailoutId Declarations()
static BailoutId FunctionEntry()
static Handle< Code > initialize_stub(Isolate *isolate, int argc, CallICState::CallType call_type)
static const int kValueOffset
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
static Condition ComputeCondition(Token::Value op)
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
bool IsOptimizable() const
bool ShouldSelfOptimize()
void AddNoFrameRange(int from, int to)
static bool IsCompileTimeValue(Expression *expression)
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
@ STRING_FUNCTION_PROTOTYPE_MAP_INDEX
@ JSFUNCTION_RESULT_CACHES_INDEX
static int SlotOffset(int index)
static const int kDescriptorSize
static const int kEnumCacheBridgeCacheOffset
static const int kEnumCacheOffset
static const int kFirstOffset
static const int kMaximumClonedProperties
static const int kLengthOffset
static const int kHeaderSize
static int OffsetOfElementAt(int index)
virtual void DropAndPlug(int count, Register reg) const
virtual void Plug(bool flag) const
virtual void DropAndPlug(int count, Register reg) const
virtual void Plug(bool flag) const
virtual void DropAndPlug(int count, Register reg) const
virtual void Plug(bool flag) const
virtual void DropAndPlug(int count, Register reg) const
static const TestContext * cast(const ExpressionContext *context)
virtual void Plug(bool flag) const
virtual NestedStatement * Exit(int *stack_depth, int *context_length)
static const int kMaxBackEdgeWeight
void EmitAccessor(Expression *expression)
void Split(Condition cc, Label *if_true, Label *if_false, Label *fall_through)
void EmitBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode)
void EmitLiteralCompareTypeof(Expression *expr, Expression *sub_expr, Handle< String > check)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
void PrepareForBailoutForId(BailoutId id, State state)
void EmitNewClosure(Handle< SharedFunctionInfo > info, bool pretenure)
void GetVar(Register destination, Variable *var)
static Register context_register()
ZoneList< Handle< Object > > * globals_
void VisitForControl(Expression *expr, Label *if_true, Label *if_false, Label *fall_through)
void PushFunctionArgumentForContextAllocation()
void CallLoadIC(ContextualMode mode, TypeFeedbackId id=TypeFeedbackId::None())
bool generate_debug_code_
void RecordBackEdge(BailoutId osr_ast_id)
MemOperand StackOperand(Variable *var)
void EmitVariableLoad(VariableProxy *proxy)
friend class NestedStatement
void EmitReturnSequence()
void SetVar(Variable *var, Register source, Register scratch0, Register scratch1)
MemOperand ContextSlotOperandCheckExtensions(Variable *var, Label *slow)
void EmitKeyedPropertyAssignment(Assignment *expr)
void DeclareGlobals(Handle< FixedArray > pairs)
void EmitResolvePossiblyDirectEval(int arg_count)
void VisitForStackValue(Expression *expr)
void EmitKeyedCallWithLoadIC(Call *expr, Expression *key)
void EmitProfilingCounterReset()
void EmitKeyedPropertyLoad(Property *expr)
void EmitDebugCheckDeclarationContext(Variable *variable)
void EmitUnwindBeforeReturn()
FunctionLiteral * function()
void EmitNamedSuperPropertyLoad(Property *expr)
Handle< Cell > profiling_counter_
bool TryLiteralCompare(CompareOperation *compare)
void increment_loop_depth()
void SetStatementPosition(Statement *stmt)
Handle< FixedArray > FeedbackVector()
void StoreToFrameField(int frame_offset, Register value)
void LoadContextField(Register dst, int context_index)
const ExpressionContext * context()
void EmitNamedPropertyLoad(Property *expr)
void EmitBackEdgeBookkeeping(IterationStatement *stmt, Label *back_edge_target)
void DoTest(Expression *condition, Label *if_true, Label *if_false, Label *fall_through)
void VisitForAccumulatorValue(Expression *expr)
void PrepareForBailout(Expression *node, State state)
void CallStoreIC(TypeFeedbackId id=TypeFeedbackId::None())
MemOperand VarOperand(Variable *var, Register scratch)
void DeclareModules(Handle< FixedArray > descriptions)
void EmitGeneratorResume(Expression *generator, Expression *value, JSGeneratorObject::ResumeMode resume_mode)
void VisitForEffect(Expression *expr)
void EmitAssignment(Expression *expr)
void EmitCall(Call *expr, CallICState::CallType=CallICState::FUNCTION)
void SetFunctionPosition(FunctionLiteral *fun)
int DeclareGlobalsFlags()
void EmitLoadHomeObject(SuperReference *expr)
void EmitStoreToStackLocalOrContextSlot(Variable *var, MemOperand location)
void decrement_loop_depth()
void EmitInlineSmiBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode, Expression *left, Expression *right)
void EmitLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
void EmitVariableAssignment(Variable *var, Token::Value op)
void CallIC(Handle< Code > code, TypeFeedbackId id=TypeFeedbackId::None())
void EmitCreateIteratorResult(bool done)
void EmitLoadGlobalCheckExtensions(VariableProxy *proxy, TypeofState typeof_state, Label *slow)
void EmitCallWithLoadIC(Call *expr)
void EnsureSlotContainsAllocationSite(int slot)
void PrepareForBailoutBeforeSplit(Expression *expr, bool should_normalize, Label *if_true, Label *if_false)
Handle< Script > script()
void EmitInlineRuntimeCall(CallRuntime *expr)
void SetSourcePosition(int pos)
void EmitSuperCallWithLoadIC(Call *expr)
void EmitNamedPropertyAssignment(Assignment *expr)
Handle< FixedArray > handler_table_
void RecordJSReturnSite(Call *call)
static Register result_register()
void VisitForTypeofValue(Expression *expr)
void EmitDynamicLookupFastCase(VariableProxy *proxy, TypeofState typeof_state, Label *slow, Label *done)
bool ShouldInlineSmiCase(Token::Value op)
Handle< FixedArray > handler_table()
void EmitProfilingCounterDecrement(int delta)
void VisitInDuplicateContext(Expression *expr)
static const int kBuiltinsOffset
static const int kNativeContextOffset
static const int kGlobalProxyOffset
static const int kMapOffset
static const int kLengthOffset
static const int kValueOffset
static const int kCacheStampOffset
static const int kFingerOffset
static const int kSharedFunctionInfoOffset
static const int kLiteralsOffset
static const int kCodeEntryOffset
static const int kResultDonePropertyOffset
static const int kFunctionOffset
static const int kGeneratorClosed
static const int kResultValuePropertyOffset
static const int kGeneratorExecuting
static const int kOperandStackOffset
static const int kReceiverOffset
static const int kContextOffset
static const int kContinuationOffset
static const int kInitialMaxFastElementArray
static const int kPropertiesOffset
static const int kElementsOffset
static const int kInObjectFieldCount
static const int kValueOffset
static const int kLocal0Offset
static const int kFunctionOffset
static const Register ReceiverRegister()
static const Register NameRegister()
void mov(Register rd, Register rt)
void MultiPop(RegList regs)
void Jump(Register target, Condition cond=al)
static int CallSize(Register target, Condition cond=al)
Handle< Object > CodeObject()
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kStringWrapperSafeForDefaultValueOf
static const int kInstanceTypeOffset
static const int kBitField2Offset
static const int kConstructorOffset
static const int kPrototypeOffset
static const int kHashFieldOffset
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
Scope * outer_scope() const
int num_parameters() const
VariableDeclaration * function() const
int ContextChainLength(Scope *scope)
void VisitIllegalRedeclaration(AstVisitor *visitor)
Scope * DeclarationScope()
Variable * arguments() const
Variable * parameter(int index) const
static const int kHeaderSize
static const int kFormalParameterCountOffset
static const int kInstanceClassNameOffset
static Smi * FromInt(int value)
static bool IsValid(intptr_t value)
static const int kContextOffset
static const int kContextOffset
static const int kExpressionsOffset
static const int kCallerSPOffset
static const int kMarkerOffset
static const int kCallerFPOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const unsigned int kContainsCachedArrayIndexMask
static const int kLengthOffset
bool Equals(String *other)
static Handle< Code > GetUninitialized(Isolate *isolate)
static TypeFeedbackId None()
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const Register SlotRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_BOOL(enable_unaligned_accesses
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define POINTER_SIZE_ALIGN(value)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Vector< const char > CStrVector(const char *data)
MemOperand ContextOperand(Register context, int index)
@ DONT_TRACK_ALLOCATION_SITE
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
bool IsDeclaredVariableMode(VariableMode mode)
const uint32_t kTwoByteStringTag
const int kPointerSizeLog2
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
const uint32_t kOneByteStringTag
MemOperand FieldMemOperand(Register object, int offset)
bool IsImmutableVariableMode(VariableMode mode)
Condition NegateCondition(Condition cond)
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
kFeedbackVectorOffset flag
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
@ RECORD_CONSTRUCTOR_TARGET
void CopyBytes(uint8_t *target, uint8_t *source)
bool IsFastObjectElementsKind(ElementsKind kind)
Debugger support for the V8 JavaScript engine.
static Register from_code(int code)