V8 Project
full-codegen-mips64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_MIPS64
8 
9 // Note on Mips implementation:
10 //
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
16 
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/isolate-inl.h"
25 #include "src/parser.h"
26 #include "src/scopes.h"
27 
30 
31 namespace v8 {
32 namespace internal {
33 
34 #define __ ACCESS_MASM(masm_)
35 
36 
37 // A patch site is a location in the code which it is possible to patch. This
38 // class has a number of methods to emit the code which is patchable and the
39 // method EmitPatchInfo to record a marker back to the patchable code. This
40 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
41 // (raw 16 bit immediate value is used) is the delta from the pc to the first
42 // instruction of the patchable code.
43 // The marker instruction is effectively a NOP (dest is zero_reg) and will
44 // never be emitted by normal code.
45 class JumpPatchSite BASE_EMBEDDED {
46  public:
47  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
48 #ifdef DEBUG
49  info_emitted_ = false;
50 #endif
51  }
52 
53  ~JumpPatchSite() {
54  DCHECK(patch_site_.is_bound() == info_emitted_);
55  }
56 
57  // When initially emitting this ensure that a jump is always generated to skip
58  // the inlined smi code.
59  void EmitJumpIfNotSmi(Register reg, Label* target) {
60  DCHECK(!patch_site_.is_bound() && !info_emitted_);
61  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
62  __ bind(&patch_site_);
63  __ andi(at, reg, 0);
64  // Always taken before patched.
65  __ BranchShort(target, eq, at, Operand(zero_reg));
66  }
67 
68  // When initially emitting this ensure that a jump is never generated to skip
69  // the inlined smi code.
70  void EmitJumpIfSmi(Register reg, Label* target) {
71  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
72  DCHECK(!patch_site_.is_bound() && !info_emitted_);
73  __ bind(&patch_site_);
74  __ andi(at, reg, 0);
75  // Never taken before patched.
76  __ BranchShort(target, ne, at, Operand(zero_reg));
77  }
78 
79  void EmitPatchInfo() {
80  if (patch_site_.is_bound()) {
81  int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
82  Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
83  __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
84 #ifdef DEBUG
85  info_emitted_ = true;
86 #endif
87  } else {
88  __ nop(); // Signals no inlined code.
89  }
90  }
91 
92  private:
93  MacroAssembler* masm_;
94  Label patch_site_;
95 #ifdef DEBUG
96  bool info_emitted_;
97 #endif
98 };
99 
100 
101 // Generate code for a JS function. On entry to the function the receiver
102 // and arguments have been pushed on the stack left to right. The actual
103 // argument count matches the formal parameter count expected by the
104 // function.
105 //
106 // The live registers are:
107 // o a1: the JS function object being called (i.e. ourselves)
108 // o cp: our context
109 // o fp: our caller's frame pointer
110 // o sp: stack pointer
111 // o ra: return address
112 //
113 // The function builds a JS frame. Please see JavaScriptFrameConstants in
114 // frames-mips.h for its layout.
116  CompilationInfo* info = info_;
118  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
119 
120  profiling_counter_ = isolate()->factory()->NewCell(
121  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
122  SetFunctionPosition(function());
123  Comment cmnt(masm_, "[ function compiled by full code generator");
124 
126 
127 #ifdef DEBUG
128  if (strlen(FLAG_stop_at) > 0 &&
129  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
130  __ stop("stop-at");
131  }
132 #endif
133 
134  // Sloppy mode functions and builtins need to replace the receiver with the
135  // global proxy when called as functions (without an explicit receiver
136  // object).
137  if (info->strict_mode() == SLOPPY && !info->is_native()) {
138  Label ok;
139  int receiver_offset = info->scope()->num_parameters() * kPointerSize;
140  __ ld(at, MemOperand(sp, receiver_offset));
141  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142  __ Branch(&ok, ne, a2, Operand(at));
143 
144  __ ld(a2, GlobalObjectOperand());
146 
147  __ sd(a2, MemOperand(sp, receiver_offset));
148  __ bind(&ok);
149  }
150  // Open a frame scope to indicate that there is a frame on the stack. The
151  // MANUAL indicates that the scope shouldn't actually generate code to set up
152  // the frame (that is done below).
153  FrameScope frame_scope(masm_, StackFrame::MANUAL);
154  info->set_prologue_offset(masm_->pc_offset());
155  __ Prologue(info->IsCodePreAgingActive());
156  info->AddNoFrameRange(0, masm_->pc_offset());
157 
158  { Comment cmnt(masm_, "[ Allocate locals");
159  int locals_count = info->scope()->num_stack_slots();
160  // Generators allocate locals, if any, in context slots.
161  DCHECK(!info->function()->is_generator() || locals_count == 0);
162  if (locals_count > 0) {
163  if (locals_count >= 128) {
164  Label ok;
165  __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
166  __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
167  __ Branch(&ok, hs, t1, Operand(a2));
168  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
169  __ bind(&ok);
170  }
171  __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
172  int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
173  if (locals_count >= kMaxPushes) {
174  int loop_iterations = locals_count / kMaxPushes;
175  __ li(a2, Operand(loop_iterations));
176  Label loop_header;
177  __ bind(&loop_header);
178  // Do pushes.
179  __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
180  for (int i = 0; i < kMaxPushes; i++) {
181  __ sd(t1, MemOperand(sp, i * kPointerSize));
182  }
183  // Continue loop if not done.
184  __ Dsubu(a2, a2, Operand(1));
185  __ Branch(&loop_header, ne, a2, Operand(zero_reg));
186  }
187  int remaining = locals_count % kMaxPushes;
188  // Emit the remaining pushes.
189  __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
190  for (int i = 0; i < remaining; i++) {
191  __ sd(t1, MemOperand(sp, i * kPointerSize));
192  }
193  }
194  }
195 
196  bool function_in_register = true;
197 
198  // Possibly allocate a local context.
199  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
200  if (heap_slots > 0) {
201  Comment cmnt(masm_, "[ Allocate context");
202  // Argument to NewContext is the function, which is still in a1.
203  bool need_write_barrier = true;
204  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
205  __ push(a1);
206  __ Push(info->scope()->GetScopeInfo());
207  __ CallRuntime(Runtime::kNewGlobalContext, 2);
208  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
209  FastNewContextStub stub(isolate(), heap_slots);
210  __ CallStub(&stub);
211  // Result of FastNewContextStub is always in new space.
212  need_write_barrier = false;
213  } else {
214  __ push(a1);
215  __ CallRuntime(Runtime::kNewFunctionContext, 1);
216  }
217  function_in_register = false;
218  // Context is returned in v0. It replaces the context passed to us.
219  // It's saved in the stack and kept live in cp.
220  __ mov(cp, v0);
222  // Copy any necessary parameters into the context.
223  int num_parameters = info->scope()->num_parameters();
224  for (int i = 0; i < num_parameters; i++) {
225  Variable* var = scope()->parameter(i);
226  if (var->IsContextSlot()) {
227  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
228  (num_parameters - 1 - i) * kPointerSize;
229  // Load parameter from stack.
230  __ ld(a0, MemOperand(fp, parameter_offset));
231  // Store it in the context.
232  MemOperand target = ContextOperand(cp, var->index());
233  __ sd(a0, target);
234 
235  // Update the write barrier.
236  if (need_write_barrier) {
237  __ RecordWriteContextSlot(
238  cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
239  } else if (FLAG_debug_code) {
240  Label done;
241  __ JumpIfInNewSpace(cp, a0, &done);
242  __ Abort(kExpectedNewSpaceObject);
243  __ bind(&done);
244  }
245  }
246  }
247  }
248  Variable* arguments = scope()->arguments();
249  if (arguments != NULL) {
250  // Function uses arguments object.
251  Comment cmnt(masm_, "[ Allocate arguments object");
252  if (!function_in_register) {
253  // Load this again, if it's used by the local context below.
255  } else {
256  __ mov(a3, a1);
257  }
258  // Receiver is just before the parameters on the caller's stack.
259  int num_parameters = info->scope()->num_parameters();
260  int offset = num_parameters * kPointerSize;
261  __ Daddu(a2, fp,
262  Operand(StandardFrameConstants::kCallerSPOffset + offset));
263  __ li(a1, Operand(Smi::FromInt(num_parameters)));
264  __ Push(a3, a2, a1);
265 
266  // Arguments to ArgumentsAccessStub:
267  // function, receiver address, parameter count.
268  // The stub will rewrite receiever and parameter count if the previous
269  // stack frame was an arguments adapter frame.
271  if (strict_mode() == STRICT) {
273  } else if (function()->has_duplicate_parameters()) {
275  } else {
277  }
278  ArgumentsAccessStub stub(isolate(), type);
279  __ CallStub(&stub);
280 
281  SetVar(arguments, v0, a1, a2);
282  }
283 
284  if (FLAG_trace) {
285  __ CallRuntime(Runtime::kTraceEnter, 0);
286  }
287  // Visit the declarations and body unless there is an illegal
288  // redeclaration.
289  if (scope()->HasIllegalRedeclaration()) {
290  Comment cmnt(masm_, "[ Declarations");
292 
293  } else {
295  { Comment cmnt(masm_, "[ Declarations");
296  // For named function expressions, declare the function name as a
297  // constant.
298  if (scope()->is_function_scope() && scope()->function() != NULL) {
299  VariableDeclaration* function = scope()->function();
300  DCHECK(function->proxy()->var()->mode() == CONST ||
301  function->proxy()->var()->mode() == CONST_LEGACY);
302  DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
303  VisitVariableDeclaration(function);
304  }
305  VisitDeclarations(scope()->declarations());
306  }
307  { Comment cmnt(masm_, "[ Stack check");
309  Label ok;
310  __ LoadRoot(at, Heap::kStackLimitRootIndex);
311  __ Branch(&ok, hs, sp, Operand(at));
312  Handle<Code> stack_check = isolate()->builtins()->StackCheck();
313  PredictableCodeSizeScope predictable(masm_,
314  masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
315  __ Call(stack_check, RelocInfo::CODE_TARGET);
316  __ bind(&ok);
317  }
318 
319  { Comment cmnt(masm_, "[ Body");
320  DCHECK(loop_depth() == 0);
321 
322  VisitStatements(function()->body());
323 
324  DCHECK(loop_depth() == 0);
325  }
326  }
327 
328  // Always emit a 'return undefined' in case control fell off the end of
329  // the body.
330  { Comment cmnt(masm_, "[ return <undefined>;");
331  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
332  }
334 }
335 
336 
338  DCHECK(Smi::FromInt(0) == 0);
339  __ mov(v0, zero_reg);
340 }
341 
342 
344  __ li(a2, Operand(profiling_counter_));
346  __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
348 }
349 
350 
352  int reset_value = FLAG_interrupt_budget;
353  if (info_->is_debug()) {
354  // Detect debug break requests as soon as possible.
355  reset_value = FLAG_interrupt_budget >> 4;
356  }
357  __ li(a2, Operand(profiling_counter_));
358  __ li(a3, Operand(Smi::FromInt(reset_value)));
360 }
361 
362 
363 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
364  Label* back_edge_target) {
365  // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
366  // to make sure it is constant. Branch may emit a skip-or-jump sequence
367  // instead of the normal Branch. It seems that the "skip" part of that
368  // sequence is about as long as this Branch would be so it is safe to ignore
369  // that.
370  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
371  Comment cmnt(masm_, "[ Back edge bookkeeping");
372  Label ok;
373  DCHECK(back_edge_target->is_bound());
374  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
375  int weight = Min(kMaxBackEdgeWeight,
376  Max(1, distance / kCodeSizeMultiplier));
378  __ slt(at, a3, zero_reg);
379  __ beq(at, zero_reg, &ok);
380  // Call will emit a li t9 first, so it is safe to use the delay slot.
381  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
382  // Record a mapping of this PC offset to the OSR id. This is used to find
383  // the AST id from the unoptimized code in order to use it as a key into
384  // the deoptimization input data found in the optimized code.
385  RecordBackEdge(stmt->OsrEntryId());
387 
388  __ bind(&ok);
389  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
390  // Record a mapping of the OSR id to this PC. This is used if the OSR
391  // entry becomes the target of a bailout. We don't expect it to be, but
392  // we want it to work if it is.
393  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
394 }
395 
396 
398  Comment cmnt(masm_, "[ Return sequence");
399  if (return_label_.is_bound()) {
400  __ Branch(&return_label_);
401  } else {
402  __ bind(&return_label_);
403  if (FLAG_trace) {
404  // Push the return value on the stack as the parameter.
405  // Runtime::TraceExit returns its parameter in v0.
406  __ push(v0);
407  __ CallRuntime(Runtime::kTraceExit, 1);
408  }
409  // Pretend that the exit is a backwards jump to the entry.
410  int weight = 1;
411  if (info_->ShouldSelfOptimize()) {
412  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
413  } else {
414  int distance = masm_->pc_offset();
415  weight = Min(kMaxBackEdgeWeight,
416  Max(1, distance / kCodeSizeMultiplier));
417  }
419  Label ok;
420  __ Branch(&ok, ge, a3, Operand(zero_reg));
421  __ push(v0);
422  __ Call(isolate()->builtins()->InterruptCheck(),
424  __ pop(v0);
426  __ bind(&ok);
427 
428 #ifdef DEBUG
429  // Add a label for checking the size of the code used for returning.
430  Label check_exit_codesize;
431  masm_->bind(&check_exit_codesize);
432 #endif
433  // Make sure that the constant pool is not emitted inside of the return
434  // sequence.
435  { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
436  // Here we use masm_-> instead of the __ macro to avoid the code coverage
437  // tool from instrumenting as we rely on the code size here.
438  int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
439  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
440  __ RecordJSReturn();
441  masm_->mov(sp, fp);
442  int no_frame_start = masm_->pc_offset();
443  masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
444  masm_->Daddu(sp, sp, Operand(sp_delta));
445  masm_->Jump(ra);
446  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
447  }
448 
449 #ifdef DEBUG
450  // Check that the size of the code used for returning is large enough
451  // for the debugger's requirements.
453  masm_->InstructionsGeneratedSince(&check_exit_codesize));
454 #endif
455  }
456 }
457 
458 
459 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
460  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
461 }
462 
463 
464 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
465  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
466  codegen()->GetVar(result_register(), var);
467 }
468 
469 
470 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
471  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
472  codegen()->GetVar(result_register(), var);
473  __ push(result_register());
474 }
475 
476 
477 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
478  // For simplicity we always test the accumulator register.
479  codegen()->GetVar(result_register(), var);
480  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
481  codegen()->DoTest(this);
482 }
483 
484 
486 }
487 
488 
490  Heap::RootListIndex index) const {
491  __ LoadRoot(result_register(), index);
492 }
493 
494 
496  Heap::RootListIndex index) const {
497  __ LoadRoot(result_register(), index);
498  __ push(result_register());
499 }
500 
501 
503  codegen()->PrepareForBailoutBeforeSplit(condition(),
504  true,
505  true_label_,
506  false_label_);
507  if (index == Heap::kUndefinedValueRootIndex ||
508  index == Heap::kNullValueRootIndex ||
509  index == Heap::kFalseValueRootIndex) {
510  if (false_label_ != fall_through_) __ Branch(false_label_);
511  } else if (index == Heap::kTrueValueRootIndex) {
512  if (true_label_ != fall_through_) __ Branch(true_label_);
513  } else {
514  __ LoadRoot(result_register(), index);
515  codegen()->DoTest(this);
516  }
517 }
518 
519 
520 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
521 }
522 
523 
525  Handle<Object> lit) const {
526  __ li(result_register(), Operand(lit));
527 }
528 
529 
530 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
531  // Immediates cannot be pushed directly.
532  __ li(result_register(), Operand(lit));
533  __ push(result_register());
534 }
535 
536 
537 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
538  codegen()->PrepareForBailoutBeforeSplit(condition(),
539  true,
540  true_label_,
541  false_label_);
542  DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
543  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
544  if (false_label_ != fall_through_) __ Branch(false_label_);
545  } else if (lit->IsTrue() || lit->IsJSObject()) {
546  if (true_label_ != fall_through_) __ Branch(true_label_);
547  } else if (lit->IsString()) {
548  if (String::cast(*lit)->length() == 0) {
549  if (false_label_ != fall_through_) __ Branch(false_label_);
550  } else {
551  if (true_label_ != fall_through_) __ Branch(true_label_);
552  }
553  } else if (lit->IsSmi()) {
554  if (Smi::cast(*lit)->value() == 0) {
555  if (false_label_ != fall_through_) __ Branch(false_label_);
556  } else {
557  if (true_label_ != fall_through_) __ Branch(true_label_);
558  }
559  } else {
560  // For simplicity we always test the accumulator register.
561  __ li(result_register(), Operand(lit));
562  codegen()->DoTest(this);
563  }
564 }
565 
566 
568  Register reg) const {
569  DCHECK(count > 0);
570  __ Drop(count);
571 }
572 
573 
575  int count,
576  Register reg) const {
577  DCHECK(count > 0);
578  __ Drop(count);
579  __ Move(result_register(), reg);
580 }
581 
582 
584  Register reg) const {
585  DCHECK(count > 0);
586  if (count > 1) __ Drop(count - 1);
587  __ sd(reg, MemOperand(sp, 0));
588 }
589 
590 
592  Register reg) const {
593  DCHECK(count > 0);
594  // For simplicity we always test the accumulator register.
595  __ Drop(count);
596  __ Move(result_register(), reg);
597  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
598  codegen()->DoTest(this);
599 }
600 
601 
602 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
603  Label* materialize_false) const {
604  DCHECK(materialize_true == materialize_false);
605  __ bind(materialize_true);
606 }
607 
608 
610  Label* materialize_true,
611  Label* materialize_false) const {
612  Label done;
613  __ bind(materialize_true);
614  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
615  __ Branch(&done);
616  __ bind(materialize_false);
617  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
618  __ bind(&done);
619 }
620 
621 
623  Label* materialize_true,
624  Label* materialize_false) const {
625  Label done;
626  __ bind(materialize_true);
627  __ LoadRoot(at, Heap::kTrueValueRootIndex);
628  // Push the value as the following branch can clobber at in long branch mode.
629  __ push(at);
630  __ Branch(&done);
631  __ bind(materialize_false);
632  __ LoadRoot(at, Heap::kFalseValueRootIndex);
633  __ push(at);
634  __ bind(&done);
635 }
636 
637 
638 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
639  Label* materialize_false) const {
640  DCHECK(materialize_true == true_label_);
641  DCHECK(materialize_false == false_label_);
642 }
643 
644 
646 }
647 
648 
650  Heap::RootListIndex value_root_index =
651  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
652  __ LoadRoot(result_register(), value_root_index);
653 }
654 
655 
657  Heap::RootListIndex value_root_index =
658  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
659  __ LoadRoot(at, value_root_index);
660  __ push(at);
661 }
662 
663 
665  codegen()->PrepareForBailoutBeforeSplit(condition(),
666  true,
667  true_label_,
668  false_label_);
669  if (flag) {
670  if (true_label_ != fall_through_) __ Branch(true_label_);
671  } else {
672  if (false_label_ != fall_through_) __ Branch(false_label_);
673  }
674 }
675 
676 
677 void FullCodeGenerator::DoTest(Expression* condition,
678  Label* if_true,
679  Label* if_false,
680  Label* fall_through) {
681  __ mov(a0, result_register());
682  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
683  CallIC(ic, condition->test_id());
684  __ mov(at, zero_reg);
685  Split(ne, v0, Operand(at), if_true, if_false, fall_through);
686 }
687 
688 
690  Register lhs,
691  const Operand& rhs,
692  Label* if_true,
693  Label* if_false,
694  Label* fall_through) {
695  if (if_false == fall_through) {
696  __ Branch(if_true, cc, lhs, rhs);
697  } else if (if_true == fall_through) {
698  __ Branch(if_false, NegateCondition(cc), lhs, rhs);
699  } else {
700  __ Branch(if_true, cc, lhs, rhs);
701  __ Branch(if_false);
702  }
703 }
704 
705 
707  DCHECK(var->IsStackAllocated());
708  // Offset is negative because higher indexes are at lower addresses.
709  int offset = -var->index() * kPointerSize;
710  // Adjust by a (parameter or local) base offset.
711  if (var->IsParameter()) {
712  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
713  } else {
715  }
716  return MemOperand(fp, offset);
717 }
718 
719 
720 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
721  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
722  if (var->IsContextSlot()) {
723  int context_chain_length = scope()->ContextChainLength(var->scope());
724  __ LoadContext(scratch, context_chain_length);
725  return ContextOperand(scratch, var->index());
726  } else {
727  return StackOperand(var);
728  }
729 }
730 
731 
732 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
733  // Use destination as scratch.
734  MemOperand location = VarOperand(var, dest);
735  __ ld(dest, location);
736 }
737 
738 
739 void FullCodeGenerator::SetVar(Variable* var,
740  Register src,
741  Register scratch0,
742  Register scratch1) {
743  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
744  DCHECK(!scratch0.is(src));
745  DCHECK(!scratch0.is(scratch1));
746  DCHECK(!scratch1.is(src));
747  MemOperand location = VarOperand(var, scratch0);
748  __ sd(src, location);
749  // Emit the write barrier code if the location is in the heap.
750  if (var->IsContextSlot()) {
751  __ RecordWriteContextSlot(scratch0,
752  location.offset(),
753  src,
754  scratch1,
757  }
758 }
759 
760 
762  bool should_normalize,
763  Label* if_true,
764  Label* if_false) {
765  // Only prepare for bailouts before splits if we're in a test
766  // context. Otherwise, we let the Visit function deal with the
767  // preparation to avoid preparing with the same AST id twice.
768  if (!context()->IsTest() || !info_->IsOptimizable()) return;
769 
770  Label skip;
771  if (should_normalize) __ Branch(&skip);
772  PrepareForBailout(expr, TOS_REG);
773  if (should_normalize) {
774  __ LoadRoot(a4, Heap::kTrueValueRootIndex);
775  Split(eq, a0, Operand(a4), if_true, if_false, NULL);
776  __ bind(&skip);
777  }
778 }
779 
780 
782  // The variable in the declaration always resides in the current function
783  // context.
784  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
785  if (generate_debug_code_) {
786  // Check that we're not inside a with or catch context.
788  __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
789  __ Check(ne, kDeclarationInWithContext,
790  a1, Operand(a4));
791  __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
792  __ Check(ne, kDeclarationInCatchContext,
793  a1, Operand(a4));
794  }
795 }
796 
797 
798 void FullCodeGenerator::VisitVariableDeclaration(
799  VariableDeclaration* declaration) {
800  // If it was not possible to allocate the variable at compile time, we
801  // need to "declare" it at runtime to make sure it actually exists in the
802  // local context.
803  VariableProxy* proxy = declaration->proxy();
804  VariableMode mode = declaration->mode();
805  Variable* variable = proxy->var();
806  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
807  switch (variable->location()) {
809  globals_->Add(variable->name(), zone());
810  globals_->Add(variable->binding_needs_init()
811  ? isolate()->factory()->the_hole_value()
812  : isolate()->factory()->undefined_value(),
813  zone());
814  break;
815 
816  case Variable::PARAMETER:
817  case Variable::LOCAL:
818  if (hole_init) {
819  Comment cmnt(masm_, "[ VariableDeclaration");
820  __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
821  __ sd(a4, StackOperand(variable));
822  }
823  break;
824 
825  case Variable::CONTEXT:
826  if (hole_init) {
827  Comment cmnt(masm_, "[ VariableDeclaration");
829  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
830  __ sd(at, ContextOperand(cp, variable->index()));
831  // No write barrier since the_hole_value is in old space.
832  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
833  }
834  break;
835 
836  case Variable::LOOKUP: {
837  Comment cmnt(masm_, "[ VariableDeclaration");
838  __ li(a2, Operand(variable->name()));
839  // Declaration nodes are always introduced in one of four modes.
841  PropertyAttributes attr =
843  __ li(a1, Operand(Smi::FromInt(attr)));
844  // Push initial value, if any.
845  // Note: For variables we must not push an initial value (such as
846  // 'undefined') because we may have a (legal) redeclaration and we
847  // must not destroy the current value.
848  if (hole_init) {
849  __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
850  __ Push(cp, a2, a1, a0);
851  } else {
852  DCHECK(Smi::FromInt(0) == 0);
853  __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
854  __ Push(cp, a2, a1, a0);
855  }
856  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
857  break;
858  }
859  }
860 }
861 
862 
863 void FullCodeGenerator::VisitFunctionDeclaration(
864  FunctionDeclaration* declaration) {
865  VariableProxy* proxy = declaration->proxy();
866  Variable* variable = proxy->var();
867  switch (variable->location()) {
868  case Variable::UNALLOCATED: {
869  globals_->Add(variable->name(), zone());
870  Handle<SharedFunctionInfo> function =
871  Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
872  // Check for stack-overflow exception.
873  if (function.is_null()) return SetStackOverflow();
874  globals_->Add(function, zone());
875  break;
876  }
877 
878  case Variable::PARAMETER:
879  case Variable::LOCAL: {
880  Comment cmnt(masm_, "[ FunctionDeclaration");
881  VisitForAccumulatorValue(declaration->fun());
882  __ sd(result_register(), StackOperand(variable));
883  break;
884  }
885 
886  case Variable::CONTEXT: {
887  Comment cmnt(masm_, "[ FunctionDeclaration");
889  VisitForAccumulatorValue(declaration->fun());
890  __ sd(result_register(), ContextOperand(cp, variable->index()));
891  int offset = Context::SlotOffset(variable->index());
892  // We know that we have written a function, which is not a smi.
893  __ RecordWriteContextSlot(cp,
894  offset,
895  result_register(),
896  a2,
901  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
902  break;
903  }
904 
905  case Variable::LOOKUP: {
906  Comment cmnt(masm_, "[ FunctionDeclaration");
907  __ li(a2, Operand(variable->name()));
908  __ li(a1, Operand(Smi::FromInt(NONE)));
909  __ Push(cp, a2, a1);
910  // Push initial value for function declaration.
911  VisitForStackValue(declaration->fun());
912  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
913  break;
914  }
915  }
916 }
917 
918 
919 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
920  Variable* variable = declaration->proxy()->var();
921  DCHECK(variable->location() == Variable::CONTEXT);
922  DCHECK(variable->interface()->IsFrozen());
923  Comment cmnt(masm_, "[ ModuleDeclaration");
925 
926  // Load instance object.
927  __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope()));
928  __ ld(a1, ContextOperand(a1, variable->interface()->Index()));
930 
931  // Assign it.
932  __ sd(a1, ContextOperand(cp, variable->index()));
933  // We know that we have written a module, which is not a smi.
934  __ RecordWriteContextSlot(cp,
935  Context::SlotOffset(variable->index()),
936  a1,
937  a3,
942  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
943 
944  // Traverse into body.
945  Visit(declaration->module());
946 }
947 
948 
949 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
950  VariableProxy* proxy = declaration->proxy();
951  Variable* variable = proxy->var();
952  switch (variable->location()) {
954  // TODO(rossberg)
955  break;
956 
957  case Variable::CONTEXT: {
958  Comment cmnt(masm_, "[ ImportDeclaration");
960  // TODO(rossberg)
961  break;
962  }
963 
964  case Variable::PARAMETER:
965  case Variable::LOCAL:
966  case Variable::LOOKUP:
967  UNREACHABLE();
968  }
969 }
970 
971 
972 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
973  // TODO(rossberg)
974 }
975 
976 
977 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
978  // Call the runtime to declare the globals.
979  // The context is the first argument.
980  __ li(a1, Operand(pairs));
981  __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
982  __ Push(cp, a1, a0);
983  __ CallRuntime(Runtime::kDeclareGlobals, 3);
984  // Return value is ignored.
985 }
986 
987 
988 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
989  // Call the runtime to declare the modules.
990  __ Push(descriptions);
991  __ CallRuntime(Runtime::kDeclareModules, 1);
992  // Return value is ignored.
993 }
994 
995 
996 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
997  Comment cmnt(masm_, "[ SwitchStatement");
998  Breakable nested_statement(this, stmt);
999  SetStatementPosition(stmt);
1000 
1001  // Keep the switch value on the stack until a case matches.
1002  VisitForStackValue(stmt->tag());
1003  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1004 
1005  ZoneList<CaseClause*>* clauses = stmt->cases();
1006  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1007 
1008  Label next_test; // Recycled for each test.
1009  // Compile all the tests with branches to their bodies.
1010  for (int i = 0; i < clauses->length(); i++) {
1011  CaseClause* clause = clauses->at(i);
1012  clause->body_target()->Unuse();
1013 
1014  // The default is not a test, but remember it as final fall through.
1015  if (clause->is_default()) {
1016  default_clause = clause;
1017  continue;
1018  }
1019 
1020  Comment cmnt(masm_, "[ Case comparison");
1021  __ bind(&next_test);
1022  next_test.Unuse();
1023 
1024  // Compile the label expression.
1025  VisitForAccumulatorValue(clause->label());
1026  __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1027 
1028  // Perform the comparison as if via '==='.
1029  __ ld(a1, MemOperand(sp, 0)); // Switch value.
1030  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1031  JumpPatchSite patch_site(masm_);
1032  if (inline_smi_code) {
1033  Label slow_case;
1034  __ or_(a2, a1, a0);
1035  patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1036 
1037  __ Branch(&next_test, ne, a1, Operand(a0));
1038  __ Drop(1); // Switch value is no longer needed.
1039  __ Branch(clause->body_target());
1040 
1041  __ bind(&slow_case);
1042  }
1043 
1044  // Record position before stub call for type feedback.
1045  SetSourcePosition(clause->position());
1046  Handle<Code> ic =
1047  CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1048  CallIC(ic, clause->CompareId());
1049  patch_site.EmitPatchInfo();
1050 
1051  Label skip;
1052  __ Branch(&skip);
1053  PrepareForBailout(clause, TOS_REG);
1054  __ LoadRoot(at, Heap::kTrueValueRootIndex);
1055  __ Branch(&next_test, ne, v0, Operand(at));
1056  __ Drop(1);
1057  __ Branch(clause->body_target());
1058  __ bind(&skip);
1059 
1060  __ Branch(&next_test, ne, v0, Operand(zero_reg));
1061  __ Drop(1); // Switch value is no longer needed.
1062  __ Branch(clause->body_target());
1063  }
1064 
1065  // Discard the test value and jump to the default if present, otherwise to
1066  // the end of the statement.
1067  __ bind(&next_test);
1068  __ Drop(1); // Switch value is no longer needed.
1069  if (default_clause == NULL) {
1070  __ Branch(nested_statement.break_label());
1071  } else {
1072  __ Branch(default_clause->body_target());
1073  }
1074 
1075  // Compile all the case bodies.
1076  for (int i = 0; i < clauses->length(); i++) {
1077  Comment cmnt(masm_, "[ Case body");
1078  CaseClause* clause = clauses->at(i);
1079  __ bind(clause->body_target());
1080  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1081  VisitStatements(clause->statements());
1082  }
1083 
1084  __ bind(nested_statement.break_label());
1085  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1086 }
1087 
1088 
1089 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1090  Comment cmnt(masm_, "[ ForInStatement");
1091  int slot = stmt->ForInFeedbackSlot();
1092  SetStatementPosition(stmt);
1093 
1094  Label loop, exit;
1095  ForIn loop_statement(this, stmt);
1097 
1098  // Get the object to enumerate over. If the object is null or undefined, skip
1099  // over the loop. See ECMA-262 version 5, section 12.6.4.
1100  VisitForAccumulatorValue(stmt->enumerable());
1101  __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1102  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1103  __ Branch(&exit, eq, a0, Operand(at));
1104  Register null_value = a5;
1105  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1106  __ Branch(&exit, eq, a0, Operand(null_value));
1107  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1108  __ mov(a0, v0);
1109  // Convert the object to a JS object.
1110  Label convert, done_convert;
1111  __ JumpIfSmi(a0, &convert);
1112  __ GetObjectType(a0, a1, a1);
1113  __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1114  __ bind(&convert);
1115  __ push(a0);
1116  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1117  __ mov(a0, v0);
1118  __ bind(&done_convert);
1119  __ push(a0);
1120 
1121  // Check for proxies.
1122  Label call_runtime;
1124  __ GetObjectType(a0, a1, a1);
1125  __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1126 
1127  // Check cache validity in generated code. This is a fast case for
1128  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1129  // guarantee cache validity, call the runtime system to check cache
1130  // validity or get the property names in a fixed array.
1131  __ CheckEnumCache(null_value, &call_runtime);
1132 
1133  // The enum cache is valid. Load the map of the object being
1134  // iterated over and use the cache for the iteration.
1135  Label use_cache;
1137  __ Branch(&use_cache);
1138 
1139  // Get the set of properties to enumerate.
1140  __ bind(&call_runtime);
1141  __ push(a0); // Duplicate the enumerable object on the stack.
1142  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1143 
1144  // If we got a map from the runtime call, we can do a fast
1145  // modification check. Otherwise, we got a fixed array, and we have
1146  // to do a slow check.
1147  Label fixed_array;
1149  __ LoadRoot(at, Heap::kMetaMapRootIndex);
1150  __ Branch(&fixed_array, ne, a2, Operand(at));
1151 
1152  // We got a map in register v0. Get the enumeration cache from it.
1153  Label no_descriptors;
1154  __ bind(&use_cache);
1155 
1156  __ EnumLength(a1, v0);
1157  __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1158 
1159  __ LoadInstanceDescriptors(v0, a2);
1162 
1163  // Set up the four remaining stack slots.
1164  __ li(a0, Operand(Smi::FromInt(0)));
1165  // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1166  __ Push(v0, a2, a1, a0);
1167  __ jmp(&loop);
1168 
1169  __ bind(&no_descriptors);
1170  __ Drop(1);
1171  __ jmp(&exit);
1172 
1173  // We got a fixed array in register v0. Iterate through that.
1174  Label non_proxy;
1175  __ bind(&fixed_array);
1176 
1177  __ li(a1, FeedbackVector());
1178  __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1180 
1181  __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1182  __ ld(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1184  __ GetObjectType(a2, a3, a3);
1185  __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1186  __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1187  __ bind(&non_proxy);
1188  __ Push(a1, v0); // Smi and array
1190  __ li(a0, Operand(Smi::FromInt(0)));
1191  __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1192 
1193  // Generate code for doing the condition check.
1194  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1195  __ bind(&loop);
1196  // Load the current count to a0, load the length to a1.
1197  __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1198  __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1199  __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1200 
1201  // Get the current entry of the array into register a3.
1202  __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1203  __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1204  __ SmiScale(a4, a0, kPointerSizeLog2);
1205  __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1206  __ ld(a3, MemOperand(a4)); // Current entry.
1207 
1208  // Get the expected map from the stack or a smi in the
1209  // permanent slow case into register a2.
1210  __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1211 
1212  // Check if the expected map still matches that of the enumerable.
1213  // If not, we may have to filter the key.
1214  Label update_each;
1215  __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1217  __ Branch(&update_each, eq, a4, Operand(a2));
1218 
1219  // For proxies, no filtering is done.
1220  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1221  DCHECK_EQ(Smi::FromInt(0), 0);
1222  __ Branch(&update_each, eq, a2, Operand(zero_reg));
1223 
1224  // Convert the entry to a string or (smi) 0 if it isn't a property
1225  // any more. If the property has been removed while iterating, we
1226  // just skip it.
1227  __ Push(a1, a3); // Enumerable and current entry.
1228  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1229  __ mov(a3, result_register());
1230  __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1231 
1232  // Update the 'each' property or variable from the possibly filtered
1233  // entry in register a3.
1234  __ bind(&update_each);
1235  __ mov(result_register(), a3);
1236  // Perform the assignment as if via '='.
1237  { EffectContext context(this);
1238  EmitAssignment(stmt->each());
1239  }
1240 
1241  // Generate code for the body of the loop.
1242  Visit(stmt->body());
1243 
1244  // Generate code for the going to the next element by incrementing
1245  // the index (smi) stored on top of the stack.
1246  __ bind(loop_statement.continue_label());
1247  __ pop(a0);
1248  __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1249  __ push(a0);
1250 
1251  EmitBackEdgeBookkeeping(stmt, &loop);
1252  __ Branch(&loop);
1253 
1254  // Remove the pointers stored on the stack.
1255  __ bind(loop_statement.break_label());
1256  __ Drop(5);
1257 
1258  // Exit and decrement the loop depth.
1259  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1260  __ bind(&exit);
1262 }
1263 
1264 
1265 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1266  Comment cmnt(masm_, "[ ForOfStatement");
1267  SetStatementPosition(stmt);
1268 
1269  Iteration loop_statement(this, stmt);
1271 
1272  // var iterator = iterable[Symbol.iterator]();
1273  VisitForEffect(stmt->assign_iterator());
1274 
1275  // Loop entry.
1276  __ bind(loop_statement.continue_label());
1277 
1278  // result = iterator.next()
1279  VisitForEffect(stmt->next_result());
1280 
1281  // if (result.done) break;
1282  Label result_not_done;
1283  VisitForControl(stmt->result_done(),
1284  loop_statement.break_label(),
1285  &result_not_done,
1286  &result_not_done);
1287  __ bind(&result_not_done);
1288 
1289  // each = result.value
1290  VisitForEffect(stmt->assign_each());
1291 
1292  // Generate code for the body of the loop.
1293  Visit(stmt->body());
1294 
1295  // Check stack before looping.
1296  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1297  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1298  __ jmp(loop_statement.continue_label());
1299 
1300  // Exit and decrement the loop depth.
1301  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1302  __ bind(loop_statement.break_label());
1304 }
1305 
1306 
1307 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1308  bool pretenure) {
1309  // Use the fast case closure allocation code that allocates in new
1310  // space for nested functions that don't need literals cloning. If
1311  // we're running with the --always-opt or the --prepare-always-opt
1312  // flag, we need to use the runtime function so that the new function
1313  // we are creating here gets a chance to have its code optimized and
1314  // doesn't just get a copy of the existing unoptimized code.
1315  if (!FLAG_always_opt &&
1316  !FLAG_prepare_always_opt &&
1317  !pretenure &&
1318  scope()->is_function_scope() &&
1319  info->num_literals() == 0) {
1320  FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1321  __ li(a2, Operand(info));
1322  __ CallStub(&stub);
1323  } else {
1324  __ li(a0, Operand(info));
1325  __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1326  : Heap::kFalseValueRootIndex);
1327  __ Push(cp, a0, a1);
1328  __ CallRuntime(Runtime::kNewClosure, 3);
1329  }
1330  context()->Plug(v0);
1331 }
1332 
1333 
1334 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1335  Comment cmnt(masm_, "[ VariableProxy");
1336  EmitVariableLoad(expr);
1337 }
1338 
1339 
1340 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1341  Comment cnmt(masm_, "[ SuperReference ");
1342 
1345 
1346  Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1347  __ li(LoadDescriptor::NameRegister(), home_object_symbol);
1348 
1349  CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1350 
1351  Label done;
1352  __ Branch(&done, ne, v0, Operand(isolate()->factory()->undefined_value()));
1353  __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1354  __ bind(&done);
1355 }
1356 
1357 
1358 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1359  TypeofState typeof_state,
1360  Label* slow) {
1361  Register current = cp;
1362  Register next = a1;
1363  Register temp = a2;
1364 
1365  Scope* s = scope();
1366  while (s != NULL) {
1367  if (s->num_heap_slots() > 0) {
1368  if (s->calls_sloppy_eval()) {
1369  // Check that extension is NULL.
1370  __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1371  __ Branch(slow, ne, temp, Operand(zero_reg));
1372  }
1373  // Load next context in chain.
1374  __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1375  // Walk the rest of the chain without clobbering cp.
1376  current = next;
1377  }
1378  // If no outer scope calls eval, we do not need to check more
1379  // context extensions.
1380  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1381  s = s->outer_scope();
1382  }
1383 
1384  if (s->is_eval_scope()) {
1385  Label loop, fast;
1386  if (!current.is(next)) {
1387  __ Move(next, current);
1388  }
1389  __ bind(&loop);
1390  // Terminate at native context.
1391  __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1392  __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1393  __ Branch(&fast, eq, temp, Operand(a4));
1394  // Check that extension is NULL.
1395  __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1396  __ Branch(slow, ne, temp, Operand(zero_reg));
1397  // Load next context in chain.
1398  __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1399  __ Branch(&loop);
1400  __ bind(&fast);
1401  }
1402 
1404  __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1405  if (FLAG_vector_ics) {
1407  Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1408  }
1409 
1410  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1411  ? NOT_CONTEXTUAL
1412  : CONTEXTUAL;
1413  CallLoadIC(mode);
1414 }
1415 
1416 
1418  Label* slow) {
1419  DCHECK(var->IsContextSlot());
1420  Register context = cp;
1421  Register next = a3;
1422  Register temp = a4;
1423 
1424  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1425  if (s->num_heap_slots() > 0) {
1426  if (s->calls_sloppy_eval()) {
1427  // Check that extension is NULL.
1429  __ Branch(slow, ne, temp, Operand(zero_reg));
1430  }
1432  // Walk the rest of the chain without clobbering cp.
1433  context = next;
1434  }
1435  }
1436  // Check that last extension is NULL.
1438  __ Branch(slow, ne, temp, Operand(zero_reg));
1439 
1440  // This function is used only for loads, not stores, so it's safe to
1441  // return an cp-based operand (the write barrier cannot be allowed to
1442  // destroy the cp register).
1443  return ContextOperand(context, var->index());
1444 }
1445 
1446 
1447 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1448  TypeofState typeof_state,
1449  Label* slow,
1450  Label* done) {
1451  // Generate fast-case code for variables that might be shadowed by
1452  // eval-introduced variables. Eval is used a lot without
1453  // introducing variables. In those cases, we do not want to
1454  // perform a runtime call for all variables in the scope
1455  // containing the eval.
1456  Variable* var = proxy->var();
1457  if (var->mode() == DYNAMIC_GLOBAL) {
1458  EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1459  __ Branch(done);
1460  } else if (var->mode() == DYNAMIC_LOCAL) {
1461  Variable* local = var->local_if_not_shadowed();
1462  __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1463  if (local->mode() == LET || local->mode() == CONST ||
1464  local->mode() == CONST_LEGACY) {
1465  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1466  __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1467  if (local->mode() == CONST_LEGACY) {
1468  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1469  __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1470  } else { // LET || CONST
1471  __ Branch(done, ne, at, Operand(zero_reg));
1472  __ li(a0, Operand(var->name()));
1473  __ push(a0);
1474  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1475  }
1476  }
1477  __ Branch(done);
1478  }
1479 }
1480 
1481 
1482 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1483  // Record position before possible IC call.
1484  SetSourcePosition(proxy->position());
1485  Variable* var = proxy->var();
1486 
1487  // Three cases: global variables, lookup variables, and all other types of
1488  // variables.
1489  switch (var->location()) {
1490  case Variable::UNALLOCATED: {
1491  Comment cmnt(masm_, "[ Global variable");
1492  // Use inline caching. Variable name is passed in a2 and the global
1493  // object (receiver) in a0.
1495  __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1496  if (FLAG_vector_ics) {
1498  Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1499  }
1501  context()->Plug(v0);
1502  break;
1503  }
1504 
1505  case Variable::PARAMETER:
1506  case Variable::LOCAL:
1507  case Variable::CONTEXT: {
1508  Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1509  : "[ Stack variable");
1510  if (var->binding_needs_init()) {
1511  // var->scope() may be NULL when the proxy is located in eval code and
1512  // refers to a potential outside binding. Currently those bindings are
1513  // always looked up dynamically, i.e. in that case
1514  // var->location() == LOOKUP.
1515  // always holds.
1516  DCHECK(var->scope() != NULL);
1517 
1518  // Check if the binding really needs an initialization check. The check
1519  // can be skipped in the following situation: we have a LET or CONST
1520  // binding in harmony mode, both the Variable and the VariableProxy have
1521  // the same declaration scope (i.e. they are both in global code, in the
1522  // same function or in the same eval code) and the VariableProxy is in
1523  // the source physically located after the initializer of the variable.
1524  //
1525  // We cannot skip any initialization checks for CONST in non-harmony
1526  // mode because const variables may be declared but never initialized:
1527  // if (false) { const x; }; var y = x;
1528  //
1529  // The condition on the declaration scopes is a conservative check for
1530  // nested functions that access a binding and are called before the
1531  // binding is initialized:
1532  // function() { f(); let x = 1; function f() { x = 2; } }
1533  //
1534  bool skip_init_check;
1535  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1536  skip_init_check = false;
1537  } else {
1538  // Check that we always have valid source position.
1539  DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1540  DCHECK(proxy->position() != RelocInfo::kNoPosition);
1541  skip_init_check = var->mode() != CONST_LEGACY &&
1542  var->initializer_position() < proxy->position();
1543  }
1544 
1545  if (!skip_init_check) {
1546  // Let and const need a read barrier.
1547  GetVar(v0, var);
1548  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1549  __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1550  if (var->mode() == LET || var->mode() == CONST) {
1551  // Throw a reference error when using an uninitialized let/const
1552  // binding in harmony mode.
1553  Label done;
1554  __ Branch(&done, ne, at, Operand(zero_reg));
1555  __ li(a0, Operand(var->name()));
1556  __ push(a0);
1557  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1558  __ bind(&done);
1559  } else {
1560  // Uninitalized const bindings outside of harmony mode are unholed.
1561  DCHECK(var->mode() == CONST_LEGACY);
1562  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1563  __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1564  }
1565  context()->Plug(v0);
1566  break;
1567  }
1568  }
1569  context()->Plug(var);
1570  break;
1571  }
1572 
1573  case Variable::LOOKUP: {
1574  Comment cmnt(masm_, "[ Lookup variable");
1575  Label done, slow;
1576  // Generate code for loading from variables potentially shadowed
1577  // by eval-introduced variables.
1578  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1579  __ bind(&slow);
1580  __ li(a1, Operand(var->name()));
1581  __ Push(cp, a1); // Context and name.
1582  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1583  __ bind(&done);
1584  context()->Plug(v0);
1585  }
1586  }
1587 }
1588 
1589 
1590 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1591  Comment cmnt(masm_, "[ RegExpLiteral");
1592  Label materialized;
1593  // Registers will be used as follows:
1594  // a5 = materialized value (RegExp literal)
1595  // a4 = JS function, literals array
1596  // a3 = literal index
1597  // a2 = RegExp pattern
1598  // a1 = RegExp flags
1599  // a0 = RegExp literal clone
1602  int literal_offset =
1603  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1604  __ ld(a5, FieldMemOperand(a4, literal_offset));
1605  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1606  __ Branch(&materialized, ne, a5, Operand(at));
1607 
1608  // Create regexp literal using runtime function.
1609  // Result will be in v0.
1610  __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1611  __ li(a2, Operand(expr->pattern()));
1612  __ li(a1, Operand(expr->flags()));
1613  __ Push(a4, a3, a2, a1);
1614  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1615  __ mov(a5, v0);
1616 
1617  __ bind(&materialized);
1619  Label allocated, runtime_allocate;
1620  __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1621  __ jmp(&allocated);
1622 
1623  __ bind(&runtime_allocate);
1624  __ li(a0, Operand(Smi::FromInt(size)));
1625  __ Push(a5, a0);
1626  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1627  __ pop(a5);
1628 
1629  __ bind(&allocated);
1630 
1631  // After this, registers are used as follows:
1632  // v0: Newly allocated regexp.
1633  // a5: Materialized regexp.
1634  // a2: temp.
1635  __ CopyFields(v0, a5, a2.bit(), size / kPointerSize);
1636  context()->Plug(v0);
1637 }
1638 
1639 
1640 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1641  if (expression == NULL) {
1642  __ LoadRoot(a1, Heap::kNullValueRootIndex);
1643  __ push(a1);
1644  } else {
1645  VisitForStackValue(expression);
1646  }
1647 }
1648 
1649 
1650 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1651  Comment cmnt(masm_, "[ ObjectLiteral");
1652 
1653  expr->BuildConstantProperties(isolate());
1654  Handle<FixedArray> constant_properties = expr->constant_properties();
1657  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1658  __ li(a1, Operand(constant_properties));
1659  int flags = expr->fast_elements()
1660  ? ObjectLiteral::kFastElements
1661  : ObjectLiteral::kNoFlags;
1662  flags |= expr->has_function()
1663  ? ObjectLiteral::kHasFunction
1664  : ObjectLiteral::kNoFlags;
1665  __ li(a0, Operand(Smi::FromInt(flags)));
1666  int properties_count = constant_properties->length() / 2;
1667  if (expr->may_store_doubles() || expr->depth() > 1 ||
1668  masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1670  __ Push(a3, a2, a1, a0);
1671  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1672  } else {
1673  FastCloneShallowObjectStub stub(isolate(), properties_count);
1674  __ CallStub(&stub);
1675  }
1676 
1677  // If result_saved is true the result is on top of the stack. If
1678  // result_saved is false the result is in v0.
1679  bool result_saved = false;
1680 
1681  // Mark all computed expressions that are bound to a key that
1682  // is shadowed by a later occurrence of the same key. For the
1683  // marked expressions, no store code is emitted.
1684  expr->CalculateEmitStore(zone());
1685 
1686  AccessorTable accessor_table(zone());
1687  for (int i = 0; i < expr->properties()->length(); i++) {
1688  ObjectLiteral::Property* property = expr->properties()->at(i);
1689  if (property->IsCompileTimeValue()) continue;
1690 
1691  Literal* key = property->key();
1692  Expression* value = property->value();
1693  if (!result_saved) {
1694  __ push(v0); // Save result on stack.
1695  result_saved = true;
1696  }
1697  switch (property->kind()) {
1699  UNREACHABLE();
1700  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1701  DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1702  // Fall through.
1703  case ObjectLiteral::Property::COMPUTED:
1704  if (key->value()->IsInternalizedString()) {
1705  if (property->emit_store()) {
1706  VisitForAccumulatorValue(value);
1709  __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1711  CallStoreIC(key->LiteralFeedbackId());
1713  } else {
1714  VisitForEffect(value);
1715  }
1716  break;
1717  }
1718  // Duplicate receiver on stack.
1719  __ ld(a0, MemOperand(sp));
1720  __ push(a0);
1721  VisitForStackValue(key);
1722  VisitForStackValue(value);
1723  if (property->emit_store()) {
1724  __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1725  __ push(a0);
1726  __ CallRuntime(Runtime::kSetProperty, 4);
1727  } else {
1728  __ Drop(3);
1729  }
1730  break;
1731  case ObjectLiteral::Property::PROTOTYPE:
1732  // Duplicate receiver on stack.
1733  __ ld(a0, MemOperand(sp));
1734  __ push(a0);
1735  VisitForStackValue(value);
1736  if (property->emit_store()) {
1737  __ CallRuntime(Runtime::kSetPrototype, 2);
1738  } else {
1739  __ Drop(2);
1740  }
1741  break;
1742  case ObjectLiteral::Property::GETTER:
1743  accessor_table.lookup(key)->second->getter = value;
1744  break;
1745  case ObjectLiteral::Property::SETTER:
1746  accessor_table.lookup(key)->second->setter = value;
1747  break;
1748  }
1749  }
1750 
1751  // Emit code to define accessors, using only a single call to the runtime for
1752  // each pair of corresponding getters and setters.
1753  for (AccessorTable::Iterator it = accessor_table.begin();
1754  it != accessor_table.end();
1755  ++it) {
1756  __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1757  __ push(a0);
1758  VisitForStackValue(it->first);
1759  EmitAccessor(it->second->getter);
1760  EmitAccessor(it->second->setter);
1761  __ li(a0, Operand(Smi::FromInt(NONE)));
1762  __ push(a0);
1763  __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1764  }
1765 
1766  if (expr->has_function()) {
1767  DCHECK(result_saved);
1768  __ ld(a0, MemOperand(sp));
1769  __ push(a0);
1770  __ CallRuntime(Runtime::kToFastProperties, 1);
1771  }
1772 
1773  if (result_saved) {
1774  context()->PlugTOS();
1775  } else {
1776  context()->Plug(v0);
1777  }
1778 }
1779 
1780 
1781 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1782  Comment cmnt(masm_, "[ ArrayLiteral");
1783 
1784  expr->BuildConstantElements(isolate());
1785  int flags = expr->depth() == 1
1786  ? ArrayLiteral::kShallowElements
1787  : ArrayLiteral::kNoFlags;
1788 
1789  ZoneList<Expression*>* subexprs = expr->values();
1790  int length = subexprs->length();
1791 
1792  Handle<FixedArray> constant_elements = expr->constant_elements();
1793  DCHECK_EQ(2, constant_elements->length());
1794  ElementsKind constant_elements_kind =
1795  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1796  bool has_fast_elements =
1797  IsFastObjectElementsKind(constant_elements_kind);
1798  Handle<FixedArrayBase> constant_elements_values(
1799  FixedArrayBase::cast(constant_elements->get(1)));
1800 
1801  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1802  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1803  // If the only customer of allocation sites is transitioning, then
1804  // we can turn it off if we don't have anywhere else to transition to.
1805  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1806  }
1807 
1808  __ mov(a0, result_register());
1811  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1812  __ li(a1, Operand(constant_elements));
1813  if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1814  __ li(a0, Operand(Smi::FromInt(flags)));
1815  __ Push(a3, a2, a1, a0);
1816  __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1817  } else {
1818  FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1819  __ CallStub(&stub);
1820  }
1821 
1822  bool result_saved = false; // Is the result saved to the stack?
1823 
1824  // Emit code to evaluate all the non-constant subexpressions and to store
1825  // them into the newly cloned array.
1826  for (int i = 0; i < length; i++) {
1827  Expression* subexpr = subexprs->at(i);
1828  // If the subexpression is a literal or a simple materialized literal it
1829  // is already set in the cloned array.
1830  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1831 
1832  if (!result_saved) {
1833  __ push(v0); // array literal
1834  __ Push(Smi::FromInt(expr->literal_index()));
1835  result_saved = true;
1836  }
1837 
1838  VisitForAccumulatorValue(subexpr);
1839 
1840  if (IsFastObjectElementsKind(constant_elements_kind)) {
1841  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1842  __ ld(a6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1844  __ sd(result_register(), FieldMemOperand(a1, offset));
1845  // Update the write barrier for the array store.
1846  __ RecordWriteField(a1, offset, result_register(), a2,
1849  } else {
1850  __ li(a3, Operand(Smi::FromInt(i)));
1851  __ mov(a0, result_register());
1852  StoreArrayLiteralElementStub stub(isolate());
1853  __ CallStub(&stub);
1854  }
1855 
1856  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1857  }
1858  if (result_saved) {
1859  __ Pop(); // literal index
1860  context()->PlugTOS();
1861  } else {
1862  context()->Plug(v0);
1863  }
1864 }
1865 
1866 
1867 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1868  DCHECK(expr->target()->IsValidReferenceExpression());
1869 
1870  Comment cmnt(masm_, "[ Assignment");
1871 
1872  // Left-hand side can only be a property, a global or a (parameter or local)
1873  // slot.
1875  LhsKind assign_type = VARIABLE;
1876  Property* property = expr->target()->AsProperty();
1877  if (property != NULL) {
1878  assign_type = (property->key()->IsPropertyName())
1879  ? NAMED_PROPERTY
1880  : KEYED_PROPERTY;
1881  }
1882 
1883  // Evaluate LHS expression.
1884  switch (assign_type) {
1885  case VARIABLE:
1886  // Nothing to do here.
1887  break;
1888  case NAMED_PROPERTY:
1889  if (expr->is_compound()) {
1890  // We need the receiver both on the stack and in the register.
1891  VisitForStackValue(property->obj());
1893  } else {
1894  VisitForStackValue(property->obj());
1895  }
1896  break;
1897  case KEYED_PROPERTY:
1898  // We need the key and receiver on both the stack and in v0 and a1.
1899  if (expr->is_compound()) {
1900  VisitForStackValue(property->obj());
1901  VisitForStackValue(property->key());
1903  MemOperand(sp, 1 * kPointerSize));
1905  } else {
1906  VisitForStackValue(property->obj());
1907  VisitForStackValue(property->key());
1908  }
1909  break;
1910  }
1911 
1912  // For compound assignments we need another deoptimization point after the
1913  // variable/property load.
1914  if (expr->is_compound()) {
1915  { AccumulatorValueContext context(this);
1916  switch (assign_type) {
1917  case VARIABLE:
1918  EmitVariableLoad(expr->target()->AsVariableProxy());
1919  PrepareForBailout(expr->target(), TOS_REG);
1920  break;
1921  case NAMED_PROPERTY:
1922  EmitNamedPropertyLoad(property);
1923  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1924  break;
1925  case KEYED_PROPERTY:
1926  EmitKeyedPropertyLoad(property);
1927  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1928  break;
1929  }
1930  }
1931 
1932  Token::Value op = expr->binary_op();
1933  __ push(v0); // Left operand goes on the stack.
1934  VisitForAccumulatorValue(expr->value());
1935 
1936  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1937  ? OVERWRITE_RIGHT
1938  : NO_OVERWRITE;
1939  SetSourcePosition(expr->position() + 1);
1940  AccumulatorValueContext context(this);
1941  if (ShouldInlineSmiCase(op)) {
1942  EmitInlineSmiBinaryOp(expr->binary_operation(),
1943  op,
1944  mode,
1945  expr->target(),
1946  expr->value());
1947  } else {
1948  EmitBinaryOp(expr->binary_operation(), op, mode);
1949  }
1950 
1951  // Deoptimization point in case the binary operation may have side effects.
1952  PrepareForBailout(expr->binary_operation(), TOS_REG);
1953  } else {
1954  VisitForAccumulatorValue(expr->value());
1955  }
1956 
1957  // Record source position before possible IC call.
1958  SetSourcePosition(expr->position());
1959 
1960  // Store the value.
1961  switch (assign_type) {
1962  case VARIABLE:
1963  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1964  expr->op());
1965  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1966  context()->Plug(v0);
1967  break;
1968  case NAMED_PROPERTY:
1970  break;
1971  case KEYED_PROPERTY:
1973  break;
1974  }
1975 }
1976 
1977 
1978 void FullCodeGenerator::VisitYield(Yield* expr) {
1979  Comment cmnt(masm_, "[ Yield");
1980  // Evaluate yielded value first; the initial iterator definition depends on
1981  // this. It stays on the stack while we update the iterator.
1982  VisitForStackValue(expr->expression());
1983 
1984  switch (expr->yield_kind()) {
1985  case Yield::kSuspend:
1986  // Pop value from top-of-stack slot; box result into result register.
1987  EmitCreateIteratorResult(false);
1988  __ push(result_register());
1989  // Fall through.
1990  case Yield::kInitial: {
1991  Label suspend, continuation, post_runtime, resume;
1992 
1993  __ jmp(&suspend);
1994 
1995  __ bind(&continuation);
1996  __ jmp(&resume);
1997 
1998  __ bind(&suspend);
1999  VisitForAccumulatorValue(expr->generator_object());
2000  DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2001  __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2004  __ mov(a1, cp);
2005  __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2007  __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2008  __ Branch(&post_runtime, eq, sp, Operand(a1));
2009  __ push(v0); // generator object
2010  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2012  __ bind(&post_runtime);
2013  __ pop(result_register());
2015 
2016  __ bind(&resume);
2017  context()->Plug(result_register());
2018  break;
2019  }
2020 
2021  case Yield::kFinal: {
2022  VisitForAccumulatorValue(expr->generator_object());
2026  // Pop value from top-of-stack slot, box result into result register.
2030  break;
2031  }
2032 
2033  case Yield::kDelegating: {
2034  VisitForStackValue(expr->generator_object());
2035 
2036  // Initial stack layout is as follows:
2037  // [sp + 1 * kPointerSize] iter
2038  // [sp + 0 * kPointerSize] g
2039 
2040  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2041  Label l_next, l_call;
2042  Register load_receiver = LoadDescriptor::ReceiverRegister();
2043  Register load_name = LoadDescriptor::NameRegister();
2044  // Initial send value is undefined.
2045  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2046  __ Branch(&l_next);
2047 
2048  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2049  __ bind(&l_catch);
2050  __ mov(a0, v0);
2051  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2052  __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2053  __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2054  __ Push(a2, a3, a0); // "throw", iter, except
2055  __ jmp(&l_call);
2056 
2057  // try { received = %yield result }
2058  // Shuffle the received result above a try handler and yield it without
2059  // re-boxing.
2060  __ bind(&l_try);
2061  __ pop(a0); // result
2062  __ PushTryHandler(StackHandler::CATCH, expr->index());
2063  const int handler_size = StackHandlerConstants::kSize;
2064  __ push(a0); // result
2065  __ jmp(&l_suspend);
2066  __ bind(&l_continuation);
2067  __ mov(a0, v0);
2068  __ jmp(&l_resume);
2069  __ bind(&l_suspend);
2070  const int generator_object_depth = kPointerSize + handler_size;
2071  __ ld(a0, MemOperand(sp, generator_object_depth));
2072  __ push(a0); // g
2073  DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2074  __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2077  __ mov(a1, cp);
2078  __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2080  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2082  __ pop(v0); // result
2084  __ mov(a0, v0);
2085  __ bind(&l_resume); // received in a0
2086  __ PopTryHandler();
2087 
2088  // receiver = iter; f = 'next'; arg = received;
2089  __ bind(&l_next);
2090  __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2091  __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2092  __ Push(load_name, a3, a0); // "next", iter, received
2093 
2094  // result = receiver[f](arg);
2095  __ bind(&l_call);
2096  __ ld(load_receiver, MemOperand(sp, kPointerSize));
2097  __ ld(load_name, MemOperand(sp, 2 * kPointerSize));
2098  if (FLAG_vector_ics) {
2100  Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2101  }
2102  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2104  __ mov(a0, v0);
2105  __ mov(a1, a0);
2106  __ sd(a1, MemOperand(sp, 2 * kPointerSize));
2107  CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2108  __ CallStub(&stub);
2109 
2111  __ Drop(1); // The function is still on the stack; drop it.
2112 
2113  // if (!result.done) goto l_try;
2114  __ Move(load_receiver, v0);
2115 
2116  __ push(load_receiver); // save result
2117  __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2118  if (FLAG_vector_ics) {
2120  Operand(Smi::FromInt(expr->DoneFeedbackSlot())));
2121  }
2122  CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2123  __ mov(a0, v0);
2124  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2125  CallIC(bool_ic);
2126  __ Branch(&l_try, eq, v0, Operand(zero_reg));
2127 
2128  // result.value
2129  __ pop(load_receiver); // result
2130  __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2131  if (FLAG_vector_ics) {
2133  Operand(Smi::FromInt(expr->ValueFeedbackSlot())));
2134  }
2135  CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2136  context()->DropAndPlug(2, v0); // drop iter and g
2137  break;
2138  }
2139  }
2140 }
2141 
2142 
2143 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2144  Expression *value,
2145  JSGeneratorObject::ResumeMode resume_mode) {
2146  // The value stays in a0, and is ultimately read by the resumed generator, as
2147  // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2148  // is read to throw the value when the resumed generator is already closed.
2149  // a1 will hold the generator object until the activation has been resumed.
2150  VisitForStackValue(generator);
2151  VisitForAccumulatorValue(value);
2152  __ pop(a1);
2153 
2154  // Check generator state.
2155  Label wrong_state, closed_state, done;
2159  __ Branch(&closed_state, eq, a3, Operand(zero_reg));
2160  __ Branch(&wrong_state, lt, a3, Operand(zero_reg));
2161 
2162  // Load suspended function and context.
2165 
2166  // Load receiver and store as the first argument.
2168  __ push(a2);
2169 
2170  // Push holes for the rest of the arguments to the generator function.
2172  // The argument count is stored as int32_t on 64-bit platforms.
2173  // TODO(plind): Smi on 32-bit platforms.
2174  __ lw(a3,
2176  __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2177  Label push_argument_holes, push_frame;
2178  __ bind(&push_argument_holes);
2179  __ Dsubu(a3, a3, Operand(1));
2180  __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2181  __ push(a2);
2182  __ jmp(&push_argument_holes);
2183 
2184  // Enter a new JavaScript frame, and initialize its slots as they were when
2185  // the generator was suspended.
2186  Label resume_frame;
2187  __ bind(&push_frame);
2188  __ Call(&resume_frame);
2189  __ jmp(&done);
2190  __ bind(&resume_frame);
2191  // ra = return address.
2192  // fp = caller's frame pointer.
2193  // cp = callee's context,
2194  // a4 = callee's JS function.
2195  __ Push(ra, fp, cp, a4);
2196  // Adjust FP to point to saved FP.
2197  __ Daddu(fp, sp, 2 * kPointerSize);
2198 
2199  // Load the operand stack size.
2202  __ SmiUntag(a3);
2203 
2204  // If we are sending a value and there is no operand stack, we can jump back
2205  // in directly.
2206  if (resume_mode == JSGeneratorObject::NEXT) {
2207  Label slow_resume;
2208  __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2211  __ SmiUntag(a2);
2212  __ Daddu(a3, a3, Operand(a2));
2215  __ Jump(a3);
2216  __ bind(&slow_resume);
2217  }
2218 
2219  // Otherwise, we push holes for the operand stack and call the runtime to fix
2220  // up the stack and the handlers.
2221  Label push_operand_holes, call_resume;
2222  __ bind(&push_operand_holes);
2223  __ Dsubu(a3, a3, Operand(1));
2224  __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2225  __ push(a2);
2226  __ Branch(&push_operand_holes);
2227  __ bind(&call_resume);
2228  DCHECK(!result_register().is(a1));
2229  __ Push(a1, result_register());
2230  __ Push(Smi::FromInt(resume_mode));
2231  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2232  // Not reached: the runtime call returns elsewhere.
2233  __ stop("not-reached");
2234 
2235  // Reach here when generator is closed.
2236  __ bind(&closed_state);
2237  if (resume_mode == JSGeneratorObject::NEXT) {
2238  // Return completed iterator result when generator is closed.
2239  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2240  __ push(a2);
2241  // Pop value from top-of-stack slot; box result into result register.
2243  } else {
2244  // Throw the provided value.
2245  __ push(a0);
2246  __ CallRuntime(Runtime::kThrow, 1);
2247  }
2248  __ jmp(&done);
2249 
2250  // Throw error if we attempt to operate on a running generator.
2251  __ bind(&wrong_state);
2252  __ push(a1);
2253  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2254 
2255  __ bind(&done);
2256  context()->Plug(result_register());
2257 }
2258 
2259 
2261  Label gc_required;
2262  Label allocated;
2263 
2264  Handle<Map> map(isolate()->native_context()->iterator_result_map());
2265 
2266  __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
2267  __ jmp(&allocated);
2268 
2269  __ bind(&gc_required);
2270  __ Push(Smi::FromInt(map->instance_size()));
2271  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2272  __ ld(context_register(),
2274 
2275  __ bind(&allocated);
2276  __ li(a1, Operand(map));
2277  __ pop(a2);
2278  __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2279  __ li(a4, Operand(isolate()->factory()->empty_fixed_array()));
2280  DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2284  __ sd(a2,
2286  __ sd(a3,
2288 
2289  // Only the value field needs a write barrier, as the other values are in the
2290  // root set.
2292  a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2293 }
2294 
2295 
2296 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2297  SetSourcePosition(prop->position());
2298  Literal* key = prop->key()->AsLiteral();
2299  DCHECK(!prop->IsSuperAccess());
2300 
2301  __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2302  if (FLAG_vector_ics) {
2304  Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2306  } else {
2307  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2308  }
2309 }
2310 
2311 
2312 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2313  SetSourcePosition(prop->position());
2314  Literal* key = prop->key()->AsLiteral();
2315  DCHECK(!key->value()->IsSmi());
2316  DCHECK(prop->IsSuperAccess());
2317 
2318  SuperReference* super_ref = prop->obj()->AsSuperReference();
2319  EmitLoadHomeObject(super_ref);
2320  __ Push(v0);
2321  VisitForStackValue(super_ref->this_var());
2322  __ Push(key->value());
2323  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2324 }
2325 
2326 
2327 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2328  SetSourcePosition(prop->position());
2329  // Call keyed load IC. It has register arguments receiver and key.
2330  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2331  if (FLAG_vector_ics) {
2333  Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2334  CallIC(ic);
2335  } else {
2336  CallIC(ic, prop->PropertyFeedbackId());
2337  }
2338 }
2339 
2340 
2341 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2342  Token::Value op,
2344  Expression* left_expr,
2345  Expression* right_expr) {
2346  Label done, smi_case, stub_call;
2347 
2348  Register scratch1 = a2;
2349  Register scratch2 = a3;
2350 
2351  // Get the arguments.
2352  Register left = a1;
2353  Register right = a0;
2354  __ pop(left);
2355  __ mov(a0, result_register());
2356 
2357  // Perform combined smi check on both operands.
2358  __ Or(scratch1, left, Operand(right));
2359  STATIC_ASSERT(kSmiTag == 0);
2360  JumpPatchSite patch_site(masm_);
2361  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2362 
2363  __ bind(&stub_call);
2364  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2365  CallIC(code, expr->BinaryOperationFeedbackId());
2366  patch_site.EmitPatchInfo();
2367  __ jmp(&done);
2368 
2369  __ bind(&smi_case);
2370  // Smi case. This code works the same way as the smi-smi case in the type
2371  // recording binary operation stub, see
2372  switch (op) {
2373  case Token::SAR:
2374  __ GetLeastBitsFromSmi(scratch1, right, 5);
2375  __ dsrav(right, left, scratch1);
2376  __ And(v0, right, Operand(0xffffffff00000000L));
2377  break;
2378  case Token::SHL: {
2379  __ SmiUntag(scratch1, left);
2380  __ GetLeastBitsFromSmi(scratch2, right, 5);
2381  __ dsllv(scratch1, scratch1, scratch2);
2382  __ SmiTag(v0, scratch1);
2383  break;
2384  }
2385  case Token::SHR: {
2386  __ SmiUntag(scratch1, left);
2387  __ GetLeastBitsFromSmi(scratch2, right, 5);
2388  __ dsrlv(scratch1, scratch1, scratch2);
2389  __ And(scratch2, scratch1, 0x80000000);
2390  __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2391  __ SmiTag(v0, scratch1);
2392  break;
2393  }
2394  case Token::ADD:
2395  __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2396  __ BranchOnOverflow(&stub_call, scratch1);
2397  break;
2398  case Token::SUB:
2399  __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2400  __ BranchOnOverflow(&stub_call, scratch1);
2401  break;
2402  case Token::MUL: {
2403  __ Dmulh(v0, left, right);
2404  __ dsra32(scratch2, v0, 0);
2405  __ sra(scratch1, v0, 31);
2406  __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2407  __ SmiTag(v0);
2408  __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2409  __ Daddu(scratch2, right, left);
2410  __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2411  DCHECK(Smi::FromInt(0) == 0);
2412  __ mov(v0, zero_reg);
2413  break;
2414  }
2415  case Token::BIT_OR:
2416  __ Or(v0, left, Operand(right));
2417  break;
2418  case Token::BIT_AND:
2419  __ And(v0, left, Operand(right));
2420  break;
2421  case Token::BIT_XOR:
2422  __ Xor(v0, left, Operand(right));
2423  break;
2424  default:
2425  UNREACHABLE();
2426  }
2427 
2428  __ bind(&done);
2429  context()->Plug(v0);
2430 }
2431 
2432 
2433 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2434  Token::Value op,
2435  OverwriteMode mode) {
2436  __ mov(a0, result_register());
2437  __ pop(a1);
2438  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2439  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2440  CallIC(code, expr->BinaryOperationFeedbackId());
2441  patch_site.EmitPatchInfo();
2442  context()->Plug(v0);
2443 }
2444 
2445 
2446 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2447  DCHECK(expr->IsValidReferenceExpression());
2448 
2449  // Left-hand side can only be a property, a global or a (parameter or local)
2450  // slot.
2452  LhsKind assign_type = VARIABLE;
2453  Property* prop = expr->AsProperty();
2454  if (prop != NULL) {
2455  assign_type = (prop->key()->IsPropertyName())
2456  ? NAMED_PROPERTY
2457  : KEYED_PROPERTY;
2458  }
2459 
2460  switch (assign_type) {
2461  case VARIABLE: {
2462  Variable* var = expr->AsVariableProxy()->var();
2463  EffectContext context(this);
2464  EmitVariableAssignment(var, Token::ASSIGN);
2465  break;
2466  }
2467  case NAMED_PROPERTY: {
2468  __ push(result_register()); // Preserve value.
2469  VisitForAccumulatorValue(prop->obj());
2471  __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2473  Operand(prop->key()->AsLiteral()->value()));
2474  CallStoreIC();
2475  break;
2476  }
2477  case KEYED_PROPERTY: {
2478  __ push(result_register()); // Preserve value.
2479  VisitForStackValue(prop->obj());
2480  VisitForAccumulatorValue(prop->key());
2484  Handle<Code> ic =
2485  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2486  CallIC(ic);
2487  break;
2488  }
2489  }
2490  context()->Plug(v0);
2491 }
2492 
2493 
2495  Variable* var, MemOperand location) {
2496  __ sd(result_register(), location);
2497  if (var->IsContextSlot()) {
2498  // RecordWrite may destroy all its register arguments.
2499  __ Move(a3, result_register());
2500  int offset = Context::SlotOffset(var->index());
2501  __ RecordWriteContextSlot(
2502  a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2503  }
2504 }
2505 
2506 
2508  if (var->IsUnallocated()) {
2509  // Global var, const, or let.
2511  __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2513  CallStoreIC();
2514  } else if (op == Token::INIT_CONST_LEGACY) {
2515  // Const initializers need a write barrier.
2516  DCHECK(!var->IsParameter()); // No const parameters.
2517  if (var->IsLookupSlot()) {
2518  __ li(a0, Operand(var->name()));
2519  __ Push(v0, cp, a0); // Context and name.
2520  __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2521  } else {
2522  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2523  Label skip;
2524  MemOperand location = VarOperand(var, a1);
2525  __ ld(a2, location);
2526  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2527  __ Branch(&skip, ne, a2, Operand(at));
2528  EmitStoreToStackLocalOrContextSlot(var, location);
2529  __ bind(&skip);
2530  }
2531 
2532  } else if (var->mode() == LET && op != Token::INIT_LET) {
2533  // Non-initializing assignment to let variable needs a write barrier.
2534  DCHECK(!var->IsLookupSlot());
2535  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2536  Label assign;
2537  MemOperand location = VarOperand(var, a1);
2538  __ ld(a3, location);
2539  __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2540  __ Branch(&assign, ne, a3, Operand(a4));
2541  __ li(a3, Operand(var->name()));
2542  __ push(a3);
2543  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2544  // Perform the assignment.
2545  __ bind(&assign);
2546  EmitStoreToStackLocalOrContextSlot(var, location);
2547 
2548  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2549  if (var->IsLookupSlot()) {
2550  // Assignment to var.
2551  __ li(a4, Operand(var->name()));
2552  __ li(a3, Operand(Smi::FromInt(strict_mode())));
2553  // jssp[0] : mode.
2554  // jssp[8] : name.
2555  // jssp[16] : context.
2556  // jssp[24] : value.
2557  __ Push(v0, cp, a4, a3);
2558  __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2559  } else {
2560  // Assignment to var or initializing assignment to let/const in harmony
2561  // mode.
2562  DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2563  MemOperand location = VarOperand(var, a1);
2564  if (generate_debug_code_ && op == Token::INIT_LET) {
2565  // Check for an uninitialized let binding.
2566  __ ld(a2, location);
2567  __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2568  __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2569  }
2570  EmitStoreToStackLocalOrContextSlot(var, location);
2571  }
2572  }
2573  // Non-initializing assignments to consts are ignored.
2574 }
2575 
2576 
2577 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2578  // Assignment to a property, using a named store IC.
2579  Property* prop = expr->target()->AsProperty();
2580  DCHECK(prop != NULL);
2581  DCHECK(prop->key()->IsLiteral());
2582 
2583  // Record source code position before IC call.
2584  SetSourcePosition(expr->position());
2587  Operand(prop->key()->AsLiteral()->value()));
2589  CallStoreIC(expr->AssignmentFeedbackId());
2590 
2591  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2592  context()->Plug(v0);
2593 }
2594 
2595 
2596 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2597  // Assignment to a property, using a keyed store IC.
2598 
2599  // Record source code position before IC call.
2600  SetSourcePosition(expr->position());
2601  // Call keyed store IC.
2602  // The arguments are:
2603  // - a0 is the value,
2604  // - a1 is the key,
2605  // - a2 is the receiver.
2609 
2610  Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2611  CallIC(ic, expr->AssignmentFeedbackId());
2612 
2613  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2614  context()->Plug(v0);
2615 }
2616 
2617 
2618 void FullCodeGenerator::VisitProperty(Property* expr) {
2619  Comment cmnt(masm_, "[ Property");
2620  Expression* key = expr->key();
2621 
2622  if (key->IsPropertyName()) {
2623  if (!expr->IsSuperAccess()) {
2624  VisitForAccumulatorValue(expr->obj());
2626  EmitNamedPropertyLoad(expr);
2627  } else {
2629  }
2630  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2631  context()->Plug(v0);
2632  } else {
2633  VisitForStackValue(expr->obj());
2634  VisitForAccumulatorValue(expr->key());
2635  __ Move(LoadDescriptor::NameRegister(), v0);
2637  EmitKeyedPropertyLoad(expr);
2638  context()->Plug(v0);
2639  }
2640 }
2641 
2642 
2643 void FullCodeGenerator::CallIC(Handle<Code> code,
2644  TypeFeedbackId id) {
2645  ic_total_count_++;
2646  __ Call(code, RelocInfo::CODE_TARGET, id);
2647 }
2648 
2649 
2650 // Code common for calls using the IC.
2651 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2652  Expression* callee = expr->expression();
2653 
2654  CallICState::CallType call_type =
2655  callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2656 
2657  // Get the target function.
2658  if (call_type == CallICState::FUNCTION) {
2659  { StackValueContext context(this);
2660  EmitVariableLoad(callee->AsVariableProxy());
2662  }
2663  // Push undefined as receiver. This is patched in the method prologue if it
2664  // is a sloppy mode method.
2665  __ Push(isolate()->factory()->undefined_value());
2666  } else {
2667  // Load the function from the receiver.
2668  DCHECK(callee->IsProperty());
2669  DCHECK(!callee->AsProperty()->IsSuperAccess());
2671  EmitNamedPropertyLoad(callee->AsProperty());
2672  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2673  // Push the target function under the receiver.
2674  __ ld(at, MemOperand(sp, 0));
2675  __ push(at);
2676  __ sd(v0, MemOperand(sp, kPointerSize));
2677  }
2678 
2679  EmitCall(expr, call_type);
2680 }
2681 
2682 
2684  Expression* callee = expr->expression();
2685  DCHECK(callee->IsProperty());
2686  Property* prop = callee->AsProperty();
2687  DCHECK(prop->IsSuperAccess());
2688 
2689  SetSourcePosition(prop->position());
2690  Literal* key = prop->key()->AsLiteral();
2691  DCHECK(!key->value()->IsSmi());
2692  // Load the function from the receiver.
2693  const Register scratch = a1;
2694  SuperReference* super_ref = prop->obj()->AsSuperReference();
2695  EmitLoadHomeObject(super_ref);
2696  __ Push(v0);
2697  VisitForAccumulatorValue(super_ref->this_var());
2698  __ Push(v0);
2699  __ ld(scratch, MemOperand(sp, kPointerSize));
2700  __ Push(scratch, v0);
2701  __ Push(key->value());
2702 
2703  // Stack here:
2704  // - home_object
2705  // - this (receiver)
2706  // - home_object <-- LoadFromSuper will pop here and below.
2707  // - this (receiver)
2708  // - key
2709  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2710 
2711  // Replace home_object with target function.
2712  __ sd(v0, MemOperand(sp, kPointerSize));
2713 
2714  // Stack here:
2715  // - target function
2716  // - this (receiver)
2717  EmitCall(expr, CallICState::METHOD);
2718 }
2719 
2720 
2721 // Code common for calls using the IC.
2723  Expression* key) {
2724  // Load the key.
2726 
2727  Expression* callee = expr->expression();
2728 
2729  // Load the function from the receiver.
2730  DCHECK(callee->IsProperty());
2732  __ Move(LoadDescriptor::NameRegister(), v0);
2733  EmitKeyedPropertyLoad(callee->AsProperty());
2734  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2735 
2736  // Push the target function under the receiver.
2737  __ ld(at, MemOperand(sp, 0));
2738  __ push(at);
2739  __ sd(v0, MemOperand(sp, kPointerSize));
2740 
2741  EmitCall(expr, CallICState::METHOD);
2742 }
2743 
2744 
2745 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2746  // Load the arguments.
2747  ZoneList<Expression*>* args = expr->arguments();
2748  int arg_count = args->length();
2749  { PreservePositionScope scope(masm()->positions_recorder());
2750  for (int i = 0; i < arg_count; i++) {
2751  VisitForStackValue(args->at(i));
2752  }
2753  }
2754 
2755  // Record source position of the IC call.
2756  SetSourcePosition(expr->position());
2757  Handle<Code> ic = CallIC::initialize_stub(
2758  isolate(), arg_count, call_type);
2759  __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2760  __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2761  // Don't assign a type feedback id to the IC, since type feedback is provided
2762  // by the vector above.
2763  CallIC(ic);
2764  RecordJSReturnSite(expr);
2765  // Restore context register.
2767  context()->DropAndPlug(1, v0);
2768 }
2769 
2770 
2772  // a6: copy of the first argument or undefined if it doesn't exist.
2773  if (arg_count > 0) {
2774  __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
2775  } else {
2776  __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
2777  }
2778 
2779  // a5: the receiver of the enclosing function.
2780  int receiver_offset = 2 + info_->scope()->num_parameters();
2781  __ ld(a5, MemOperand(fp, receiver_offset * kPointerSize));
2782 
2783  // a4: the strict mode.
2784  __ li(a4, Operand(Smi::FromInt(strict_mode())));
2785 
2786  // a1: the start position of the scope the calls resides in.
2787  __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2788 
2789  // Do the runtime call.
2790  __ Push(a6, a5, a4, a1);
2791  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2792 }
2793 
2794 
2795 void FullCodeGenerator::VisitCall(Call* expr) {
2796 #ifdef DEBUG
2797  // We want to verify that RecordJSReturnSite gets called on all paths
2798  // through this function. Avoid early returns.
2799  expr->return_is_recorded_ = false;
2800 #endif
2801 
2802  Comment cmnt(masm_, "[ Call");
2803  Expression* callee = expr->expression();
2804  Call::CallType call_type = expr->GetCallType(isolate());
2805 
2806  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2807  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2808  // to resolve the function we need to call and the receiver of the
2809  // call. Then we call the resolved function using the given
2810  // arguments.
2811  ZoneList<Expression*>* args = expr->arguments();
2812  int arg_count = args->length();
2813 
2814  { PreservePositionScope pos_scope(masm()->positions_recorder());
2815  VisitForStackValue(callee);
2816  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2817  __ push(a2); // Reserved receiver slot.
2818 
2819  // Push the arguments.
2820  for (int i = 0; i < arg_count; i++) {
2821  VisitForStackValue(args->at(i));
2822  }
2823 
2824  // Push a copy of the function (found below the arguments) and
2825  // resolve eval.
2826  __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2827  __ push(a1);
2828  EmitResolvePossiblyDirectEval(arg_count);
2829 
2830  // The runtime call returns a pair of values in v0 (function) and
2831  // v1 (receiver). Touch up the stack with the right values.
2832  __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2833  __ sd(v1, MemOperand(sp, arg_count * kPointerSize));
2834  }
2835  // Record source position for debugger.
2836  SetSourcePosition(expr->position());
2837  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2838  __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2839  __ CallStub(&stub);
2840  RecordJSReturnSite(expr);
2841  // Restore context register.
2843  context()->DropAndPlug(1, v0);
2844  } else if (call_type == Call::GLOBAL_CALL) {
2845  EmitCallWithLoadIC(expr);
2846  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2847  // Call to a lookup slot (dynamically introduced variable).
2848  VariableProxy* proxy = callee->AsVariableProxy();
2849  Label slow, done;
2850 
2851  { PreservePositionScope scope(masm()->positions_recorder());
2852  // Generate code for loading from variables potentially shadowed
2853  // by eval-introduced variables.
2854  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2855  }
2856 
2857  __ bind(&slow);
2858  // Call the runtime to find the function to call (returned in v0)
2859  // and the object holding it (returned in v1).
2860  DCHECK(!context_register().is(a2));
2861  __ li(a2, Operand(proxy->name()));
2862  __ Push(context_register(), a2);
2863  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2864  __ Push(v0, v1); // Function, receiver.
2865 
2866  // If fast case code has been generated, emit code to push the
2867  // function and receiver and have the slow path jump around this
2868  // code.
2869  if (done.is_linked()) {
2870  Label call;
2871  __ Branch(&call);
2872  __ bind(&done);
2873  // Push function.
2874  __ push(v0);
2875  // The receiver is implicitly the global receiver. Indicate this
2876  // by passing the hole to the call function stub.
2877  __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2878  __ push(a1);
2879  __ bind(&call);
2880  }
2881 
2882  // The receiver is either the global receiver or an object found
2883  // by LoadContextSlot.
2884  EmitCall(expr);
2885  } else if (call_type == Call::PROPERTY_CALL) {
2886  Property* property = callee->AsProperty();
2887  bool is_named_call = property->key()->IsPropertyName();
2888  // super.x() is handled in EmitCallWithLoadIC.
2889  if (property->IsSuperAccess() && is_named_call) {
2891  } else {
2892  {
2893  PreservePositionScope scope(masm()->positions_recorder());
2894  VisitForStackValue(property->obj());
2895  }
2896  if (is_named_call) {
2897  EmitCallWithLoadIC(expr);
2898  } else {
2899  EmitKeyedCallWithLoadIC(expr, property->key());
2900  }
2901  }
2902  } else {
2903  DCHECK(call_type == Call::OTHER_CALL);
2904  // Call to an arbitrary expression not handled specially above.
2905  { PreservePositionScope scope(masm()->positions_recorder());
2906  VisitForStackValue(callee);
2907  }
2908  __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2909  __ push(a1);
2910  // Emit function call.
2911  EmitCall(expr);
2912  }
2913 
2914 #ifdef DEBUG
2915  // RecordJSReturnSite should have been called.
2916  DCHECK(expr->return_is_recorded_);
2917 #endif
2918 }
2919 
2920 
2921 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2922  Comment cmnt(masm_, "[ CallNew");
2923  // According to ECMA-262, section 11.2.2, page 44, the function
2924  // expression in new calls must be evaluated before the
2925  // arguments.
2926 
2927  // Push constructor on the stack. If it's not a function it's used as
2928  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2929  // ignored.
2930  VisitForStackValue(expr->expression());
2931 
2932  // Push the arguments ("left-to-right") on the stack.
2933  ZoneList<Expression*>* args = expr->arguments();
2934  int arg_count = args->length();
2935  for (int i = 0; i < arg_count; i++) {
2936  VisitForStackValue(args->at(i));
2937  }
2938  // Call the construct call builtin that handles allocation and
2939  // constructor invocation.
2940  SetSourcePosition(expr->position());
2941 
2942  // Load function and argument count into a1 and a0.
2943  __ li(a0, Operand(arg_count));
2944  __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2945 
2946  // Record call targets in unoptimized code.
2947  if (FLAG_pretenuring_call_new) {
2948  EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2949  DCHECK(expr->AllocationSiteFeedbackSlot() ==
2950  expr->CallNewFeedbackSlot() + 1);
2951  }
2952 
2953  __ li(a2, FeedbackVector());
2954  __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2955 
2956  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2957  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2958  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2959  context()->Plug(v0);
2960 }
2961 
2962 
2963 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2964  ZoneList<Expression*>* args = expr->arguments();
2965  DCHECK(args->length() == 1);
2966 
2967  VisitForAccumulatorValue(args->at(0));
2968 
2969  Label materialize_true, materialize_false;
2970  Label* if_true = NULL;
2971  Label* if_false = NULL;
2972  Label* fall_through = NULL;
2973  context()->PrepareTest(&materialize_true, &materialize_false,
2974  &if_true, &if_false, &fall_through);
2975 
2976  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2977  __ SmiTst(v0, a4);
2978  Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
2979 
2980  context()->Plug(if_true, if_false);
2981 }
2982 
2983 
2984 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2985  ZoneList<Expression*>* args = expr->arguments();
2986  DCHECK(args->length() == 1);
2987 
2988  VisitForAccumulatorValue(args->at(0));
2989 
2990  Label materialize_true, materialize_false;
2991  Label* if_true = NULL;
2992  Label* if_false = NULL;
2993  Label* fall_through = NULL;
2994  context()->PrepareTest(&materialize_true, &materialize_false,
2995  &if_true, &if_false, &fall_through);
2996 
2997  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2998  __ NonNegativeSmiTst(v0, at);
2999  Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3000 
3001  context()->Plug(if_true, if_false);
3002 }
3003 
3004 
3005 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3006  ZoneList<Expression*>* args = expr->arguments();
3007  DCHECK(args->length() == 1);
3008 
3009  VisitForAccumulatorValue(args->at(0));
3010 
3011  Label materialize_true, materialize_false;
3012  Label* if_true = NULL;
3013  Label* if_false = NULL;
3014  Label* fall_through = NULL;
3015  context()->PrepareTest(&materialize_true, &materialize_false,
3016  &if_true, &if_false, &fall_through);
3017 
3018  __ JumpIfSmi(v0, if_false);
3019  __ LoadRoot(at, Heap::kNullValueRootIndex);
3020  __ Branch(if_true, eq, v0, Operand(at));
3022  // Undetectable objects behave like undefined when tested with typeof.
3023  __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3024  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3025  __ Branch(if_false, ne, at, Operand(zero_reg));
3027  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3028  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3030  if_true, if_false, fall_through);
3031 
3032  context()->Plug(if_true, if_false);
3033 }
3034 
3035 
3036 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3037  ZoneList<Expression*>* args = expr->arguments();
3038  DCHECK(args->length() == 1);
3039 
3040  VisitForAccumulatorValue(args->at(0));
3041 
3042  Label materialize_true, materialize_false;
3043  Label* if_true = NULL;
3044  Label* if_false = NULL;
3045  Label* fall_through = NULL;
3046  context()->PrepareTest(&materialize_true, &materialize_false,
3047  &if_true, &if_false, &fall_through);
3048 
3049  __ JumpIfSmi(v0, if_false);
3050  __ GetObjectType(v0, a1, a1);
3051  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3052  Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3053  if_true, if_false, fall_through);
3054 
3055  context()->Plug(if_true, if_false);
3056 }
3057 
3058 
3059 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3060  ZoneList<Expression*>* args = expr->arguments();
3061  DCHECK(args->length() == 1);
3062 
3063  VisitForAccumulatorValue(args->at(0));
3064 
3065  Label materialize_true, materialize_false;
3066  Label* if_true = NULL;
3067  Label* if_false = NULL;
3068  Label* fall_through = NULL;
3069  context()->PrepareTest(&materialize_true, &materialize_false,
3070  &if_true, &if_false, &fall_through);
3071 
3072  __ JumpIfSmi(v0, if_false);
3074  __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3075  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3076  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3077  Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3078 
3079  context()->Plug(if_true, if_false);
3080 }
3081 
3082 
3083 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3084  CallRuntime* expr) {
3085  ZoneList<Expression*>* args = expr->arguments();
3086  DCHECK(args->length() == 1);
3087 
3088  VisitForAccumulatorValue(args->at(0));
3089 
3090  Label materialize_true, materialize_false, skip_lookup;
3091  Label* if_true = NULL;
3092  Label* if_false = NULL;
3093  Label* fall_through = NULL;
3094  context()->PrepareTest(&materialize_true, &materialize_false,
3095  &if_true, &if_false, &fall_through);
3096 
3097  __ AssertNotSmi(v0);
3098 
3101  __ And(a4, a4, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3102  __ Branch(&skip_lookup, ne, a4, Operand(zero_reg));
3103 
3104  // Check for fast case object. Generate false result for slow case object.
3107  __ LoadRoot(a4, Heap::kHashTableMapRootIndex);
3108  __ Branch(if_false, eq, a2, Operand(a4));
3109 
3110  // Look for valueOf name in the descriptor array, and indicate false if
3111  // found. Since we omit an enumeration index check, if it is added via a
3112  // transition that shares its descriptor array, this is a false positive.
3113  Label entry, loop, done;
3114 
3115  // Skip loop if no descriptors are valid.
3116  __ NumberOfOwnDescriptors(a3, a1);
3117  __ Branch(&done, eq, a3, Operand(zero_reg));
3118 
3119  __ LoadInstanceDescriptors(a1, a4);
3120  // a4: descriptor array.
3121  // a3: valid entries in the descriptor array.
3122  STATIC_ASSERT(kSmiTag == 0);
3123  STATIC_ASSERT(kSmiTagSize == 1);
3124 // Does not need?
3125 // STATIC_ASSERT(kPointerSize == 4);
3126  __ li(at, Operand(DescriptorArray::kDescriptorSize));
3127  __ Dmul(a3, a3, at);
3128  // Calculate location of the first key name.
3129  __ Daddu(a4, a4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3130  // Calculate the end of the descriptor array.
3131  __ mov(a2, a4);
3132  __ dsll(a5, a3, kPointerSizeLog2);
3133  __ Daddu(a2, a2, a5);
3134 
3135  // Loop through all the keys in the descriptor array. If one of these is the
3136  // string "valueOf" the result is false.
3137  // The use of a6 to store the valueOf string assumes that it is not otherwise
3138  // used in the loop below.
3139  __ li(a6, Operand(isolate()->factory()->value_of_string()));
3140  __ jmp(&entry);
3141  __ bind(&loop);
3142  __ ld(a3, MemOperand(a4, 0));
3143  __ Branch(if_false, eq, a3, Operand(a6));
3144  __ Daddu(a4, a4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3145  __ bind(&entry);
3146  __ Branch(&loop, ne, a4, Operand(a2));
3147 
3148  __ bind(&done);
3149 
3150  // Set the bit in the map to indicate that there is no local valueOf field.
3152  __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3154 
3155  __ bind(&skip_lookup);
3156 
3157  // If a valueOf property is not found on the object check that its
3158  // prototype is the un-modified String prototype. If not result is false.
3160  __ JumpIfSmi(a2, if_false);
3165  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3166  Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3167 
3168  context()->Plug(if_true, if_false);
3169 }
3170 
3171 
3172 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3173  ZoneList<Expression*>* args = expr->arguments();
3174  DCHECK(args->length() == 1);
3175 
3176  VisitForAccumulatorValue(args->at(0));
3177 
3178  Label materialize_true, materialize_false;
3179  Label* if_true = NULL;
3180  Label* if_false = NULL;
3181  Label* fall_through = NULL;
3182  context()->PrepareTest(&materialize_true, &materialize_false,
3183  &if_true, &if_false, &fall_through);
3184 
3185  __ JumpIfSmi(v0, if_false);
3186  __ GetObjectType(v0, a1, a2);
3187  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3188  __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3189  __ Branch(if_false);
3190 
3191  context()->Plug(if_true, if_false);
3192 }
3193 
3194 
3195 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3196  ZoneList<Expression*>* args = expr->arguments();
3197  DCHECK(args->length() == 1);
3198 
3199  VisitForAccumulatorValue(args->at(0));
3200 
3201  Label materialize_true, materialize_false;
3202  Label* if_true = NULL;
3203  Label* if_false = NULL;
3204  Label* fall_through = NULL;
3205  context()->PrepareTest(&materialize_true, &materialize_false,
3206  &if_true, &if_false, &fall_through);
3207 
3208  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3209  __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3210  __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3211  __ li(a4, 0x80000000);
3212  Label not_nan;
3213  __ Branch(&not_nan, ne, a2, Operand(a4));
3214  __ mov(a4, zero_reg);
3215  __ mov(a2, a1);
3216  __ bind(&not_nan);
3217 
3218  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3219  Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3220 
3221  context()->Plug(if_true, if_false);
3222 }
3223 
3224 
3225 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3226  ZoneList<Expression*>* args = expr->arguments();
3227  DCHECK(args->length() == 1);
3228 
3229  VisitForAccumulatorValue(args->at(0));
3230 
3231  Label materialize_true, materialize_false;
3232  Label* if_true = NULL;
3233  Label* if_false = NULL;
3234  Label* fall_through = NULL;
3235  context()->PrepareTest(&materialize_true, &materialize_false,
3236  &if_true, &if_false, &fall_through);
3237 
3238  __ JumpIfSmi(v0, if_false);
3239  __ GetObjectType(v0, a1, a1);
3240  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3241  Split(eq, a1, Operand(JS_ARRAY_TYPE),
3242  if_true, if_false, fall_through);
3243 
3244  context()->Plug(if_true, if_false);
3245 }
3246 
3247 
3248 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3249  ZoneList<Expression*>* args = expr->arguments();
3250  DCHECK(args->length() == 1);
3251 
3252  VisitForAccumulatorValue(args->at(0));
3253 
3254  Label materialize_true, materialize_false;
3255  Label* if_true = NULL;
3256  Label* if_false = NULL;
3257  Label* fall_through = NULL;
3258  context()->PrepareTest(&materialize_true, &materialize_false,
3259  &if_true, &if_false, &fall_through);
3260 
3261  __ JumpIfSmi(v0, if_false);
3262  __ GetObjectType(v0, a1, a1);
3263  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3264  Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3265 
3266  context()->Plug(if_true, if_false);
3267 }
3268 
3269 
3270 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3271  DCHECK(expr->arguments()->length() == 0);
3272 
3273  Label materialize_true, materialize_false;
3274  Label* if_true = NULL;
3275  Label* if_false = NULL;
3276  Label* fall_through = NULL;
3277  context()->PrepareTest(&materialize_true, &materialize_false,
3278  &if_true, &if_false, &fall_through);
3279 
3280  // Get the frame pointer for the calling frame.
3282 
3283  // Skip the arguments adaptor frame if it exists.
3284  Label check_frame_marker;
3286  __ Branch(&check_frame_marker, ne,
3289 
3290  // Check the marker in the calling frame.
3291  __ bind(&check_frame_marker);
3293  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3294  Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3295  if_true, if_false, fall_through);
3296 
3297  context()->Plug(if_true, if_false);
3298 }
3299 
3300 
3301 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3302  ZoneList<Expression*>* args = expr->arguments();
3303  DCHECK(args->length() == 2);
3304 
3305  // Load the two objects into registers and perform the comparison.
3306  VisitForStackValue(args->at(0));
3307  VisitForAccumulatorValue(args->at(1));
3308 
3309  Label materialize_true, materialize_false;
3310  Label* if_true = NULL;
3311  Label* if_false = NULL;
3312  Label* fall_through = NULL;
3313  context()->PrepareTest(&materialize_true, &materialize_false,
3314  &if_true, &if_false, &fall_through);
3315 
3316  __ pop(a1);
3317  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3318  Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3319 
3320  context()->Plug(if_true, if_false);
3321 }
3322 
3323 
3324 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3325  ZoneList<Expression*>* args = expr->arguments();
3326  DCHECK(args->length() == 1);
3327 
3328  // ArgumentsAccessStub expects the key in a1 and the formal
3329  // parameter count in a0.
3330  VisitForAccumulatorValue(args->at(0));
3331  __ mov(a1, v0);
3332  __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3333  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3334  __ CallStub(&stub);
3335  context()->Plug(v0);
3336 }
3337 
3338 
3339 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3340  DCHECK(expr->arguments()->length() == 0);
3341  Label exit;
3342  // Get the number of formal parameters.
3343  __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3344 
3345  // Check if the calling frame is an arguments adaptor frame.
3348  __ Branch(&exit, ne, a3,
3350 
3351  // Arguments adaptor case: Read the arguments length from the
3352  // adaptor frame.
3354 
3355  __ bind(&exit);
3356  context()->Plug(v0);
3357 }
3358 
3359 
3360 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3361  ZoneList<Expression*>* args = expr->arguments();
3362  DCHECK(args->length() == 1);
3363  Label done, null, function, non_function_constructor;
3364 
3365  VisitForAccumulatorValue(args->at(0));
3366 
3367  // If the object is a smi, we return null.
3368  __ JumpIfSmi(v0, &null);
3369 
3370  // Check that the object is a JS object but take special care of JS
3371  // functions to make sure they have 'Function' as their class.
3372  // Assume that there are only two callable types, and one of them is at
3373  // either end of the type range for JS object types. Saves extra comparisons.
3375  __ GetObjectType(v0, v0, a1); // Map is now in v0.
3376  __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3377 
3380  __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3381 
3383  LAST_SPEC_OBJECT_TYPE - 1);
3384  __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3385  // Assume that there is no larger type.
3387 
3388  // Check if the constructor in the map is a JS function.
3390  __ GetObjectType(v0, a1, a1);
3391  __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3392 
3393  // v0 now contains the constructor function. Grab the
3394  // instance class name from there.
3397  __ Branch(&done);
3398 
3399  // Functions have class 'Function'.
3400  __ bind(&function);
3401  __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3402  __ jmp(&done);
3403 
3404  // Objects with a non-function constructor have class 'Object'.
3405  __ bind(&non_function_constructor);
3406  __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3407  __ jmp(&done);
3408 
3409  // Non-JS objects have class null.
3410  __ bind(&null);
3411  __ LoadRoot(v0, Heap::kNullValueRootIndex);
3412 
3413  // All done.
3414  __ bind(&done);
3415 
3416  context()->Plug(v0);
3417 }
3418 
3419 
3420 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3421  // Load the arguments on the stack and call the stub.
3422  SubStringStub stub(isolate());
3423  ZoneList<Expression*>* args = expr->arguments();
3424  DCHECK(args->length() == 3);
3425  VisitForStackValue(args->at(0));
3426  VisitForStackValue(args->at(1));
3427  VisitForStackValue(args->at(2));
3428  __ CallStub(&stub);
3429  context()->Plug(v0);
3430 }
3431 
3432 
3433 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3434  // Load the arguments on the stack and call the stub.
3435  RegExpExecStub stub(isolate());
3436  ZoneList<Expression*>* args = expr->arguments();
3437  DCHECK(args->length() == 4);
3438  VisitForStackValue(args->at(0));
3439  VisitForStackValue(args->at(1));
3440  VisitForStackValue(args->at(2));
3441  VisitForStackValue(args->at(3));
3442  __ CallStub(&stub);
3443  context()->Plug(v0);
3444 }
3445 
3446 
3447 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3448  ZoneList<Expression*>* args = expr->arguments();
3449  DCHECK(args->length() == 1);
3450 
3451  VisitForAccumulatorValue(args->at(0)); // Load the object.
3452 
3453  Label done;
3454  // If the object is a smi return the object.
3455  __ JumpIfSmi(v0, &done);
3456  // If the object is not a value type, return the object.
3457  __ GetObjectType(v0, a1, a1);
3458  __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3459 
3461 
3462  __ bind(&done);
3463  context()->Plug(v0);
3464 }
3465 
3466 
3467 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3468  ZoneList<Expression*>* args = expr->arguments();
3469  DCHECK(args->length() == 2);
3470  DCHECK_NE(NULL, args->at(1)->AsLiteral());
3471  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3472 
3473  VisitForAccumulatorValue(args->at(0)); // Load the object.
3474 
3475  Label runtime, done, not_date_object;
3476  Register object = v0;
3477  Register result = v0;
3478  Register scratch0 = t1;
3479  Register scratch1 = a1;
3480 
3481  __ JumpIfSmi(object, &not_date_object);
3482  __ GetObjectType(object, scratch1, scratch1);
3483  __ Branch(&not_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3484 
3485  if (index->value() == 0) {
3486  __ ld(result, FieldMemOperand(object, JSDate::kValueOffset));
3487  __ jmp(&done);
3488  } else {
3489  if (index->value() < JSDate::kFirstUncachedField) {
3490  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3491  __ li(scratch1, Operand(stamp));
3492  __ ld(scratch1, MemOperand(scratch1));
3493  __ ld(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3494  __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3495  __ ld(result, FieldMemOperand(object, JSDate::kValueOffset +
3496  kPointerSize * index->value()));
3497  __ jmp(&done);
3498  }
3499  __ bind(&runtime);
3500  __ PrepareCallCFunction(2, scratch1);
3501  __ li(a1, Operand(index));
3502  __ Move(a0, object);
3503  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3504  __ jmp(&done);
3505  }
3506 
3507  __ bind(&not_date_object);
3508  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3509  __ bind(&done);
3510  context()->Plug(v0);
3511 }
3512 
3513 
3514 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3515  ZoneList<Expression*>* args = expr->arguments();
3516  DCHECK_EQ(3, args->length());
3517 
3518  Register string = v0;
3519  Register index = a1;
3520  Register value = a2;
3521 
3522  VisitForStackValue(args->at(0)); // index
3523  VisitForStackValue(args->at(1)); // value
3524  VisitForAccumulatorValue(args->at(2)); // string
3525  __ Pop(index, value);
3526 
3527  if (FLAG_debug_code) {
3528  __ SmiTst(value, at);
3529  __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3530  __ SmiTst(index, at);
3531  __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3532  __ SmiUntag(index, index);
3533  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3534  Register scratch = t1;
3535  __ EmitSeqStringSetCharCheck(
3536  string, index, value, scratch, one_byte_seq_type);
3537  __ SmiTag(index, index);
3538  }
3539 
3540  __ SmiUntag(value, value);
3541  __ Daddu(at,
3542  string,
3544  __ SmiUntag(index);
3545  __ Daddu(at, at, index);
3546  __ sb(value, MemOperand(at));
3547  context()->Plug(string);
3548 }
3549 
3550 
3551 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3552  ZoneList<Expression*>* args = expr->arguments();
3553  DCHECK_EQ(3, args->length());
3554 
3555  Register string = v0;
3556  Register index = a1;
3557  Register value = a2;
3558 
3559  VisitForStackValue(args->at(0)); // index
3560  VisitForStackValue(args->at(1)); // value
3561  VisitForAccumulatorValue(args->at(2)); // string
3562  __ Pop(index, value);
3563 
3564  if (FLAG_debug_code) {
3565  __ SmiTst(value, at);
3566  __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3567  __ SmiTst(index, at);
3568  __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3569  __ SmiUntag(index, index);
3570  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3571  Register scratch = t1;
3572  __ EmitSeqStringSetCharCheck(
3573  string, index, value, scratch, two_byte_seq_type);
3574  __ SmiTag(index, index);
3575  }
3576 
3577  __ SmiUntag(value, value);
3578  __ Daddu(at,
3579  string,
3581  __ dsra(index, index, 32 - 1);
3582  __ Daddu(at, at, index);
3583  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3584  __ sh(value, MemOperand(at));
3585  context()->Plug(string);
3586 }
3587 
3588 
3589 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3590  // Load the arguments on the stack and call the runtime function.
3591  ZoneList<Expression*>* args = expr->arguments();
3592  DCHECK(args->length() == 2);
3593  VisitForStackValue(args->at(0));
3594  VisitForStackValue(args->at(1));
3595  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3596  __ CallStub(&stub);
3597  context()->Plug(v0);
3598 }
3599 
3600 
3601 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3602  ZoneList<Expression*>* args = expr->arguments();
3603  DCHECK(args->length() == 2);
3604 
3605  VisitForStackValue(args->at(0)); // Load the object.
3606  VisitForAccumulatorValue(args->at(1)); // Load the value.
3607  __ pop(a1); // v0 = value. a1 = object.
3608 
3609  Label done;
3610  // If the object is a smi, return the value.
3611  __ JumpIfSmi(a1, &done);
3612 
3613  // If the object is not a value type, return the value.
3614  __ GetObjectType(a1, a2, a2);
3615  __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3616 
3617  // Store the value.
3619  // Update the write barrier. Save the value as it will be
3620  // overwritten by the write barrier code and is needed afterward.
3621  __ mov(a2, v0);
3622  __ RecordWriteField(
3624 
3625  __ bind(&done);
3626  context()->Plug(v0);
3627 }
3628 
3629 
3630 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3631  ZoneList<Expression*>* args = expr->arguments();
3632  DCHECK_EQ(args->length(), 1);
3633 
3634  // Load the argument into a0 and call the stub.
3635  VisitForAccumulatorValue(args->at(0));
3636  __ mov(a0, result_register());
3637 
3638  NumberToStringStub stub(isolate());
3639  __ CallStub(&stub);
3640  context()->Plug(v0);
3641 }
3642 
3643 
3644 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3645  ZoneList<Expression*>* args = expr->arguments();
3646  DCHECK(args->length() == 1);
3647 
3648  VisitForAccumulatorValue(args->at(0));
3649 
3650  Label done;
3651  StringCharFromCodeGenerator generator(v0, a1);
3652  generator.GenerateFast(masm_);
3653  __ jmp(&done);
3654 
3655  NopRuntimeCallHelper call_helper;
3656  generator.GenerateSlow(masm_, call_helper);
3657 
3658  __ bind(&done);
3659  context()->Plug(a1);
3660 }
3661 
3662 
3663 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3664  ZoneList<Expression*>* args = expr->arguments();
3665  DCHECK(args->length() == 2);
3666 
3667  VisitForStackValue(args->at(0));
3668  VisitForAccumulatorValue(args->at(1));
3669  __ mov(a0, result_register());
3670 
3671  Register object = a1;
3672  Register index = a0;
3673  Register result = v0;
3674 
3675  __ pop(object);
3676 
3677  Label need_conversion;
3678  Label index_out_of_range;
3679  Label done;
3680  StringCharCodeAtGenerator generator(object,
3681  index,
3682  result,
3683  &need_conversion,
3684  &need_conversion,
3685  &index_out_of_range,
3687  generator.GenerateFast(masm_);
3688  __ jmp(&done);
3689 
3690  __ bind(&index_out_of_range);
3691  // When the index is out of range, the spec requires us to return
3692  // NaN.
3693  __ LoadRoot(result, Heap::kNanValueRootIndex);
3694  __ jmp(&done);
3695 
3696  __ bind(&need_conversion);
3697  // Load the undefined value into the result register, which will
3698  // trigger conversion.
3699  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3700  __ jmp(&done);
3701 
3702  NopRuntimeCallHelper call_helper;
3703  generator.GenerateSlow(masm_, call_helper);
3704 
3705  __ bind(&done);
3706  context()->Plug(result);
3707 }
3708 
3709 
3710 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3711  ZoneList<Expression*>* args = expr->arguments();
3712  DCHECK(args->length() == 2);
3713 
3714  VisitForStackValue(args->at(0));
3715  VisitForAccumulatorValue(args->at(1));
3716  __ mov(a0, result_register());
3717 
3718  Register object = a1;
3719  Register index = a0;
3720  Register scratch = a3;
3721  Register result = v0;
3722 
3723  __ pop(object);
3724 
3725  Label need_conversion;
3726  Label index_out_of_range;
3727  Label done;
3728  StringCharAtGenerator generator(object,
3729  index,
3730  scratch,
3731  result,
3732  &need_conversion,
3733  &need_conversion,
3734  &index_out_of_range,
3736  generator.GenerateFast(masm_);
3737  __ jmp(&done);
3738 
3739  __ bind(&index_out_of_range);
3740  // When the index is out of range, the spec requires us to return
3741  // the empty string.
3742  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3743  __ jmp(&done);
3744 
3745  __ bind(&need_conversion);
3746  // Move smi zero into the result register, which will trigger
3747  // conversion.
3748  __ li(result, Operand(Smi::FromInt(0)));
3749  __ jmp(&done);
3750 
3751  NopRuntimeCallHelper call_helper;
3752  generator.GenerateSlow(masm_, call_helper);
3753 
3754  __ bind(&done);
3755  context()->Plug(result);
3756 }
3757 
3758 
3759 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3760  ZoneList<Expression*>* args = expr->arguments();
3761  DCHECK_EQ(2, args->length());
3762  VisitForStackValue(args->at(0));
3763  VisitForAccumulatorValue(args->at(1));
3764 
3765  __ pop(a1);
3766  __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
3767  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3768  __ CallStub(&stub);
3769  context()->Plug(v0);
3770 }
3771 
3772 
3773 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3774  ZoneList<Expression*>* args = expr->arguments();
3775  DCHECK_EQ(2, args->length());
3776 
3777  VisitForStackValue(args->at(0));
3778  VisitForStackValue(args->at(1));
3779 
3780  StringCompareStub stub(isolate());
3781  __ CallStub(&stub);
3782  context()->Plug(v0);
3783 }
3784 
3785 
3786 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3787  ZoneList<Expression*>* args = expr->arguments();
3788  DCHECK(args->length() >= 2);
3789 
3790  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3791  for (int i = 0; i < arg_count + 1; i++) {
3792  VisitForStackValue(args->at(i));
3793  }
3794  VisitForAccumulatorValue(args->last()); // Function.
3795 
3796  Label runtime, done;
3797  // Check for non-function argument (including proxy).
3798  __ JumpIfSmi(v0, &runtime);
3799  __ GetObjectType(v0, a1, a1);
3800  __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
3801 
3802  // InvokeFunction requires the function in a1. Move it in there.
3803  __ mov(a1, result_register());
3804  ParameterCount count(arg_count);
3805  __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
3807  __ jmp(&done);
3808 
3809  __ bind(&runtime);
3810  __ push(v0);
3811  __ CallRuntime(Runtime::kCall, args->length());
3812  __ bind(&done);
3813 
3814  context()->Plug(v0);
3815 }
3816 
3817 
3818 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3819  RegExpConstructResultStub stub(isolate());
3820  ZoneList<Expression*>* args = expr->arguments();
3821  DCHECK(args->length() == 3);
3822  VisitForStackValue(args->at(0));
3823  VisitForStackValue(args->at(1));
3824  VisitForAccumulatorValue(args->at(2));
3825  __ mov(a0, result_register());
3826  __ pop(a1);
3827  __ pop(a2);
3828  __ CallStub(&stub);
3829  context()->Plug(v0);
3830 }
3831 
3832 
3833 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3834  ZoneList<Expression*>* args = expr->arguments();
3835  DCHECK_EQ(2, args->length());
3836 
3837  DCHECK_NE(NULL, args->at(0)->AsLiteral());
3838  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3839 
3840  Handle<FixedArray> jsfunction_result_caches(
3841  isolate()->native_context()->jsfunction_result_caches());
3842  if (jsfunction_result_caches->length() <= cache_id) {
3843  __ Abort(kAttemptToUseUndefinedCache);
3844  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3845  context()->Plug(v0);
3846  return;
3847  }
3848 
3849  VisitForAccumulatorValue(args->at(1));
3850 
3851  Register key = v0;
3852  Register cache = a1;
3855  __ ld(cache,
3858  __ ld(cache,
3860 
3861 
3862  Label done, not_found;
3863  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3865  // a2 now holds finger offset as a smi.
3866  __ Daddu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3867  // a3 now points to the start of fixed array elements.
3868  __ SmiScale(at, a2, kPointerSizeLog2);
3869  __ daddu(a3, a3, at);
3870  // a3 now points to key of indexed element of cache.
3871  __ ld(a2, MemOperand(a3));
3872  __ Branch(&not_found, ne, key, Operand(a2));
3873 
3874  __ ld(v0, MemOperand(a3, kPointerSize));
3875  __ Branch(&done);
3876 
3877  __ bind(&not_found);
3878  // Call runtime to perform the lookup.
3879  __ Push(cache, key);
3880  __ CallRuntime(Runtime::kGetFromCache, 2);
3881 
3882  __ bind(&done);
3883  context()->Plug(v0);
3884 }
3885 
3886 
3887 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3888  ZoneList<Expression*>* args = expr->arguments();
3889  VisitForAccumulatorValue(args->at(0));
3890 
3891  Label materialize_true, materialize_false;
3892  Label* if_true = NULL;
3893  Label* if_false = NULL;
3894  Label* fall_through = NULL;
3895  context()->PrepareTest(&materialize_true, &materialize_false,
3896  &if_true, &if_false, &fall_through);
3897 
3899  __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3900 
3901  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3902  Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3903 
3904  context()->Plug(if_true, if_false);
3905 }
3906 
3907 
3908 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3909  ZoneList<Expression*>* args = expr->arguments();
3910  DCHECK(args->length() == 1);
3911  VisitForAccumulatorValue(args->at(0));
3912 
3913  __ AssertString(v0);
3914 
3916  __ IndexFromHash(v0, v0);
3917 
3918  context()->Plug(v0);
3919 }
3920 
3921 
3922 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3923  Label bailout, done, one_char_separator, long_separator,
3924  non_trivial_array, not_size_one_array, loop,
3925  empty_separator_loop, one_char_separator_loop,
3926  one_char_separator_loop_entry, long_separator_loop;
3927  ZoneList<Expression*>* args = expr->arguments();
3928  DCHECK(args->length() == 2);
3929  VisitForStackValue(args->at(1));
3930  VisitForAccumulatorValue(args->at(0));
3931 
3932  // All aliases of the same register have disjoint lifetimes.
3933  Register array = v0;
3934  Register elements = no_reg; // Will be v0.
3935  Register result = no_reg; // Will be v0.
3936  Register separator = a1;
3937  Register array_length = a2;
3938  Register result_pos = no_reg; // Will be a2.
3939  Register string_length = a3;
3940  Register string = a4;
3941  Register element = a5;
3942  Register elements_end = a6;
3943  Register scratch1 = a7;
3944  Register scratch2 = t1;
3945  Register scratch3 = t0;
3946 
3947  // Separator operand is on the stack.
3948  __ pop(separator);
3949 
3950  // Check that the array is a JSArray.
3951  __ JumpIfSmi(array, &bailout);
3952  __ GetObjectType(array, scratch1, scratch2);
3953  __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3954 
3955  // Check that the array has fast elements.
3956  __ CheckFastElements(scratch1, scratch2, &bailout);
3957 
3958  // If the array has length zero, return the empty string.
3959  __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3960  __ SmiUntag(array_length);
3961  __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3962  __ LoadRoot(v0, Heap::kempty_stringRootIndex);
3963  __ Branch(&done);
3964 
3965  __ bind(&non_trivial_array);
3966 
3967  // Get the FixedArray containing array's elements.
3968  elements = array;
3969  __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3970  array = no_reg; // End of array's live range.
3971 
3972  // Check that all array elements are sequential one-byte strings, and
3973  // accumulate the sum of their lengths, as a smi-encoded value.
3974  __ mov(string_length, zero_reg);
3975  __ Daddu(element,
3976  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3977  __ dsll(elements_end, array_length, kPointerSizeLog2);
3978  __ Daddu(elements_end, element, elements_end);
3979  // Loop condition: while (element < elements_end).
3980  // Live values in registers:
3981  // elements: Fixed array of strings.
3982  // array_length: Length of the fixed array of strings (not smi)
3983  // separator: Separator string
3984  // string_length: Accumulated sum of string lengths (smi).
3985  // element: Current array element.
3986  // elements_end: Array end.
3987  if (generate_debug_code_) {
3988  __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
3989  Operand(zero_reg));
3990  }
3991  __ bind(&loop);
3992  __ ld(string, MemOperand(element));
3993  __ Daddu(element, element, kPointerSize);
3994  __ JumpIfSmi(string, &bailout);
3995  __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3996  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3997  __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3998  __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3999  __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4000  __ BranchOnOverflow(&bailout, scratch3);
4001  __ Branch(&loop, lt, element, Operand(elements_end));
4002 
4003  // If array_length is 1, return elements[0], a string.
4004  __ Branch(&not_size_one_array, ne, array_length, Operand(1));
4005  __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4006  __ Branch(&done);
4007 
4008  __ bind(&not_size_one_array);
4009 
4010  // Live values in registers:
4011  // separator: Separator string
4012  // array_length: Length of the array.
4013  // string_length: Sum of string lengths (smi).
4014  // elements: FixedArray of strings.
4015 
4016  // Check that the separator is a flat one-byte string.
4017  __ JumpIfSmi(separator, &bailout);
4018  __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4019  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4020  __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4021 
4022  // Add (separator length times array_length) - separator length to the
4023  // string_length to get the length of the result string. array_length is not
4024  // smi but the other values are, so the result is a smi.
4025  __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4026  __ Dsubu(string_length, string_length, Operand(scratch1));
4027  __ SmiUntag(scratch1);
4028  __ Dmul(scratch2, array_length, scratch1);
4029  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4030  // zero.
4031  __ dsra32(scratch1, scratch2, 0);
4032  __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4033  __ SmiUntag(string_length);
4034  __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4035  __ BranchOnOverflow(&bailout, scratch3);
4036 
4037  // Get first element in the array to free up the elements register to be used
4038  // for the result.
4039  __ Daddu(element,
4040  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4041  result = elements; // End of live range for elements.
4042  elements = no_reg;
4043  // Live values in registers:
4044  // element: First array element
4045  // separator: Separator string
4046  // string_length: Length of result string (not smi)
4047  // array_length: Length of the array.
4048  __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4049  elements_end, &bailout);
4050  // Prepare for looping. Set up elements_end to end of the array. Set
4051  // result_pos to the position of the result where to write the first
4052  // character.
4053  __ dsll(elements_end, array_length, kPointerSizeLog2);
4054  __ Daddu(elements_end, element, elements_end);
4055  result_pos = array_length; // End of live range for array_length.
4056  array_length = no_reg;
4057  __ Daddu(result_pos,
4058  result,
4060 
4061  // Check the length of the separator.
4062  __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4063  __ li(at, Operand(Smi::FromInt(1)));
4064  __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4065  __ Branch(&long_separator, gt, scratch1, Operand(at));
4066 
4067  // Empty separator case.
4068  __ bind(&empty_separator_loop);
4069  // Live values in registers:
4070  // result_pos: the position to which we are currently copying characters.
4071  // element: Current array element.
4072  // elements_end: Array end.
4073 
4074  // Copy next array element to the result.
4075  __ ld(string, MemOperand(element));
4076  __ Daddu(element, element, kPointerSize);
4077  __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4078  __ SmiUntag(string_length);
4079  __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4080  __ CopyBytes(string, result_pos, string_length, scratch1);
4081  // End while (element < elements_end).
4082  __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4083  DCHECK(result.is(v0));
4084  __ Branch(&done);
4085 
4086  // One-character separator case.
4087  __ bind(&one_char_separator);
4088  // Replace separator with its one-byte character value.
4089  __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4090  // Jump into the loop after the code that copies the separator, so the first
4091  // element is not preceded by a separator.
4092  __ jmp(&one_char_separator_loop_entry);
4093 
4094  __ bind(&one_char_separator_loop);
4095  // Live values in registers:
4096  // result_pos: the position to which we are currently copying characters.
4097  // element: Current array element.
4098  // elements_end: Array end.
4099  // separator: Single separator one-byte char (in lower byte).
4100 
4101  // Copy the separator character to the result.
4102  __ sb(separator, MemOperand(result_pos));
4103  __ Daddu(result_pos, result_pos, 1);
4104 
4105  // Copy next array element to the result.
4106  __ bind(&one_char_separator_loop_entry);
4107  __ ld(string, MemOperand(element));
4108  __ Daddu(element, element, kPointerSize);
4109  __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4110  __ SmiUntag(string_length);
4111  __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4112  __ CopyBytes(string, result_pos, string_length, scratch1);
4113  // End while (element < elements_end).
4114  __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4115  DCHECK(result.is(v0));
4116  __ Branch(&done);
4117 
4118  // Long separator case (separator is more than one character). Entry is at the
4119  // label long_separator below.
4120  __ bind(&long_separator_loop);
4121  // Live values in registers:
4122  // result_pos: the position to which we are currently copying characters.
4123  // element: Current array element.
4124  // elements_end: Array end.
4125  // separator: Separator string.
4126 
4127  // Copy the separator to the result.
4128  __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
4129  __ SmiUntag(string_length);
4130  __ Daddu(string,
4131  separator,
4133  __ CopyBytes(string, result_pos, string_length, scratch1);
4134 
4135  __ bind(&long_separator);
4136  __ ld(string, MemOperand(element));
4137  __ Daddu(element, element, kPointerSize);
4138  __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4139  __ SmiUntag(string_length);
4140  __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4141  __ CopyBytes(string, result_pos, string_length, scratch1);
4142  // End while (element < elements_end).
4143  __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4144  DCHECK(result.is(v0));
4145  __ Branch(&done);
4146 
4147  __ bind(&bailout);
4148  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4149  __ bind(&done);
4150  context()->Plug(v0);
4151 }
4152 
4153 
4154 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4155  DCHECK(expr->arguments()->length() == 0);
4156  ExternalReference debug_is_active =
4157  ExternalReference::debug_is_active_address(isolate());
4158  __ li(at, Operand(debug_is_active));
4159  __ lbu(v0, MemOperand(at));
4160  __ SmiTag(v0);
4161  context()->Plug(v0);
4162 }
4163 
4164 
4165 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4166  if (expr->function() != NULL &&
4167  expr->function()->intrinsic_type == Runtime::INLINE) {
4168  Comment cmnt(masm_, "[ InlineRuntimeCall");
4169  EmitInlineRuntimeCall(expr);
4170  return;
4171  }
4172 
4173  Comment cmnt(masm_, "[ CallRuntime");
4174  ZoneList<Expression*>* args = expr->arguments();
4175  int arg_count = args->length();
4176 
4177  if (expr->is_jsruntime()) {
4178  // Push the builtins object as the receiver.
4179  Register receiver = LoadDescriptor::ReceiverRegister();
4180  __ ld(receiver, GlobalObjectOperand());
4181  __ ld(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4182  __ push(receiver);
4183 
4184  // Load the function from the receiver.
4185  __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4186  if (FLAG_vector_ics) {
4188  Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4190  } else {
4191  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4192  }
4193 
4194  // Push the target function under the receiver.
4195  __ ld(at, MemOperand(sp, 0));
4196  __ push(at);
4197  __ sd(v0, MemOperand(sp, kPointerSize));
4198 
4199  // Push the arguments ("left-to-right").
4200  int arg_count = args->length();
4201  for (int i = 0; i < arg_count; i++) {
4202  VisitForStackValue(args->at(i));
4203  }
4204 
4205  // Record source position of the IC call.
4206  SetSourcePosition(expr->position());
4207  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4208  __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4209  __ CallStub(&stub);
4210 
4211  // Restore context register.
4213 
4214  context()->DropAndPlug(1, v0);
4215  } else {
4216  // Push the arguments ("left-to-right").
4217  for (int i = 0; i < arg_count; i++) {
4218  VisitForStackValue(args->at(i));
4219  }
4220 
4221  // Call the C runtime function.
4222  __ CallRuntime(expr->function(), arg_count);
4223  context()->Plug(v0);
4224  }
4225 }
4226 
4227 
4228 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4229  switch (expr->op()) {
4230  case Token::DELETE: {
4231  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4232  Property* property = expr->expression()->AsProperty();
4233  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4234 
4235  if (property != NULL) {
4236  VisitForStackValue(property->obj());
4237  VisitForStackValue(property->key());
4238  __ li(a1, Operand(Smi::FromInt(strict_mode())));
4239  __ push(a1);
4240  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4241  context()->Plug(v0);
4242  } else if (proxy != NULL) {
4243  Variable* var = proxy->var();
4244  // Delete of an unqualified identifier is disallowed in strict mode
4245  // but "delete this" is allowed.
4246  DCHECK(strict_mode() == SLOPPY || var->is_this());
4247  if (var->IsUnallocated()) {
4248  __ ld(a2, GlobalObjectOperand());
4249  __ li(a1, Operand(var->name()));
4250  __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4251  __ Push(a2, a1, a0);
4252  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4253  context()->Plug(v0);
4254  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4255  // Result of deleting non-global, non-dynamic variables is false.
4256  // The subexpression does not have side effects.
4257  context()->Plug(var->is_this());
4258  } else {
4259  // Non-global variable. Call the runtime to try to delete from the
4260  // context where the variable was introduced.
4261  DCHECK(!context_register().is(a2));
4262  __ li(a2, Operand(var->name()));
4263  __ Push(context_register(), a2);
4264  __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4265  context()->Plug(v0);
4266  }
4267  } else {
4268  // Result of deleting non-property, non-variable reference is true.
4269  // The subexpression may have side effects.
4270  VisitForEffect(expr->expression());
4271  context()->Plug(true);
4272  }
4273  break;
4274  }
4275 
4276  case Token::VOID: {
4277  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4278  VisitForEffect(expr->expression());
4279  context()->Plug(Heap::kUndefinedValueRootIndex);
4280  break;
4281  }
4282 
4283  case Token::NOT: {
4284  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4285  if (context()->IsEffect()) {
4286  // Unary NOT has no side effects so it's only necessary to visit the
4287  // subexpression. Match the optimizing compiler by not branching.
4288  VisitForEffect(expr->expression());
4289  } else if (context()->IsTest()) {
4290  const TestContext* test = TestContext::cast(context());
4291  // The labels are swapped for the recursive call.
4292  VisitForControl(expr->expression(),
4293  test->false_label(),
4294  test->true_label(),
4295  test->fall_through());
4296  context()->Plug(test->true_label(), test->false_label());
4297  } else {
4298  // We handle value contexts explicitly rather than simply visiting
4299  // for control and plugging the control flow into the context,
4300  // because we need to prepare a pair of extra administrative AST ids
4301  // for the optimizing compiler.
4302  DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4303  Label materialize_true, materialize_false, done;
4304  VisitForControl(expr->expression(),
4305  &materialize_false,
4306  &materialize_true,
4307  &materialize_true);
4308  __ bind(&materialize_true);
4309  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4310  __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4311  if (context()->IsStackValue()) __ push(v0);
4312  __ jmp(&done);
4313  __ bind(&materialize_false);
4314  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4315  __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4316  if (context()->IsStackValue()) __ push(v0);
4317  __ bind(&done);
4318  }
4319  break;
4320  }
4321 
4322  case Token::TYPEOF: {
4323  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4324  { StackValueContext context(this);
4325  VisitForTypeofValue(expr->expression());
4326  }
4327  __ CallRuntime(Runtime::kTypeof, 1);
4328  context()->Plug(v0);
4329  break;
4330  }
4331 
4332  default:
4333  UNREACHABLE();
4334  }
4335 }
4336 
4337 
4338 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4339  DCHECK(expr->expression()->IsValidReferenceExpression());
4340 
4341  Comment cmnt(masm_, "[ CountOperation");
4342  SetSourcePosition(expr->position());
4343 
4344  // Expression can only be a property, a global or a (parameter or local)
4345  // slot.
4347  LhsKind assign_type = VARIABLE;
4348  Property* prop = expr->expression()->AsProperty();
4349  // In case of a property we use the uninitialized expression context
4350  // of the key to detect a named property.
4351  if (prop != NULL) {
4352  assign_type =
4353  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4354  }
4355 
4356  // Evaluate expression and get value.
4357  if (assign_type == VARIABLE) {
4358  DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4359  AccumulatorValueContext context(this);
4360  EmitVariableLoad(expr->expression()->AsVariableProxy());
4361  } else {
4362  // Reserve space for result of postfix operation.
4363  if (expr->is_postfix() && !context()->IsEffect()) {
4364  __ li(at, Operand(Smi::FromInt(0)));
4365  __ push(at);
4366  }
4367  if (assign_type == NAMED_PROPERTY) {
4368  // Put the object both on the stack and in the register.
4369  VisitForStackValue(prop->obj());
4371  EmitNamedPropertyLoad(prop);
4372  } else {
4373  VisitForStackValue(prop->obj());
4374  VisitForStackValue(prop->key());
4376  MemOperand(sp, 1 * kPointerSize));
4378  EmitKeyedPropertyLoad(prop);
4379  }
4380  }
4381 
4382  // We need a second deoptimization point after loading the value
4383  // in case evaluating the property load my have a side effect.
4384  if (assign_type == VARIABLE) {
4385  PrepareForBailout(expr->expression(), TOS_REG);
4386  } else {
4387  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4388  }
4389 
4390  // Inline smi case if we are in a loop.
4391  Label stub_call, done;
4392  JumpPatchSite patch_site(masm_);
4393 
4394  int count_value = expr->op() == Token::INC ? 1 : -1;
4395  __ mov(a0, v0);
4396  if (ShouldInlineSmiCase(expr->op())) {
4397  Label slow;
4398  patch_site.EmitJumpIfNotSmi(v0, &slow);
4399 
4400  // Save result for postfix expressions.
4401  if (expr->is_postfix()) {
4402  if (!context()->IsEffect()) {
4403  // Save the result on the stack. If we have a named or keyed property
4404  // we store the result under the receiver that is currently on top
4405  // of the stack.
4406  switch (assign_type) {
4407  case VARIABLE:
4408  __ push(v0);
4409  break;
4410  case NAMED_PROPERTY:
4411  __ sd(v0, MemOperand(sp, kPointerSize));
4412  break;
4413  case KEYED_PROPERTY:
4414  __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4415  break;
4416  }
4417  }
4418  }
4419 
4420  Register scratch1 = a1;
4421  Register scratch2 = a4;
4422  __ li(scratch1, Operand(Smi::FromInt(count_value)));
4423  __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4424  __ BranchOnNoOverflow(&done, scratch2);
4425  // Call stub. Undo operation first.
4426  __ Move(v0, a0);
4427  __ jmp(&stub_call);
4428  __ bind(&slow);
4429  }
4430  ToNumberStub convert_stub(isolate());
4431  __ CallStub(&convert_stub);
4432 
4433  // Save result for postfix expressions.
4434  if (expr->is_postfix()) {
4435  if (!context()->IsEffect()) {
4436  // Save the result on the stack. If we have a named or keyed property
4437  // we store the result under the receiver that is currently on top
4438  // of the stack.
4439  switch (assign_type) {
4440  case VARIABLE:
4441  __ push(v0);
4442  break;
4443  case NAMED_PROPERTY:
4444  __ sd(v0, MemOperand(sp, kPointerSize));
4445  break;
4446  case KEYED_PROPERTY:
4447  __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4448  break;
4449  }
4450  }
4451  }
4452 
4453  __ bind(&stub_call);
4454  __ mov(a1, v0);
4455  __ li(a0, Operand(Smi::FromInt(count_value)));
4456 
4457  // Record position before stub call.
4458  SetSourcePosition(expr->position());
4459 
4460  Handle<Code> code =
4461  CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4462  CallIC(code, expr->CountBinOpFeedbackId());
4463  patch_site.EmitPatchInfo();
4464  __ bind(&done);
4465 
4466  // Store the value returned in v0.
4467  switch (assign_type) {
4468  case VARIABLE:
4469  if (expr->is_postfix()) {
4470  { EffectContext context(this);
4471  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4472  Token::ASSIGN);
4473  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4474  context.Plug(v0);
4475  }
4476  // For all contexts except EffectConstant we have the result on
4477  // top of the stack.
4478  if (!context()->IsEffect()) {
4479  context()->PlugTOS();
4480  }
4481  } else {
4482  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4483  Token::ASSIGN);
4484  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4485  context()->Plug(v0);
4486  }
4487  break;
4488  case NAMED_PROPERTY: {
4491  Operand(prop->key()->AsLiteral()->value()));
4493  CallStoreIC(expr->CountStoreFeedbackId());
4494  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4495  if (expr->is_postfix()) {
4496  if (!context()->IsEffect()) {
4497  context()->PlugTOS();
4498  }
4499  } else {
4500  context()->Plug(v0);
4501  }
4502  break;
4503  }
4504  case KEYED_PROPERTY: {
4508  Handle<Code> ic =
4509  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4510  CallIC(ic, expr->CountStoreFeedbackId());
4511  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4512  if (expr->is_postfix()) {
4513  if (!context()->IsEffect()) {
4514  context()->PlugTOS();
4515  }
4516  } else {
4517  context()->Plug(v0);
4518  }
4519  break;
4520  }
4521  }
4522 }
4523 
4524 
4525 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4526  DCHECK(!context()->IsEffect());
4527  DCHECK(!context()->IsTest());
4528  VariableProxy* proxy = expr->AsVariableProxy();
4529  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4530  Comment cmnt(masm_, "[ Global variable");
4532  __ li(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4533  if (FLAG_vector_ics) {
4535  Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
4536  }
4537  // Use a regular load, not a contextual load, to avoid a reference
4538  // error.
4540  PrepareForBailout(expr, TOS_REG);
4541  context()->Plug(v0);
4542  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4543  Comment cmnt(masm_, "[ Lookup slot");
4544  Label done, slow;
4545 
4546  // Generate code for loading from variables potentially shadowed
4547  // by eval-introduced variables.
4548  EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4549 
4550  __ bind(&slow);
4551  __ li(a0, Operand(proxy->name()));
4552  __ Push(cp, a0);
4553  __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4554  PrepareForBailout(expr, TOS_REG);
4555  __ bind(&done);
4556 
4557  context()->Plug(v0);
4558  } else {
4559  // This expression cannot throw a reference error at the top level.
4561  }
4562 }
4563 
4564 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4565  Expression* sub_expr,
4566  Handle<String> check) {
4567  Label materialize_true, materialize_false;
4568  Label* if_true = NULL;
4569  Label* if_false = NULL;
4570  Label* fall_through = NULL;
4571  context()->PrepareTest(&materialize_true, &materialize_false,
4572  &if_true, &if_false, &fall_through);
4573 
4574  { AccumulatorValueContext context(this);
4575  VisitForTypeofValue(sub_expr);
4576  }
4577  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4578 
4579  Factory* factory = isolate()->factory();
4580  if (String::Equals(check, factory->number_string())) {
4581  __ JumpIfSmi(v0, if_true);
4583  __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4584  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4585  } else if (String::Equals(check, factory->string_string())) {
4586  __ JumpIfSmi(v0, if_false);
4587  // Check for undetectable objects => false.
4588  __ GetObjectType(v0, v0, a1);
4589  __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4590  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4591  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4592  Split(eq, a1, Operand(zero_reg),
4593  if_true, if_false, fall_through);
4594  } else if (String::Equals(check, factory->symbol_string())) {
4595  __ JumpIfSmi(v0, if_false);
4596  __ GetObjectType(v0, v0, a1);
4597  Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4598  } else if (String::Equals(check, factory->boolean_string())) {
4599  __ LoadRoot(at, Heap::kTrueValueRootIndex);
4600  __ Branch(if_true, eq, v0, Operand(at));
4601  __ LoadRoot(at, Heap::kFalseValueRootIndex);
4602  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4603  } else if (String::Equals(check, factory->undefined_string())) {
4604  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4605  __ Branch(if_true, eq, v0, Operand(at));
4606  __ JumpIfSmi(v0, if_false);
4607  // Check for undetectable objects => true.
4609  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4610  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4611  Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4612  } else if (String::Equals(check, factory->function_string())) {
4613  __ JumpIfSmi(v0, if_false);
4615  __ GetObjectType(v0, v0, a1);
4616  __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4617  Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4618  if_true, if_false, fall_through);
4619  } else if (String::Equals(check, factory->object_string())) {
4620  __ JumpIfSmi(v0, if_false);
4621  __ LoadRoot(at, Heap::kNullValueRootIndex);
4622  __ Branch(if_true, eq, v0, Operand(at));
4623  // Check for JS objects => true.
4624  __ GetObjectType(v0, v0, a1);
4625  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4627  __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4628  // Check for undetectable objects => false.
4629  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4630  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4631  Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4632  } else {
4633  if (if_false != fall_through) __ jmp(if_false);
4634  }
4635  context()->Plug(if_true, if_false);
4636 }
4637 
4638 
4639 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4640  Comment cmnt(masm_, "[ CompareOperation");
4641  SetSourcePosition(expr->position());
4642 
4643  // First we try a fast inlined version of the compare when one of
4644  // the operands is a literal.
4645  if (TryLiteralCompare(expr)) return;
4646 
4647  // Always perform the comparison for its control flow. Pack the result
4648  // into the expression's context after the comparison is performed.
4649  Label materialize_true, materialize_false;
4650  Label* if_true = NULL;
4651  Label* if_false = NULL;
4652  Label* fall_through = NULL;
4653  context()->PrepareTest(&materialize_true, &materialize_false,
4654  &if_true, &if_false, &fall_through);
4655 
4656  Token::Value op = expr->op();
4657  VisitForStackValue(expr->left());
4658  switch (op) {
4659  case Token::IN:
4660  VisitForStackValue(expr->right());
4661  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4662  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4663  __ LoadRoot(a4, Heap::kTrueValueRootIndex);
4664  Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
4665  break;
4666 
4667  case Token::INSTANCEOF: {
4668  VisitForStackValue(expr->right());
4669  InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4670  __ CallStub(&stub);
4671  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4672  // The stub returns 0 for true.
4673  Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4674  break;
4675  }
4676 
4677  default: {
4678  VisitForAccumulatorValue(expr->right());
4680  __ mov(a0, result_register());
4681  __ pop(a1);
4682 
4683  bool inline_smi_code = ShouldInlineSmiCase(op);
4684  JumpPatchSite patch_site(masm_);
4685  if (inline_smi_code) {
4686  Label slow_case;
4687  __ Or(a2, a0, Operand(a1));
4688  patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4689  Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4690  __ bind(&slow_case);
4691  }
4692  // Record position and call the compare IC.
4693  SetSourcePosition(expr->position());
4694  Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4695  CallIC(ic, expr->CompareOperationFeedbackId());
4696  patch_site.EmitPatchInfo();
4697  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4698  Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4699  }
4700  }
4701 
4702  // Convert the result of the comparison into one expected for this
4703  // expression's context.
4704  context()->Plug(if_true, if_false);
4705 }
4706 
4707 
4708 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4709  Expression* sub_expr,
4710  NilValue nil) {
4711  Label materialize_true, materialize_false;
4712  Label* if_true = NULL;
4713  Label* if_false = NULL;
4714  Label* fall_through = NULL;
4715  context()->PrepareTest(&materialize_true, &materialize_false,
4716  &if_true, &if_false, &fall_through);
4717 
4718  VisitForAccumulatorValue(sub_expr);
4719  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4720  __ mov(a0, result_register());
4721  if (expr->op() == Token::EQ_STRICT) {
4722  Heap::RootListIndex nil_value = nil == kNullValue ?
4723  Heap::kNullValueRootIndex :
4724  Heap::kUndefinedValueRootIndex;
4725  __ LoadRoot(a1, nil_value);
4726  Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4727  } else {
4728  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4729  CallIC(ic, expr->CompareOperationFeedbackId());
4730  Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
4731  }
4732  context()->Plug(if_true, if_false);
4733 }
4734 
4735 
4736 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4738  context()->Plug(v0);
4739 }
4740 
4741 
4743  return v0;
4744 }
4745 
4746 
4748  return cp;
4749 }
4750 
4751 
4752 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4753  // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4754  DCHECK(IsAligned(frame_offset, kPointerSize));
4755  // __ sw(value, MemOperand(fp, frame_offset));
4756  __ sd(value, MemOperand(fp, frame_offset));
4757 }
4758 
4759 
4760 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4761  __ ld(dst, ContextOperand(cp, context_index));
4762 }
4763 
4764 
4766  Scope* declaration_scope = scope()->DeclarationScope();
4767  if (declaration_scope->is_global_scope() ||
4768  declaration_scope->is_module_scope()) {
4769  // Contexts nested in the native context have a canonical empty function
4770  // as their closure, not the anonymous closure containing the global
4771  // code. Pass a smi sentinel and let the runtime look up the empty
4772  // function.
4773  __ li(at, Operand(Smi::FromInt(0)));
4774  } else if (declaration_scope->is_eval_scope()) {
4775  // Contexts created by a call to eval have the same closure as the
4776  // context calling eval, not the anonymous closure containing the eval
4777  // code. Fetch it from the context.
4779  } else {
4780  DCHECK(declaration_scope->is_function_scope());
4782  }
4783  __ push(at);
4784 }
4785 
4786 
4787 // ----------------------------------------------------------------------------
4788 // Non-local control flow support.
4789 
4791  DCHECK(!result_register().is(a1));
4792  // Store result register while executing finally block.
4793  __ push(result_register());
4794  // Cook return address in link register to stack (smi encoded Code* delta).
4795  __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
4796  __ SmiTag(a1);
4797 
4798  // Store result register while executing finally block.
4799  __ push(a1);
4800 
4801  // Store pending message while executing finally block.
4802  ExternalReference pending_message_obj =
4803  ExternalReference::address_of_pending_message_obj(isolate());
4804  __ li(at, Operand(pending_message_obj));
4805  __ ld(a1, MemOperand(at));
4806  __ push(a1);
4807 
4808  ExternalReference has_pending_message =
4809  ExternalReference::address_of_has_pending_message(isolate());
4810  __ li(at, Operand(has_pending_message));
4811  __ ld(a1, MemOperand(at));
4812  __ SmiTag(a1);
4813  __ push(a1);
4814 
4815  ExternalReference pending_message_script =
4816  ExternalReference::address_of_pending_message_script(isolate());
4817  __ li(at, Operand(pending_message_script));
4818  __ ld(a1, MemOperand(at));
4819  __ push(a1);
4820 }
4821 
4822 
4824  DCHECK(!result_register().is(a1));
4825  // Restore pending message from stack.
4826  __ pop(a1);
4827  ExternalReference pending_message_script =
4828  ExternalReference::address_of_pending_message_script(isolate());
4829  __ li(at, Operand(pending_message_script));
4830  __ sd(a1, MemOperand(at));
4831 
4832  __ pop(a1);
4833  __ SmiUntag(a1);
4834  ExternalReference has_pending_message =
4835  ExternalReference::address_of_has_pending_message(isolate());
4836  __ li(at, Operand(has_pending_message));
4837  __ sd(a1, MemOperand(at));
4838 
4839  __ pop(a1);
4840  ExternalReference pending_message_obj =
4841  ExternalReference::address_of_pending_message_obj(isolate());
4842  __ li(at, Operand(pending_message_obj));
4843  __ sd(a1, MemOperand(at));
4844 
4845  // Restore result register from stack.
4846  __ pop(a1);
4847 
4848  // Uncook return address and return.
4849  __ pop(result_register());
4850 
4851  __ SmiUntag(a1);
4852  __ Daddu(at, a1, Operand(masm_->CodeObject()));
4853  __ Jump(at);
4854 }
4855 
4856 
4857 #undef __
4858 
4859 #define __ ACCESS_MASM(masm())
4860 
4862  int* stack_depth,
4863  int* context_length) {
4864  // The macros used here must preserve the result register.
4865 
4866  // Because the handler block contains the context of the finally
4867  // code, we can restore it directly from there for the finally code
4868  // rather than iteratively unwinding contexts via their previous
4869  // links.
4870  __ Drop(*stack_depth); // Down to the handler block.
4871  if (*context_length > 0) {
4872  // Restore the context to its dedicated register and the stack.
4875  }
4876  __ PopTryHandler();
4877  __ Call(finally_entry_);
4878 
4879  *stack_depth = 0;
4880  *context_length = 0;
4881  return previous_;
4882 }
4883 
4884 
4885 #undef __
4886 
4887 
4888 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4889  Address pc,
4890  BackEdgeState target_state,
4891  Code* replacement_code) {
4892  static const int kInstrSize = Assembler::kInstrSize;
4893  Address branch_address = pc - 8 * kInstrSize;
4894  CodePatcher patcher(branch_address, 1);
4895 
4896  switch (target_state) {
4897  case INTERRUPT:
4898  // slt at, a3, zero_reg (in case of count based interrupts)
4899  // beq at, zero_reg, ok
4900  // lui t9, <interrupt stub address> upper
4901  // ori t9, <interrupt stub address> u-middle
4902  // dsll t9, t9, 16
4903  // ori t9, <interrupt stub address> lower
4904  // jalr t9
4905  // nop
4906  // ok-label ----- pc_after points here
4907  patcher.masm()->slt(at, a3, zero_reg);
4908  break;
4909  case ON_STACK_REPLACEMENT:
4910  case OSR_AFTER_STACK_CHECK:
4911  // addiu at, zero_reg, 1
4912  // beq at, zero_reg, ok ;; Not changed
4913  // lui t9, <on-stack replacement address> upper
4914  // ori t9, <on-stack replacement address> middle
4915  // dsll t9, t9, 16
4916  // ori t9, <on-stack replacement address> lower
4917  // jalr t9 ;; Not changed
4918  // nop ;; Not changed
4919  // ok-label ----- pc_after points here
4920  patcher.masm()->daddiu(at, zero_reg, 1);
4921  break;
4922  }
4923  Address pc_immediate_load_address = pc - 6 * kInstrSize;
4924  // Replace the stack check address in the load-immediate (6-instr sequence)
4925  // with the entry address of the replacement code.
4926  Assembler::set_target_address_at(pc_immediate_load_address,
4927  replacement_code->entry());
4928 
4929  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4930  unoptimized_code, pc_immediate_load_address, replacement_code);
4931 }
4932 
4933 
4935  Isolate* isolate,
4936  Code* unoptimized_code,
4937  Address pc) {
4938  static const int kInstrSize = Assembler::kInstrSize;
4939  Address branch_address = pc - 8 * kInstrSize;
4940  Address pc_immediate_load_address = pc - 6 * kInstrSize;
4941 
4942  DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
4943  if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4944  DCHECK(reinterpret_cast<uint64_t>(
4945  Assembler::target_address_at(pc_immediate_load_address)) ==
4946  reinterpret_cast<uint64_t>(
4947  isolate->builtins()->InterruptCheck()->entry()));
4948  return INTERRUPT;
4949  }
4950 
4952 
4953  if (reinterpret_cast<uint64_t>(
4954  Assembler::target_address_at(pc_immediate_load_address)) ==
4955  reinterpret_cast<uint64_t>(
4956  isolate->builtins()->OnStackReplacement()->entry())) {
4957  return ON_STACK_REPLACEMENT;
4958  }
4959 
4960  DCHECK(reinterpret_cast<uint64_t>(
4961  Assembler::target_address_at(pc_immediate_load_address)) ==
4962  reinterpret_cast<uint64_t>(
4963  isolate->builtins()->OsrAfterStackCheck()->entry()));
4964  return OSR_AFTER_STACK_CHECK;
4965 }
4966 
4967 
4968 } } // namespace v8::internal
4969 
4970 #endif // V8_TARGET_ARCH_MIPS64
#define BASE_EMBEDDED
Definition: allocation.h:45
static const int kInstrSize
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
friend class BlockTrampolinePoolScope
int InstructionsGeneratedSince(Label *label)
static bool IsAddImmediate(Instr instr)
static const int kJSReturnSequenceInstructions
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target, ICacheFlushMode icache_flush_mode=FLUSH_ICACHE_IF_NEEDED)
static bool IsBeq(Instr instr)
int SizeOfCodeGeneratedSince(Label *label)
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static BailoutId Declarations()
Definition: utils.h:962
static BailoutId FunctionEntry()
Definition: utils.h:961
static Handle< Code > initialize_stub(Isolate *isolate, int argc, CallICState::CallType call_type)
Definition: ic.cc:1338
static const int kValueOffset
Definition: objects.h:9446
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:225
static Condition ComputeCondition(Token::Value op)
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1329
void AddNoFrameRange(int from, int to)
Definition: compiler.h:354
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3331
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
Definition: compiler.cc:1243
@ STRING_FUNCTION_PROTOTYPE_MAP_INDEX
Definition: contexts.h:294
static int SlotOffset(int index)
Definition: contexts.h:552
static const int kDescriptorSize
Definition: objects.h:3038
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3032
static const int kEnumCacheOffset
Definition: objects.h:3028
static const int kFirstOffset
Definition: objects.h:3029
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
static const TestContext * cast(const ExpressionContext *context)
Definition: full-codegen.h:778
virtual void Plug(bool flag) const
virtual NestedStatement * Exit(int *stack_depth, int *context_length)
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:99
void EmitAccessor(Expression *expression)
void Split(Condition cc, Label *if_true, Label *if_false, Label *fall_through)
void EmitBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode)
void EmitLiteralCompareTypeof(Expression *expr, Expression *sub_expr, Handle< String > check)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
void PrepareForBailoutForId(BailoutId id, State state)
void EmitNewClosure(Handle< SharedFunctionInfo > info, bool pretenure)
void GetVar(Register destination, Variable *var)
static Register context_register()
ZoneList< Handle< Object > > * globals_
Definition: full-codegen.h:837
void VisitForControl(Expression *expr, Label *if_true, Label *if_false, Label *fall_through)
Definition: full-codegen.h:382
void CallLoadIC(ContextualMode mode, TypeFeedbackId id=TypeFeedbackId::None())
void RecordBackEdge(BailoutId osr_ast_id)
MemOperand StackOperand(Variable *var)
void EmitVariableLoad(VariableProxy *proxy)
void SetVar(Variable *var, Register source, Register scratch0, Register scratch1)
MemOperand ContextSlotOperandCheckExtensions(Variable *var, Label *slow)
void EmitKeyedPropertyAssignment(Assignment *expr)
void DeclareGlobals(Handle< FixedArray > pairs)
void EmitResolvePossiblyDirectEval(int arg_count)
void VisitForStackValue(Expression *expr)
Definition: full-codegen.h:376
void EmitKeyedCallWithLoadIC(Call *expr, Expression *key)
void EmitKeyedPropertyLoad(Property *expr)
void EmitDebugCheckDeclarationContext(Variable *variable)
FunctionLiteral * function()
Definition: full-codegen.h:609
void EmitNamedSuperPropertyLoad(Property *expr)
bool TryLiteralCompare(CompareOperation *compare)
void SetStatementPosition(Statement *stmt)
Handle< FixedArray > FeedbackVector()
Definition: full-codegen.h:432
void StoreToFrameField(int frame_offset, Register value)
void LoadContextField(Register dst, int context_index)
const ExpressionContext * context()
Definition: full-codegen.h:602
void EmitNamedPropertyLoad(Property *expr)
void EmitBackEdgeBookkeeping(IterationStatement *stmt, Label *back_edge_target)
void DoTest(Expression *condition, Label *if_true, Label *if_false, Label *fall_through)
void VisitForAccumulatorValue(Expression *expr)
Definition: full-codegen.h:370
void PrepareForBailout(Expression *node, State state)
void CallStoreIC(TypeFeedbackId id=TypeFeedbackId::None())
MemOperand VarOperand(Variable *var, Register scratch)
void DeclareModules(Handle< FixedArray > descriptions)
void EmitGeneratorResume(Expression *generator, Expression *value, JSGeneratorObject::ResumeMode resume_mode)
void VisitForEffect(Expression *expr)
Definition: full-codegen.h:364
void EmitAssignment(Expression *expr)
void EmitCall(Call *expr, CallICState::CallType=CallICState::FUNCTION)
void SetFunctionPosition(FunctionLiteral *fun)
void EmitLoadHomeObject(SuperReference *expr)
void EmitStoreToStackLocalOrContextSlot(Variable *var, MemOperand location)
void EmitInlineSmiBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode, Expression *left, Expression *right)
void EmitLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
void EmitVariableAssignment(Variable *var, Token::Value op)
void CallIC(Handle< Code > code, TypeFeedbackId id=TypeFeedbackId::None())
void EmitCreateIteratorResult(bool done)
void EmitLoadGlobalCheckExtensions(VariableProxy *proxy, TypeofState typeof_state, Label *slow)
void EmitCallWithLoadIC(Call *expr)
void EnsureSlotContainsAllocationSite(int slot)
void PrepareForBailoutBeforeSplit(Expression *expr, bool should_normalize, Label *if_true, Label *if_false)
void EmitInlineRuntimeCall(CallRuntime *expr)
void EmitSuperCallWithLoadIC(Call *expr)
void EmitNamedPropertyAssignment(Assignment *expr)
Handle< FixedArray > handler_table_
Definition: full-codegen.h:844
void RecordJSReturnSite(Call *call)
static Register result_register()
void VisitForTypeofValue(Expression *expr)
void EmitDynamicLookupFastCase(VariableProxy *proxy, TypeofState typeof_state, Label *slow, Label *done)
bool ShouldInlineSmiCase(Token::Value op)
Handle< FixedArray > handler_table()
Definition: full-codegen.h:642
void EmitProfilingCounterDecrement(int delta)
void VisitInDuplicateContext(Expression *expr)
static const int kBuiltinsOffset
Definition: objects.h:7458
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static const int kMapOffset
Definition: objects.h:1427
static const int kLengthOffset
Definition: objects.h:10072
static const int kValueOffset
Definition: objects.h:7623
static const int kCacheStampOffset
Definition: objects.h:7631
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kLiteralsOffset
Definition: objects.h:7382
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kResultDonePropertyOffset
Definition: objects.h:7142
static const int kFunctionOffset
Definition: objects.h:7123
static const int kGeneratorClosed
Definition: objects.h:7120
static const int kResultValuePropertyOffset
Definition: objects.h:7141
static const int kGeneratorExecuting
Definition: objects.h:7119
static const int kOperandStackOffset
Definition: objects.h:7127
static const int kReceiverOffset
Definition: objects.h:7125
static const int kContextOffset
Definition: objects.h:7124
static const int kContinuationOffset
Definition: objects.h:7126
static const int kInitialMaxFastElementArray
Definition: objects.h:2180
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kSize
Definition: objects.h:7772
static const int kInObjectFieldCount
Definition: objects.h:7826
static const int kValueOffset
Definition: objects.h:7546
static const Register ReceiverRegister()
static const Register NameRegister()
void mov(Register rd, Register rt)
void MultiPop(RegList regs)
void Jump(Register target, Condition cond=al)
static int CallSize(Register target, Condition cond=al)
static const int kIsUndetectable
Definition: objects.h:6244
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6251
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kBitField2Offset
Definition: objects.h:6233
static const int kConstructorOffset
Definition: objects.h:6191
static const int kPrototypeOffset
Definition: objects.h:6190
static const int kHashFieldOffset
Definition: objects.h:8486
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
Definition: assembler.h:317
Scope * outer_scope() const
Definition: scopes.h:333
int num_parameters() const
Definition: scopes.h:321
VariableDeclaration * function() const
Definition: scopes.h:309
int ContextChainLength(Scope *scope)
Definition: scopes.cc:715
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:539
Scope * DeclarationScope()
Definition: scopes.cc:737
Variable * arguments() const
Definition: scopes.h:324
Scope * GlobalScope()
Definition: scopes.cc:728
Variable * parameter(int index) const
Definition: scopes.h:316
static const int kHeaderSize
Definition: objects.h:8941
static const int kFormalParameterCountOffset
Definition: objects.h:6946
static const int kInstanceClassNameOffset
Definition: objects.h:6897
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1334
static const int kContextOffset
Definition: frames.h:74
static const int kContextOffset
Definition: frames.h:162
static const int kExpressionsOffset
Definition: frames.h:160
static const int kCallerSPOffset
Definition: frames.h:167
static const int kMarkerOffset
Definition: frames.h:161
static const int kCallerFPOffset
Definition: frames.h:165
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8618
static const int kLengthOffset
Definition: objects.h:8802
bool Equals(String *other)
Definition: objects-inl.h:3336
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2258
static TypeFeedbackId None()
Definition: utils.h:945
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
TypeofState
Definition: codegen.h:46
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_BOOL(enable_unaligned_accesses
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK_NE(v1, v2)
Definition: logging.h:207
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ CALL_FUNCTION
@ TAG_OBJECT
int int32_t
Definition: unicode.cc:24
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
Vector< const char > CStrVector(const char *data)
Definition: vector.h:158
const int kPointerSize
Definition: globals.h:129
MemOperand ContextOperand(Register context, int index)
@ DO_SMI_CHECK
Definition: globals.h:641
@ STRING_ADD_CHECK_BOTH
Definition: code-stubs.h:1218
@ TRACK_ALLOCATION_SITE
Definition: objects.h:8085
@ DONT_TRACK_ALLOCATION_SITE
Definition: objects.h:8084
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
Definition: objects-inl.h:5448
@ kSeqStringTag
Definition: objects.h:563
const Register cp
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
bool IsDeclaredVariableMode(VariableMode mode)
Definition: globals.h:705
const uint32_t kTwoByteStringTag
Definition: objects.h:556
const int kSmiTagSize
Definition: v8.h:5743
const Register fp
const Register sp
const int kPointerSizeLog2
Definition: globals.h:147
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:785
@ JS_REGEXP_TYPE
Definition: objects.h:748
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
Definition: objects.h:788
@ JS_VALUE_TYPE
Definition: objects.h:728
@ JS_DATE_TYPE
Definition: objects.h:730
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:784
@ FIRST_JS_PROXY_TYPE
Definition: objects.h:778
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
@ LAST_JS_PROXY_TYPE
Definition: objects.h:779
const uint32_t kOneByteStringTag
Definition: objects.h:557
@ NO_OVERWRITE
Definition: ic-state.h:58
@ OVERWRITE_RIGHT
Definition: ic-state.h:58
MemOperand FieldMemOperand(Register object, int offset)
bool IsImmutableVariableMode(VariableMode mode)
Definition: globals.h:715
const Register pc
@ DYNAMIC_GLOBAL
Definition: globals.h:689
@ DYNAMIC_LOCAL
Definition: globals.h:693
@ CONST_LEGACY
Definition: globals.h:671
Condition NegateCondition(Condition cond)
Definition: constants-arm.h:86
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
uint32_t RegList
Definition: frames.h:18
byte * Address
Definition: globals.h:101
NilValue
Definition: v8.h:97
@ kNullValue
Definition: v8.h:97
@ NOT_CONTEXTUAL
Definition: objects.h:174
const int kHeapObjectTag
Definition: v8.h:5737
const Register no_reg
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
@ RECORD_CONSTRUCTOR_TARGET
Definition: globals.h:480
const int kSmiTag
Definition: v8.h:5742
bool IsAligned(T value, U alignment)
Definition: utils.h:123
@ NO_CALL_FUNCTION_FLAGS
Definition: globals.h:469
@ CALL_AS_METHOD
Definition: globals.h:470
@ STRING_INDEX_IS_NUMBER
Definition: code-stubs.h:1590
void CopyBytes(uint8_t *target, uint8_t *source)
bool IsFastObjectElementsKind(ElementsKind kind)
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
#define VOID
#define IN
PropertyAttributes
@ NONE
@ READ_ONLY
static Register from_code(int code)