V8 Project
full-codegen-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_MIPS
8 
9 // Note on Mips implementation:
10 //
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
16 
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/isolate-inl.h"
25 #include "src/parser.h"
26 #include "src/scopes.h"
27 
30 
31 namespace v8 {
32 namespace internal {
33 
34 #define __ ACCESS_MASM(masm_)
35 
36 
37 // A patch site is a location in the code which it is possible to patch. This
38 // class has a number of methods to emit the code which is patchable and the
39 // method EmitPatchInfo to record a marker back to the patchable code. This
40 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
41 // (raw 16 bit immediate value is used) is the delta from the pc to the first
42 // instruction of the patchable code.
43 // The marker instruction is effectively a NOP (dest is zero_reg) and will
44 // never be emitted by normal code.
45 class JumpPatchSite BASE_EMBEDDED {
46  public:
47  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
48 #ifdef DEBUG
49  info_emitted_ = false;
50 #endif
51  }
52 
53  ~JumpPatchSite() {
54  DCHECK(patch_site_.is_bound() == info_emitted_);
55  }
56 
57  // When initially emitting this ensure that a jump is always generated to skip
58  // the inlined smi code.
59  void EmitJumpIfNotSmi(Register reg, Label* target) {
60  DCHECK(!patch_site_.is_bound() && !info_emitted_);
61  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
62  __ bind(&patch_site_);
63  __ andi(at, reg, 0);
64  // Always taken before patched.
65  __ BranchShort(target, eq, at, Operand(zero_reg));
66  }
67 
68  // When initially emitting this ensure that a jump is never generated to skip
69  // the inlined smi code.
70  void EmitJumpIfSmi(Register reg, Label* target) {
71  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
72  DCHECK(!patch_site_.is_bound() && !info_emitted_);
73  __ bind(&patch_site_);
74  __ andi(at, reg, 0);
75  // Never taken before patched.
76  __ BranchShort(target, ne, at, Operand(zero_reg));
77  }
78 
79  void EmitPatchInfo() {
80  if (patch_site_.is_bound()) {
81  int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
82  Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
83  __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
84 #ifdef DEBUG
85  info_emitted_ = true;
86 #endif
87  } else {
88  __ nop(); // Signals no inlined code.
89  }
90  }
91 
92  private:
93  MacroAssembler* masm_;
94  Label patch_site_;
95 #ifdef DEBUG
96  bool info_emitted_;
97 #endif
98 };
99 
100 
101 // Generate code for a JS function. On entry to the function the receiver
102 // and arguments have been pushed on the stack left to right. The actual
103 // argument count matches the formal parameter count expected by the
104 // function.
105 //
106 // The live registers are:
107 // o a1: the JS function object being called (i.e. ourselves)
108 // o cp: our context
109 // o fp: our caller's frame pointer
110 // o sp: stack pointer
111 // o ra: return address
112 //
113 // The function builds a JS frame. Please see JavaScriptFrameConstants in
114 // frames-mips.h for its layout.
116  CompilationInfo* info = info_;
118  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
119 
120  profiling_counter_ = isolate()->factory()->NewCell(
121  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
122  SetFunctionPosition(function());
123  Comment cmnt(masm_, "[ function compiled by full code generator");
124 
126 
127 #ifdef DEBUG
128  if (strlen(FLAG_stop_at) > 0 &&
129  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
130  __ stop("stop-at");
131  }
132 #endif
133 
134  // Sloppy mode functions and builtins need to replace the receiver with the
135  // global proxy when called as functions (without an explicit receiver
136  // object).
137  if (info->strict_mode() == SLOPPY && !info->is_native()) {
138  Label ok;
139  int receiver_offset = info->scope()->num_parameters() * kPointerSize;
140  __ lw(at, MemOperand(sp, receiver_offset));
141  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142  __ Branch(&ok, ne, a2, Operand(at));
143 
144  __ lw(a2, GlobalObjectOperand());
146 
147  __ sw(a2, MemOperand(sp, receiver_offset));
148 
149  __ bind(&ok);
150  }
151 
152  // Open a frame scope to indicate that there is a frame on the stack. The
153  // MANUAL indicates that the scope shouldn't actually generate code to set up
154  // the frame (that is done below).
155  FrameScope frame_scope(masm_, StackFrame::MANUAL);
156 
157  info->set_prologue_offset(masm_->pc_offset());
158  __ Prologue(info->IsCodePreAgingActive());
159  info->AddNoFrameRange(0, masm_->pc_offset());
160 
161  { Comment cmnt(masm_, "[ Allocate locals");
162  int locals_count = info->scope()->num_stack_slots();
163  // Generators allocate locals, if any, in context slots.
164  DCHECK(!info->function()->is_generator() || locals_count == 0);
165  if (locals_count > 0) {
166  if (locals_count >= 128) {
167  Label ok;
168  __ Subu(t5, sp, Operand(locals_count * kPointerSize));
169  __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
170  __ Branch(&ok, hs, t5, Operand(a2));
171  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
172  __ bind(&ok);
173  }
174  __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
175  int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
176  if (locals_count >= kMaxPushes) {
177  int loop_iterations = locals_count / kMaxPushes;
178  __ li(a2, Operand(loop_iterations));
179  Label loop_header;
180  __ bind(&loop_header);
181  // Do pushes.
182  __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
183  for (int i = 0; i < kMaxPushes; i++) {
184  __ sw(t5, MemOperand(sp, i * kPointerSize));
185  }
186  // Continue loop if not done.
187  __ Subu(a2, a2, Operand(1));
188  __ Branch(&loop_header, ne, a2, Operand(zero_reg));
189  }
190  int remaining = locals_count % kMaxPushes;
191  // Emit the remaining pushes.
192  __ Subu(sp, sp, Operand(remaining * kPointerSize));
193  for (int i = 0; i < remaining; i++) {
194  __ sw(t5, MemOperand(sp, i * kPointerSize));
195  }
196  }
197  }
198 
199  bool function_in_register = true;
200 
201  // Possibly allocate a local context.
202  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
203  if (heap_slots > 0) {
204  Comment cmnt(masm_, "[ Allocate context");
205  // Argument to NewContext is the function, which is still in a1.
206  bool need_write_barrier = true;
207  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
208  __ push(a1);
209  __ Push(info->scope()->GetScopeInfo());
210  __ CallRuntime(Runtime::kNewGlobalContext, 2);
211  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
212  FastNewContextStub stub(isolate(), heap_slots);
213  __ CallStub(&stub);
214  // Result of FastNewContextStub is always in new space.
215  need_write_barrier = false;
216  } else {
217  __ push(a1);
218  __ CallRuntime(Runtime::kNewFunctionContext, 1);
219  }
220  function_in_register = false;
221  // Context is returned in v0. It replaces the context passed to us.
222  // It's saved in the stack and kept live in cp.
223  __ mov(cp, v0);
225  // Copy any necessary parameters into the context.
226  int num_parameters = info->scope()->num_parameters();
227  for (int i = 0; i < num_parameters; i++) {
228  Variable* var = scope()->parameter(i);
229  if (var->IsContextSlot()) {
230  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
231  (num_parameters - 1 - i) * kPointerSize;
232  // Load parameter from stack.
233  __ lw(a0, MemOperand(fp, parameter_offset));
234  // Store it in the context.
235  MemOperand target = ContextOperand(cp, var->index());
236  __ sw(a0, target);
237 
238  // Update the write barrier.
239  if (need_write_barrier) {
240  __ RecordWriteContextSlot(
241  cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
242  } else if (FLAG_debug_code) {
243  Label done;
244  __ JumpIfInNewSpace(cp, a0, &done);
245  __ Abort(kExpectedNewSpaceObject);
246  __ bind(&done);
247  }
248  }
249  }
250  }
251 
252  Variable* arguments = scope()->arguments();
253  if (arguments != NULL) {
254  // Function uses arguments object.
255  Comment cmnt(masm_, "[ Allocate arguments object");
256  if (!function_in_register) {
257  // Load this again, if it's used by the local context below.
259  } else {
260  __ mov(a3, a1);
261  }
262  // Receiver is just before the parameters on the caller's stack.
263  int num_parameters = info->scope()->num_parameters();
264  int offset = num_parameters * kPointerSize;
265  __ Addu(a2, fp,
266  Operand(StandardFrameConstants::kCallerSPOffset + offset));
267  __ li(a1, Operand(Smi::FromInt(num_parameters)));
268  __ Push(a3, a2, a1);
269 
270  // Arguments to ArgumentsAccessStub:
271  // function, receiver address, parameter count.
272  // The stub will rewrite receiever and parameter count if the previous
273  // stack frame was an arguments adapter frame.
275  if (strict_mode() == STRICT) {
277  } else if (function()->has_duplicate_parameters()) {
279  } else {
281  }
282  ArgumentsAccessStub stub(isolate(), type);
283  __ CallStub(&stub);
284 
285  SetVar(arguments, v0, a1, a2);
286  }
287 
288  if (FLAG_trace) {
289  __ CallRuntime(Runtime::kTraceEnter, 0);
290  }
291 
292  // Visit the declarations and body unless there is an illegal
293  // redeclaration.
294  if (scope()->HasIllegalRedeclaration()) {
295  Comment cmnt(masm_, "[ Declarations");
297 
298  } else {
300  { Comment cmnt(masm_, "[ Declarations");
301  // For named function expressions, declare the function name as a
302  // constant.
303  if (scope()->is_function_scope() && scope()->function() != NULL) {
304  VariableDeclaration* function = scope()->function();
305  DCHECK(function->proxy()->var()->mode() == CONST ||
306  function->proxy()->var()->mode() == CONST_LEGACY);
307  DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
308  VisitVariableDeclaration(function);
309  }
310  VisitDeclarations(scope()->declarations());
311  }
312 
313  { Comment cmnt(masm_, "[ Stack check");
315  Label ok;
316  __ LoadRoot(at, Heap::kStackLimitRootIndex);
317  __ Branch(&ok, hs, sp, Operand(at));
318  Handle<Code> stack_check = isolate()->builtins()->StackCheck();
319  PredictableCodeSizeScope predictable(masm_,
320  masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
321  __ Call(stack_check, RelocInfo::CODE_TARGET);
322  __ bind(&ok);
323  }
324 
325  { Comment cmnt(masm_, "[ Body");
326  DCHECK(loop_depth() == 0);
327  VisitStatements(function()->body());
328  DCHECK(loop_depth() == 0);
329  }
330  }
331 
332  // Always emit a 'return undefined' in case control fell off the end of
333  // the body.
334  { Comment cmnt(masm_, "[ return <undefined>;");
335  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
336  }
338 }
339 
340 
342  DCHECK(Smi::FromInt(0) == 0);
343  __ mov(v0, zero_reg);
344 }
345 
346 
348  __ li(a2, Operand(profiling_counter_));
350  __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
352 }
353 
354 
356  int reset_value = FLAG_interrupt_budget;
357  if (info_->is_debug()) {
358  // Detect debug break requests as soon as possible.
359  reset_value = FLAG_interrupt_budget >> 4;
360  }
361  __ li(a2, Operand(profiling_counter_));
362  __ li(a3, Operand(Smi::FromInt(reset_value)));
364 }
365 
366 
367 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
368  Label* back_edge_target) {
369  // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
370  // to make sure it is constant. Branch may emit a skip-or-jump sequence
371  // instead of the normal Branch. It seems that the "skip" part of that
372  // sequence is about as long as this Branch would be so it is safe to ignore
373  // that.
374  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
375  Comment cmnt(masm_, "[ Back edge bookkeeping");
376  Label ok;
377  DCHECK(back_edge_target->is_bound());
378  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
379  int weight = Min(kMaxBackEdgeWeight,
380  Max(1, distance / kCodeSizeMultiplier));
382  __ slt(at, a3, zero_reg);
383  __ beq(at, zero_reg, &ok);
384  // Call will emit a li t9 first, so it is safe to use the delay slot.
385  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
386  // Record a mapping of this PC offset to the OSR id. This is used to find
387  // the AST id from the unoptimized code in order to use it as a key into
388  // the deoptimization input data found in the optimized code.
389  RecordBackEdge(stmt->OsrEntryId());
391 
392  __ bind(&ok);
393  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
394  // Record a mapping of the OSR id to this PC. This is used if the OSR
395  // entry becomes the target of a bailout. We don't expect it to be, but
396  // we want it to work if it is.
397  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
398 }
399 
400 
402  Comment cmnt(masm_, "[ Return sequence");
403  if (return_label_.is_bound()) {
404  __ Branch(&return_label_);
405  } else {
406  __ bind(&return_label_);
407  if (FLAG_trace) {
408  // Push the return value on the stack as the parameter.
409  // Runtime::TraceExit returns its parameter in v0.
410  __ push(v0);
411  __ CallRuntime(Runtime::kTraceExit, 1);
412  }
413  // Pretend that the exit is a backwards jump to the entry.
414  int weight = 1;
415  if (info_->ShouldSelfOptimize()) {
416  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
417  } else {
418  int distance = masm_->pc_offset();
419  weight = Min(kMaxBackEdgeWeight,
420  Max(1, distance / kCodeSizeMultiplier));
421  }
423  Label ok;
424  __ Branch(&ok, ge, a3, Operand(zero_reg));
425  __ push(v0);
426  __ Call(isolate()->builtins()->InterruptCheck(),
428  __ pop(v0);
430  __ bind(&ok);
431 
432 #ifdef DEBUG
433  // Add a label for checking the size of the code used for returning.
434  Label check_exit_codesize;
435  masm_->bind(&check_exit_codesize);
436 #endif
437  // Make sure that the constant pool is not emitted inside of the return
438  // sequence.
439  { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
440  // Here we use masm_-> instead of the __ macro to avoid the code coverage
441  // tool from instrumenting as we rely on the code size here.
442  int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
443  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
444  __ RecordJSReturn();
445  masm_->mov(sp, fp);
446  int no_frame_start = masm_->pc_offset();
447  masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
448  masm_->Addu(sp, sp, Operand(sp_delta));
449  masm_->Jump(ra);
450  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
451  }
452 
453 #ifdef DEBUG
454  // Check that the size of the code used for returning is large enough
455  // for the debugger's requirements.
457  masm_->InstructionsGeneratedSince(&check_exit_codesize));
458 #endif
459  }
460 }
461 
462 
463 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
464  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
465 }
466 
467 
468 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
469  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
470  codegen()->GetVar(result_register(), var);
471 }
472 
473 
474 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
475  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
476  codegen()->GetVar(result_register(), var);
477  __ push(result_register());
478 }
479 
480 
481 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
482  // For simplicity we always test the accumulator register.
483  codegen()->GetVar(result_register(), var);
484  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
485  codegen()->DoTest(this);
486 }
487 
488 
490 }
491 
492 
494  Heap::RootListIndex index) const {
495  __ LoadRoot(result_register(), index);
496 }
497 
498 
500  Heap::RootListIndex index) const {
501  __ LoadRoot(result_register(), index);
502  __ push(result_register());
503 }
504 
505 
507  codegen()->PrepareForBailoutBeforeSplit(condition(),
508  true,
509  true_label_,
510  false_label_);
511  if (index == Heap::kUndefinedValueRootIndex ||
512  index == Heap::kNullValueRootIndex ||
513  index == Heap::kFalseValueRootIndex) {
514  if (false_label_ != fall_through_) __ Branch(false_label_);
515  } else if (index == Heap::kTrueValueRootIndex) {
516  if (true_label_ != fall_through_) __ Branch(true_label_);
517  } else {
518  __ LoadRoot(result_register(), index);
519  codegen()->DoTest(this);
520  }
521 }
522 
523 
524 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
525 }
526 
527 
529  Handle<Object> lit) const {
530  __ li(result_register(), Operand(lit));
531 }
532 
533 
534 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
535  // Immediates cannot be pushed directly.
536  __ li(result_register(), Operand(lit));
537  __ push(result_register());
538 }
539 
540 
541 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
542  codegen()->PrepareForBailoutBeforeSplit(condition(),
543  true,
544  true_label_,
545  false_label_);
546  DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
547  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
548  if (false_label_ != fall_through_) __ Branch(false_label_);
549  } else if (lit->IsTrue() || lit->IsJSObject()) {
550  if (true_label_ != fall_through_) __ Branch(true_label_);
551  } else if (lit->IsString()) {
552  if (String::cast(*lit)->length() == 0) {
553  if (false_label_ != fall_through_) __ Branch(false_label_);
554  } else {
555  if (true_label_ != fall_through_) __ Branch(true_label_);
556  }
557  } else if (lit->IsSmi()) {
558  if (Smi::cast(*lit)->value() == 0) {
559  if (false_label_ != fall_through_) __ Branch(false_label_);
560  } else {
561  if (true_label_ != fall_through_) __ Branch(true_label_);
562  }
563  } else {
564  // For simplicity we always test the accumulator register.
565  __ li(result_register(), Operand(lit));
566  codegen()->DoTest(this);
567  }
568 }
569 
570 
572  Register reg) const {
573  DCHECK(count > 0);
574  __ Drop(count);
575 }
576 
577 
579  int count,
580  Register reg) const {
581  DCHECK(count > 0);
582  __ Drop(count);
583  __ Move(result_register(), reg);
584 }
585 
586 
588  Register reg) const {
589  DCHECK(count > 0);
590  if (count > 1) __ Drop(count - 1);
591  __ sw(reg, MemOperand(sp, 0));
592 }
593 
594 
596  Register reg) const {
597  DCHECK(count > 0);
598  // For simplicity we always test the accumulator register.
599  __ Drop(count);
600  __ Move(result_register(), reg);
601  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
602  codegen()->DoTest(this);
603 }
604 
605 
606 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
607  Label* materialize_false) const {
608  DCHECK(materialize_true == materialize_false);
609  __ bind(materialize_true);
610 }
611 
612 
614  Label* materialize_true,
615  Label* materialize_false) const {
616  Label done;
617  __ bind(materialize_true);
618  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
619  __ Branch(&done);
620  __ bind(materialize_false);
621  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
622  __ bind(&done);
623 }
624 
625 
627  Label* materialize_true,
628  Label* materialize_false) const {
629  Label done;
630  __ bind(materialize_true);
631  __ LoadRoot(at, Heap::kTrueValueRootIndex);
632  // Push the value as the following branch can clobber at in long branch mode.
633  __ push(at);
634  __ Branch(&done);
635  __ bind(materialize_false);
636  __ LoadRoot(at, Heap::kFalseValueRootIndex);
637  __ push(at);
638  __ bind(&done);
639 }
640 
641 
642 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
643  Label* materialize_false) const {
644  DCHECK(materialize_true == true_label_);
645  DCHECK(materialize_false == false_label_);
646 }
647 
648 
650 }
651 
652 
654  Heap::RootListIndex value_root_index =
655  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
656  __ LoadRoot(result_register(), value_root_index);
657 }
658 
659 
661  Heap::RootListIndex value_root_index =
662  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
663  __ LoadRoot(at, value_root_index);
664  __ push(at);
665 }
666 
667 
669  codegen()->PrepareForBailoutBeforeSplit(condition(),
670  true,
671  true_label_,
672  false_label_);
673  if (flag) {
674  if (true_label_ != fall_through_) __ Branch(true_label_);
675  } else {
676  if (false_label_ != fall_through_) __ Branch(false_label_);
677  }
678 }
679 
680 
681 void FullCodeGenerator::DoTest(Expression* condition,
682  Label* if_true,
683  Label* if_false,
684  Label* fall_through) {
685  __ mov(a0, result_register());
686  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
687  CallIC(ic, condition->test_id());
688  __ mov(at, zero_reg);
689  Split(ne, v0, Operand(at), if_true, if_false, fall_through);
690 }
691 
692 
694  Register lhs,
695  const Operand& rhs,
696  Label* if_true,
697  Label* if_false,
698  Label* fall_through) {
699  if (if_false == fall_through) {
700  __ Branch(if_true, cc, lhs, rhs);
701  } else if (if_true == fall_through) {
702  __ Branch(if_false, NegateCondition(cc), lhs, rhs);
703  } else {
704  __ Branch(if_true, cc, lhs, rhs);
705  __ Branch(if_false);
706  }
707 }
708 
709 
711  DCHECK(var->IsStackAllocated());
712  // Offset is negative because higher indexes are at lower addresses.
713  int offset = -var->index() * kPointerSize;
714  // Adjust by a (parameter or local) base offset.
715  if (var->IsParameter()) {
716  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
717  } else {
719  }
720  return MemOperand(fp, offset);
721 }
722 
723 
724 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
725  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
726  if (var->IsContextSlot()) {
727  int context_chain_length = scope()->ContextChainLength(var->scope());
728  __ LoadContext(scratch, context_chain_length);
729  return ContextOperand(scratch, var->index());
730  } else {
731  return StackOperand(var);
732  }
733 }
734 
735 
736 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
737  // Use destination as scratch.
738  MemOperand location = VarOperand(var, dest);
739  __ lw(dest, location);
740 }
741 
742 
743 void FullCodeGenerator::SetVar(Variable* var,
744  Register src,
745  Register scratch0,
746  Register scratch1) {
747  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
748  DCHECK(!scratch0.is(src));
749  DCHECK(!scratch0.is(scratch1));
750  DCHECK(!scratch1.is(src));
751  MemOperand location = VarOperand(var, scratch0);
752  __ sw(src, location);
753  // Emit the write barrier code if the location is in the heap.
754  if (var->IsContextSlot()) {
755  __ RecordWriteContextSlot(scratch0,
756  location.offset(),
757  src,
758  scratch1,
761  }
762 }
763 
764 
766  bool should_normalize,
767  Label* if_true,
768  Label* if_false) {
769  // Only prepare for bailouts before splits if we're in a test
770  // context. Otherwise, we let the Visit function deal with the
771  // preparation to avoid preparing with the same AST id twice.
772  if (!context()->IsTest() || !info_->IsOptimizable()) return;
773 
774  Label skip;
775  if (should_normalize) __ Branch(&skip);
776  PrepareForBailout(expr, TOS_REG);
777  if (should_normalize) {
778  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
779  Split(eq, a0, Operand(t0), if_true, if_false, NULL);
780  __ bind(&skip);
781  }
782 }
783 
784 
786  // The variable in the declaration always resides in the current function
787  // context.
788  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
789  if (generate_debug_code_) {
790  // Check that we're not inside a with or catch context.
792  __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
793  __ Check(ne, kDeclarationInWithContext,
794  a1, Operand(t0));
795  __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
796  __ Check(ne, kDeclarationInCatchContext,
797  a1, Operand(t0));
798  }
799 }
800 
801 
802 void FullCodeGenerator::VisitVariableDeclaration(
803  VariableDeclaration* declaration) {
804  // If it was not possible to allocate the variable at compile time, we
805  // need to "declare" it at runtime to make sure it actually exists in the
806  // local context.
807  VariableProxy* proxy = declaration->proxy();
808  VariableMode mode = declaration->mode();
809  Variable* variable = proxy->var();
810  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
811  switch (variable->location()) {
813  globals_->Add(variable->name(), zone());
814  globals_->Add(variable->binding_needs_init()
815  ? isolate()->factory()->the_hole_value()
816  : isolate()->factory()->undefined_value(),
817  zone());
818  break;
819 
820  case Variable::PARAMETER:
821  case Variable::LOCAL:
822  if (hole_init) {
823  Comment cmnt(masm_, "[ VariableDeclaration");
824  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
825  __ sw(t0, StackOperand(variable));
826  }
827  break;
828 
829  case Variable::CONTEXT:
830  if (hole_init) {
831  Comment cmnt(masm_, "[ VariableDeclaration");
833  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
834  __ sw(at, ContextOperand(cp, variable->index()));
835  // No write barrier since the_hole_value is in old space.
836  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
837  }
838  break;
839 
840  case Variable::LOOKUP: {
841  Comment cmnt(masm_, "[ VariableDeclaration");
842  __ li(a2, Operand(variable->name()));
843  // Declaration nodes are always introduced in one of four modes.
845  PropertyAttributes attr =
847  __ li(a1, Operand(Smi::FromInt(attr)));
848  // Push initial value, if any.
849  // Note: For variables we must not push an initial value (such as
850  // 'undefined') because we may have a (legal) redeclaration and we
851  // must not destroy the current value.
852  if (hole_init) {
853  __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
854  __ Push(cp, a2, a1, a0);
855  } else {
856  DCHECK(Smi::FromInt(0) == 0);
857  __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
858  __ Push(cp, a2, a1, a0);
859  }
860  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
861  break;
862  }
863  }
864 }
865 
866 
867 void FullCodeGenerator::VisitFunctionDeclaration(
868  FunctionDeclaration* declaration) {
869  VariableProxy* proxy = declaration->proxy();
870  Variable* variable = proxy->var();
871  switch (variable->location()) {
872  case Variable::UNALLOCATED: {
873  globals_->Add(variable->name(), zone());
874  Handle<SharedFunctionInfo> function =
875  Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
876  // Check for stack-overflow exception.
877  if (function.is_null()) return SetStackOverflow();
878  globals_->Add(function, zone());
879  break;
880  }
881 
882  case Variable::PARAMETER:
883  case Variable::LOCAL: {
884  Comment cmnt(masm_, "[ FunctionDeclaration");
885  VisitForAccumulatorValue(declaration->fun());
886  __ sw(result_register(), StackOperand(variable));
887  break;
888  }
889 
890  case Variable::CONTEXT: {
891  Comment cmnt(masm_, "[ FunctionDeclaration");
893  VisitForAccumulatorValue(declaration->fun());
894  __ sw(result_register(), ContextOperand(cp, variable->index()));
895  int offset = Context::SlotOffset(variable->index());
896  // We know that we have written a function, which is not a smi.
897  __ RecordWriteContextSlot(cp,
898  offset,
899  result_register(),
900  a2,
905  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
906  break;
907  }
908 
909  case Variable::LOOKUP: {
910  Comment cmnt(masm_, "[ FunctionDeclaration");
911  __ li(a2, Operand(variable->name()));
912  __ li(a1, Operand(Smi::FromInt(NONE)));
913  __ Push(cp, a2, a1);
914  // Push initial value for function declaration.
915  VisitForStackValue(declaration->fun());
916  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
917  break;
918  }
919  }
920 }
921 
922 
923 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
924  Variable* variable = declaration->proxy()->var();
925  DCHECK(variable->location() == Variable::CONTEXT);
926  DCHECK(variable->interface()->IsFrozen());
927 
928  Comment cmnt(masm_, "[ ModuleDeclaration");
930 
931  // Load instance object.
932  __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope()));
933  __ lw(a1, ContextOperand(a1, variable->interface()->Index()));
935 
936  // Assign it.
937  __ sw(a1, ContextOperand(cp, variable->index()));
938  // We know that we have written a module, which is not a smi.
939  __ RecordWriteContextSlot(cp,
940  Context::SlotOffset(variable->index()),
941  a1,
942  a3,
947  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
948 
949  // Traverse into body.
950  Visit(declaration->module());
951 }
952 
953 
954 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
955  VariableProxy* proxy = declaration->proxy();
956  Variable* variable = proxy->var();
957  switch (variable->location()) {
959  // TODO(rossberg)
960  break;
961 
962  case Variable::CONTEXT: {
963  Comment cmnt(masm_, "[ ImportDeclaration");
965  // TODO(rossberg)
966  break;
967  }
968 
969  case Variable::PARAMETER:
970  case Variable::LOCAL:
971  case Variable::LOOKUP:
972  UNREACHABLE();
973  }
974 }
975 
976 
977 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
978  // TODO(rossberg)
979 }
980 
981 
982 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
983  // Call the runtime to declare the globals.
984  // The context is the first argument.
985  __ li(a1, Operand(pairs));
986  __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
987  __ Push(cp, a1, a0);
988  __ CallRuntime(Runtime::kDeclareGlobals, 3);
989  // Return value is ignored.
990 }
991 
992 
993 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
994  // Call the runtime to declare the modules.
995  __ Push(descriptions);
996  __ CallRuntime(Runtime::kDeclareModules, 1);
997  // Return value is ignored.
998 }
999 
1000 
1001 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1002  Comment cmnt(masm_, "[ SwitchStatement");
1003  Breakable nested_statement(this, stmt);
1004  SetStatementPosition(stmt);
1005 
1006  // Keep the switch value on the stack until a case matches.
1007  VisitForStackValue(stmt->tag());
1008  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1009 
1010  ZoneList<CaseClause*>* clauses = stmt->cases();
1011  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1012 
1013  Label next_test; // Recycled for each test.
1014  // Compile all the tests with branches to their bodies.
1015  for (int i = 0; i < clauses->length(); i++) {
1016  CaseClause* clause = clauses->at(i);
1017  clause->body_target()->Unuse();
1018 
1019  // The default is not a test, but remember it as final fall through.
1020  if (clause->is_default()) {
1021  default_clause = clause;
1022  continue;
1023  }
1024 
1025  Comment cmnt(masm_, "[ Case comparison");
1026  __ bind(&next_test);
1027  next_test.Unuse();
1028 
1029  // Compile the label expression.
1030  VisitForAccumulatorValue(clause->label());
1031  __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1032 
1033  // Perform the comparison as if via '==='.
1034  __ lw(a1, MemOperand(sp, 0)); // Switch value.
1035  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1036  JumpPatchSite patch_site(masm_);
1037  if (inline_smi_code) {
1038  Label slow_case;
1039  __ or_(a2, a1, a0);
1040  patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1041 
1042  __ Branch(&next_test, ne, a1, Operand(a0));
1043  __ Drop(1); // Switch value is no longer needed.
1044  __ Branch(clause->body_target());
1045 
1046  __ bind(&slow_case);
1047  }
1048 
1049  // Record position before stub call for type feedback.
1050  SetSourcePosition(clause->position());
1051  Handle<Code> ic =
1052  CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1053  CallIC(ic, clause->CompareId());
1054  patch_site.EmitPatchInfo();
1055 
1056  Label skip;
1057  __ Branch(&skip);
1058  PrepareForBailout(clause, TOS_REG);
1059  __ LoadRoot(at, Heap::kTrueValueRootIndex);
1060  __ Branch(&next_test, ne, v0, Operand(at));
1061  __ Drop(1);
1062  __ Branch(clause->body_target());
1063  __ bind(&skip);
1064 
1065  __ Branch(&next_test, ne, v0, Operand(zero_reg));
1066  __ Drop(1); // Switch value is no longer needed.
1067  __ Branch(clause->body_target());
1068  }
1069 
1070  // Discard the test value and jump to the default if present, otherwise to
1071  // the end of the statement.
1072  __ bind(&next_test);
1073  __ Drop(1); // Switch value is no longer needed.
1074  if (default_clause == NULL) {
1075  __ Branch(nested_statement.break_label());
1076  } else {
1077  __ Branch(default_clause->body_target());
1078  }
1079 
1080  // Compile all the case bodies.
1081  for (int i = 0; i < clauses->length(); i++) {
1082  Comment cmnt(masm_, "[ Case body");
1083  CaseClause* clause = clauses->at(i);
1084  __ bind(clause->body_target());
1085  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1086  VisitStatements(clause->statements());
1087  }
1088 
1089  __ bind(nested_statement.break_label());
1090  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1091 }
1092 
1093 
1094 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1095  Comment cmnt(masm_, "[ ForInStatement");
1096  int slot = stmt->ForInFeedbackSlot();
1097  SetStatementPosition(stmt);
1098 
1099  Label loop, exit;
1100  ForIn loop_statement(this, stmt);
1102 
1103  // Get the object to enumerate over. If the object is null or undefined, skip
1104  // over the loop. See ECMA-262 version 5, section 12.6.4.
1105  VisitForAccumulatorValue(stmt->enumerable());
1106  __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1107  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1108  __ Branch(&exit, eq, a0, Operand(at));
1109  Register null_value = t1;
1110  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1111  __ Branch(&exit, eq, a0, Operand(null_value));
1112  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1113  __ mov(a0, v0);
1114  // Convert the object to a JS object.
1115  Label convert, done_convert;
1116  __ JumpIfSmi(a0, &convert);
1117  __ GetObjectType(a0, a1, a1);
1118  __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1119  __ bind(&convert);
1120  __ push(a0);
1121  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1122  __ mov(a0, v0);
1123  __ bind(&done_convert);
1124  __ push(a0);
1125 
1126  // Check for proxies.
1127  Label call_runtime;
1129  __ GetObjectType(a0, a1, a1);
1130  __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1131 
1132  // Check cache validity in generated code. This is a fast case for
1133  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1134  // guarantee cache validity, call the runtime system to check cache
1135  // validity or get the property names in a fixed array.
1136  __ CheckEnumCache(null_value, &call_runtime);
1137 
1138  // The enum cache is valid. Load the map of the object being
1139  // iterated over and use the cache for the iteration.
1140  Label use_cache;
1142  __ Branch(&use_cache);
1143 
1144  // Get the set of properties to enumerate.
1145  __ bind(&call_runtime);
1146  __ push(a0); // Duplicate the enumerable object on the stack.
1147  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1148 
1149  // If we got a map from the runtime call, we can do a fast
1150  // modification check. Otherwise, we got a fixed array, and we have
1151  // to do a slow check.
1152  Label fixed_array;
1154  __ LoadRoot(at, Heap::kMetaMapRootIndex);
1155  __ Branch(&fixed_array, ne, a2, Operand(at));
1156 
1157  // We got a map in register v0. Get the enumeration cache from it.
1158  Label no_descriptors;
1159  __ bind(&use_cache);
1160 
1161  __ EnumLength(a1, v0);
1162  __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1163 
1164  __ LoadInstanceDescriptors(v0, a2);
1167 
1168  // Set up the four remaining stack slots.
1169  __ li(a0, Operand(Smi::FromInt(0)));
1170  // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1171  __ Push(v0, a2, a1, a0);
1172  __ jmp(&loop);
1173 
1174  __ bind(&no_descriptors);
1175  __ Drop(1);
1176  __ jmp(&exit);
1177 
1178  // We got a fixed array in register v0. Iterate through that.
1179  Label non_proxy;
1180  __ bind(&fixed_array);
1181 
1182  __ li(a1, FeedbackVector());
1183  __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1185 
1186  __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1187  __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1189  __ GetObjectType(a2, a3, a3);
1190  __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1191  __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1192  __ bind(&non_proxy);
1193  __ Push(a1, v0); // Smi and array
1195  __ li(a0, Operand(Smi::FromInt(0)));
1196  __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1197 
1198  // Generate code for doing the condition check.
1199  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1200  __ bind(&loop);
1201  // Load the current count to a0, load the length to a1.
1202  __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1203  __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1204  __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1205 
1206  // Get the current entry of the array into register a3.
1207  __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1208  __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1209  __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1210  __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1211  __ lw(a3, MemOperand(t0)); // Current entry.
1212 
1213  // Get the expected map from the stack or a smi in the
1214  // permanent slow case into register a2.
1215  __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1216 
1217  // Check if the expected map still matches that of the enumerable.
1218  // If not, we may have to filter the key.
1219  Label update_each;
1220  __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1222  __ Branch(&update_each, eq, t0, Operand(a2));
1223 
1224  // For proxies, no filtering is done.
1225  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1226  DCHECK_EQ(Smi::FromInt(0), 0);
1227  __ Branch(&update_each, eq, a2, Operand(zero_reg));
1228 
1229  // Convert the entry to a string or (smi) 0 if it isn't a property
1230  // any more. If the property has been removed while iterating, we
1231  // just skip it.
1232  __ Push(a1, a3); // Enumerable and current entry.
1233  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1234  __ mov(a3, result_register());
1235  __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1236 
1237  // Update the 'each' property or variable from the possibly filtered
1238  // entry in register a3.
1239  __ bind(&update_each);
1240  __ mov(result_register(), a3);
1241  // Perform the assignment as if via '='.
1242  { EffectContext context(this);
1243  EmitAssignment(stmt->each());
1244  }
1245 
1246  // Generate code for the body of the loop.
1247  Visit(stmt->body());
1248 
1249  // Generate code for the going to the next element by incrementing
1250  // the index (smi) stored on top of the stack.
1251  __ bind(loop_statement.continue_label());
1252  __ pop(a0);
1253  __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1254  __ push(a0);
1255 
1256  EmitBackEdgeBookkeeping(stmt, &loop);
1257  __ Branch(&loop);
1258 
1259  // Remove the pointers stored on the stack.
1260  __ bind(loop_statement.break_label());
1261  __ Drop(5);
1262 
1263  // Exit and decrement the loop depth.
1264  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1265  __ bind(&exit);
1267 }
1268 
1269 
1270 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1271  Comment cmnt(masm_, "[ ForOfStatement");
1272  SetStatementPosition(stmt);
1273 
1274  Iteration loop_statement(this, stmt);
1276 
1277  // var iterator = iterable[Symbol.iterator]();
1278  VisitForEffect(stmt->assign_iterator());
1279 
1280  // Loop entry.
1281  __ bind(loop_statement.continue_label());
1282 
1283  // result = iterator.next()
1284  VisitForEffect(stmt->next_result());
1285 
1286  // if (result.done) break;
1287  Label result_not_done;
1288  VisitForControl(stmt->result_done(),
1289  loop_statement.break_label(),
1290  &result_not_done,
1291  &result_not_done);
1292  __ bind(&result_not_done);
1293 
1294  // each = result.value
1295  VisitForEffect(stmt->assign_each());
1296 
1297  // Generate code for the body of the loop.
1298  Visit(stmt->body());
1299 
1300  // Check stack before looping.
1301  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1302  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1303  __ jmp(loop_statement.continue_label());
1304 
1305  // Exit and decrement the loop depth.
1306  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1307  __ bind(loop_statement.break_label());
1309 }
1310 
1311 
1312 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1313  bool pretenure) {
1314  // Use the fast case closure allocation code that allocates in new
1315  // space for nested functions that don't need literals cloning. If
1316  // we're running with the --always-opt or the --prepare-always-opt
1317  // flag, we need to use the runtime function so that the new function
1318  // we are creating here gets a chance to have its code optimized and
1319  // doesn't just get a copy of the existing unoptimized code.
1320  if (!FLAG_always_opt &&
1321  !FLAG_prepare_always_opt &&
1322  !pretenure &&
1323  scope()->is_function_scope() &&
1324  info->num_literals() == 0) {
1325  FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1326  __ li(a2, Operand(info));
1327  __ CallStub(&stub);
1328  } else {
1329  __ li(a0, Operand(info));
1330  __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1331  : Heap::kFalseValueRootIndex);
1332  __ Push(cp, a0, a1);
1333  __ CallRuntime(Runtime::kNewClosure, 3);
1334  }
1335  context()->Plug(v0);
1336 }
1337 
1338 
1339 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1340  Comment cmnt(masm_, "[ VariableProxy");
1341  EmitVariableLoad(expr);
1342 }
1343 
1344 
1345 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1346  Comment cnmt(masm_, "[ SuperReference ");
1347 
1350 
1351  Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1352  __ li(LoadDescriptor::NameRegister(), home_object_symbol);
1353 
1354  CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1355 
1356  Label done;
1357  __ Branch(&done, ne, v0, Operand(isolate()->factory()->undefined_value()));
1358  __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1359  __ bind(&done);
1360 }
1361 
1362 
1363 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1364  TypeofState typeof_state,
1365  Label* slow) {
1366  Register current = cp;
1367  Register next = a1;
1368  Register temp = a2;
1369 
1370  Scope* s = scope();
1371  while (s != NULL) {
1372  if (s->num_heap_slots() > 0) {
1373  if (s->calls_sloppy_eval()) {
1374  // Check that extension is NULL.
1375  __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1376  __ Branch(slow, ne, temp, Operand(zero_reg));
1377  }
1378  // Load next context in chain.
1379  __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1380  // Walk the rest of the chain without clobbering cp.
1381  current = next;
1382  }
1383  // If no outer scope calls eval, we do not need to check more
1384  // context extensions.
1385  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1386  s = s->outer_scope();
1387  }
1388 
1389  if (s->is_eval_scope()) {
1390  Label loop, fast;
1391  if (!current.is(next)) {
1392  __ Move(next, current);
1393  }
1394  __ bind(&loop);
1395  // Terminate at native context.
1396  __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1397  __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1398  __ Branch(&fast, eq, temp, Operand(t0));
1399  // Check that extension is NULL.
1400  __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1401  __ Branch(slow, ne, temp, Operand(zero_reg));
1402  // Load next context in chain.
1403  __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1404  __ Branch(&loop);
1405  __ bind(&fast);
1406  }
1407 
1409  __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1410  if (FLAG_vector_ics) {
1412  Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1413  }
1414 
1415  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1416  ? NOT_CONTEXTUAL
1417  : CONTEXTUAL;
1418  CallLoadIC(mode);
1419 }
1420 
1421 
1423  Label* slow) {
1424  DCHECK(var->IsContextSlot());
1425  Register context = cp;
1426  Register next = a3;
1427  Register temp = t0;
1428 
1429  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1430  if (s->num_heap_slots() > 0) {
1431  if (s->calls_sloppy_eval()) {
1432  // Check that extension is NULL.
1434  __ Branch(slow, ne, temp, Operand(zero_reg));
1435  }
1437  // Walk the rest of the chain without clobbering cp.
1438  context = next;
1439  }
1440  }
1441  // Check that last extension is NULL.
1443  __ Branch(slow, ne, temp, Operand(zero_reg));
1444 
1445  // This function is used only for loads, not stores, so it's safe to
1446  // return an cp-based operand (the write barrier cannot be allowed to
1447  // destroy the cp register).
1448  return ContextOperand(context, var->index());
1449 }
1450 
1451 
1452 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1453  TypeofState typeof_state,
1454  Label* slow,
1455  Label* done) {
1456  // Generate fast-case code for variables that might be shadowed by
1457  // eval-introduced variables. Eval is used a lot without
1458  // introducing variables. In those cases, we do not want to
1459  // perform a runtime call for all variables in the scope
1460  // containing the eval.
1461  Variable* var = proxy->var();
1462  if (var->mode() == DYNAMIC_GLOBAL) {
1463  EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1464  __ Branch(done);
1465  } else if (var->mode() == DYNAMIC_LOCAL) {
1466  Variable* local = var->local_if_not_shadowed();
1467  __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1468  if (local->mode() == LET || local->mode() == CONST ||
1469  local->mode() == CONST_LEGACY) {
1470  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1471  __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1472  if (local->mode() == CONST_LEGACY) {
1473  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1474  __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1475  } else { // LET || CONST
1476  __ Branch(done, ne, at, Operand(zero_reg));
1477  __ li(a0, Operand(var->name()));
1478  __ push(a0);
1479  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1480  }
1481  }
1482  __ Branch(done);
1483  }
1484 }
1485 
1486 
1487 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1488  // Record position before possible IC call.
1489  SetSourcePosition(proxy->position());
1490  Variable* var = proxy->var();
1491 
1492  // Three cases: global variables, lookup variables, and all other types of
1493  // variables.
1494  switch (var->location()) {
1495  case Variable::UNALLOCATED: {
1496  Comment cmnt(masm_, "[ Global variable");
1498  __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1499  if (FLAG_vector_ics) {
1501  Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1502  }
1504  context()->Plug(v0);
1505  break;
1506  }
1507 
1508  case Variable::PARAMETER:
1509  case Variable::LOCAL:
1510  case Variable::CONTEXT: {
1511  Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1512  : "[ Stack variable");
1513  if (var->binding_needs_init()) {
1514  // var->scope() may be NULL when the proxy is located in eval code and
1515  // refers to a potential outside binding. Currently those bindings are
1516  // always looked up dynamically, i.e. in that case
1517  // var->location() == LOOKUP.
1518  // always holds.
1519  DCHECK(var->scope() != NULL);
1520 
1521  // Check if the binding really needs an initialization check. The check
1522  // can be skipped in the following situation: we have a LET or CONST
1523  // binding in harmony mode, both the Variable and the VariableProxy have
1524  // the same declaration scope (i.e. they are both in global code, in the
1525  // same function or in the same eval code) and the VariableProxy is in
1526  // the source physically located after the initializer of the variable.
1527  //
1528  // We cannot skip any initialization checks for CONST in non-harmony
1529  // mode because const variables may be declared but never initialized:
1530  // if (false) { const x; }; var y = x;
1531  //
1532  // The condition on the declaration scopes is a conservative check for
1533  // nested functions that access a binding and are called before the
1534  // binding is initialized:
1535  // function() { f(); let x = 1; function f() { x = 2; } }
1536  //
1537  bool skip_init_check;
1538  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1539  skip_init_check = false;
1540  } else {
1541  // Check that we always have valid source position.
1542  DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1543  DCHECK(proxy->position() != RelocInfo::kNoPosition);
1544  skip_init_check = var->mode() != CONST_LEGACY &&
1545  var->initializer_position() < proxy->position();
1546  }
1547 
1548  if (!skip_init_check) {
1549  // Let and const need a read barrier.
1550  GetVar(v0, var);
1551  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1552  __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1553  if (var->mode() == LET || var->mode() == CONST) {
1554  // Throw a reference error when using an uninitialized let/const
1555  // binding in harmony mode.
1556  Label done;
1557  __ Branch(&done, ne, at, Operand(zero_reg));
1558  __ li(a0, Operand(var->name()));
1559  __ push(a0);
1560  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1561  __ bind(&done);
1562  } else {
1563  // Uninitalized const bindings outside of harmony mode are unholed.
1564  DCHECK(var->mode() == CONST_LEGACY);
1565  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1566  __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1567  }
1568  context()->Plug(v0);
1569  break;
1570  }
1571  }
1572  context()->Plug(var);
1573  break;
1574  }
1575 
1576  case Variable::LOOKUP: {
1577  Comment cmnt(masm_, "[ Lookup variable");
1578  Label done, slow;
1579  // Generate code for loading from variables potentially shadowed
1580  // by eval-introduced variables.
1581  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1582  __ bind(&slow);
1583  __ li(a1, Operand(var->name()));
1584  __ Push(cp, a1); // Context and name.
1585  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1586  __ bind(&done);
1587  context()->Plug(v0);
1588  }
1589  }
1590 }
1591 
1592 
1593 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1594  Comment cmnt(masm_, "[ RegExpLiteral");
1595  Label materialized;
1596  // Registers will be used as follows:
1597  // t1 = materialized value (RegExp literal)
1598  // t0 = JS function, literals array
1599  // a3 = literal index
1600  // a2 = RegExp pattern
1601  // a1 = RegExp flags
1602  // a0 = RegExp literal clone
1605  int literal_offset =
1606  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1607  __ lw(t1, FieldMemOperand(t0, literal_offset));
1608  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1609  __ Branch(&materialized, ne, t1, Operand(at));
1610 
1611  // Create regexp literal using runtime function.
1612  // Result will be in v0.
1613  __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1614  __ li(a2, Operand(expr->pattern()));
1615  __ li(a1, Operand(expr->flags()));
1616  __ Push(t0, a3, a2, a1);
1617  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1618  __ mov(t1, v0);
1619 
1620  __ bind(&materialized);
1622  Label allocated, runtime_allocate;
1623  __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1624  __ jmp(&allocated);
1625 
1626  __ bind(&runtime_allocate);
1627  __ li(a0, Operand(Smi::FromInt(size)));
1628  __ Push(t1, a0);
1629  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1630  __ pop(t1);
1631 
1632  __ bind(&allocated);
1633 
1634  // After this, registers are used as follows:
1635  // v0: Newly allocated regexp.
1636  // t1: Materialized regexp.
1637  // a2: temp.
1638  __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1639  context()->Plug(v0);
1640 }
1641 
1642 
1643 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1644  if (expression == NULL) {
1645  __ LoadRoot(a1, Heap::kNullValueRootIndex);
1646  __ push(a1);
1647  } else {
1648  VisitForStackValue(expression);
1649  }
1650 }
1651 
1652 
1653 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1654  Comment cmnt(masm_, "[ ObjectLiteral");
1655 
1656  expr->BuildConstantProperties(isolate());
1657  Handle<FixedArray> constant_properties = expr->constant_properties();
1660  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1661  __ li(a1, Operand(constant_properties));
1662  int flags = expr->fast_elements()
1663  ? ObjectLiteral::kFastElements
1664  : ObjectLiteral::kNoFlags;
1665  flags |= expr->has_function()
1666  ? ObjectLiteral::kHasFunction
1667  : ObjectLiteral::kNoFlags;
1668  __ li(a0, Operand(Smi::FromInt(flags)));
1669  int properties_count = constant_properties->length() / 2;
1670  if (expr->may_store_doubles() || expr->depth() > 1 ||
1671  masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1673  __ Push(a3, a2, a1, a0);
1674  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1675  } else {
1676  FastCloneShallowObjectStub stub(isolate(), properties_count);
1677  __ CallStub(&stub);
1678  }
1679 
1680  // If result_saved is true the result is on top of the stack. If
1681  // result_saved is false the result is in v0.
1682  bool result_saved = false;
1683 
1684  // Mark all computed expressions that are bound to a key that
1685  // is shadowed by a later occurrence of the same key. For the
1686  // marked expressions, no store code is emitted.
1687  expr->CalculateEmitStore(zone());
1688 
1689  AccessorTable accessor_table(zone());
1690  for (int i = 0; i < expr->properties()->length(); i++) {
1691  ObjectLiteral::Property* property = expr->properties()->at(i);
1692  if (property->IsCompileTimeValue()) continue;
1693 
1694  Literal* key = property->key();
1695  Expression* value = property->value();
1696  if (!result_saved) {
1697  __ push(v0); // Save result on stack.
1698  result_saved = true;
1699  }
1700  switch (property->kind()) {
1702  UNREACHABLE();
1703  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1704  DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1705  // Fall through.
1706  case ObjectLiteral::Property::COMPUTED:
1707  if (key->value()->IsInternalizedString()) {
1708  if (property->emit_store()) {
1709  VisitForAccumulatorValue(value);
1712  __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1714  CallStoreIC(key->LiteralFeedbackId());
1716  } else {
1717  VisitForEffect(value);
1718  }
1719  break;
1720  }
1721  // Duplicate receiver on stack.
1722  __ lw(a0, MemOperand(sp));
1723  __ push(a0);
1724  VisitForStackValue(key);
1725  VisitForStackValue(value);
1726  if (property->emit_store()) {
1727  __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1728  __ push(a0);
1729  __ CallRuntime(Runtime::kSetProperty, 4);
1730  } else {
1731  __ Drop(3);
1732  }
1733  break;
1734  case ObjectLiteral::Property::PROTOTYPE:
1735  // Duplicate receiver on stack.
1736  __ lw(a0, MemOperand(sp));
1737  __ push(a0);
1738  VisitForStackValue(value);
1739  if (property->emit_store()) {
1740  __ CallRuntime(Runtime::kSetPrototype, 2);
1741  } else {
1742  __ Drop(2);
1743  }
1744  break;
1745  case ObjectLiteral::Property::GETTER:
1746  accessor_table.lookup(key)->second->getter = value;
1747  break;
1748  case ObjectLiteral::Property::SETTER:
1749  accessor_table.lookup(key)->second->setter = value;
1750  break;
1751  }
1752  }
1753 
1754  // Emit code to define accessors, using only a single call to the runtime for
1755  // each pair of corresponding getters and setters.
1756  for (AccessorTable::Iterator it = accessor_table.begin();
1757  it != accessor_table.end();
1758  ++it) {
1759  __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1760  __ push(a0);
1761  VisitForStackValue(it->first);
1762  EmitAccessor(it->second->getter);
1763  EmitAccessor(it->second->setter);
1764  __ li(a0, Operand(Smi::FromInt(NONE)));
1765  __ push(a0);
1766  __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1767  }
1768 
1769  if (expr->has_function()) {
1770  DCHECK(result_saved);
1771  __ lw(a0, MemOperand(sp));
1772  __ push(a0);
1773  __ CallRuntime(Runtime::kToFastProperties, 1);
1774  }
1775 
1776  if (result_saved) {
1777  context()->PlugTOS();
1778  } else {
1779  context()->Plug(v0);
1780  }
1781 }
1782 
1783 
1784 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1785  Comment cmnt(masm_, "[ ArrayLiteral");
1786 
1787  expr->BuildConstantElements(isolate());
1788  int flags = expr->depth() == 1
1789  ? ArrayLiteral::kShallowElements
1790  : ArrayLiteral::kNoFlags;
1791 
1792  ZoneList<Expression*>* subexprs = expr->values();
1793  int length = subexprs->length();
1794 
1795  Handle<FixedArray> constant_elements = expr->constant_elements();
1796  DCHECK_EQ(2, constant_elements->length());
1797  ElementsKind constant_elements_kind =
1798  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1799  bool has_fast_elements =
1800  IsFastObjectElementsKind(constant_elements_kind);
1801  Handle<FixedArrayBase> constant_elements_values(
1802  FixedArrayBase::cast(constant_elements->get(1)));
1803 
1804  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1805  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1806  // If the only customer of allocation sites is transitioning, then
1807  // we can turn it off if we don't have anywhere else to transition to.
1808  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1809  }
1810 
1811  __ mov(a0, result_register());
1814  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1815  __ li(a1, Operand(constant_elements));
1816  if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1817  __ li(a0, Operand(Smi::FromInt(flags)));
1818  __ Push(a3, a2, a1, a0);
1819  __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1820  } else {
1821  FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1822  __ CallStub(&stub);
1823  }
1824 
1825  bool result_saved = false; // Is the result saved to the stack?
1826 
1827  // Emit code to evaluate all the non-constant subexpressions and to store
1828  // them into the newly cloned array.
1829  for (int i = 0; i < length; i++) {
1830  Expression* subexpr = subexprs->at(i);
1831  // If the subexpression is a literal or a simple materialized literal it
1832  // is already set in the cloned array.
1833  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1834 
1835  if (!result_saved) {
1836  __ push(v0); // array literal
1837  __ Push(Smi::FromInt(expr->literal_index()));
1838  result_saved = true;
1839  }
1840 
1841  VisitForAccumulatorValue(subexpr);
1842 
1843  if (IsFastObjectElementsKind(constant_elements_kind)) {
1844  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1845  __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal.
1847  __ sw(result_register(), FieldMemOperand(a1, offset));
1848  // Update the write barrier for the array store.
1849  __ RecordWriteField(a1, offset, result_register(), a2,
1852  } else {
1853  __ li(a3, Operand(Smi::FromInt(i)));
1854  __ mov(a0, result_register());
1855  StoreArrayLiteralElementStub stub(isolate());
1856  __ CallStub(&stub);
1857  }
1858 
1859  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1860  }
1861  if (result_saved) {
1862  __ Pop(); // literal index
1863  context()->PlugTOS();
1864  } else {
1865  context()->Plug(v0);
1866  }
1867 }
1868 
1869 
1870 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1871  DCHECK(expr->target()->IsValidReferenceExpression());
1872 
1873  Comment cmnt(masm_, "[ Assignment");
1874 
1875  // Left-hand side can only be a property, a global or a (parameter or local)
1876  // slot.
1878  LhsKind assign_type = VARIABLE;
1879  Property* property = expr->target()->AsProperty();
1880  if (property != NULL) {
1881  assign_type = (property->key()->IsPropertyName())
1882  ? NAMED_PROPERTY
1883  : KEYED_PROPERTY;
1884  }
1885 
1886  // Evaluate LHS expression.
1887  switch (assign_type) {
1888  case VARIABLE:
1889  // Nothing to do here.
1890  break;
1891  case NAMED_PROPERTY:
1892  if (expr->is_compound()) {
1893  // We need the receiver both on the stack and in the register.
1894  VisitForStackValue(property->obj());
1896  } else {
1897  VisitForStackValue(property->obj());
1898  }
1899  break;
1900  case KEYED_PROPERTY:
1901  // We need the key and receiver on both the stack and in v0 and a1.
1902  if (expr->is_compound()) {
1903  VisitForStackValue(property->obj());
1904  VisitForStackValue(property->key());
1906  MemOperand(sp, 1 * kPointerSize));
1908  } else {
1909  VisitForStackValue(property->obj());
1910  VisitForStackValue(property->key());
1911  }
1912  break;
1913  }
1914 
1915  // For compound assignments we need another deoptimization point after the
1916  // variable/property load.
1917  if (expr->is_compound()) {
1918  { AccumulatorValueContext context(this);
1919  switch (assign_type) {
1920  case VARIABLE:
1921  EmitVariableLoad(expr->target()->AsVariableProxy());
1922  PrepareForBailout(expr->target(), TOS_REG);
1923  break;
1924  case NAMED_PROPERTY:
1925  EmitNamedPropertyLoad(property);
1926  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1927  break;
1928  case KEYED_PROPERTY:
1929  EmitKeyedPropertyLoad(property);
1930  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1931  break;
1932  }
1933  }
1934 
1935  Token::Value op = expr->binary_op();
1936  __ push(v0); // Left operand goes on the stack.
1937  VisitForAccumulatorValue(expr->value());
1938 
1939  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1940  ? OVERWRITE_RIGHT
1941  : NO_OVERWRITE;
1942  SetSourcePosition(expr->position() + 1);
1943  AccumulatorValueContext context(this);
1944  if (ShouldInlineSmiCase(op)) {
1945  EmitInlineSmiBinaryOp(expr->binary_operation(),
1946  op,
1947  mode,
1948  expr->target(),
1949  expr->value());
1950  } else {
1951  EmitBinaryOp(expr->binary_operation(), op, mode);
1952  }
1953 
1954  // Deoptimization point in case the binary operation may have side effects.
1955  PrepareForBailout(expr->binary_operation(), TOS_REG);
1956  } else {
1957  VisitForAccumulatorValue(expr->value());
1958  }
1959 
1960  // Record source position before possible IC call.
1961  SetSourcePosition(expr->position());
1962 
1963  // Store the value.
1964  switch (assign_type) {
1965  case VARIABLE:
1966  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1967  expr->op());
1968  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1969  context()->Plug(v0);
1970  break;
1971  case NAMED_PROPERTY:
1973  break;
1974  case KEYED_PROPERTY:
1976  break;
1977  }
1978 }
1979 
1980 
1981 void FullCodeGenerator::VisitYield(Yield* expr) {
1982  Comment cmnt(masm_, "[ Yield");
1983  // Evaluate yielded value first; the initial iterator definition depends on
1984  // this. It stays on the stack while we update the iterator.
1985  VisitForStackValue(expr->expression());
1986 
1987  switch (expr->yield_kind()) {
1988  case Yield::kSuspend:
1989  // Pop value from top-of-stack slot; box result into result register.
1990  EmitCreateIteratorResult(false);
1991  __ push(result_register());
1992  // Fall through.
1993  case Yield::kInitial: {
1994  Label suspend, continuation, post_runtime, resume;
1995 
1996  __ jmp(&suspend);
1997 
1998  __ bind(&continuation);
1999  __ jmp(&resume);
2000 
2001  __ bind(&suspend);
2002  VisitForAccumulatorValue(expr->generator_object());
2003  DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2004  __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2007  __ mov(a1, cp);
2008  __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2010  __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2011  __ Branch(&post_runtime, eq, sp, Operand(a1));
2012  __ push(v0); // generator object
2013  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2015  __ bind(&post_runtime);
2016  __ pop(result_register());
2018 
2019  __ bind(&resume);
2020  context()->Plug(result_register());
2021  break;
2022  }
2023 
2024  case Yield::kFinal: {
2025  VisitForAccumulatorValue(expr->generator_object());
2029  // Pop value from top-of-stack slot, box result into result register.
2033  break;
2034  }
2035 
2036  case Yield::kDelegating: {
2037  VisitForStackValue(expr->generator_object());
2038 
2039  // Initial stack layout is as follows:
2040  // [sp + 1 * kPointerSize] iter
2041  // [sp + 0 * kPointerSize] g
2042 
2043  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2044  Label l_next, l_call;
2045  Register load_receiver = LoadDescriptor::ReceiverRegister();
2046  Register load_name = LoadDescriptor::NameRegister();
2047 
2048  // Initial send value is undefined.
2049  __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2050  __ Branch(&l_next);
2051 
2052  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2053  __ bind(&l_catch);
2054  __ mov(a0, v0);
2055  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2056  __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2057  __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2058  __ Push(load_name, a3, a0); // "throw", iter, except
2059  __ jmp(&l_call);
2060 
2061  // try { received = %yield result }
2062  // Shuffle the received result above a try handler and yield it without
2063  // re-boxing.
2064  __ bind(&l_try);
2065  __ pop(a0); // result
2066  __ PushTryHandler(StackHandler::CATCH, expr->index());
2067  const int handler_size = StackHandlerConstants::kSize;
2068  __ push(a0); // result
2069  __ jmp(&l_suspend);
2070  __ bind(&l_continuation);
2071  __ mov(a0, v0);
2072  __ jmp(&l_resume);
2073  __ bind(&l_suspend);
2074  const int generator_object_depth = kPointerSize + handler_size;
2075  __ lw(a0, MemOperand(sp, generator_object_depth));
2076  __ push(a0); // g
2077  DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2078  __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2081  __ mov(a1, cp);
2082  __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2084  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2086  __ pop(v0); // result
2088  __ mov(a0, v0);
2089  __ bind(&l_resume); // received in a0
2090  __ PopTryHandler();
2091 
2092  // receiver = iter; f = 'next'; arg = received;
2093  __ bind(&l_next);
2094 
2095  __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2096  __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2097  __ Push(load_name, a3, a0); // "next", iter, received
2098 
2099  // result = receiver[f](arg);
2100  __ bind(&l_call);
2101  __ lw(load_receiver, MemOperand(sp, kPointerSize));
2102  __ lw(load_name, MemOperand(sp, 2 * kPointerSize));
2103  if (FLAG_vector_ics) {
2105  Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2106  }
2107  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2109  __ mov(a0, v0);
2110  __ mov(a1, a0);
2111  __ sw(a1, MemOperand(sp, 2 * kPointerSize));
2112  CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2113  __ CallStub(&stub);
2114 
2116  __ Drop(1); // The function is still on the stack; drop it.
2117 
2118  // if (!result.done) goto l_try;
2119  __ Move(load_receiver, v0);
2120 
2121  __ push(load_receiver); // save result
2122  __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2123  if (FLAG_vector_ics) {
2125  Operand(Smi::FromInt(expr->DoneFeedbackSlot())));
2126  }
2127  CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2128  __ mov(a0, v0);
2129  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2130  CallIC(bool_ic);
2131  __ Branch(&l_try, eq, v0, Operand(zero_reg));
2132 
2133  // result.value
2134  __ pop(load_receiver); // result
2135  __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2136  if (FLAG_vector_ics) {
2138  Operand(Smi::FromInt(expr->ValueFeedbackSlot())));
2139  }
2140  CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2141  context()->DropAndPlug(2, v0); // drop iter and g
2142  break;
2143  }
2144  }
2145 }
2146 
2147 
2148 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2149  Expression *value,
2150  JSGeneratorObject::ResumeMode resume_mode) {
2151  // The value stays in a0, and is ultimately read by the resumed generator, as
2152  // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2153  // is read to throw the value when the resumed generator is already closed.
2154  // a1 will hold the generator object until the activation has been resumed.
2155  VisitForStackValue(generator);
2156  VisitForAccumulatorValue(value);
2157  __ pop(a1);
2158 
2159  // Check generator state.
2160  Label wrong_state, closed_state, done;
2164  __ Branch(&closed_state, eq, a3, Operand(zero_reg));
2165  __ Branch(&wrong_state, lt, a3, Operand(zero_reg));
2166 
2167  // Load suspended function and context.
2170 
2171  // Load receiver and store as the first argument.
2173  __ push(a2);
2174 
2175  // Push holes for the rest of the arguments to the generator function.
2177  __ lw(a3,
2179  __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2180  Label push_argument_holes, push_frame;
2181  __ bind(&push_argument_holes);
2182  __ Subu(a3, a3, Operand(Smi::FromInt(1)));
2183  __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2184  __ push(a2);
2185  __ jmp(&push_argument_holes);
2186 
2187  // Enter a new JavaScript frame, and initialize its slots as they were when
2188  // the generator was suspended.
2189  Label resume_frame;
2190  __ bind(&push_frame);
2191  __ Call(&resume_frame);
2192  __ jmp(&done);
2193  __ bind(&resume_frame);
2194  // ra = return address.
2195  // fp = caller's frame pointer.
2196  // cp = callee's context,
2197  // t0 = callee's JS function.
2198  __ Push(ra, fp, cp, t0);
2199  // Adjust FP to point to saved FP.
2200  __ Addu(fp, sp, 2 * kPointerSize);
2201 
2202  // Load the operand stack size.
2205  __ SmiUntag(a3);
2206 
2207  // If we are sending a value and there is no operand stack, we can jump back
2208  // in directly.
2209  if (resume_mode == JSGeneratorObject::NEXT) {
2210  Label slow_resume;
2211  __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2214  __ SmiUntag(a2);
2215  __ Addu(a3, a3, Operand(a2));
2218  __ Jump(a3);
2219  __ bind(&slow_resume);
2220  }
2221 
2222  // Otherwise, we push holes for the operand stack and call the runtime to fix
2223  // up the stack and the handlers.
2224  Label push_operand_holes, call_resume;
2225  __ bind(&push_operand_holes);
2226  __ Subu(a3, a3, Operand(1));
2227  __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2228  __ push(a2);
2229  __ Branch(&push_operand_holes);
2230  __ bind(&call_resume);
2231  DCHECK(!result_register().is(a1));
2232  __ Push(a1, result_register());
2233  __ Push(Smi::FromInt(resume_mode));
2234  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2235  // Not reached: the runtime call returns elsewhere.
2236  __ stop("not-reached");
2237 
2238  // Reach here when generator is closed.
2239  __ bind(&closed_state);
2240  if (resume_mode == JSGeneratorObject::NEXT) {
2241  // Return completed iterator result when generator is closed.
2242  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2243  __ push(a2);
2244  // Pop value from top-of-stack slot; box result into result register.
2246  } else {
2247  // Throw the provided value.
2248  __ push(a0);
2249  __ CallRuntime(Runtime::kThrow, 1);
2250  }
2251  __ jmp(&done);
2252 
2253  // Throw error if we attempt to operate on a running generator.
2254  __ bind(&wrong_state);
2255  __ push(a1);
2256  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2257 
2258  __ bind(&done);
2259  context()->Plug(result_register());
2260 }
2261 
2262 
2264  Label gc_required;
2265  Label allocated;
2266 
2267  Handle<Map> map(isolate()->native_context()->iterator_result_map());
2268 
2269  __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
2270  __ jmp(&allocated);
2271 
2272  __ bind(&gc_required);
2273  __ Push(Smi::FromInt(map->instance_size()));
2274  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2275  __ lw(context_register(),
2277 
2278  __ bind(&allocated);
2279  __ li(a1, Operand(map));
2280  __ pop(a2);
2281  __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2282  __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2283  DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2287  __ sw(a2,
2289  __ sw(a3,
2291 
2292  // Only the value field needs a write barrier, as the other values are in the
2293  // root set.
2295  a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2296 }
2297 
2298 
2299 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2300  SetSourcePosition(prop->position());
2301  Literal* key = prop->key()->AsLiteral();
2302 
2303  __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2304  if (FLAG_vector_ics) {
2306  Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2308  } else {
2309  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2310  }
2311 }
2312 
2313 
2314 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2315  SetSourcePosition(prop->position());
2316  Literal* key = prop->key()->AsLiteral();
2317  DCHECK(!key->value()->IsSmi());
2318  DCHECK(prop->IsSuperAccess());
2319 
2320  SuperReference* super_ref = prop->obj()->AsSuperReference();
2321  EmitLoadHomeObject(super_ref);
2322  __ Push(v0);
2323  VisitForStackValue(super_ref->this_var());
2324  __ Push(key->value());
2325  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2326 }
2327 
2328 
2329 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2330  SetSourcePosition(prop->position());
2331  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2332  if (FLAG_vector_ics) {
2334  Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2335  CallIC(ic);
2336  } else {
2337  CallIC(ic, prop->PropertyFeedbackId());
2338  }
2339 }
2340 
2341 
2342 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2343  Token::Value op,
2345  Expression* left_expr,
2346  Expression* right_expr) {
2347  Label done, smi_case, stub_call;
2348 
2349  Register scratch1 = a2;
2350  Register scratch2 = a3;
2351 
2352  // Get the arguments.
2353  Register left = a1;
2354  Register right = a0;
2355  __ pop(left);
2356  __ mov(a0, result_register());
2357 
2358  // Perform combined smi check on both operands.
2359  __ Or(scratch1, left, Operand(right));
2360  STATIC_ASSERT(kSmiTag == 0);
2361  JumpPatchSite patch_site(masm_);
2362  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2363 
2364  __ bind(&stub_call);
2365  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2366  CallIC(code, expr->BinaryOperationFeedbackId());
2367  patch_site.EmitPatchInfo();
2368  __ jmp(&done);
2369 
2370  __ bind(&smi_case);
2371  // Smi case. This code works the same way as the smi-smi case in the type
2372  // recording binary operation stub, see
2373  switch (op) {
2374  case Token::SAR:
2375  __ GetLeastBitsFromSmi(scratch1, right, 5);
2376  __ srav(right, left, scratch1);
2377  __ And(v0, right, Operand(~kSmiTagMask));
2378  break;
2379  case Token::SHL: {
2380  __ SmiUntag(scratch1, left);
2381  __ GetLeastBitsFromSmi(scratch2, right, 5);
2382  __ sllv(scratch1, scratch1, scratch2);
2383  __ Addu(scratch2, scratch1, Operand(0x40000000));
2384  __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2385  __ SmiTag(v0, scratch1);
2386  break;
2387  }
2388  case Token::SHR: {
2389  __ SmiUntag(scratch1, left);
2390  __ GetLeastBitsFromSmi(scratch2, right, 5);
2391  __ srlv(scratch1, scratch1, scratch2);
2392  __ And(scratch2, scratch1, 0xc0000000);
2393  __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2394  __ SmiTag(v0, scratch1);
2395  break;
2396  }
2397  case Token::ADD:
2398  __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2399  __ BranchOnOverflow(&stub_call, scratch1);
2400  break;
2401  case Token::SUB:
2402  __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2403  __ BranchOnOverflow(&stub_call, scratch1);
2404  break;
2405  case Token::MUL: {
2406  __ SmiUntag(scratch1, right);
2407  __ Mul(scratch2, v0, left, scratch1);
2408  __ sra(scratch1, v0, 31);
2409  __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2410  __ Branch(&done, ne, v0, Operand(zero_reg));
2411  __ Addu(scratch2, right, left);
2412  __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2413  DCHECK(Smi::FromInt(0) == 0);
2414  __ mov(v0, zero_reg);
2415  break;
2416  }
2417  case Token::BIT_OR:
2418  __ Or(v0, left, Operand(right));
2419  break;
2420  case Token::BIT_AND:
2421  __ And(v0, left, Operand(right));
2422  break;
2423  case Token::BIT_XOR:
2424  __ Xor(v0, left, Operand(right));
2425  break;
2426  default:
2427  UNREACHABLE();
2428  }
2429 
2430  __ bind(&done);
2431  context()->Plug(v0);
2432 }
2433 
2434 
2435 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2436  Token::Value op,
2437  OverwriteMode mode) {
2438  __ mov(a0, result_register());
2439  __ pop(a1);
2440  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2441  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2442  CallIC(code, expr->BinaryOperationFeedbackId());
2443  patch_site.EmitPatchInfo();
2444  context()->Plug(v0);
2445 }
2446 
2447 
2448 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2449  DCHECK(expr->IsValidReferenceExpression());
2450 
2451  // Left-hand side can only be a property, a global or a (parameter or local)
2452  // slot.
2454  LhsKind assign_type = VARIABLE;
2455  Property* prop = expr->AsProperty();
2456  if (prop != NULL) {
2457  assign_type = (prop->key()->IsPropertyName())
2458  ? NAMED_PROPERTY
2459  : KEYED_PROPERTY;
2460  }
2461 
2462  switch (assign_type) {
2463  case VARIABLE: {
2464  Variable* var = expr->AsVariableProxy()->var();
2465  EffectContext context(this);
2466  EmitVariableAssignment(var, Token::ASSIGN);
2467  break;
2468  }
2469  case NAMED_PROPERTY: {
2470  __ push(result_register()); // Preserve value.
2471  VisitForAccumulatorValue(prop->obj());
2473  __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2475  Operand(prop->key()->AsLiteral()->value()));
2476  CallStoreIC();
2477  break;
2478  }
2479  case KEYED_PROPERTY: {
2480  __ push(result_register()); // Preserve value.
2481  VisitForStackValue(prop->obj());
2482  VisitForAccumulatorValue(prop->key());
2486  Handle<Code> ic =
2487  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2488  CallIC(ic);
2489  break;
2490  }
2491  }
2492  context()->Plug(v0);
2493 }
2494 
2495 
2497  Variable* var, MemOperand location) {
2498  __ sw(result_register(), location);
2499  if (var->IsContextSlot()) {
2500  // RecordWrite may destroy all its register arguments.
2501  __ Move(a3, result_register());
2502  int offset = Context::SlotOffset(var->index());
2503  __ RecordWriteContextSlot(
2504  a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2505  }
2506 }
2507 
2508 
2510  if (var->IsUnallocated()) {
2511  // Global var, const, or let.
2513  __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2515  CallStoreIC();
2516 
2517  } else if (op == Token::INIT_CONST_LEGACY) {
2518  // Const initializers need a write barrier.
2519  DCHECK(!var->IsParameter()); // No const parameters.
2520  if (var->IsLookupSlot()) {
2521  __ li(a0, Operand(var->name()));
2522  __ Push(v0, cp, a0); // Context and name.
2523  __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2524  } else {
2525  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2526  Label skip;
2527  MemOperand location = VarOperand(var, a1);
2528  __ lw(a2, location);
2529  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2530  __ Branch(&skip, ne, a2, Operand(at));
2531  EmitStoreToStackLocalOrContextSlot(var, location);
2532  __ bind(&skip);
2533  }
2534 
2535  } else if (var->mode() == LET && op != Token::INIT_LET) {
2536  // Non-initializing assignment to let variable needs a write barrier.
2537  DCHECK(!var->IsLookupSlot());
2538  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2539  Label assign;
2540  MemOperand location = VarOperand(var, a1);
2541  __ lw(a3, location);
2542  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2543  __ Branch(&assign, ne, a3, Operand(t0));
2544  __ li(a3, Operand(var->name()));
2545  __ push(a3);
2546  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2547  // Perform the assignment.
2548  __ bind(&assign);
2549  EmitStoreToStackLocalOrContextSlot(var, location);
2550 
2551  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2552  if (var->IsLookupSlot()) {
2553  // Assignment to var.
2554  __ li(a1, Operand(var->name()));
2555  __ li(a0, Operand(Smi::FromInt(strict_mode())));
2556  __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
2557  __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2558  } else {
2559  // Assignment to var or initializing assignment to let/const in harmony
2560  // mode.
2561  DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2562  MemOperand location = VarOperand(var, a1);
2563  if (generate_debug_code_ && op == Token::INIT_LET) {
2564  // Check for an uninitialized let binding.
2565  __ lw(a2, location);
2566  __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2567  __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2568  }
2569  EmitStoreToStackLocalOrContextSlot(var, location);
2570  }
2571  }
2572  // Non-initializing assignments to consts are ignored.
2573 }
2574 
2575 
2576 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2577  // Assignment to a property, using a named store IC.
2578  Property* prop = expr->target()->AsProperty();
2579  DCHECK(prop != NULL);
2580  DCHECK(prop->key()->IsLiteral());
2581 
2582  // Record source code position before IC call.
2583  SetSourcePosition(expr->position());
2586  Operand(prop->key()->AsLiteral()->value()));
2588  CallStoreIC(expr->AssignmentFeedbackId());
2589 
2590  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2591  context()->Plug(v0);
2592 }
2593 
2594 
2595 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2596  // Assignment to a property, using a keyed store IC.
2597 
2598  // Record source code position before IC call.
2599  SetSourcePosition(expr->position());
2600  // Call keyed store IC.
2601  // The arguments are:
2602  // - a0 is the value,
2603  // - a1 is the key,
2604  // - a2 is the receiver.
2608 
2609  Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2610  CallIC(ic, expr->AssignmentFeedbackId());
2611 
2612  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2613  context()->Plug(v0);
2614 }
2615 
2616 
2617 void FullCodeGenerator::VisitProperty(Property* expr) {
2618  Comment cmnt(masm_, "[ Property");
2619  Expression* key = expr->key();
2620 
2621  if (key->IsPropertyName()) {
2622  if (!expr->IsSuperAccess()) {
2623  VisitForAccumulatorValue(expr->obj());
2625  EmitNamedPropertyLoad(expr);
2626  } else {
2628  }
2629  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2630  context()->Plug(v0);
2631  } else {
2632  VisitForStackValue(expr->obj());
2633  VisitForAccumulatorValue(expr->key());
2634  __ Move(LoadDescriptor::NameRegister(), v0);
2636  EmitKeyedPropertyLoad(expr);
2637  context()->Plug(v0);
2638  }
2639 }
2640 
2641 
2642 void FullCodeGenerator::CallIC(Handle<Code> code,
2643  TypeFeedbackId id) {
2644  ic_total_count_++;
2645  __ Call(code, RelocInfo::CODE_TARGET, id);
2646 }
2647 
2648 
2649 // Code common for calls using the IC.
2650 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2651  Expression* callee = expr->expression();
2652 
2653  CallICState::CallType call_type =
2654  callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2655 
2656  // Get the target function.
2657  if (call_type == CallICState::FUNCTION) {
2658  { StackValueContext context(this);
2659  EmitVariableLoad(callee->AsVariableProxy());
2661  }
2662  // Push undefined as receiver. This is patched in the method prologue if it
2663  // is a sloppy mode method.
2664  __ Push(isolate()->factory()->undefined_value());
2665  } else {
2666  // Load the function from the receiver.
2667  DCHECK(callee->IsProperty());
2668  DCHECK(!callee->AsProperty()->IsSuperAccess());
2670  EmitNamedPropertyLoad(callee->AsProperty());
2671  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2672  // Push the target function under the receiver.
2673  __ lw(at, MemOperand(sp, 0));
2674  __ push(at);
2675  __ sw(v0, MemOperand(sp, kPointerSize));
2676  }
2677 
2678  EmitCall(expr, call_type);
2679 }
2680 
2681 
2683  Expression* callee = expr->expression();
2684  DCHECK(callee->IsProperty());
2685  Property* prop = callee->AsProperty();
2686  DCHECK(prop->IsSuperAccess());
2687 
2688  SetSourcePosition(prop->position());
2689  Literal* key = prop->key()->AsLiteral();
2690  DCHECK(!key->value()->IsSmi());
2691  // Load the function from the receiver.
2692  const Register scratch = a1;
2693  SuperReference* super_ref = prop->obj()->AsSuperReference();
2694  EmitLoadHomeObject(super_ref);
2695  __ Push(v0);
2696  VisitForAccumulatorValue(super_ref->this_var());
2697  __ Push(v0);
2698  __ lw(scratch, MemOperand(sp, kPointerSize));
2699  __ Push(scratch, v0);
2700  __ Push(key->value());
2701 
2702  // Stack here:
2703  // - home_object
2704  // - this (receiver)
2705  // - home_object <-- LoadFromSuper will pop here and below.
2706  // - this (receiver)
2707  // - key
2708  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2709 
2710  // Replace home_object with target function.
2711  __ sw(v0, MemOperand(sp, kPointerSize));
2712 
2713  // Stack here:
2714  // - target function
2715  // - this (receiver)
2716  EmitCall(expr, CallICState::METHOD);
2717 }
2718 
2719 
2720 // Code common for calls using the IC.
2722  Expression* key) {
2723  // Load the key.
2725 
2726  Expression* callee = expr->expression();
2727 
2728  // Load the function from the receiver.
2729  DCHECK(callee->IsProperty());
2731  __ Move(LoadDescriptor::NameRegister(), v0);
2732  EmitKeyedPropertyLoad(callee->AsProperty());
2733  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2734 
2735  // Push the target function under the receiver.
2736  __ lw(at, MemOperand(sp, 0));
2737  __ push(at);
2738  __ sw(v0, MemOperand(sp, kPointerSize));
2739 
2740  EmitCall(expr, CallICState::METHOD);
2741 }
2742 
2743 
2744 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2745  // Load the arguments.
2746  ZoneList<Expression*>* args = expr->arguments();
2747  int arg_count = args->length();
2748  { PreservePositionScope scope(masm()->positions_recorder());
2749  for (int i = 0; i < arg_count; i++) {
2750  VisitForStackValue(args->at(i));
2751  }
2752  }
2753 
2754  // Record source position of the IC call.
2755  SetSourcePosition(expr->position());
2756  Handle<Code> ic = CallIC::initialize_stub(
2757  isolate(), arg_count, call_type);
2758  __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2759  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2760  // Don't assign a type feedback id to the IC, since type feedback is provided
2761  // by the vector above.
2762  CallIC(ic);
2763 
2764  RecordJSReturnSite(expr);
2765  // Restore context register.
2767  context()->DropAndPlug(1, v0);
2768 }
2769 
2770 
2772  // t2: copy of the first argument or undefined if it doesn't exist.
2773  if (arg_count > 0) {
2774  __ lw(t2, MemOperand(sp, arg_count * kPointerSize));
2775  } else {
2776  __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
2777  }
2778 
2779  // t1: the receiver of the enclosing function.
2780  int receiver_offset = 2 + info_->scope()->num_parameters();
2781  __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
2782 
2783  // t0: the strict mode.
2784  __ li(t0, Operand(Smi::FromInt(strict_mode())));
2785 
2786  // a1: the start position of the scope the calls resides in.
2787  __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2788 
2789  // Do the runtime call.
2790  __ Push(t2, t1, t0, a1);
2791  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2792 }
2793 
2794 
2795 void FullCodeGenerator::VisitCall(Call* expr) {
2796 #ifdef DEBUG
2797  // We want to verify that RecordJSReturnSite gets called on all paths
2798  // through this function. Avoid early returns.
2799  expr->return_is_recorded_ = false;
2800 #endif
2801 
2802  Comment cmnt(masm_, "[ Call");
2803  Expression* callee = expr->expression();
2804  Call::CallType call_type = expr->GetCallType(isolate());
2805 
2806  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2807  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2808  // to resolve the function we need to call and the receiver of the
2809  // call. Then we call the resolved function using the given
2810  // arguments.
2811  ZoneList<Expression*>* args = expr->arguments();
2812  int arg_count = args->length();
2813 
2814  { PreservePositionScope pos_scope(masm()->positions_recorder());
2815  VisitForStackValue(callee);
2816  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2817  __ push(a2); // Reserved receiver slot.
2818 
2819  // Push the arguments.
2820  for (int i = 0; i < arg_count; i++) {
2821  VisitForStackValue(args->at(i));
2822  }
2823 
2824  // Push a copy of the function (found below the arguments) and
2825  // resolve eval.
2826  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2827  __ push(a1);
2828  EmitResolvePossiblyDirectEval(arg_count);
2829 
2830  // The runtime call returns a pair of values in v0 (function) and
2831  // v1 (receiver). Touch up the stack with the right values.
2832  __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2833  __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
2834  }
2835  // Record source position for debugger.
2836  SetSourcePosition(expr->position());
2837  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2838  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2839  __ CallStub(&stub);
2840  RecordJSReturnSite(expr);
2841  // Restore context register.
2843  context()->DropAndPlug(1, v0);
2844  } else if (call_type == Call::GLOBAL_CALL) {
2845  EmitCallWithLoadIC(expr);
2846  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2847  // Call to a lookup slot (dynamically introduced variable).
2848  VariableProxy* proxy = callee->AsVariableProxy();
2849  Label slow, done;
2850 
2851  { PreservePositionScope scope(masm()->positions_recorder());
2852  // Generate code for loading from variables potentially shadowed
2853  // by eval-introduced variables.
2854  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2855  }
2856 
2857  __ bind(&slow);
2858  // Call the runtime to find the function to call (returned in v0)
2859  // and the object holding it (returned in v1).
2860  DCHECK(!context_register().is(a2));
2861  __ li(a2, Operand(proxy->name()));
2862  __ Push(context_register(), a2);
2863  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2864  __ Push(v0, v1); // Function, receiver.
2865 
2866  // If fast case code has been generated, emit code to push the
2867  // function and receiver and have the slow path jump around this
2868  // code.
2869  if (done.is_linked()) {
2870  Label call;
2871  __ Branch(&call);
2872  __ bind(&done);
2873  // Push function.
2874  __ push(v0);
2875  // The receiver is implicitly the global receiver. Indicate this
2876  // by passing the hole to the call function stub.
2877  __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2878  __ push(a1);
2879  __ bind(&call);
2880  }
2881 
2882  // The receiver is either the global receiver or an object found
2883  // by LoadContextSlot.
2884  EmitCall(expr);
2885  } else if (call_type == Call::PROPERTY_CALL) {
2886  Property* property = callee->AsProperty();
2887  bool is_named_call = property->key()->IsPropertyName();
2888  // super.x() is handled in EmitCallWithLoadIC.
2889  if (property->IsSuperAccess() && is_named_call) {
2891  } else {
2892  {
2893  PreservePositionScope scope(masm()->positions_recorder());
2894  VisitForStackValue(property->obj());
2895  }
2896  if (is_named_call) {
2897  EmitCallWithLoadIC(expr);
2898  } else {
2899  EmitKeyedCallWithLoadIC(expr, property->key());
2900  }
2901  }
2902  } else {
2903  DCHECK(call_type == Call::OTHER_CALL);
2904  // Call to an arbitrary expression not handled specially above.
2905  { PreservePositionScope scope(masm()->positions_recorder());
2906  VisitForStackValue(callee);
2907  }
2908  __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2909  __ push(a1);
2910  // Emit function call.
2911  EmitCall(expr);
2912  }
2913 
2914 #ifdef DEBUG
2915  // RecordJSReturnSite should have been called.
2916  DCHECK(expr->return_is_recorded_);
2917 #endif
2918 }
2919 
2920 
2921 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2922  Comment cmnt(masm_, "[ CallNew");
2923  // According to ECMA-262, section 11.2.2, page 44, the function
2924  // expression in new calls must be evaluated before the
2925  // arguments.
2926 
2927  // Push constructor on the stack. If it's not a function it's used as
2928  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2929  // ignored.
2930  VisitForStackValue(expr->expression());
2931 
2932  // Push the arguments ("left-to-right") on the stack.
2933  ZoneList<Expression*>* args = expr->arguments();
2934  int arg_count = args->length();
2935  for (int i = 0; i < arg_count; i++) {
2936  VisitForStackValue(args->at(i));
2937  }
2938 
2939  // Call the construct call builtin that handles allocation and
2940  // constructor invocation.
2941  SetSourcePosition(expr->position());
2942 
2943  // Load function and argument count into a1 and a0.
2944  __ li(a0, Operand(arg_count));
2945  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2946 
2947  // Record call targets in unoptimized code.
2948  if (FLAG_pretenuring_call_new) {
2949  EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2950  DCHECK(expr->AllocationSiteFeedbackSlot() ==
2951  expr->CallNewFeedbackSlot() + 1);
2952  }
2953 
2954  __ li(a2, FeedbackVector());
2955  __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2956 
2957  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2958  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2959  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2960  context()->Plug(v0);
2961 }
2962 
2963 
2964 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2965  ZoneList<Expression*>* args = expr->arguments();
2966  DCHECK(args->length() == 1);
2967 
2968  VisitForAccumulatorValue(args->at(0));
2969 
2970  Label materialize_true, materialize_false;
2971  Label* if_true = NULL;
2972  Label* if_false = NULL;
2973  Label* fall_through = NULL;
2974  context()->PrepareTest(&materialize_true, &materialize_false,
2975  &if_true, &if_false, &fall_through);
2976 
2977  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2978  __ SmiTst(v0, t0);
2979  Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2980 
2981  context()->Plug(if_true, if_false);
2982 }
2983 
2984 
2985 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2986  ZoneList<Expression*>* args = expr->arguments();
2987  DCHECK(args->length() == 1);
2988 
2989  VisitForAccumulatorValue(args->at(0));
2990 
2991  Label materialize_true, materialize_false;
2992  Label* if_true = NULL;
2993  Label* if_false = NULL;
2994  Label* fall_through = NULL;
2995  context()->PrepareTest(&materialize_true, &materialize_false,
2996  &if_true, &if_false, &fall_through);
2997 
2998  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2999  __ NonNegativeSmiTst(v0, at);
3000  Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3001 
3002  context()->Plug(if_true, if_false);
3003 }
3004 
3005 
3006 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3007  ZoneList<Expression*>* args = expr->arguments();
3008  DCHECK(args->length() == 1);
3009 
3010  VisitForAccumulatorValue(args->at(0));
3011 
3012  Label materialize_true, materialize_false;
3013  Label* if_true = NULL;
3014  Label* if_false = NULL;
3015  Label* fall_through = NULL;
3016  context()->PrepareTest(&materialize_true, &materialize_false,
3017  &if_true, &if_false, &fall_through);
3018 
3019  __ JumpIfSmi(v0, if_false);
3020  __ LoadRoot(at, Heap::kNullValueRootIndex);
3021  __ Branch(if_true, eq, v0, Operand(at));
3023  // Undetectable objects behave like undefined when tested with typeof.
3024  __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3025  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3026  __ Branch(if_false, ne, at, Operand(zero_reg));
3028  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3029  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3031  if_true, if_false, fall_through);
3032 
3033  context()->Plug(if_true, if_false);
3034 }
3035 
3036 
3037 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3038  ZoneList<Expression*>* args = expr->arguments();
3039  DCHECK(args->length() == 1);
3040 
3041  VisitForAccumulatorValue(args->at(0));
3042 
3043  Label materialize_true, materialize_false;
3044  Label* if_true = NULL;
3045  Label* if_false = NULL;
3046  Label* fall_through = NULL;
3047  context()->PrepareTest(&materialize_true, &materialize_false,
3048  &if_true, &if_false, &fall_through);
3049 
3050  __ JumpIfSmi(v0, if_false);
3051  __ GetObjectType(v0, a1, a1);
3052  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3053  Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3054  if_true, if_false, fall_through);
3055 
3056  context()->Plug(if_true, if_false);
3057 }
3058 
3059 
3060 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3061  ZoneList<Expression*>* args = expr->arguments();
3062  DCHECK(args->length() == 1);
3063 
3064  VisitForAccumulatorValue(args->at(0));
3065 
3066  Label materialize_true, materialize_false;
3067  Label* if_true = NULL;
3068  Label* if_false = NULL;
3069  Label* fall_through = NULL;
3070  context()->PrepareTest(&materialize_true, &materialize_false,
3071  &if_true, &if_false, &fall_through);
3072 
3073  __ JumpIfSmi(v0, if_false);
3075  __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3076  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3077  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3078  Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3079 
3080  context()->Plug(if_true, if_false);
3081 }
3082 
3083 
3084 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3085  CallRuntime* expr) {
3086  ZoneList<Expression*>* args = expr->arguments();
3087  DCHECK(args->length() == 1);
3088 
3089  VisitForAccumulatorValue(args->at(0));
3090 
3091  Label materialize_true, materialize_false, skip_lookup;
3092  Label* if_true = NULL;
3093  Label* if_false = NULL;
3094  Label* fall_through = NULL;
3095  context()->PrepareTest(&materialize_true, &materialize_false,
3096  &if_true, &if_false, &fall_through);
3097 
3098  __ AssertNotSmi(v0);
3099 
3102  __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3103  __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
3104 
3105  // Check for fast case object. Generate false result for slow case object.
3108  __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3109  __ Branch(if_false, eq, a2, Operand(t0));
3110 
3111  // Look for valueOf name in the descriptor array, and indicate false if
3112  // found. Since we omit an enumeration index check, if it is added via a
3113  // transition that shares its descriptor array, this is a false positive.
3114  Label entry, loop, done;
3115 
3116  // Skip loop if no descriptors are valid.
3117  __ NumberOfOwnDescriptors(a3, a1);
3118  __ Branch(&done, eq, a3, Operand(zero_reg));
3119 
3120  __ LoadInstanceDescriptors(a1, t0);
3121  // t0: descriptor array.
3122  // a3: valid entries in the descriptor array.
3123  STATIC_ASSERT(kSmiTag == 0);
3124  STATIC_ASSERT(kSmiTagSize == 1);
3126  __ li(at, Operand(DescriptorArray::kDescriptorSize));
3127  __ Mul(a3, a3, at);
3128  // Calculate location of the first key name.
3129  __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3130  // Calculate the end of the descriptor array.
3131  __ mov(a2, t0);
3132  __ sll(t1, a3, kPointerSizeLog2);
3133  __ Addu(a2, a2, t1);
3134 
3135  // Loop through all the keys in the descriptor array. If one of these is the
3136  // string "valueOf" the result is false.
3137  // The use of t2 to store the valueOf string assumes that it is not otherwise
3138  // used in the loop below.
3139  __ li(t2, Operand(isolate()->factory()->value_of_string()));
3140  __ jmp(&entry);
3141  __ bind(&loop);
3142  __ lw(a3, MemOperand(t0, 0));
3143  __ Branch(if_false, eq, a3, Operand(t2));
3144  __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3145  __ bind(&entry);
3146  __ Branch(&loop, ne, t0, Operand(a2));
3147 
3148  __ bind(&done);
3149 
3150  // Set the bit in the map to indicate that there is no local valueOf field.
3152  __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3154 
3155  __ bind(&skip_lookup);
3156 
3157  // If a valueOf property is not found on the object check that its
3158  // prototype is the un-modified String prototype. If not result is false.
3160  __ JumpIfSmi(a2, if_false);
3165  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3166  Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3167 
3168  context()->Plug(if_true, if_false);
3169 }
3170 
3171 
3172 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3173  ZoneList<Expression*>* args = expr->arguments();
3174  DCHECK(args->length() == 1);
3175 
3176  VisitForAccumulatorValue(args->at(0));
3177 
3178  Label materialize_true, materialize_false;
3179  Label* if_true = NULL;
3180  Label* if_false = NULL;
3181  Label* fall_through = NULL;
3182  context()->PrepareTest(&materialize_true, &materialize_false,
3183  &if_true, &if_false, &fall_through);
3184 
3185  __ JumpIfSmi(v0, if_false);
3186  __ GetObjectType(v0, a1, a2);
3187  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3188  __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3189  __ Branch(if_false);
3190 
3191  context()->Plug(if_true, if_false);
3192 }
3193 
3194 
3195 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3196  ZoneList<Expression*>* args = expr->arguments();
3197  DCHECK(args->length() == 1);
3198 
3199  VisitForAccumulatorValue(args->at(0));
3200 
3201  Label materialize_true, materialize_false;
3202  Label* if_true = NULL;
3203  Label* if_false = NULL;
3204  Label* fall_through = NULL;
3205  context()->PrepareTest(&materialize_true, &materialize_false,
3206  &if_true, &if_false, &fall_through);
3207 
3208  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3209  __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3210  __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3211  __ li(t0, 0x80000000);
3212  Label not_nan;
3213  __ Branch(&not_nan, ne, a2, Operand(t0));
3214  __ mov(t0, zero_reg);
3215  __ mov(a2, a1);
3216  __ bind(&not_nan);
3217 
3218  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3219  Split(eq, a2, Operand(t0), if_true, if_false, fall_through);
3220 
3221  context()->Plug(if_true, if_false);
3222 }
3223 
3224 
3225 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3226  ZoneList<Expression*>* args = expr->arguments();
3227  DCHECK(args->length() == 1);
3228 
3229  VisitForAccumulatorValue(args->at(0));
3230 
3231  Label materialize_true, materialize_false;
3232  Label* if_true = NULL;
3233  Label* if_false = NULL;
3234  Label* fall_through = NULL;
3235  context()->PrepareTest(&materialize_true, &materialize_false,
3236  &if_true, &if_false, &fall_through);
3237 
3238  __ JumpIfSmi(v0, if_false);
3239  __ GetObjectType(v0, a1, a1);
3240  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3241  Split(eq, a1, Operand(JS_ARRAY_TYPE),
3242  if_true, if_false, fall_through);
3243 
3244  context()->Plug(if_true, if_false);
3245 }
3246 
3247 
3248 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3249  ZoneList<Expression*>* args = expr->arguments();
3250  DCHECK(args->length() == 1);
3251 
3252  VisitForAccumulatorValue(args->at(0));
3253 
3254  Label materialize_true, materialize_false;
3255  Label* if_true = NULL;
3256  Label* if_false = NULL;
3257  Label* fall_through = NULL;
3258  context()->PrepareTest(&materialize_true, &materialize_false,
3259  &if_true, &if_false, &fall_through);
3260 
3261  __ JumpIfSmi(v0, if_false);
3262  __ GetObjectType(v0, a1, a1);
3263  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3264  Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3265 
3266  context()->Plug(if_true, if_false);
3267 }
3268 
3269 
3270 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3271  DCHECK(expr->arguments()->length() == 0);
3272 
3273  Label materialize_true, materialize_false;
3274  Label* if_true = NULL;
3275  Label* if_false = NULL;
3276  Label* fall_through = NULL;
3277  context()->PrepareTest(&materialize_true, &materialize_false,
3278  &if_true, &if_false, &fall_through);
3279 
3280  // Get the frame pointer for the calling frame.
3282 
3283  // Skip the arguments adaptor frame if it exists.
3284  Label check_frame_marker;
3286  __ Branch(&check_frame_marker, ne,
3289 
3290  // Check the marker in the calling frame.
3291  __ bind(&check_frame_marker);
3293  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3294  Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3295  if_true, if_false, fall_through);
3296 
3297  context()->Plug(if_true, if_false);
3298 }
3299 
3300 
3301 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3302  ZoneList<Expression*>* args = expr->arguments();
3303  DCHECK(args->length() == 2);
3304 
3305  // Load the two objects into registers and perform the comparison.
3306  VisitForStackValue(args->at(0));
3307  VisitForAccumulatorValue(args->at(1));
3308 
3309  Label materialize_true, materialize_false;
3310  Label* if_true = NULL;
3311  Label* if_false = NULL;
3312  Label* fall_through = NULL;
3313  context()->PrepareTest(&materialize_true, &materialize_false,
3314  &if_true, &if_false, &fall_through);
3315 
3316  __ pop(a1);
3317  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3318  Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3319 
3320  context()->Plug(if_true, if_false);
3321 }
3322 
3323 
3324 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3325  ZoneList<Expression*>* args = expr->arguments();
3326  DCHECK(args->length() == 1);
3327 
3328  // ArgumentsAccessStub expects the key in a1 and the formal
3329  // parameter count in a0.
3330  VisitForAccumulatorValue(args->at(0));
3331  __ mov(a1, v0);
3332  __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3333  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3334  __ CallStub(&stub);
3335  context()->Plug(v0);
3336 }
3337 
3338 
3339 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3340  DCHECK(expr->arguments()->length() == 0);
3341  Label exit;
3342  // Get the number of formal parameters.
3343  __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3344 
3345  // Check if the calling frame is an arguments adaptor frame.
3348  __ Branch(&exit, ne, a3,
3350 
3351  // Arguments adaptor case: Read the arguments length from the
3352  // adaptor frame.
3354 
3355  __ bind(&exit);
3356  context()->Plug(v0);
3357 }
3358 
3359 
3360 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3361  ZoneList<Expression*>* args = expr->arguments();
3362  DCHECK(args->length() == 1);
3363  Label done, null, function, non_function_constructor;
3364 
3365  VisitForAccumulatorValue(args->at(0));
3366 
3367  // If the object is a smi, we return null.
3368  __ JumpIfSmi(v0, &null);
3369 
3370  // Check that the object is a JS object but take special care of JS
3371  // functions to make sure they have 'Function' as their class.
3372  // Assume that there are only two callable types, and one of them is at
3373  // either end of the type range for JS object types. Saves extra comparisons.
3375  __ GetObjectType(v0, v0, a1); // Map is now in v0.
3376  __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3377 
3380  __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3381 
3383  LAST_SPEC_OBJECT_TYPE - 1);
3384  __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3385  // Assume that there is no larger type.
3387 
3388  // Check if the constructor in the map is a JS function.
3390  __ GetObjectType(v0, a1, a1);
3391  __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3392 
3393  // v0 now contains the constructor function. Grab the
3394  // instance class name from there.
3397  __ Branch(&done);
3398 
3399  // Functions have class 'Function'.
3400  __ bind(&function);
3401  __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3402  __ jmp(&done);
3403 
3404  // Objects with a non-function constructor have class 'Object'.
3405  __ bind(&non_function_constructor);
3406  __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3407  __ jmp(&done);
3408 
3409  // Non-JS objects have class null.
3410  __ bind(&null);
3411  __ LoadRoot(v0, Heap::kNullValueRootIndex);
3412 
3413  // All done.
3414  __ bind(&done);
3415 
3416  context()->Plug(v0);
3417 }
3418 
3419 
3420 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3421  // Load the arguments on the stack and call the stub.
3422  SubStringStub stub(isolate());
3423  ZoneList<Expression*>* args = expr->arguments();
3424  DCHECK(args->length() == 3);
3425  VisitForStackValue(args->at(0));
3426  VisitForStackValue(args->at(1));
3427  VisitForStackValue(args->at(2));
3428  __ CallStub(&stub);
3429  context()->Plug(v0);
3430 }
3431 
3432 
3433 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3434  // Load the arguments on the stack and call the stub.
3435  RegExpExecStub stub(isolate());
3436  ZoneList<Expression*>* args = expr->arguments();
3437  DCHECK(args->length() == 4);
3438  VisitForStackValue(args->at(0));
3439  VisitForStackValue(args->at(1));
3440  VisitForStackValue(args->at(2));
3441  VisitForStackValue(args->at(3));
3442  __ CallStub(&stub);
3443  context()->Plug(v0);
3444 }
3445 
3446 
3447 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3448  ZoneList<Expression*>* args = expr->arguments();
3449  DCHECK(args->length() == 1);
3450 
3451  VisitForAccumulatorValue(args->at(0)); // Load the object.
3452 
3453  Label done;
3454  // If the object is a smi return the object.
3455  __ JumpIfSmi(v0, &done);
3456  // If the object is not a value type, return the object.
3457  __ GetObjectType(v0, a1, a1);
3458  __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3459 
3461 
3462  __ bind(&done);
3463  context()->Plug(v0);
3464 }
3465 
3466 
3467 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3468  ZoneList<Expression*>* args = expr->arguments();
3469  DCHECK(args->length() == 2);
3470  DCHECK_NE(NULL, args->at(1)->AsLiteral());
3471  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3472 
3473  VisitForAccumulatorValue(args->at(0)); // Load the object.
3474 
3475  Label runtime, done, not_date_object;
3476  Register object = v0;
3477  Register result = v0;
3478  Register scratch0 = t5;
3479  Register scratch1 = a1;
3480 
3481  __ JumpIfSmi(object, &not_date_object);
3482  __ GetObjectType(object, scratch1, scratch1);
3483  __ Branch(&not_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3484 
3485  if (index->value() == 0) {
3486  __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3487  __ jmp(&done);
3488  } else {
3489  if (index->value() < JSDate::kFirstUncachedField) {
3490  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3491  __ li(scratch1, Operand(stamp));
3492  __ lw(scratch1, MemOperand(scratch1));
3493  __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3494  __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3495  __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3496  kPointerSize * index->value()));
3497  __ jmp(&done);
3498  }
3499  __ bind(&runtime);
3500  __ PrepareCallCFunction(2, scratch1);
3501  __ li(a1, Operand(index));
3502  __ Move(a0, object);
3503  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3504  __ jmp(&done);
3505  }
3506 
3507  __ bind(&not_date_object);
3508  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3509  __ bind(&done);
3510  context()->Plug(v0);
3511 }
3512 
3513 
3514 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3515  ZoneList<Expression*>* args = expr->arguments();
3516  DCHECK_EQ(3, args->length());
3517 
3518  Register string = v0;
3519  Register index = a1;
3520  Register value = a2;
3521 
3522  VisitForStackValue(args->at(0)); // index
3523  VisitForStackValue(args->at(1)); // value
3524  VisitForAccumulatorValue(args->at(2)); // string
3525  __ Pop(index, value);
3526 
3527  if (FLAG_debug_code) {
3528  __ SmiTst(value, at);
3529  __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3530  __ SmiTst(index, at);
3531  __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3532  __ SmiUntag(index, index);
3533  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3534  Register scratch = t5;
3535  __ EmitSeqStringSetCharCheck(
3536  string, index, value, scratch, one_byte_seq_type);
3537  __ SmiTag(index, index);
3538  }
3539 
3540  __ SmiUntag(value, value);
3541  __ Addu(at,
3542  string,
3544  __ SmiUntag(index);
3545  __ Addu(at, at, index);
3546  __ sb(value, MemOperand(at));
3547  context()->Plug(string);
3548 }
3549 
3550 
3551 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3552  ZoneList<Expression*>* args = expr->arguments();
3553  DCHECK_EQ(3, args->length());
3554 
3555  Register string = v0;
3556  Register index = a1;
3557  Register value = a2;
3558 
3559  VisitForStackValue(args->at(0)); // index
3560  VisitForStackValue(args->at(1)); // value
3561  VisitForAccumulatorValue(args->at(2)); // string
3562  __ Pop(index, value);
3563 
3564  if (FLAG_debug_code) {
3565  __ SmiTst(value, at);
3566  __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3567  __ SmiTst(index, at);
3568  __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3569  __ SmiUntag(index, index);
3570  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3571  Register scratch = t5;
3572  __ EmitSeqStringSetCharCheck(
3573  string, index, value, scratch, two_byte_seq_type);
3574  __ SmiTag(index, index);
3575  }
3576 
3577  __ SmiUntag(value, value);
3578  __ Addu(at,
3579  string,
3581  __ Addu(at, at, index);
3582  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3583  __ sh(value, MemOperand(at));
3584  context()->Plug(string);
3585 }
3586 
3587 
3588 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3589  // Load the arguments on the stack and call the runtime function.
3590  ZoneList<Expression*>* args = expr->arguments();
3591  DCHECK(args->length() == 2);
3592  VisitForStackValue(args->at(0));
3593  VisitForStackValue(args->at(1));
3594  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3595  __ CallStub(&stub);
3596  context()->Plug(v0);
3597 }
3598 
3599 
3600 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3601  ZoneList<Expression*>* args = expr->arguments();
3602  DCHECK(args->length() == 2);
3603 
3604  VisitForStackValue(args->at(0)); // Load the object.
3605  VisitForAccumulatorValue(args->at(1)); // Load the value.
3606  __ pop(a1); // v0 = value. a1 = object.
3607 
3608  Label done;
3609  // If the object is a smi, return the value.
3610  __ JumpIfSmi(a1, &done);
3611 
3612  // If the object is not a value type, return the value.
3613  __ GetObjectType(a1, a2, a2);
3614  __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3615 
3616  // Store the value.
3618  // Update the write barrier. Save the value as it will be
3619  // overwritten by the write barrier code and is needed afterward.
3620  __ mov(a2, v0);
3621  __ RecordWriteField(
3623 
3624  __ bind(&done);
3625  context()->Plug(v0);
3626 }
3627 
3628 
3629 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3630  ZoneList<Expression*>* args = expr->arguments();
3631  DCHECK_EQ(args->length(), 1);
3632 
3633  // Load the argument into a0 and call the stub.
3634  VisitForAccumulatorValue(args->at(0));
3635  __ mov(a0, result_register());
3636 
3637  NumberToStringStub stub(isolate());
3638  __ CallStub(&stub);
3639  context()->Plug(v0);
3640 }
3641 
3642 
3643 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3644  ZoneList<Expression*>* args = expr->arguments();
3645  DCHECK(args->length() == 1);
3646 
3647  VisitForAccumulatorValue(args->at(0));
3648 
3649  Label done;
3650  StringCharFromCodeGenerator generator(v0, a1);
3651  generator.GenerateFast(masm_);
3652  __ jmp(&done);
3653 
3654  NopRuntimeCallHelper call_helper;
3655  generator.GenerateSlow(masm_, call_helper);
3656 
3657  __ bind(&done);
3658  context()->Plug(a1);
3659 }
3660 
3661 
3662 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3663  ZoneList<Expression*>* args = expr->arguments();
3664  DCHECK(args->length() == 2);
3665 
3666  VisitForStackValue(args->at(0));
3667  VisitForAccumulatorValue(args->at(1));
3668  __ mov(a0, result_register());
3669 
3670  Register object = a1;
3671  Register index = a0;
3672  Register result = v0;
3673 
3674  __ pop(object);
3675 
3676  Label need_conversion;
3677  Label index_out_of_range;
3678  Label done;
3679  StringCharCodeAtGenerator generator(object,
3680  index,
3681  result,
3682  &need_conversion,
3683  &need_conversion,
3684  &index_out_of_range,
3686  generator.GenerateFast(masm_);
3687  __ jmp(&done);
3688 
3689  __ bind(&index_out_of_range);
3690  // When the index is out of range, the spec requires us to return
3691  // NaN.
3692  __ LoadRoot(result, Heap::kNanValueRootIndex);
3693  __ jmp(&done);
3694 
3695  __ bind(&need_conversion);
3696  // Load the undefined value into the result register, which will
3697  // trigger conversion.
3698  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3699  __ jmp(&done);
3700 
3701  NopRuntimeCallHelper call_helper;
3702  generator.GenerateSlow(masm_, call_helper);
3703 
3704  __ bind(&done);
3705  context()->Plug(result);
3706 }
3707 
3708 
3709 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3710  ZoneList<Expression*>* args = expr->arguments();
3711  DCHECK(args->length() == 2);
3712 
3713  VisitForStackValue(args->at(0));
3714  VisitForAccumulatorValue(args->at(1));
3715  __ mov(a0, result_register());
3716 
3717  Register object = a1;
3718  Register index = a0;
3719  Register scratch = a3;
3720  Register result = v0;
3721 
3722  __ pop(object);
3723 
3724  Label need_conversion;
3725  Label index_out_of_range;
3726  Label done;
3727  StringCharAtGenerator generator(object,
3728  index,
3729  scratch,
3730  result,
3731  &need_conversion,
3732  &need_conversion,
3733  &index_out_of_range,
3735  generator.GenerateFast(masm_);
3736  __ jmp(&done);
3737 
3738  __ bind(&index_out_of_range);
3739  // When the index is out of range, the spec requires us to return
3740  // the empty string.
3741  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3742  __ jmp(&done);
3743 
3744  __ bind(&need_conversion);
3745  // Move smi zero into the result register, which will trigger
3746  // conversion.
3747  __ li(result, Operand(Smi::FromInt(0)));
3748  __ jmp(&done);
3749 
3750  NopRuntimeCallHelper call_helper;
3751  generator.GenerateSlow(masm_, call_helper);
3752 
3753  __ bind(&done);
3754  context()->Plug(result);
3755 }
3756 
3757 
3758 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3759  ZoneList<Expression*>* args = expr->arguments();
3760  DCHECK_EQ(2, args->length());
3761  VisitForStackValue(args->at(0));
3762  VisitForAccumulatorValue(args->at(1));
3763 
3764  __ pop(a1);
3765  __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
3766  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3767  __ CallStub(&stub);
3768  context()->Plug(v0);
3769 }
3770 
3771 
3772 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3773  ZoneList<Expression*>* args = expr->arguments();
3774  DCHECK_EQ(2, args->length());
3775 
3776  VisitForStackValue(args->at(0));
3777  VisitForStackValue(args->at(1));
3778 
3779  StringCompareStub stub(isolate());
3780  __ CallStub(&stub);
3781  context()->Plug(v0);
3782 }
3783 
3784 
3785 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3786  ZoneList<Expression*>* args = expr->arguments();
3787  DCHECK(args->length() >= 2);
3788 
3789  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3790  for (int i = 0; i < arg_count + 1; i++) {
3791  VisitForStackValue(args->at(i));
3792  }
3793  VisitForAccumulatorValue(args->last()); // Function.
3794 
3795  Label runtime, done;
3796  // Check for non-function argument (including proxy).
3797  __ JumpIfSmi(v0, &runtime);
3798  __ GetObjectType(v0, a1, a1);
3799  __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
3800 
3801  // InvokeFunction requires the function in a1. Move it in there.
3802  __ mov(a1, result_register());
3803  ParameterCount count(arg_count);
3804  __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
3806  __ jmp(&done);
3807 
3808  __ bind(&runtime);
3809  __ push(v0);
3810  __ CallRuntime(Runtime::kCall, args->length());
3811  __ bind(&done);
3812 
3813  context()->Plug(v0);
3814 }
3815 
3816 
3817 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3818  RegExpConstructResultStub stub(isolate());
3819  ZoneList<Expression*>* args = expr->arguments();
3820  DCHECK(args->length() == 3);
3821  VisitForStackValue(args->at(0));
3822  VisitForStackValue(args->at(1));
3823  VisitForAccumulatorValue(args->at(2));
3824  __ mov(a0, result_register());
3825  __ pop(a1);
3826  __ pop(a2);
3827  __ CallStub(&stub);
3828  context()->Plug(v0);
3829 }
3830 
3831 
3832 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3833  ZoneList<Expression*>* args = expr->arguments();
3834  DCHECK_EQ(2, args->length());
3835 
3836  DCHECK_NE(NULL, args->at(0)->AsLiteral());
3837  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3838 
3839  Handle<FixedArray> jsfunction_result_caches(
3840  isolate()->native_context()->jsfunction_result_caches());
3841  if (jsfunction_result_caches->length() <= cache_id) {
3842  __ Abort(kAttemptToUseUndefinedCache);
3843  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3844  context()->Plug(v0);
3845  return;
3846  }
3847 
3848  VisitForAccumulatorValue(args->at(1));
3849 
3850  Register key = v0;
3851  Register cache = a1;
3854  __ lw(cache,
3857  __ lw(cache,
3859 
3860 
3861  Label done, not_found;
3862  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3864  // a2 now holds finger offset as a smi.
3865  __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3866  // a3 now points to the start of fixed array elements.
3867  __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
3868  __ addu(a3, a3, at);
3869  // a3 now points to key of indexed element of cache.
3870  __ lw(a2, MemOperand(a3));
3871  __ Branch(&not_found, ne, key, Operand(a2));
3872 
3873  __ lw(v0, MemOperand(a3, kPointerSize));
3874  __ Branch(&done);
3875 
3876  __ bind(&not_found);
3877  // Call runtime to perform the lookup.
3878  __ Push(cache, key);
3879  __ CallRuntime(Runtime::kGetFromCache, 2);
3880 
3881  __ bind(&done);
3882  context()->Plug(v0);
3883 }
3884 
3885 
3886 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3887  ZoneList<Expression*>* args = expr->arguments();
3888  VisitForAccumulatorValue(args->at(0));
3889 
3890  Label materialize_true, materialize_false;
3891  Label* if_true = NULL;
3892  Label* if_false = NULL;
3893  Label* fall_through = NULL;
3894  context()->PrepareTest(&materialize_true, &materialize_false,
3895  &if_true, &if_false, &fall_through);
3896 
3898  __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3899 
3900  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3901  Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3902 
3903  context()->Plug(if_true, if_false);
3904 }
3905 
3906 
3907 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3908  ZoneList<Expression*>* args = expr->arguments();
3909  DCHECK(args->length() == 1);
3910  VisitForAccumulatorValue(args->at(0));
3911 
3912  __ AssertString(v0);
3913 
3915  __ IndexFromHash(v0, v0);
3916 
3917  context()->Plug(v0);
3918 }
3919 
3920 
3921 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3922  Label bailout, done, one_char_separator, long_separator,
3923  non_trivial_array, not_size_one_array, loop,
3924  empty_separator_loop, one_char_separator_loop,
3925  one_char_separator_loop_entry, long_separator_loop;
3926  ZoneList<Expression*>* args = expr->arguments();
3927  DCHECK(args->length() == 2);
3928  VisitForStackValue(args->at(1));
3929  VisitForAccumulatorValue(args->at(0));
3930 
3931  // All aliases of the same register have disjoint lifetimes.
3932  Register array = v0;
3933  Register elements = no_reg; // Will be v0.
3934  Register result = no_reg; // Will be v0.
3935  Register separator = a1;
3936  Register array_length = a2;
3937  Register result_pos = no_reg; // Will be a2.
3938  Register string_length = a3;
3939  Register string = t0;
3940  Register element = t1;
3941  Register elements_end = t2;
3942  Register scratch1 = t3;
3943  Register scratch2 = t5;
3944  Register scratch3 = t4;
3945 
3946  // Separator operand is on the stack.
3947  __ pop(separator);
3948 
3949  // Check that the array is a JSArray.
3950  __ JumpIfSmi(array, &bailout);
3951  __ GetObjectType(array, scratch1, scratch2);
3952  __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3953 
3954  // Check that the array has fast elements.
3955  __ CheckFastElements(scratch1, scratch2, &bailout);
3956 
3957  // If the array has length zero, return the empty string.
3958  __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3959  __ SmiUntag(array_length);
3960  __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3961  __ LoadRoot(v0, Heap::kempty_stringRootIndex);
3962  __ Branch(&done);
3963 
3964  __ bind(&non_trivial_array);
3965 
3966  // Get the FixedArray containing array's elements.
3967  elements = array;
3968  __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3969  array = no_reg; // End of array's live range.
3970 
3971  // Check that all array elements are sequential one-byte strings, and
3972  // accumulate the sum of their lengths, as a smi-encoded value.
3973  __ mov(string_length, zero_reg);
3974  __ Addu(element,
3975  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3976  __ sll(elements_end, array_length, kPointerSizeLog2);
3977  __ Addu(elements_end, element, elements_end);
3978  // Loop condition: while (element < elements_end).
3979  // Live values in registers:
3980  // elements: Fixed array of strings.
3981  // array_length: Length of the fixed array of strings (not smi)
3982  // separator: Separator string
3983  // string_length: Accumulated sum of string lengths (smi).
3984  // element: Current array element.
3985  // elements_end: Array end.
3986  if (generate_debug_code_) {
3987  __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
3988  Operand(zero_reg));
3989  }
3990  __ bind(&loop);
3991  __ lw(string, MemOperand(element));
3992  __ Addu(element, element, kPointerSize);
3993  __ JumpIfSmi(string, &bailout);
3994  __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3995  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3996  __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3997  __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3998  __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3999  __ BranchOnOverflow(&bailout, scratch3);
4000  __ Branch(&loop, lt, element, Operand(elements_end));
4001 
4002  // If array_length is 1, return elements[0], a string.
4003  __ Branch(&not_size_one_array, ne, array_length, Operand(1));
4004  __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4005  __ Branch(&done);
4006 
4007  __ bind(&not_size_one_array);
4008 
4009  // Live values in registers:
4010  // separator: Separator string
4011  // array_length: Length of the array.
4012  // string_length: Sum of string lengths (smi).
4013  // elements: FixedArray of strings.
4014 
4015  // Check that the separator is a flat one-byte string.
4016  __ JumpIfSmi(separator, &bailout);
4017  __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4018  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4019  __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4020 
4021  // Add (separator length times array_length) - separator length to the
4022  // string_length to get the length of the result string. array_length is not
4023  // smi but the other values are, so the result is a smi.
4024  __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4025  __ Subu(string_length, string_length, Operand(scratch1));
4026  __ Mul(scratch3, scratch2, array_length, scratch1);
4027  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4028  // zero.
4029  __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4030  __ And(scratch3, scratch2, Operand(0x80000000));
4031  __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4032  __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4033  __ BranchOnOverflow(&bailout, scratch3);
4034  __ SmiUntag(string_length);
4035 
4036  // Get first element in the array to free up the elements register to be used
4037  // for the result.
4038  __ Addu(element,
4039  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4040  result = elements; // End of live range for elements.
4041  elements = no_reg;
4042  // Live values in registers:
4043  // element: First array element
4044  // separator: Separator string
4045  // string_length: Length of result string (not smi)
4046  // array_length: Length of the array.
4047  __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4048  elements_end, &bailout);
4049  // Prepare for looping. Set up elements_end to end of the array. Set
4050  // result_pos to the position of the result where to write the first
4051  // character.
4052  __ sll(elements_end, array_length, kPointerSizeLog2);
4053  __ Addu(elements_end, element, elements_end);
4054  result_pos = array_length; // End of live range for array_length.
4055  array_length = no_reg;
4056  __ Addu(result_pos,
4057  result,
4059 
4060  // Check the length of the separator.
4061  __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4062  __ li(at, Operand(Smi::FromInt(1)));
4063  __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4064  __ Branch(&long_separator, gt, scratch1, Operand(at));
4065 
4066  // Empty separator case.
4067  __ bind(&empty_separator_loop);
4068  // Live values in registers:
4069  // result_pos: the position to which we are currently copying characters.
4070  // element: Current array element.
4071  // elements_end: Array end.
4072 
4073  // Copy next array element to the result.
4074  __ lw(string, MemOperand(element));
4075  __ Addu(element, element, kPointerSize);
4076  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4077  __ SmiUntag(string_length);
4078  __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4079  __ CopyBytes(string, result_pos, string_length, scratch1);
4080  // End while (element < elements_end).
4081  __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4082  DCHECK(result.is(v0));
4083  __ Branch(&done);
4084 
4085  // One-character separator case.
4086  __ bind(&one_char_separator);
4087  // Replace separator with its one-byte character value.
4088  __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4089  // Jump into the loop after the code that copies the separator, so the first
4090  // element is not preceded by a separator.
4091  __ jmp(&one_char_separator_loop_entry);
4092 
4093  __ bind(&one_char_separator_loop);
4094  // Live values in registers:
4095  // result_pos: the position to which we are currently copying characters.
4096  // element: Current array element.
4097  // elements_end: Array end.
4098  // separator: Single separator one-byte char (in lower byte).
4099 
4100  // Copy the separator character to the result.
4101  __ sb(separator, MemOperand(result_pos));
4102  __ Addu(result_pos, result_pos, 1);
4103 
4104  // Copy next array element to the result.
4105  __ bind(&one_char_separator_loop_entry);
4106  __ lw(string, MemOperand(element));
4107  __ Addu(element, element, kPointerSize);
4108  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4109  __ SmiUntag(string_length);
4110  __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4111  __ CopyBytes(string, result_pos, string_length, scratch1);
4112  // End while (element < elements_end).
4113  __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4114  DCHECK(result.is(v0));
4115  __ Branch(&done);
4116 
4117  // Long separator case (separator is more than one character). Entry is at the
4118  // label long_separator below.
4119  __ bind(&long_separator_loop);
4120  // Live values in registers:
4121  // result_pos: the position to which we are currently copying characters.
4122  // element: Current array element.
4123  // elements_end: Array end.
4124  // separator: Separator string.
4125 
4126  // Copy the separator to the result.
4127  __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
4128  __ SmiUntag(string_length);
4129  __ Addu(string,
4130  separator,
4132  __ CopyBytes(string, result_pos, string_length, scratch1);
4133 
4134  __ bind(&long_separator);
4135  __ lw(string, MemOperand(element));
4136  __ Addu(element, element, kPointerSize);
4137  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4138  __ SmiUntag(string_length);
4139  __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4140  __ CopyBytes(string, result_pos, string_length, scratch1);
4141  // End while (element < elements_end).
4142  __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4143  DCHECK(result.is(v0));
4144  __ Branch(&done);
4145 
4146  __ bind(&bailout);
4147  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4148  __ bind(&done);
4149  context()->Plug(v0);
4150 }
4151 
4152 
4153 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4154  DCHECK(expr->arguments()->length() == 0);
4155  ExternalReference debug_is_active =
4156  ExternalReference::debug_is_active_address(isolate());
4157  __ li(at, Operand(debug_is_active));
4158  __ lb(v0, MemOperand(at));
4159  __ SmiTag(v0);
4160  context()->Plug(v0);
4161 }
4162 
4163 
4164 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4165  if (expr->function() != NULL &&
4166  expr->function()->intrinsic_type == Runtime::INLINE) {
4167  Comment cmnt(masm_, "[ InlineRuntimeCall");
4168  EmitInlineRuntimeCall(expr);
4169  return;
4170  }
4171 
4172  Comment cmnt(masm_, "[ CallRuntime");
4173  ZoneList<Expression*>* args = expr->arguments();
4174  int arg_count = args->length();
4175 
4176  if (expr->is_jsruntime()) {
4177  // Push the builtins object as the receiver.
4178  Register receiver = LoadDescriptor::ReceiverRegister();
4179  __ lw(receiver, GlobalObjectOperand());
4180  __ lw(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4181  __ push(receiver);
4182 
4183  // Load the function from the receiver.
4184  __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4185  if (FLAG_vector_ics) {
4187  Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4189  } else {
4190  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4191  }
4192 
4193  // Push the target function under the receiver.
4194  __ lw(at, MemOperand(sp, 0));
4195  __ push(at);
4196  __ sw(v0, MemOperand(sp, kPointerSize));
4197 
4198  // Push the arguments ("left-to-right").
4199  int arg_count = args->length();
4200  for (int i = 0; i < arg_count; i++) {
4201  VisitForStackValue(args->at(i));
4202  }
4203 
4204  // Record source position of the IC call.
4205  SetSourcePosition(expr->position());
4206  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4207  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4208  __ CallStub(&stub);
4209 
4210  // Restore context register.
4212 
4213  context()->DropAndPlug(1, v0);
4214  } else {
4215  // Push the arguments ("left-to-right").
4216  for (int i = 0; i < arg_count; i++) {
4217  VisitForStackValue(args->at(i));
4218  }
4219 
4220  // Call the C runtime function.
4221  __ CallRuntime(expr->function(), arg_count);
4222  context()->Plug(v0);
4223  }
4224 }
4225 
4226 
4227 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4228  switch (expr->op()) {
4229  case Token::DELETE: {
4230  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4231  Property* property = expr->expression()->AsProperty();
4232  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4233 
4234  if (property != NULL) {
4235  VisitForStackValue(property->obj());
4236  VisitForStackValue(property->key());
4237  __ li(a1, Operand(Smi::FromInt(strict_mode())));
4238  __ push(a1);
4239  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4240  context()->Plug(v0);
4241  } else if (proxy != NULL) {
4242  Variable* var = proxy->var();
4243  // Delete of an unqualified identifier is disallowed in strict mode
4244  // but "delete this" is allowed.
4245  DCHECK(strict_mode() == SLOPPY || var->is_this());
4246  if (var->IsUnallocated()) {
4247  __ lw(a2, GlobalObjectOperand());
4248  __ li(a1, Operand(var->name()));
4249  __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4250  __ Push(a2, a1, a0);
4251  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4252  context()->Plug(v0);
4253  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4254  // Result of deleting non-global, non-dynamic variables is false.
4255  // The subexpression does not have side effects.
4256  context()->Plug(var->is_this());
4257  } else {
4258  // Non-global variable. Call the runtime to try to delete from the
4259  // context where the variable was introduced.
4260  DCHECK(!context_register().is(a2));
4261  __ li(a2, Operand(var->name()));
4262  __ Push(context_register(), a2);
4263  __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4264  context()->Plug(v0);
4265  }
4266  } else {
4267  // Result of deleting non-property, non-variable reference is true.
4268  // The subexpression may have side effects.
4269  VisitForEffect(expr->expression());
4270  context()->Plug(true);
4271  }
4272  break;
4273  }
4274 
4275  case Token::VOID: {
4276  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4277  VisitForEffect(expr->expression());
4278  context()->Plug(Heap::kUndefinedValueRootIndex);
4279  break;
4280  }
4281 
4282  case Token::NOT: {
4283  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4284  if (context()->IsEffect()) {
4285  // Unary NOT has no side effects so it's only necessary to visit the
4286  // subexpression. Match the optimizing compiler by not branching.
4287  VisitForEffect(expr->expression());
4288  } else if (context()->IsTest()) {
4289  const TestContext* test = TestContext::cast(context());
4290  // The labels are swapped for the recursive call.
4291  VisitForControl(expr->expression(),
4292  test->false_label(),
4293  test->true_label(),
4294  test->fall_through());
4295  context()->Plug(test->true_label(), test->false_label());
4296  } else {
4297  // We handle value contexts explicitly rather than simply visiting
4298  // for control and plugging the control flow into the context,
4299  // because we need to prepare a pair of extra administrative AST ids
4300  // for the optimizing compiler.
4301  DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4302  Label materialize_true, materialize_false, done;
4303  VisitForControl(expr->expression(),
4304  &materialize_false,
4305  &materialize_true,
4306  &materialize_true);
4307  __ bind(&materialize_true);
4308  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4309  __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4310  if (context()->IsStackValue()) __ push(v0);
4311  __ jmp(&done);
4312  __ bind(&materialize_false);
4313  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4314  __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4315  if (context()->IsStackValue()) __ push(v0);
4316  __ bind(&done);
4317  }
4318  break;
4319  }
4320 
4321  case Token::TYPEOF: {
4322  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4323  { StackValueContext context(this);
4324  VisitForTypeofValue(expr->expression());
4325  }
4326  __ CallRuntime(Runtime::kTypeof, 1);
4327  context()->Plug(v0);
4328  break;
4329  }
4330 
4331  default:
4332  UNREACHABLE();
4333  }
4334 }
4335 
4336 
4337 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4338  DCHECK(expr->expression()->IsValidReferenceExpression());
4339 
4340  Comment cmnt(masm_, "[ CountOperation");
4341  SetSourcePosition(expr->position());
4342 
4343  // Expression can only be a property, a global or a (parameter or local)
4344  // slot.
4346  LhsKind assign_type = VARIABLE;
4347  Property* prop = expr->expression()->AsProperty();
4348  // In case of a property we use the uninitialized expression context
4349  // of the key to detect a named property.
4350  if (prop != NULL) {
4351  assign_type =
4352  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4353  }
4354 
4355  // Evaluate expression and get value.
4356  if (assign_type == VARIABLE) {
4357  DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4358  AccumulatorValueContext context(this);
4359  EmitVariableLoad(expr->expression()->AsVariableProxy());
4360  } else {
4361  // Reserve space for result of postfix operation.
4362  if (expr->is_postfix() && !context()->IsEffect()) {
4363  __ li(at, Operand(Smi::FromInt(0)));
4364  __ push(at);
4365  }
4366  if (assign_type == NAMED_PROPERTY) {
4367  // Put the object both on the stack and in the register.
4368  VisitForStackValue(prop->obj());
4370  EmitNamedPropertyLoad(prop);
4371  } else {
4372  VisitForStackValue(prop->obj());
4373  VisitForStackValue(prop->key());
4375  MemOperand(sp, 1 * kPointerSize));
4377  EmitKeyedPropertyLoad(prop);
4378  }
4379  }
4380 
4381  // We need a second deoptimization point after loading the value
4382  // in case evaluating the property load my have a side effect.
4383  if (assign_type == VARIABLE) {
4384  PrepareForBailout(expr->expression(), TOS_REG);
4385  } else {
4386  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4387  }
4388 
4389  // Inline smi case if we are in a loop.
4390  Label stub_call, done;
4391  JumpPatchSite patch_site(masm_);
4392 
4393  int count_value = expr->op() == Token::INC ? 1 : -1;
4394  __ mov(a0, v0);
4395  if (ShouldInlineSmiCase(expr->op())) {
4396  Label slow;
4397  patch_site.EmitJumpIfNotSmi(v0, &slow);
4398 
4399  // Save result for postfix expressions.
4400  if (expr->is_postfix()) {
4401  if (!context()->IsEffect()) {
4402  // Save the result on the stack. If we have a named or keyed property
4403  // we store the result under the receiver that is currently on top
4404  // of the stack.
4405  switch (assign_type) {
4406  case VARIABLE:
4407  __ push(v0);
4408  break;
4409  case NAMED_PROPERTY:
4410  __ sw(v0, MemOperand(sp, kPointerSize));
4411  break;
4412  case KEYED_PROPERTY:
4413  __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4414  break;
4415  }
4416  }
4417  }
4418 
4419  Register scratch1 = a1;
4420  Register scratch2 = t0;
4421  __ li(scratch1, Operand(Smi::FromInt(count_value)));
4422  __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4423  __ BranchOnNoOverflow(&done, scratch2);
4424  // Call stub. Undo operation first.
4425  __ Move(v0, a0);
4426  __ jmp(&stub_call);
4427  __ bind(&slow);
4428  }
4429  ToNumberStub convert_stub(isolate());
4430  __ CallStub(&convert_stub);
4431 
4432  // Save result for postfix expressions.
4433  if (expr->is_postfix()) {
4434  if (!context()->IsEffect()) {
4435  // Save the result on the stack. If we have a named or keyed property
4436  // we store the result under the receiver that is currently on top
4437  // of the stack.
4438  switch (assign_type) {
4439  case VARIABLE:
4440  __ push(v0);
4441  break;
4442  case NAMED_PROPERTY:
4443  __ sw(v0, MemOperand(sp, kPointerSize));
4444  break;
4445  case KEYED_PROPERTY:
4446  __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4447  break;
4448  }
4449  }
4450  }
4451 
4452  __ bind(&stub_call);
4453  __ mov(a1, v0);
4454  __ li(a0, Operand(Smi::FromInt(count_value)));
4455 
4456  // Record position before stub call.
4457  SetSourcePosition(expr->position());
4458 
4459  Handle<Code> code =
4460  CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4461  CallIC(code, expr->CountBinOpFeedbackId());
4462  patch_site.EmitPatchInfo();
4463  __ bind(&done);
4464 
4465  // Store the value returned in v0.
4466  switch (assign_type) {
4467  case VARIABLE:
4468  if (expr->is_postfix()) {
4469  { EffectContext context(this);
4470  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4471  Token::ASSIGN);
4472  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4473  context.Plug(v0);
4474  }
4475  // For all contexts except EffectConstant we have the result on
4476  // top of the stack.
4477  if (!context()->IsEffect()) {
4478  context()->PlugTOS();
4479  }
4480  } else {
4481  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4482  Token::ASSIGN);
4483  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4484  context()->Plug(v0);
4485  }
4486  break;
4487  case NAMED_PROPERTY: {
4490  Operand(prop->key()->AsLiteral()->value()));
4492  CallStoreIC(expr->CountStoreFeedbackId());
4493  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4494  if (expr->is_postfix()) {
4495  if (!context()->IsEffect()) {
4496  context()->PlugTOS();
4497  }
4498  } else {
4499  context()->Plug(v0);
4500  }
4501  break;
4502  }
4503  case KEYED_PROPERTY: {
4507  Handle<Code> ic =
4508  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4509  CallIC(ic, expr->CountStoreFeedbackId());
4510  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4511  if (expr->is_postfix()) {
4512  if (!context()->IsEffect()) {
4513  context()->PlugTOS();
4514  }
4515  } else {
4516  context()->Plug(v0);
4517  }
4518  break;
4519  }
4520  }
4521 }
4522 
4523 
4524 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4525  DCHECK(!context()->IsEffect());
4526  DCHECK(!context()->IsTest());
4527  VariableProxy* proxy = expr->AsVariableProxy();
4528  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4529  Comment cmnt(masm_, "[ Global variable");
4531  __ li(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4532  if (FLAG_vector_ics) {
4534  Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
4535  }
4536  // Use a regular load, not a contextual load, to avoid a reference
4537  // error.
4539  PrepareForBailout(expr, TOS_REG);
4540  context()->Plug(v0);
4541  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4542  Comment cmnt(masm_, "[ Lookup slot");
4543  Label done, slow;
4544 
4545  // Generate code for loading from variables potentially shadowed
4546  // by eval-introduced variables.
4547  EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4548 
4549  __ bind(&slow);
4550  __ li(a0, Operand(proxy->name()));
4551  __ Push(cp, a0);
4552  __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4553  PrepareForBailout(expr, TOS_REG);
4554  __ bind(&done);
4555 
4556  context()->Plug(v0);
4557  } else {
4558  // This expression cannot throw a reference error at the top level.
4560  }
4561 }
4562 
4563 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4564  Expression* sub_expr,
4565  Handle<String> check) {
4566  Label materialize_true, materialize_false;
4567  Label* if_true = NULL;
4568  Label* if_false = NULL;
4569  Label* fall_through = NULL;
4570  context()->PrepareTest(&materialize_true, &materialize_false,
4571  &if_true, &if_false, &fall_through);
4572 
4573  { AccumulatorValueContext context(this);
4574  VisitForTypeofValue(sub_expr);
4575  }
4576  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4577 
4578  Factory* factory = isolate()->factory();
4579  if (String::Equals(check, factory->number_string())) {
4580  __ JumpIfSmi(v0, if_true);
4582  __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4583  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4584  } else if (String::Equals(check, factory->string_string())) {
4585  __ JumpIfSmi(v0, if_false);
4586  // Check for undetectable objects => false.
4587  __ GetObjectType(v0, v0, a1);
4588  __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4589  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4590  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4591  Split(eq, a1, Operand(zero_reg),
4592  if_true, if_false, fall_through);
4593  } else if (String::Equals(check, factory->symbol_string())) {
4594  __ JumpIfSmi(v0, if_false);
4595  __ GetObjectType(v0, v0, a1);
4596  Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4597  } else if (String::Equals(check, factory->boolean_string())) {
4598  __ LoadRoot(at, Heap::kTrueValueRootIndex);
4599  __ Branch(if_true, eq, v0, Operand(at));
4600  __ LoadRoot(at, Heap::kFalseValueRootIndex);
4601  Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4602  } else if (String::Equals(check, factory->undefined_string())) {
4603  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4604  __ Branch(if_true, eq, v0, Operand(at));
4605  __ JumpIfSmi(v0, if_false);
4606  // Check for undetectable objects => true.
4608  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4609  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4610  Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4611  } else if (String::Equals(check, factory->function_string())) {
4612  __ JumpIfSmi(v0, if_false);
4614  __ GetObjectType(v0, v0, a1);
4615  __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4616  Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4617  if_true, if_false, fall_through);
4618  } else if (String::Equals(check, factory->object_string())) {
4619  __ JumpIfSmi(v0, if_false);
4620  __ LoadRoot(at, Heap::kNullValueRootIndex);
4621  __ Branch(if_true, eq, v0, Operand(at));
4622  // Check for JS objects => true.
4623  __ GetObjectType(v0, v0, a1);
4624  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4626  __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4627  // Check for undetectable objects => false.
4628  __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4629  __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4630  Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4631  } else {
4632  if (if_false != fall_through) __ jmp(if_false);
4633  }
4634  context()->Plug(if_true, if_false);
4635 }
4636 
4637 
4638 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4639  Comment cmnt(masm_, "[ CompareOperation");
4640  SetSourcePosition(expr->position());
4641 
4642  // First we try a fast inlined version of the compare when one of
4643  // the operands is a literal.
4644  if (TryLiteralCompare(expr)) return;
4645 
4646  // Always perform the comparison for its control flow. Pack the result
4647  // into the expression's context after the comparison is performed.
4648  Label materialize_true, materialize_false;
4649  Label* if_true = NULL;
4650  Label* if_false = NULL;
4651  Label* fall_through = NULL;
4652  context()->PrepareTest(&materialize_true, &materialize_false,
4653  &if_true, &if_false, &fall_through);
4654 
4655  Token::Value op = expr->op();
4656  VisitForStackValue(expr->left());
4657  switch (op) {
4658  case Token::IN:
4659  VisitForStackValue(expr->right());
4660  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4661  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4662  __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4663  Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
4664  break;
4665 
4666  case Token::INSTANCEOF: {
4667  VisitForStackValue(expr->right());
4668  InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4669  __ CallStub(&stub);
4670  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4671  // The stub returns 0 for true.
4672  Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4673  break;
4674  }
4675 
4676  default: {
4677  VisitForAccumulatorValue(expr->right());
4679  __ mov(a0, result_register());
4680  __ pop(a1);
4681 
4682  bool inline_smi_code = ShouldInlineSmiCase(op);
4683  JumpPatchSite patch_site(masm_);
4684  if (inline_smi_code) {
4685  Label slow_case;
4686  __ Or(a2, a0, Operand(a1));
4687  patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4688  Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4689  __ bind(&slow_case);
4690  }
4691  // Record position and call the compare IC.
4692  SetSourcePosition(expr->position());
4693  Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4694  CallIC(ic, expr->CompareOperationFeedbackId());
4695  patch_site.EmitPatchInfo();
4696  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4697  Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4698  }
4699  }
4700 
4701  // Convert the result of the comparison into one expected for this
4702  // expression's context.
4703  context()->Plug(if_true, if_false);
4704 }
4705 
4706 
4707 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4708  Expression* sub_expr,
4709  NilValue nil) {
4710  Label materialize_true, materialize_false;
4711  Label* if_true = NULL;
4712  Label* if_false = NULL;
4713  Label* fall_through = NULL;
4714  context()->PrepareTest(&materialize_true, &materialize_false,
4715  &if_true, &if_false, &fall_through);
4716 
4717  VisitForAccumulatorValue(sub_expr);
4718  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4719  __ mov(a0, result_register());
4720  if (expr->op() == Token::EQ_STRICT) {
4721  Heap::RootListIndex nil_value = nil == kNullValue ?
4722  Heap::kNullValueRootIndex :
4723  Heap::kUndefinedValueRootIndex;
4724  __ LoadRoot(a1, nil_value);
4725  Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4726  } else {
4727  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4728  CallIC(ic, expr->CompareOperationFeedbackId());
4729  Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
4730  }
4731  context()->Plug(if_true, if_false);
4732 }
4733 
4734 
4735 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4737  context()->Plug(v0);
4738 }
4739 
4740 
4742  return v0;
4743 }
4744 
4745 
4747  return cp;
4748 }
4749 
4750 
4751 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4752  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4753  __ sw(value, MemOperand(fp, frame_offset));
4754 }
4755 
4756 
4757 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4758  __ lw(dst, ContextOperand(cp, context_index));
4759 }
4760 
4761 
4763  Scope* declaration_scope = scope()->DeclarationScope();
4764  if (declaration_scope->is_global_scope() ||
4765  declaration_scope->is_module_scope()) {
4766  // Contexts nested in the native context have a canonical empty function
4767  // as their closure, not the anonymous closure containing the global
4768  // code. Pass a smi sentinel and let the runtime look up the empty
4769  // function.
4770  __ li(at, Operand(Smi::FromInt(0)));
4771  } else if (declaration_scope->is_eval_scope()) {
4772  // Contexts created by a call to eval have the same closure as the
4773  // context calling eval, not the anonymous closure containing the eval
4774  // code. Fetch it from the context.
4776  } else {
4777  DCHECK(declaration_scope->is_function_scope());
4779  }
4780  __ push(at);
4781 }
4782 
4783 
4784 // ----------------------------------------------------------------------------
4785 // Non-local control flow support.
4786 
4788  DCHECK(!result_register().is(a1));
4789  // Store result register while executing finally block.
4790  __ push(result_register());
4791  // Cook return address in link register to stack (smi encoded Code* delta).
4792  __ Subu(a1, ra, Operand(masm_->CodeObject()));
4794  STATIC_ASSERT(0 == kSmiTag);
4795  __ Addu(a1, a1, Operand(a1)); // Convert to smi.
4796 
4797  // Store result register while executing finally block.
4798  __ push(a1);
4799 
4800  // Store pending message while executing finally block.
4801  ExternalReference pending_message_obj =
4802  ExternalReference::address_of_pending_message_obj(isolate());
4803  __ li(at, Operand(pending_message_obj));
4804  __ lw(a1, MemOperand(at));
4805  __ push(a1);
4806 
4807  ExternalReference has_pending_message =
4808  ExternalReference::address_of_has_pending_message(isolate());
4809  __ li(at, Operand(has_pending_message));
4810  __ lw(a1, MemOperand(at));
4811  __ SmiTag(a1);
4812  __ push(a1);
4813 
4814  ExternalReference pending_message_script =
4815  ExternalReference::address_of_pending_message_script(isolate());
4816  __ li(at, Operand(pending_message_script));
4817  __ lw(a1, MemOperand(at));
4818  __ push(a1);
4819 }
4820 
4821 
4823  DCHECK(!result_register().is(a1));
4824  // Restore pending message from stack.
4825  __ pop(a1);
4826  ExternalReference pending_message_script =
4827  ExternalReference::address_of_pending_message_script(isolate());
4828  __ li(at, Operand(pending_message_script));
4829  __ sw(a1, MemOperand(at));
4830 
4831  __ pop(a1);
4832  __ SmiUntag(a1);
4833  ExternalReference has_pending_message =
4834  ExternalReference::address_of_has_pending_message(isolate());
4835  __ li(at, Operand(has_pending_message));
4836  __ sw(a1, MemOperand(at));
4837 
4838  __ pop(a1);
4839  ExternalReference pending_message_obj =
4840  ExternalReference::address_of_pending_message_obj(isolate());
4841  __ li(at, Operand(pending_message_obj));
4842  __ sw(a1, MemOperand(at));
4843 
4844  // Restore result register from stack.
4845  __ pop(a1);
4846 
4847  // Uncook return address and return.
4848  __ pop(result_register());
4850  __ sra(a1, a1, 1); // Un-smi-tag value.
4851  __ Addu(at, a1, Operand(masm_->CodeObject()));
4852  __ Jump(at);
4853 }
4854 
4855 
4856 #undef __
4857 
4858 #define __ ACCESS_MASM(masm())
4859 
4861  int* stack_depth,
4862  int* context_length) {
4863  // The macros used here must preserve the result register.
4864 
4865  // Because the handler block contains the context of the finally
4866  // code, we can restore it directly from there for the finally code
4867  // rather than iteratively unwinding contexts via their previous
4868  // links.
4869  __ Drop(*stack_depth); // Down to the handler block.
4870  if (*context_length > 0) {
4871  // Restore the context to its dedicated register and the stack.
4874  }
4875  __ PopTryHandler();
4876  __ Call(finally_entry_);
4877 
4878  *stack_depth = 0;
4879  *context_length = 0;
4880  return previous_;
4881 }
4882 
4883 
4884 #undef __
4885 
4886 
4887 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4888  Address pc,
4889  BackEdgeState target_state,
4890  Code* replacement_code) {
4891  static const int kInstrSize = Assembler::kInstrSize;
4892  Address branch_address = pc - 6 * kInstrSize;
4893  CodePatcher patcher(branch_address, 1);
4894 
4895  switch (target_state) {
4896  case INTERRUPT:
4897  // slt at, a3, zero_reg (in case of count based interrupts)
4898  // beq at, zero_reg, ok
4899  // lui t9, <interrupt stub address> upper
4900  // ori t9, <interrupt stub address> lower
4901  // jalr t9
4902  // nop
4903  // ok-label ----- pc_after points here
4904  patcher.masm()->slt(at, a3, zero_reg);
4905  break;
4906  case ON_STACK_REPLACEMENT:
4907  case OSR_AFTER_STACK_CHECK:
4908  // addiu at, zero_reg, 1
4909  // beq at, zero_reg, ok ;; Not changed
4910  // lui t9, <on-stack replacement address> upper
4911  // ori t9, <on-stack replacement address> lower
4912  // jalr t9 ;; Not changed
4913  // nop ;; Not changed
4914  // ok-label ----- pc_after points here
4915  patcher.masm()->addiu(at, zero_reg, 1);
4916  break;
4917  }
4918  Address pc_immediate_load_address = pc - 4 * kInstrSize;
4919  // Replace the stack check address in the load-immediate (lui/ori pair)
4920  // with the entry address of the replacement code.
4921  Assembler::set_target_address_at(pc_immediate_load_address,
4922  replacement_code->entry());
4923 
4924  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4925  unoptimized_code, pc_immediate_load_address, replacement_code);
4926 }
4927 
4928 
4930  Isolate* isolate,
4931  Code* unoptimized_code,
4932  Address pc) {
4933  static const int kInstrSize = Assembler::kInstrSize;
4934  Address branch_address = pc - 6 * kInstrSize;
4935  Address pc_immediate_load_address = pc - 4 * kInstrSize;
4936 
4937  DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
4938  if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4939  DCHECK(reinterpret_cast<uint32_t>(
4940  Assembler::target_address_at(pc_immediate_load_address)) ==
4941  reinterpret_cast<uint32_t>(
4942  isolate->builtins()->InterruptCheck()->entry()));
4943  return INTERRUPT;
4944  }
4945 
4947 
4948  if (reinterpret_cast<uint32_t>(
4949  Assembler::target_address_at(pc_immediate_load_address)) ==
4950  reinterpret_cast<uint32_t>(
4951  isolate->builtins()->OnStackReplacement()->entry())) {
4952  return ON_STACK_REPLACEMENT;
4953  }
4954 
4955  DCHECK(reinterpret_cast<uint32_t>(
4956  Assembler::target_address_at(pc_immediate_load_address)) ==
4957  reinterpret_cast<uint32_t>(
4958  isolate->builtins()->OsrAfterStackCheck()->entry()));
4959  return OSR_AFTER_STACK_CHECK;
4960 }
4961 
4962 
4963 } } // namespace v8::internal
4964 
4965 #endif // V8_TARGET_ARCH_MIPS
#define BASE_EMBEDDED
Definition: allocation.h:45
static const int kInstrSize
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
friend class BlockTrampolinePoolScope
int InstructionsGeneratedSince(Label *label)
static bool IsAddImmediate(Instr instr)
static const int kJSReturnSequenceInstructions
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target, ICacheFlushMode icache_flush_mode=FLUSH_ICACHE_IF_NEEDED)
static bool IsBeq(Instr instr)
int SizeOfCodeGeneratedSince(Label *label)
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static BailoutId Declarations()
Definition: utils.h:962
static BailoutId FunctionEntry()
Definition: utils.h:961
static Handle< Code > initialize_stub(Isolate *isolate, int argc, CallICState::CallType call_type)
Definition: ic.cc:1338
static const int kValueOffset
Definition: objects.h:9446
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:225
static Condition ComputeCondition(Token::Value op)
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1329
void AddNoFrameRange(int from, int to)
Definition: compiler.h:354
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3331
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
Definition: compiler.cc:1243
@ STRING_FUNCTION_PROTOTYPE_MAP_INDEX
Definition: contexts.h:294
static int SlotOffset(int index)
Definition: contexts.h:552
static const int kDescriptorSize
Definition: objects.h:3038
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3032
static const int kEnumCacheOffset
Definition: objects.h:3028
static const int kFirstOffset
Definition: objects.h:3029
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
static const TestContext * cast(const ExpressionContext *context)
Definition: full-codegen.h:778
virtual void Plug(bool flag) const
virtual NestedStatement * Exit(int *stack_depth, int *context_length)
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:99
void EmitAccessor(Expression *expression)
void Split(Condition cc, Label *if_true, Label *if_false, Label *fall_through)
void EmitBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode)
void EmitLiteralCompareTypeof(Expression *expr, Expression *sub_expr, Handle< String > check)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
void PrepareForBailoutForId(BailoutId id, State state)
void EmitNewClosure(Handle< SharedFunctionInfo > info, bool pretenure)
void GetVar(Register destination, Variable *var)
static Register context_register()
ZoneList< Handle< Object > > * globals_
Definition: full-codegen.h:837
void VisitForControl(Expression *expr, Label *if_true, Label *if_false, Label *fall_through)
Definition: full-codegen.h:382
void CallLoadIC(ContextualMode mode, TypeFeedbackId id=TypeFeedbackId::None())
void RecordBackEdge(BailoutId osr_ast_id)
MemOperand StackOperand(Variable *var)
void EmitVariableLoad(VariableProxy *proxy)
void SetVar(Variable *var, Register source, Register scratch0, Register scratch1)
MemOperand ContextSlotOperandCheckExtensions(Variable *var, Label *slow)
void EmitKeyedPropertyAssignment(Assignment *expr)
void DeclareGlobals(Handle< FixedArray > pairs)
void EmitResolvePossiblyDirectEval(int arg_count)
void VisitForStackValue(Expression *expr)
Definition: full-codegen.h:376
void EmitKeyedCallWithLoadIC(Call *expr, Expression *key)
void EmitKeyedPropertyLoad(Property *expr)
void EmitDebugCheckDeclarationContext(Variable *variable)
FunctionLiteral * function()
Definition: full-codegen.h:609
void EmitNamedSuperPropertyLoad(Property *expr)
bool TryLiteralCompare(CompareOperation *compare)
void SetStatementPosition(Statement *stmt)
Handle< FixedArray > FeedbackVector()
Definition: full-codegen.h:432
void StoreToFrameField(int frame_offset, Register value)
void LoadContextField(Register dst, int context_index)
const ExpressionContext * context()
Definition: full-codegen.h:602
void EmitNamedPropertyLoad(Property *expr)
void EmitBackEdgeBookkeeping(IterationStatement *stmt, Label *back_edge_target)
void DoTest(Expression *condition, Label *if_true, Label *if_false, Label *fall_through)
void VisitForAccumulatorValue(Expression *expr)
Definition: full-codegen.h:370
void PrepareForBailout(Expression *node, State state)
void CallStoreIC(TypeFeedbackId id=TypeFeedbackId::None())
MemOperand VarOperand(Variable *var, Register scratch)
void DeclareModules(Handle< FixedArray > descriptions)
void EmitGeneratorResume(Expression *generator, Expression *value, JSGeneratorObject::ResumeMode resume_mode)
void VisitForEffect(Expression *expr)
Definition: full-codegen.h:364
void EmitAssignment(Expression *expr)
void EmitCall(Call *expr, CallICState::CallType=CallICState::FUNCTION)
void SetFunctionPosition(FunctionLiteral *fun)
void EmitLoadHomeObject(SuperReference *expr)
void EmitStoreToStackLocalOrContextSlot(Variable *var, MemOperand location)
void EmitInlineSmiBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode, Expression *left, Expression *right)
void EmitLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
void EmitVariableAssignment(Variable *var, Token::Value op)
void CallIC(Handle< Code > code, TypeFeedbackId id=TypeFeedbackId::None())
void EmitCreateIteratorResult(bool done)
void EmitLoadGlobalCheckExtensions(VariableProxy *proxy, TypeofState typeof_state, Label *slow)
void EmitCallWithLoadIC(Call *expr)
void EnsureSlotContainsAllocationSite(int slot)
void PrepareForBailoutBeforeSplit(Expression *expr, bool should_normalize, Label *if_true, Label *if_false)
void EmitInlineRuntimeCall(CallRuntime *expr)
void EmitSuperCallWithLoadIC(Call *expr)
void EmitNamedPropertyAssignment(Assignment *expr)
Handle< FixedArray > handler_table_
Definition: full-codegen.h:844
void RecordJSReturnSite(Call *call)
static Register result_register()
void VisitForTypeofValue(Expression *expr)
void EmitDynamicLookupFastCase(VariableProxy *proxy, TypeofState typeof_state, Label *slow, Label *done)
bool ShouldInlineSmiCase(Token::Value op)
Handle< FixedArray > handler_table()
Definition: full-codegen.h:642
void EmitProfilingCounterDecrement(int delta)
void VisitInDuplicateContext(Expression *expr)
static const int kBuiltinsOffset
Definition: objects.h:7458
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static const int kMapOffset
Definition: objects.h:1427
static const int kLengthOffset
Definition: objects.h:10072
static const int kValueOffset
Definition: objects.h:7623
static const int kCacheStampOffset
Definition: objects.h:7631
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kLiteralsOffset
Definition: objects.h:7382
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kResultDonePropertyOffset
Definition: objects.h:7142
static const int kFunctionOffset
Definition: objects.h:7123
static const int kGeneratorClosed
Definition: objects.h:7120
static const int kResultValuePropertyOffset
Definition: objects.h:7141
static const int kGeneratorExecuting
Definition: objects.h:7119
static const int kOperandStackOffset
Definition: objects.h:7127
static const int kReceiverOffset
Definition: objects.h:7125
static const int kContextOffset
Definition: objects.h:7124
static const int kContinuationOffset
Definition: objects.h:7126
static const int kInitialMaxFastElementArray
Definition: objects.h:2180
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kSize
Definition: objects.h:7772
static const int kInObjectFieldCount
Definition: objects.h:7826
static const int kValueOffset
Definition: objects.h:7546
static const Register ReceiverRegister()
static const Register NameRegister()
void mov(Register rd, Register rt)
void MultiPop(RegList regs)
void Jump(Register target, Condition cond=al)
static int CallSize(Register target, Condition cond=al)
static const int kIsUndetectable
Definition: objects.h:6244
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6251
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kBitField2Offset
Definition: objects.h:6233
static const int kConstructorOffset
Definition: objects.h:6191
static const int kPrototypeOffset
Definition: objects.h:6190
static const int kHashFieldOffset
Definition: objects.h:8486
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
Definition: assembler.h:317
Scope * outer_scope() const
Definition: scopes.h:333
int num_parameters() const
Definition: scopes.h:321
VariableDeclaration * function() const
Definition: scopes.h:309
int ContextChainLength(Scope *scope)
Definition: scopes.cc:715
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:539
Scope * DeclarationScope()
Definition: scopes.cc:737
Variable * arguments() const
Definition: scopes.h:324
Scope * GlobalScope()
Definition: scopes.cc:728
Variable * parameter(int index) const
Definition: scopes.h:316
static const int kHeaderSize
Definition: objects.h:8941
static const int kFormalParameterCountOffset
Definition: objects.h:6946
static const int kInstanceClassNameOffset
Definition: objects.h:6897
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1334
static const int kContextOffset
Definition: frames.h:74
static const int kContextOffset
Definition: frames.h:162
static const int kExpressionsOffset
Definition: frames.h:160
static const int kCallerSPOffset
Definition: frames.h:167
static const int kMarkerOffset
Definition: frames.h:161
static const int kCallerFPOffset
Definition: frames.h:165
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8618
static const int kLengthOffset
Definition: objects.h:8802
bool Equals(String *other)
Definition: objects-inl.h:3336
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2258
static TypeFeedbackId None()
Definition: utils.h:945
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
TypeofState
Definition: codegen.h:46
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_BOOL(enable_unaligned_accesses
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define POINTER_SIZE_ALIGN(value)
Definition: globals.h:582
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK_NE(v1, v2)
Definition: logging.h:207
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ CALL_FUNCTION
@ TAG_OBJECT
int int32_t
Definition: unicode.cc:24
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
Vector< const char > CStrVector(const char *data)
Definition: vector.h:158
const int kPointerSize
Definition: globals.h:129
MemOperand ContextOperand(Register context, int index)
@ DO_SMI_CHECK
Definition: globals.h:641
@ STRING_ADD_CHECK_BOTH
Definition: code-stubs.h:1218
@ TRACK_ALLOCATION_SITE
Definition: objects.h:8085
@ DONT_TRACK_ALLOCATION_SITE
Definition: objects.h:8084
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
Definition: objects-inl.h:5448
@ kSeqStringTag
Definition: objects.h:563
const Register cp
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
bool IsDeclaredVariableMode(VariableMode mode)
Definition: globals.h:705
const uint32_t kTwoByteStringTag
Definition: objects.h:556
const int kSmiTagSize
Definition: v8.h:5743
const Register fp
const Register sp
const int kPointerSizeLog2
Definition: globals.h:147
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:785
@ JS_REGEXP_TYPE
Definition: objects.h:748
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
Definition: objects.h:788
@ JS_VALUE_TYPE
Definition: objects.h:728
@ JS_DATE_TYPE
Definition: objects.h:730
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:784
@ FIRST_JS_PROXY_TYPE
Definition: objects.h:778
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
@ LAST_JS_PROXY_TYPE
Definition: objects.h:779
const uint32_t kOneByteStringTag
Definition: objects.h:557
@ NO_OVERWRITE
Definition: ic-state.h:58
@ OVERWRITE_RIGHT
Definition: ic-state.h:58
MemOperand FieldMemOperand(Register object, int offset)
bool IsImmutableVariableMode(VariableMode mode)
Definition: globals.h:715
const Register pc
@ DYNAMIC_GLOBAL
Definition: globals.h:689
@ DYNAMIC_LOCAL
Definition: globals.h:693
@ CONST_LEGACY
Definition: globals.h:671
Condition NegateCondition(Condition cond)
Definition: constants-arm.h:86
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
uint32_t RegList
Definition: frames.h:18
byte * Address
Definition: globals.h:101
NilValue
Definition: v8.h:97
@ kNullValue
Definition: v8.h:97
@ NOT_CONTEXTUAL
Definition: objects.h:174
const int kHeapObjectTag
Definition: v8.h:5737
const int kSmiShiftSize
Definition: v8.h:5805
const Register no_reg
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
Definition: v8.h:5744
@ RECORD_CONSTRUCTOR_TARGET
Definition: globals.h:480
const int kSmiTag
Definition: v8.h:5742
@ NO_CALL_FUNCTION_FLAGS
Definition: globals.h:469
@ CALL_AS_METHOD
Definition: globals.h:470
@ STRING_INDEX_IS_NUMBER
Definition: code-stubs.h:1590
void CopyBytes(uint8_t *target, uint8_t *source)
bool IsFastObjectElementsKind(ElementsKind kind)
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
#define VOID
#define IN
PropertyAttributes
@ NONE
@ READ_ONLY
static Register from_code(int code)