V8 Project
full-codegen-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_ARM
8 
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
19 
20 #include "src/arm/code-stubs-arm.h"
22 
23 namespace v8 {
24 namespace internal {
25 
26 #define __ ACCESS_MASM(masm_)
27 
28 
29 // A patch site is a location in the code which it is possible to patch. This
30 // class has a number of methods to emit the code which is patchable and the
31 // method EmitPatchInfo to record a marker back to the patchable code. This
32 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
33 // immediate value is used) is the delta from the pc to the first instruction of
34 // the patchable code.
35 class JumpPatchSite BASE_EMBEDDED {
36  public:
37  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 #ifdef DEBUG
39  info_emitted_ = false;
40 #endif
41  }
42 
43  ~JumpPatchSite() {
44  DCHECK(patch_site_.is_bound() == info_emitted_);
45  }
46 
47  // When initially emitting this ensure that a jump is always generated to skip
48  // the inlined smi code.
49  void EmitJumpIfNotSmi(Register reg, Label* target) {
50  DCHECK(!patch_site_.is_bound() && !info_emitted_);
51  Assembler::BlockConstPoolScope block_const_pool(masm_);
52  __ bind(&patch_site_);
53  __ cmp(reg, Operand(reg));
54  __ b(eq, target); // Always taken before patched.
55  }
56 
57  // When initially emitting this ensure that a jump is never generated to skip
58  // the inlined smi code.
59  void EmitJumpIfSmi(Register reg, Label* target) {
60  DCHECK(!patch_site_.is_bound() && !info_emitted_);
61  Assembler::BlockConstPoolScope block_const_pool(masm_);
62  __ bind(&patch_site_);
63  __ cmp(reg, Operand(reg));
64  __ b(ne, target); // Never taken before patched.
65  }
66 
67  void EmitPatchInfo() {
68  // Block literal pool emission whilst recording patch site information.
69  Assembler::BlockConstPoolScope block_const_pool(masm_);
70  if (patch_site_.is_bound()) {
71  int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
72  Register reg;
73  reg.set_code(delta_to_patch_site / kOff12Mask);
74  __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
75 #ifdef DEBUG
76  info_emitted_ = true;
77 #endif
78  } else {
79  __ nop(); // Signals no inlined code.
80  }
81  }
82 
83  private:
84  MacroAssembler* masm_;
85  Label patch_site_;
86 #ifdef DEBUG
87  bool info_emitted_;
88 #endif
89 };
90 
91 
92 // Generate code for a JS function. On entry to the function the receiver
93 // and arguments have been pushed on the stack left to right. The actual
94 // argument count matches the formal parameter count expected by the
95 // function.
96 //
97 // The live registers are:
98 // o r1: the JS function object being called (i.e., ourselves)
99 // o cp: our context
100 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
101 // o fp: our caller's frame pointer
102 // o sp: stack pointer
103 // o lr: return address
104 //
105 // The function builds a JS frame. Please see JavaScriptFrameConstants in
106 // frames-arm.h for its layout.
108  CompilationInfo* info = info_;
110  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
111 
112  profiling_counter_ = isolate()->factory()->NewCell(
113  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
114  SetFunctionPosition(function());
115  Comment cmnt(masm_, "[ function compiled by full code generator");
116 
118 
119 #ifdef DEBUG
120  if (strlen(FLAG_stop_at) > 0 &&
121  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
122  __ stop("stop-at");
123  }
124 #endif
125 
126  // Sloppy mode functions and builtins need to replace the receiver with the
127  // global proxy when called as functions (without an explicit receiver
128  // object).
129  if (info->strict_mode() == SLOPPY && !info->is_native()) {
130  Label ok;
131  int receiver_offset = info->scope()->num_parameters() * kPointerSize;
132  __ ldr(r2, MemOperand(sp, receiver_offset));
133  __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
134  __ b(ne, &ok);
135 
136  __ ldr(r2, GlobalObjectOperand());
138 
139  __ str(r2, MemOperand(sp, receiver_offset));
140 
141  __ bind(&ok);
142  }
143 
144  // Open a frame scope to indicate that there is a frame on the stack. The
145  // MANUAL indicates that the scope shouldn't actually generate code to set up
146  // the frame (that is done below).
147  FrameScope frame_scope(masm_, StackFrame::MANUAL);
148 
149  info->set_prologue_offset(masm_->pc_offset());
150  __ Prologue(info->IsCodePreAgingActive());
151  info->AddNoFrameRange(0, masm_->pc_offset());
152 
153  { Comment cmnt(masm_, "[ Allocate locals");
154  int locals_count = info->scope()->num_stack_slots();
155  // Generators allocate locals, if any, in context slots.
156  DCHECK(!info->function()->is_generator() || locals_count == 0);
157  if (locals_count > 0) {
158  if (locals_count >= 128) {
159  Label ok;
160  __ sub(r9, sp, Operand(locals_count * kPointerSize));
161  __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
162  __ cmp(r9, Operand(r2));
163  __ b(hs, &ok);
164  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
165  __ bind(&ok);
166  }
167  __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
168  int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
169  if (locals_count >= kMaxPushes) {
170  int loop_iterations = locals_count / kMaxPushes;
171  __ mov(r2, Operand(loop_iterations));
172  Label loop_header;
173  __ bind(&loop_header);
174  // Do pushes.
175  for (int i = 0; i < kMaxPushes; i++) {
176  __ push(r9);
177  }
178  // Continue loop if not done.
179  __ sub(r2, r2, Operand(1), SetCC);
180  __ b(&loop_header, ne);
181  }
182  int remaining = locals_count % kMaxPushes;
183  // Emit the remaining pushes.
184  for (int i = 0; i < remaining; i++) {
185  __ push(r9);
186  }
187  }
188  }
189 
190  bool function_in_register = true;
191 
192  // Possibly allocate a local context.
193  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
194  if (heap_slots > 0) {
195  // Argument to NewContext is the function, which is still in r1.
196  Comment cmnt(masm_, "[ Allocate context");
197  bool need_write_barrier = true;
198  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
199  __ push(r1);
200  __ Push(info->scope()->GetScopeInfo());
201  __ CallRuntime(Runtime::kNewGlobalContext, 2);
202  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
203  FastNewContextStub stub(isolate(), heap_slots);
204  __ CallStub(&stub);
205  // Result of FastNewContextStub is always in new space.
206  need_write_barrier = false;
207  } else {
208  __ push(r1);
209  __ CallRuntime(Runtime::kNewFunctionContext, 1);
210  }
211  function_in_register = false;
212  // Context is returned in r0. It replaces the context passed to us.
213  // It's saved in the stack and kept live in cp.
214  __ mov(cp, r0);
216  // Copy any necessary parameters into the context.
217  int num_parameters = info->scope()->num_parameters();
218  for (int i = 0; i < num_parameters; i++) {
219  Variable* var = scope()->parameter(i);
220  if (var->IsContextSlot()) {
221  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
222  (num_parameters - 1 - i) * kPointerSize;
223  // Load parameter from stack.
224  __ ldr(r0, MemOperand(fp, parameter_offset));
225  // Store it in the context.
226  MemOperand target = ContextOperand(cp, var->index());
227  __ str(r0, target);
228 
229  // Update the write barrier.
230  if (need_write_barrier) {
231  __ RecordWriteContextSlot(
232  cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
233  } else if (FLAG_debug_code) {
234  Label done;
235  __ JumpIfInNewSpace(cp, r0, &done);
236  __ Abort(kExpectedNewSpaceObject);
237  __ bind(&done);
238  }
239  }
240  }
241  }
242 
243  Variable* arguments = scope()->arguments();
244  if (arguments != NULL) {
245  // Function uses arguments object.
246  Comment cmnt(masm_, "[ Allocate arguments object");
247  if (!function_in_register) {
248  // Load this again, if it's used by the local context below.
250  } else {
251  __ mov(r3, r1);
252  }
253  // Receiver is just before the parameters on the caller's stack.
254  int num_parameters = info->scope()->num_parameters();
255  int offset = num_parameters * kPointerSize;
256  __ add(r2, fp,
257  Operand(StandardFrameConstants::kCallerSPOffset + offset));
258  __ mov(r1, Operand(Smi::FromInt(num_parameters)));
259  __ Push(r3, r2, r1);
260 
261  // Arguments to ArgumentsAccessStub:
262  // function, receiver address, parameter count.
263  // The stub will rewrite receiever and parameter count if the previous
264  // stack frame was an arguments adapter frame.
266  if (strict_mode() == STRICT) {
268  } else if (function()->has_duplicate_parameters()) {
270  } else {
272  }
273  ArgumentsAccessStub stub(isolate(), type);
274  __ CallStub(&stub);
275 
276  SetVar(arguments, r0, r1, r2);
277  }
278 
279  if (FLAG_trace) {
280  __ CallRuntime(Runtime::kTraceEnter, 0);
281  }
282 
283  // Visit the declarations and body unless there is an illegal
284  // redeclaration.
285  if (scope()->HasIllegalRedeclaration()) {
286  Comment cmnt(masm_, "[ Declarations");
288 
289  } else {
291  { Comment cmnt(masm_, "[ Declarations");
292  // For named function expressions, declare the function name as a
293  // constant.
294  if (scope()->is_function_scope() && scope()->function() != NULL) {
295  VariableDeclaration* function = scope()->function();
296  DCHECK(function->proxy()->var()->mode() == CONST ||
297  function->proxy()->var()->mode() == CONST_LEGACY);
298  DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
299  VisitVariableDeclaration(function);
300  }
301  VisitDeclarations(scope()->declarations());
302  }
303 
304  { Comment cmnt(masm_, "[ Stack check");
306  Label ok;
307  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
308  __ cmp(sp, Operand(ip));
309  __ b(hs, &ok);
310  Handle<Code> stack_check = isolate()->builtins()->StackCheck();
311  PredictableCodeSizeScope predictable(masm_,
312  masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
313  __ Call(stack_check, RelocInfo::CODE_TARGET);
314  __ bind(&ok);
315  }
316 
317  { Comment cmnt(masm_, "[ Body");
318  DCHECK(loop_depth() == 0);
319  VisitStatements(function()->body());
320  DCHECK(loop_depth() == 0);
321  }
322  }
323 
324  // Always emit a 'return undefined' in case control fell off the end of
325  // the body.
326  { Comment cmnt(masm_, "[ return <undefined>;");
327  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
328  }
330 
331  // Force emit the constant pool, so it doesn't get emitted in the middle
332  // of the back edge table.
333  masm()->CheckConstPool(true, false);
334 }
335 
336 
338  __ mov(r0, Operand(Smi::FromInt(0)));
339 }
340 
341 
343  __ mov(r2, Operand(profiling_counter_));
345  __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
347 }
348 
349 
350 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
351 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
352 #else
353 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
354 #endif
355 
356 
358  Assembler::BlockConstPoolScope block_const_pool(masm_);
359  PredictableCodeSizeScope predictable_code_size_scope(
360  masm_, kProfileCounterResetSequenceLength);
361  Label start;
362  __ bind(&start);
363  int reset_value = FLAG_interrupt_budget;
364  if (info_->is_debug()) {
365  // Detect debug break requests as soon as possible.
366  reset_value = FLAG_interrupt_budget >> 4;
367  }
368  __ mov(r2, Operand(profiling_counter_));
369  // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
370  // instructions (for ARMv6) depending upon whether it is an extended constant
371  // pool - insert nop to compensate.
372  int expected_instr_count =
373  (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
374  DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
375  while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
376  __ nop();
377  }
378  __ mov(r3, Operand(Smi::FromInt(reset_value)));
380 }
381 
382 
383 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
384  Label* back_edge_target) {
385  Comment cmnt(masm_, "[ Back edge bookkeeping");
386  // Block literal pools whilst emitting back edge code.
387  Assembler::BlockConstPoolScope block_const_pool(masm_);
388  Label ok;
389 
390  DCHECK(back_edge_target->is_bound());
391  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
392  int weight = Min(kMaxBackEdgeWeight,
393  Max(1, distance / kCodeSizeMultiplier));
395  __ b(pl, &ok);
396  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
397 
398  // Record a mapping of this PC offset to the OSR id. This is used to find
399  // the AST id from the unoptimized code in order to use it as a key into
400  // the deoptimization input data found in the optimized code.
401  RecordBackEdge(stmt->OsrEntryId());
402 
404 
405  __ bind(&ok);
406  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
407  // Record a mapping of the OSR id to this PC. This is used if the OSR
408  // entry becomes the target of a bailout. We don't expect it to be, but
409  // we want it to work if it is.
410  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
411 }
412 
413 
415  Comment cmnt(masm_, "[ Return sequence");
416  if (return_label_.is_bound()) {
417  __ b(&return_label_);
418  } else {
419  __ bind(&return_label_);
420  if (FLAG_trace) {
421  // Push the return value on the stack as the parameter.
422  // Runtime::TraceExit returns its parameter in r0.
423  __ push(r0);
424  __ CallRuntime(Runtime::kTraceExit, 1);
425  }
426  // Pretend that the exit is a backwards jump to the entry.
427  int weight = 1;
428  if (info_->ShouldSelfOptimize()) {
429  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
430  } else {
431  int distance = masm_->pc_offset();
432  weight = Min(kMaxBackEdgeWeight,
433  Max(1, distance / kCodeSizeMultiplier));
434  }
436  Label ok;
437  __ b(pl, &ok);
438  __ push(r0);
439  __ Call(isolate()->builtins()->InterruptCheck(),
441  __ pop(r0);
443  __ bind(&ok);
444 
445 #ifdef DEBUG
446  // Add a label for checking the size of the code used for returning.
447  Label check_exit_codesize;
448  __ bind(&check_exit_codesize);
449 #endif
450  // Make sure that the constant pool is not emitted inside of the return
451  // sequence.
452  { Assembler::BlockConstPoolScope block_const_pool(masm_);
453  int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
454  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
455  // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
456  PredictableCodeSizeScope predictable(masm_, -1);
457  __ RecordJSReturn();
458  int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
459  { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
460  __ add(sp, sp, Operand(sp_delta));
461  __ Jump(lr);
462  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
463  }
464  }
465 
466 #ifdef DEBUG
467  // Check that the size of the code used for returning is large enough
468  // for the debugger's requirements.
470  masm_->InstructionsGeneratedSince(&check_exit_codesize));
471 #endif
472  }
473 }
474 
475 
476 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
477  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
478 }
479 
480 
481 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
482  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
483  codegen()->GetVar(result_register(), var);
484 }
485 
486 
487 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
488  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
489  codegen()->GetVar(result_register(), var);
490  __ push(result_register());
491 }
492 
493 
494 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
495  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
496  // For simplicity we always test the accumulator register.
497  codegen()->GetVar(result_register(), var);
498  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
499  codegen()->DoTest(this);
500 }
501 
502 
504 }
505 
506 
508  Heap::RootListIndex index) const {
509  __ LoadRoot(result_register(), index);
510 }
511 
512 
514  Heap::RootListIndex index) const {
515  __ LoadRoot(result_register(), index);
516  __ push(result_register());
517 }
518 
519 
521  codegen()->PrepareForBailoutBeforeSplit(condition(),
522  true,
523  true_label_,
524  false_label_);
525  if (index == Heap::kUndefinedValueRootIndex ||
526  index == Heap::kNullValueRootIndex ||
527  index == Heap::kFalseValueRootIndex) {
528  if (false_label_ != fall_through_) __ b(false_label_);
529  } else if (index == Heap::kTrueValueRootIndex) {
530  if (true_label_ != fall_through_) __ b(true_label_);
531  } else {
532  __ LoadRoot(result_register(), index);
533  codegen()->DoTest(this);
534  }
535 }
536 
537 
538 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
539 }
540 
541 
543  Handle<Object> lit) const {
544  __ mov(result_register(), Operand(lit));
545 }
546 
547 
548 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
549  // Immediates cannot be pushed directly.
550  __ mov(result_register(), Operand(lit));
551  __ push(result_register());
552 }
553 
554 
555 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
556  codegen()->PrepareForBailoutBeforeSplit(condition(),
557  true,
558  true_label_,
559  false_label_);
560  DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
561  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
562  if (false_label_ != fall_through_) __ b(false_label_);
563  } else if (lit->IsTrue() || lit->IsJSObject()) {
564  if (true_label_ != fall_through_) __ b(true_label_);
565  } else if (lit->IsString()) {
566  if (String::cast(*lit)->length() == 0) {
567  if (false_label_ != fall_through_) __ b(false_label_);
568  } else {
569  if (true_label_ != fall_through_) __ b(true_label_);
570  }
571  } else if (lit->IsSmi()) {
572  if (Smi::cast(*lit)->value() == 0) {
573  if (false_label_ != fall_through_) __ b(false_label_);
574  } else {
575  if (true_label_ != fall_through_) __ b(true_label_);
576  }
577  } else {
578  // For simplicity we always test the accumulator register.
579  __ mov(result_register(), Operand(lit));
580  codegen()->DoTest(this);
581  }
582 }
583 
584 
586  Register reg) const {
587  DCHECK(count > 0);
588  __ Drop(count);
589 }
590 
591 
593  int count,
594  Register reg) const {
595  DCHECK(count > 0);
596  __ Drop(count);
597  __ Move(result_register(), reg);
598 }
599 
600 
602  Register reg) const {
603  DCHECK(count > 0);
604  if (count > 1) __ Drop(count - 1);
605  __ str(reg, MemOperand(sp, 0));
606 }
607 
608 
610  Register reg) const {
611  DCHECK(count > 0);
612  // For simplicity we always test the accumulator register.
613  __ Drop(count);
614  __ Move(result_register(), reg);
615  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
616  codegen()->DoTest(this);
617 }
618 
619 
620 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
621  Label* materialize_false) const {
622  DCHECK(materialize_true == materialize_false);
623  __ bind(materialize_true);
624 }
625 
626 
628  Label* materialize_true,
629  Label* materialize_false) const {
630  Label done;
631  __ bind(materialize_true);
632  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
633  __ jmp(&done);
634  __ bind(materialize_false);
635  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
636  __ bind(&done);
637 }
638 
639 
641  Label* materialize_true,
642  Label* materialize_false) const {
643  Label done;
644  __ bind(materialize_true);
645  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
646  __ jmp(&done);
647  __ bind(materialize_false);
648  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
649  __ bind(&done);
650  __ push(ip);
651 }
652 
653 
654 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
655  Label* materialize_false) const {
656  DCHECK(materialize_true == true_label_);
657  DCHECK(materialize_false == false_label_);
658 }
659 
660 
662 }
663 
664 
666  Heap::RootListIndex value_root_index =
667  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
668  __ LoadRoot(result_register(), value_root_index);
669 }
670 
671 
673  Heap::RootListIndex value_root_index =
674  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
675  __ LoadRoot(ip, value_root_index);
676  __ push(ip);
677 }
678 
679 
681  codegen()->PrepareForBailoutBeforeSplit(condition(),
682  true,
683  true_label_,
684  false_label_);
685  if (flag) {
686  if (true_label_ != fall_through_) __ b(true_label_);
687  } else {
688  if (false_label_ != fall_through_) __ b(false_label_);
689  }
690 }
691 
692 
693 void FullCodeGenerator::DoTest(Expression* condition,
694  Label* if_true,
695  Label* if_false,
696  Label* fall_through) {
697  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
698  CallIC(ic, condition->test_id());
700  Split(ne, if_true, if_false, fall_through);
701 }
702 
703 
705  Label* if_true,
706  Label* if_false,
707  Label* fall_through) {
708  if (if_false == fall_through) {
709  __ b(cond, if_true);
710  } else if (if_true == fall_through) {
711  __ b(NegateCondition(cond), if_false);
712  } else {
713  __ b(cond, if_true);
714  __ b(if_false);
715  }
716 }
717 
718 
720  DCHECK(var->IsStackAllocated());
721  // Offset is negative because higher indexes are at lower addresses.
722  int offset = -var->index() * kPointerSize;
723  // Adjust by a (parameter or local) base offset.
724  if (var->IsParameter()) {
725  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
726  } else {
728  }
729  return MemOperand(fp, offset);
730 }
731 
732 
733 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
734  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
735  if (var->IsContextSlot()) {
736  int context_chain_length = scope()->ContextChainLength(var->scope());
737  __ LoadContext(scratch, context_chain_length);
738  return ContextOperand(scratch, var->index());
739  } else {
740  return StackOperand(var);
741  }
742 }
743 
744 
745 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
746  // Use destination as scratch.
747  MemOperand location = VarOperand(var, dest);
748  __ ldr(dest, location);
749 }
750 
751 
752 void FullCodeGenerator::SetVar(Variable* var,
753  Register src,
754  Register scratch0,
755  Register scratch1) {
756  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
757  DCHECK(!scratch0.is(src));
758  DCHECK(!scratch0.is(scratch1));
759  DCHECK(!scratch1.is(src));
760  MemOperand location = VarOperand(var, scratch0);
761  __ str(src, location);
762 
763  // Emit the write barrier code if the location is in the heap.
764  if (var->IsContextSlot()) {
765  __ RecordWriteContextSlot(scratch0,
766  location.offset(),
767  src,
768  scratch1,
771  }
772 }
773 
774 
776  bool should_normalize,
777  Label* if_true,
778  Label* if_false) {
779  // Only prepare for bailouts before splits if we're in a test
780  // context. Otherwise, we let the Visit function deal with the
781  // preparation to avoid preparing with the same AST id twice.
782  if (!context()->IsTest() || !info_->IsOptimizable()) return;
783 
784  Label skip;
785  if (should_normalize) __ b(&skip);
786  PrepareForBailout(expr, TOS_REG);
787  if (should_normalize) {
788  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
789  __ cmp(r0, ip);
790  Split(eq, if_true, if_false, NULL);
791  __ bind(&skip);
792  }
793 }
794 
795 
797  // The variable in the declaration always resides in the current function
798  // context.
799  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
800  if (generate_debug_code_) {
801  // Check that we're not inside a with or catch context.
803  __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
804  __ Check(ne, kDeclarationInWithContext);
805  __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
806  __ Check(ne, kDeclarationInCatchContext);
807  }
808 }
809 
810 
811 void FullCodeGenerator::VisitVariableDeclaration(
812  VariableDeclaration* declaration) {
813  // If it was not possible to allocate the variable at compile time, we
814  // need to "declare" it at runtime to make sure it actually exists in the
815  // local context.
816  VariableProxy* proxy = declaration->proxy();
817  VariableMode mode = declaration->mode();
818  Variable* variable = proxy->var();
819  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
820  switch (variable->location()) {
822  globals_->Add(variable->name(), zone());
823  globals_->Add(variable->binding_needs_init()
824  ? isolate()->factory()->the_hole_value()
825  : isolate()->factory()->undefined_value(),
826  zone());
827  break;
828 
829  case Variable::PARAMETER:
830  case Variable::LOCAL:
831  if (hole_init) {
832  Comment cmnt(masm_, "[ VariableDeclaration");
833  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
834  __ str(ip, StackOperand(variable));
835  }
836  break;
837 
838  case Variable::CONTEXT:
839  if (hole_init) {
840  Comment cmnt(masm_, "[ VariableDeclaration");
842  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
843  __ str(ip, ContextOperand(cp, variable->index()));
844  // No write barrier since the_hole_value is in old space.
845  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
846  }
847  break;
848 
849  case Variable::LOOKUP: {
850  Comment cmnt(masm_, "[ VariableDeclaration");
851  __ mov(r2, Operand(variable->name()));
852  // Declaration nodes are always introduced in one of four modes.
854  PropertyAttributes attr =
856  __ mov(r1, Operand(Smi::FromInt(attr)));
857  // Push initial value, if any.
858  // Note: For variables we must not push an initial value (such as
859  // 'undefined') because we may have a (legal) redeclaration and we
860  // must not destroy the current value.
861  if (hole_init) {
862  __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
863  __ Push(cp, r2, r1, r0);
864  } else {
865  __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
866  __ Push(cp, r2, r1, r0);
867  }
868  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
869  break;
870  }
871  }
872 }
873 
874 
875 void FullCodeGenerator::VisitFunctionDeclaration(
876  FunctionDeclaration* declaration) {
877  VariableProxy* proxy = declaration->proxy();
878  Variable* variable = proxy->var();
879  switch (variable->location()) {
880  case Variable::UNALLOCATED: {
881  globals_->Add(variable->name(), zone());
882  Handle<SharedFunctionInfo> function =
883  Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
884  // Check for stack-overflow exception.
885  if (function.is_null()) return SetStackOverflow();
886  globals_->Add(function, zone());
887  break;
888  }
889 
890  case Variable::PARAMETER:
891  case Variable::LOCAL: {
892  Comment cmnt(masm_, "[ FunctionDeclaration");
893  VisitForAccumulatorValue(declaration->fun());
894  __ str(result_register(), StackOperand(variable));
895  break;
896  }
897 
898  case Variable::CONTEXT: {
899  Comment cmnt(masm_, "[ FunctionDeclaration");
901  VisitForAccumulatorValue(declaration->fun());
902  __ str(result_register(), ContextOperand(cp, variable->index()));
903  int offset = Context::SlotOffset(variable->index());
904  // We know that we have written a function, which is not a smi.
905  __ RecordWriteContextSlot(cp,
906  offset,
907  result_register(),
908  r2,
913  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
914  break;
915  }
916 
917  case Variable::LOOKUP: {
918  Comment cmnt(masm_, "[ FunctionDeclaration");
919  __ mov(r2, Operand(variable->name()));
920  __ mov(r1, Operand(Smi::FromInt(NONE)));
921  __ Push(cp, r2, r1);
922  // Push initial value for function declaration.
923  VisitForStackValue(declaration->fun());
924  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
925  break;
926  }
927  }
928 }
929 
930 
931 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
932  Variable* variable = declaration->proxy()->var();
933  DCHECK(variable->location() == Variable::CONTEXT);
934  DCHECK(variable->interface()->IsFrozen());
935 
936  Comment cmnt(masm_, "[ ModuleDeclaration");
938 
939  // Load instance object.
940  __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
941  __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
943 
944  // Assign it.
945  __ str(r1, ContextOperand(cp, variable->index()));
946  // We know that we have written a module, which is not a smi.
947  __ RecordWriteContextSlot(cp,
948  Context::SlotOffset(variable->index()),
949  r1,
950  r3,
955  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
956 
957  // Traverse into body.
958  Visit(declaration->module());
959 }
960 
961 
962 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
963  VariableProxy* proxy = declaration->proxy();
964  Variable* variable = proxy->var();
965  switch (variable->location()) {
967  // TODO(rossberg)
968  break;
969 
970  case Variable::CONTEXT: {
971  Comment cmnt(masm_, "[ ImportDeclaration");
973  // TODO(rossberg)
974  break;
975  }
976 
977  case Variable::PARAMETER:
978  case Variable::LOCAL:
979  case Variable::LOOKUP:
980  UNREACHABLE();
981  }
982 }
983 
984 
985 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
986  // TODO(rossberg)
987 }
988 
989 
990 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
991  // Call the runtime to declare the globals.
992  // The context is the first argument.
993  __ mov(r1, Operand(pairs));
994  __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
995  __ Push(cp, r1, r0);
996  __ CallRuntime(Runtime::kDeclareGlobals, 3);
997  // Return value is ignored.
998 }
999 
1000 
1001 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1002  // Call the runtime to declare the modules.
1003  __ Push(descriptions);
1004  __ CallRuntime(Runtime::kDeclareModules, 1);
1005  // Return value is ignored.
1006 }
1007 
1008 
1009 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1010  Comment cmnt(masm_, "[ SwitchStatement");
1011  Breakable nested_statement(this, stmt);
1012  SetStatementPosition(stmt);
1013 
1014  // Keep the switch value on the stack until a case matches.
1015  VisitForStackValue(stmt->tag());
1016  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1017 
1018  ZoneList<CaseClause*>* clauses = stmt->cases();
1019  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1020 
1021  Label next_test; // Recycled for each test.
1022  // Compile all the tests with branches to their bodies.
1023  for (int i = 0; i < clauses->length(); i++) {
1024  CaseClause* clause = clauses->at(i);
1025  clause->body_target()->Unuse();
1026 
1027  // The default is not a test, but remember it as final fall through.
1028  if (clause->is_default()) {
1029  default_clause = clause;
1030  continue;
1031  }
1032 
1033  Comment cmnt(masm_, "[ Case comparison");
1034  __ bind(&next_test);
1035  next_test.Unuse();
1036 
1037  // Compile the label expression.
1038  VisitForAccumulatorValue(clause->label());
1039 
1040  // Perform the comparison as if via '==='.
1041  __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1042  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1043  JumpPatchSite patch_site(masm_);
1044  if (inline_smi_code) {
1045  Label slow_case;
1046  __ orr(r2, r1, r0);
1047  patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1048 
1049  __ cmp(r1, r0);
1050  __ b(ne, &next_test);
1051  __ Drop(1); // Switch value is no longer needed.
1052  __ b(clause->body_target());
1053  __ bind(&slow_case);
1054  }
1055 
1056  // Record position before stub call for type feedback.
1057  SetSourcePosition(clause->position());
1058  Handle<Code> ic =
1059  CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1060  CallIC(ic, clause->CompareId());
1061  patch_site.EmitPatchInfo();
1062 
1063  Label skip;
1064  __ b(&skip);
1065  PrepareForBailout(clause, TOS_REG);
1066  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1067  __ cmp(r0, ip);
1068  __ b(ne, &next_test);
1069  __ Drop(1);
1070  __ jmp(clause->body_target());
1071  __ bind(&skip);
1072 
1073  __ cmp(r0, Operand::Zero());
1074  __ b(ne, &next_test);
1075  __ Drop(1); // Switch value is no longer needed.
1076  __ b(clause->body_target());
1077  }
1078 
1079  // Discard the test value and jump to the default if present, otherwise to
1080  // the end of the statement.
1081  __ bind(&next_test);
1082  __ Drop(1); // Switch value is no longer needed.
1083  if (default_clause == NULL) {
1084  __ b(nested_statement.break_label());
1085  } else {
1086  __ b(default_clause->body_target());
1087  }
1088 
1089  // Compile all the case bodies.
1090  for (int i = 0; i < clauses->length(); i++) {
1091  Comment cmnt(masm_, "[ Case body");
1092  CaseClause* clause = clauses->at(i);
1093  __ bind(clause->body_target());
1094  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1095  VisitStatements(clause->statements());
1096  }
1097 
1098  __ bind(nested_statement.break_label());
1099  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1100 }
1101 
1102 
1103 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1104  Comment cmnt(masm_, "[ ForInStatement");
1105  int slot = stmt->ForInFeedbackSlot();
1106  SetStatementPosition(stmt);
1107 
1108  Label loop, exit;
1109  ForIn loop_statement(this, stmt);
1111 
1112  // Get the object to enumerate over. If the object is null or undefined, skip
1113  // over the loop. See ECMA-262 version 5, section 12.6.4.
1114  VisitForAccumulatorValue(stmt->enumerable());
1115  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1116  __ cmp(r0, ip);
1117  __ b(eq, &exit);
1118  Register null_value = r5;
1119  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1120  __ cmp(r0, null_value);
1121  __ b(eq, &exit);
1122 
1123  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1124 
1125  // Convert the object to a JS object.
1126  Label convert, done_convert;
1127  __ JumpIfSmi(r0, &convert);
1128  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1129  __ b(ge, &done_convert);
1130  __ bind(&convert);
1131  __ push(r0);
1132  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1133  __ bind(&done_convert);
1134  __ push(r0);
1135 
1136  // Check for proxies.
1137  Label call_runtime;
1139  __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1140  __ b(le, &call_runtime);
1141 
1142  // Check cache validity in generated code. This is a fast case for
1143  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1144  // guarantee cache validity, call the runtime system to check cache
1145  // validity or get the property names in a fixed array.
1146  __ CheckEnumCache(null_value, &call_runtime);
1147 
1148  // The enum cache is valid. Load the map of the object being
1149  // iterated over and use the cache for the iteration.
1150  Label use_cache;
1152  __ b(&use_cache);
1153 
1154  // Get the set of properties to enumerate.
1155  __ bind(&call_runtime);
1156  __ push(r0); // Duplicate the enumerable object on the stack.
1157  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1158 
1159  // If we got a map from the runtime call, we can do a fast
1160  // modification check. Otherwise, we got a fixed array, and we have
1161  // to do a slow check.
1162  Label fixed_array;
1164  __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1165  __ cmp(r2, ip);
1166  __ b(ne, &fixed_array);
1167 
1168  // We got a map in register r0. Get the enumeration cache from it.
1169  Label no_descriptors;
1170  __ bind(&use_cache);
1171 
1172  __ EnumLength(r1, r0);
1173  __ cmp(r1, Operand(Smi::FromInt(0)));
1174  __ b(eq, &no_descriptors);
1175 
1176  __ LoadInstanceDescriptors(r0, r2);
1179 
1180  // Set up the four remaining stack slots.
1181  __ push(r0); // Map.
1182  __ mov(r0, Operand(Smi::FromInt(0)));
1183  // Push enumeration cache, enumeration cache length (as smi) and zero.
1184  __ Push(r2, r1, r0);
1185  __ jmp(&loop);
1186 
1187  __ bind(&no_descriptors);
1188  __ Drop(1);
1189  __ jmp(&exit);
1190 
1191  // We got a fixed array in register r0. Iterate through that.
1192  Label non_proxy;
1193  __ bind(&fixed_array);
1194 
1195  __ Move(r1, FeedbackVector());
1196  __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1198 
1199  __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1200  __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1202  __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1203  __ b(gt, &non_proxy);
1204  __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1205  __ bind(&non_proxy);
1206  __ Push(r1, r0); // Smi and array
1208  __ mov(r0, Operand(Smi::FromInt(0)));
1209  __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1210 
1211  // Generate code for doing the condition check.
1212  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1213  __ bind(&loop);
1214  // Load the current count to r0, load the length to r1.
1215  __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1216  __ cmp(r0, r1); // Compare to the array length.
1217  __ b(hs, loop_statement.break_label());
1218 
1219  // Get the current entry of the array into register r3.
1220  __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1221  __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1222  __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1223 
1224  // Get the expected map from the stack or a smi in the
1225  // permanent slow case into register r2.
1226  __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1227 
1228  // Check if the expected map still matches that of the enumerable.
1229  // If not, we may have to filter the key.
1230  Label update_each;
1231  __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1233  __ cmp(r4, Operand(r2));
1234  __ b(eq, &update_each);
1235 
1236  // For proxies, no filtering is done.
1237  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1238  __ cmp(r2, Operand(Smi::FromInt(0)));
1239  __ b(eq, &update_each);
1240 
1241  // Convert the entry to a string or (smi) 0 if it isn't a property
1242  // any more. If the property has been removed while iterating, we
1243  // just skip it.
1244  __ push(r1); // Enumerable.
1245  __ push(r3); // Current entry.
1246  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1247  __ mov(r3, Operand(r0), SetCC);
1248  __ b(eq, loop_statement.continue_label());
1249 
1250  // Update the 'each' property or variable from the possibly filtered
1251  // entry in register r3.
1252  __ bind(&update_each);
1253  __ mov(result_register(), r3);
1254  // Perform the assignment as if via '='.
1255  { EffectContext context(this);
1256  EmitAssignment(stmt->each());
1257  }
1258 
1259  // Generate code for the body of the loop.
1260  Visit(stmt->body());
1261 
1262  // Generate code for the going to the next element by incrementing
1263  // the index (smi) stored on top of the stack.
1264  __ bind(loop_statement.continue_label());
1265  __ pop(r0);
1266  __ add(r0, r0, Operand(Smi::FromInt(1)));
1267  __ push(r0);
1268 
1269  EmitBackEdgeBookkeeping(stmt, &loop);
1270  __ b(&loop);
1271 
1272  // Remove the pointers stored on the stack.
1273  __ bind(loop_statement.break_label());
1274  __ Drop(5);
1275 
1276  // Exit and decrement the loop depth.
1277  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1278  __ bind(&exit);
1280 }
1281 
1282 
1283 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1284  Comment cmnt(masm_, "[ ForOfStatement");
1285  SetStatementPosition(stmt);
1286 
1287  Iteration loop_statement(this, stmt);
1289 
1290  // var iterator = iterable[Symbol.iterator]();
1291  VisitForEffect(stmt->assign_iterator());
1292 
1293  // Loop entry.
1294  __ bind(loop_statement.continue_label());
1295 
1296  // result = iterator.next()
1297  VisitForEffect(stmt->next_result());
1298 
1299  // if (result.done) break;
1300  Label result_not_done;
1301  VisitForControl(stmt->result_done(),
1302  loop_statement.break_label(),
1303  &result_not_done,
1304  &result_not_done);
1305  __ bind(&result_not_done);
1306 
1307  // each = result.value
1308  VisitForEffect(stmt->assign_each());
1309 
1310  // Generate code for the body of the loop.
1311  Visit(stmt->body());
1312 
1313  // Check stack before looping.
1314  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1315  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1316  __ jmp(loop_statement.continue_label());
1317 
1318  // Exit and decrement the loop depth.
1319  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1320  __ bind(loop_statement.break_label());
1322 }
1323 
1324 
1325 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1326  bool pretenure) {
1327  // Use the fast case closure allocation code that allocates in new
1328  // space for nested functions that don't need literals cloning. If
1329  // we're running with the --always-opt or the --prepare-always-opt
1330  // flag, we need to use the runtime function so that the new function
1331  // we are creating here gets a chance to have its code optimized and
1332  // doesn't just get a copy of the existing unoptimized code.
1333  if (!FLAG_always_opt &&
1334  !FLAG_prepare_always_opt &&
1335  !pretenure &&
1336  scope()->is_function_scope() &&
1337  info->num_literals() == 0) {
1338  FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1339  __ mov(r2, Operand(info));
1340  __ CallStub(&stub);
1341  } else {
1342  __ mov(r0, Operand(info));
1343  __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1344  : Heap::kFalseValueRootIndex);
1345  __ Push(cp, r0, r1);
1346  __ CallRuntime(Runtime::kNewClosure, 3);
1347  }
1348  context()->Plug(r0);
1349 }
1350 
1351 
1352 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1353  Comment cmnt(masm_, "[ VariableProxy");
1354  EmitVariableLoad(expr);
1355 }
1356 
1357 
1358 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1359  Comment cnmt(masm_, "[ SuperReference ");
1360 
1363 
1364  Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1365  __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
1366 
1367  CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1368 
1369  __ cmp(r0, Operand(isolate()->factory()->undefined_value()));
1370  Label done;
1371  __ b(ne, &done);
1372  __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1373  __ bind(&done);
1374 }
1375 
1376 
1377 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1378  TypeofState typeof_state,
1379  Label* slow) {
1380  Register current = cp;
1381  Register next = r1;
1382  Register temp = r2;
1383 
1384  Scope* s = scope();
1385  while (s != NULL) {
1386  if (s->num_heap_slots() > 0) {
1387  if (s->calls_sloppy_eval()) {
1388  // Check that extension is NULL.
1389  __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1390  __ tst(temp, temp);
1391  __ b(ne, slow);
1392  }
1393  // Load next context in chain.
1394  __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1395  // Walk the rest of the chain without clobbering cp.
1396  current = next;
1397  }
1398  // If no outer scope calls eval, we do not need to check more
1399  // context extensions.
1400  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1401  s = s->outer_scope();
1402  }
1403 
1404  if (s->is_eval_scope()) {
1405  Label loop, fast;
1406  if (!current.is(next)) {
1407  __ Move(next, current);
1408  }
1409  __ bind(&loop);
1410  // Terminate at native context.
1411  __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1412  __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1413  __ cmp(temp, ip);
1414  __ b(eq, &fast);
1415  // Check that extension is NULL.
1416  __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1417  __ tst(temp, temp);
1418  __ b(ne, slow);
1419  // Load next context in chain.
1420  __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1421  __ b(&loop);
1422  __ bind(&fast);
1423  }
1424 
1426  __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1427  if (FLAG_vector_ics) {
1429  Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1430  }
1431 
1432  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1433  ? NOT_CONTEXTUAL
1434  : CONTEXTUAL;
1435  CallLoadIC(mode);
1436 }
1437 
1438 
1440  Label* slow) {
1441  DCHECK(var->IsContextSlot());
1442  Register context = cp;
1443  Register next = r3;
1444  Register temp = r4;
1445 
1446  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1447  if (s->num_heap_slots() > 0) {
1448  if (s->calls_sloppy_eval()) {
1449  // Check that extension is NULL.
1451  __ tst(temp, temp);
1452  __ b(ne, slow);
1453  }
1455  // Walk the rest of the chain without clobbering cp.
1456  context = next;
1457  }
1458  }
1459  // Check that last extension is NULL.
1461  __ tst(temp, temp);
1462  __ b(ne, slow);
1463 
1464  // This function is used only for loads, not stores, so it's safe to
1465  // return an cp-based operand (the write barrier cannot be allowed to
1466  // destroy the cp register).
1467  return ContextOperand(context, var->index());
1468 }
1469 
1470 
1471 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1472  TypeofState typeof_state,
1473  Label* slow,
1474  Label* done) {
1475  // Generate fast-case code for variables that might be shadowed by
1476  // eval-introduced variables. Eval is used a lot without
1477  // introducing variables. In those cases, we do not want to
1478  // perform a runtime call for all variables in the scope
1479  // containing the eval.
1480  Variable* var = proxy->var();
1481  if (var->mode() == DYNAMIC_GLOBAL) {
1482  EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1483  __ jmp(done);
1484  } else if (var->mode() == DYNAMIC_LOCAL) {
1485  Variable* local = var->local_if_not_shadowed();
1486  __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1487  if (local->mode() == LET || local->mode() == CONST ||
1488  local->mode() == CONST_LEGACY) {
1489  __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1490  if (local->mode() == CONST_LEGACY) {
1491  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1492  } else { // LET || CONST
1493  __ b(ne, done);
1494  __ mov(r0, Operand(var->name()));
1495  __ push(r0);
1496  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1497  }
1498  }
1499  __ jmp(done);
1500  }
1501 }
1502 
1503 
1504 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1505  // Record position before possible IC call.
1506  SetSourcePosition(proxy->position());
1507  Variable* var = proxy->var();
1508 
1509  // Three cases: global variables, lookup variables, and all other types of
1510  // variables.
1511  switch (var->location()) {
1512  case Variable::UNALLOCATED: {
1513  Comment cmnt(masm_, "[ Global variable");
1515  __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1516  if (FLAG_vector_ics) {
1518  Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1519  }
1521  context()->Plug(r0);
1522  break;
1523  }
1524 
1525  case Variable::PARAMETER:
1526  case Variable::LOCAL:
1527  case Variable::CONTEXT: {
1528  Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1529  : "[ Stack variable");
1530  if (var->binding_needs_init()) {
1531  // var->scope() may be NULL when the proxy is located in eval code and
1532  // refers to a potential outside binding. Currently those bindings are
1533  // always looked up dynamically, i.e. in that case
1534  // var->location() == LOOKUP.
1535  // always holds.
1536  DCHECK(var->scope() != NULL);
1537 
1538  // Check if the binding really needs an initialization check. The check
1539  // can be skipped in the following situation: we have a LET or CONST
1540  // binding in harmony mode, both the Variable and the VariableProxy have
1541  // the same declaration scope (i.e. they are both in global code, in the
1542  // same function or in the same eval code) and the VariableProxy is in
1543  // the source physically located after the initializer of the variable.
1544  //
1545  // We cannot skip any initialization checks for CONST in non-harmony
1546  // mode because const variables may be declared but never initialized:
1547  // if (false) { const x; }; var y = x;
1548  //
1549  // The condition on the declaration scopes is a conservative check for
1550  // nested functions that access a binding and are called before the
1551  // binding is initialized:
1552  // function() { f(); let x = 1; function f() { x = 2; } }
1553  //
1554  bool skip_init_check;
1555  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1556  skip_init_check = false;
1557  } else {
1558  // Check that we always have valid source position.
1559  DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1560  DCHECK(proxy->position() != RelocInfo::kNoPosition);
1561  skip_init_check = var->mode() != CONST_LEGACY &&
1562  var->initializer_position() < proxy->position();
1563  }
1564 
1565  if (!skip_init_check) {
1566  // Let and const need a read barrier.
1567  GetVar(r0, var);
1568  __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1569  if (var->mode() == LET || var->mode() == CONST) {
1570  // Throw a reference error when using an uninitialized let/const
1571  // binding in harmony mode.
1572  Label done;
1573  __ b(ne, &done);
1574  __ mov(r0, Operand(var->name()));
1575  __ push(r0);
1576  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1577  __ bind(&done);
1578  } else {
1579  // Uninitalized const bindings outside of harmony mode are unholed.
1580  DCHECK(var->mode() == CONST_LEGACY);
1581  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1582  }
1583  context()->Plug(r0);
1584  break;
1585  }
1586  }
1587  context()->Plug(var);
1588  break;
1589  }
1590 
1591  case Variable::LOOKUP: {
1592  Comment cmnt(masm_, "[ Lookup variable");
1593  Label done, slow;
1594  // Generate code for loading from variables potentially shadowed
1595  // by eval-introduced variables.
1596  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1597  __ bind(&slow);
1598  __ mov(r1, Operand(var->name()));
1599  __ Push(cp, r1); // Context and name.
1600  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1601  __ bind(&done);
1602  context()->Plug(r0);
1603  }
1604  }
1605 }
1606 
1607 
1608 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1609  Comment cmnt(masm_, "[ RegExpLiteral");
1610  Label materialized;
1611  // Registers will be used as follows:
1612  // r5 = materialized value (RegExp literal)
1613  // r4 = JS function, literals array
1614  // r3 = literal index
1615  // r2 = RegExp pattern
1616  // r1 = RegExp flags
1617  // r0 = RegExp literal clone
1620  int literal_offset =
1621  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1622  __ ldr(r5, FieldMemOperand(r4, literal_offset));
1623  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1624  __ cmp(r5, ip);
1625  __ b(ne, &materialized);
1626 
1627  // Create regexp literal using runtime function.
1628  // Result will be in r0.
1629  __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1630  __ mov(r2, Operand(expr->pattern()));
1631  __ mov(r1, Operand(expr->flags()));
1632  __ Push(r4, r3, r2, r1);
1633  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1634  __ mov(r5, r0);
1635 
1636  __ bind(&materialized);
1638  Label allocated, runtime_allocate;
1639  __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1640  __ jmp(&allocated);
1641 
1642  __ bind(&runtime_allocate);
1643  __ mov(r0, Operand(Smi::FromInt(size)));
1644  __ Push(r5, r0);
1645  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1646  __ pop(r5);
1647 
1648  __ bind(&allocated);
1649  // After this, registers are used as follows:
1650  // r0: Newly allocated regexp.
1651  // r5: Materialized regexp.
1652  // r2: temp.
1653  __ CopyFields(r0, r5, d0, size / kPointerSize);
1654  context()->Plug(r0);
1655 }
1656 
1657 
1658 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1659  if (expression == NULL) {
1660  __ LoadRoot(r1, Heap::kNullValueRootIndex);
1661  __ push(r1);
1662  } else {
1663  VisitForStackValue(expression);
1664  }
1665 }
1666 
1667 
1668 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1669  Comment cmnt(masm_, "[ ObjectLiteral");
1670 
1671  expr->BuildConstantProperties(isolate());
1672  Handle<FixedArray> constant_properties = expr->constant_properties();
1675  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1676  __ mov(r1, Operand(constant_properties));
1677  int flags = expr->fast_elements()
1678  ? ObjectLiteral::kFastElements
1679  : ObjectLiteral::kNoFlags;
1680  flags |= expr->has_function()
1681  ? ObjectLiteral::kHasFunction
1682  : ObjectLiteral::kNoFlags;
1683  __ mov(r0, Operand(Smi::FromInt(flags)));
1684  int properties_count = constant_properties->length() / 2;
1685  if (expr->may_store_doubles() || expr->depth() > 1 ||
1686  masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1688  __ Push(r3, r2, r1, r0);
1689  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1690  } else {
1691  FastCloneShallowObjectStub stub(isolate(), properties_count);
1692  __ CallStub(&stub);
1693  }
1694 
1695  // If result_saved is true the result is on top of the stack. If
1696  // result_saved is false the result is in r0.
1697  bool result_saved = false;
1698 
1699  // Mark all computed expressions that are bound to a key that
1700  // is shadowed by a later occurrence of the same key. For the
1701  // marked expressions, no store code is emitted.
1702  expr->CalculateEmitStore(zone());
1703 
1704  AccessorTable accessor_table(zone());
1705  for (int i = 0; i < expr->properties()->length(); i++) {
1706  ObjectLiteral::Property* property = expr->properties()->at(i);
1707  if (property->IsCompileTimeValue()) continue;
1708 
1709  Literal* key = property->key();
1710  Expression* value = property->value();
1711  if (!result_saved) {
1712  __ push(r0); // Save result on stack
1713  result_saved = true;
1714  }
1715  switch (property->kind()) {
1717  UNREACHABLE();
1718  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1719  DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1720  // Fall through.
1721  case ObjectLiteral::Property::COMPUTED:
1722  if (key->value()->IsInternalizedString()) {
1723  if (property->emit_store()) {
1724  VisitForAccumulatorValue(value);
1726  __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1728  CallStoreIC(key->LiteralFeedbackId());
1730  } else {
1731  VisitForEffect(value);
1732  }
1733  break;
1734  }
1735  // Duplicate receiver on stack.
1736  __ ldr(r0, MemOperand(sp));
1737  __ push(r0);
1738  VisitForStackValue(key);
1739  VisitForStackValue(value);
1740  if (property->emit_store()) {
1741  __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
1742  __ push(r0);
1743  __ CallRuntime(Runtime::kSetProperty, 4);
1744  } else {
1745  __ Drop(3);
1746  }
1747  break;
1748  case ObjectLiteral::Property::PROTOTYPE:
1749  // Duplicate receiver on stack.
1750  __ ldr(r0, MemOperand(sp));
1751  __ push(r0);
1752  VisitForStackValue(value);
1753  if (property->emit_store()) {
1754  __ CallRuntime(Runtime::kSetPrototype, 2);
1755  } else {
1756  __ Drop(2);
1757  }
1758  break;
1759 
1760  case ObjectLiteral::Property::GETTER:
1761  accessor_table.lookup(key)->second->getter = value;
1762  break;
1763  case ObjectLiteral::Property::SETTER:
1764  accessor_table.lookup(key)->second->setter = value;
1765  break;
1766  }
1767  }
1768 
1769  // Emit code to define accessors, using only a single call to the runtime for
1770  // each pair of corresponding getters and setters.
1771  for (AccessorTable::Iterator it = accessor_table.begin();
1772  it != accessor_table.end();
1773  ++it) {
1774  __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1775  __ push(r0);
1776  VisitForStackValue(it->first);
1777  EmitAccessor(it->second->getter);
1778  EmitAccessor(it->second->setter);
1779  __ mov(r0, Operand(Smi::FromInt(NONE)));
1780  __ push(r0);
1781  __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1782  }
1783 
1784  if (expr->has_function()) {
1785  DCHECK(result_saved);
1786  __ ldr(r0, MemOperand(sp));
1787  __ push(r0);
1788  __ CallRuntime(Runtime::kToFastProperties, 1);
1789  }
1790 
1791  if (result_saved) {
1792  context()->PlugTOS();
1793  } else {
1794  context()->Plug(r0);
1795  }
1796 }
1797 
1798 
1799 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1800  Comment cmnt(masm_, "[ ArrayLiteral");
1801 
1802  expr->BuildConstantElements(isolate());
1803  int flags = expr->depth() == 1
1804  ? ArrayLiteral::kShallowElements
1805  : ArrayLiteral::kNoFlags;
1806 
1807  ZoneList<Expression*>* subexprs = expr->values();
1808  int length = subexprs->length();
1809  Handle<FixedArray> constant_elements = expr->constant_elements();
1810  DCHECK_EQ(2, constant_elements->length());
1811  ElementsKind constant_elements_kind =
1812  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1813  bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1814  Handle<FixedArrayBase> constant_elements_values(
1815  FixedArrayBase::cast(constant_elements->get(1)));
1816 
1817  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1818  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1819  // If the only customer of allocation sites is transitioning, then
1820  // we can turn it off if we don't have anywhere else to transition to.
1821  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1822  }
1823 
1826  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1827  __ mov(r1, Operand(constant_elements));
1828  if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1829  __ mov(r0, Operand(Smi::FromInt(flags)));
1830  __ Push(r3, r2, r1, r0);
1831  __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1832  } else {
1833  FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1834  __ CallStub(&stub);
1835  }
1836 
1837  bool result_saved = false; // Is the result saved to the stack?
1838 
1839  // Emit code to evaluate all the non-constant subexpressions and to store
1840  // them into the newly cloned array.
1841  for (int i = 0; i < length; i++) {
1842  Expression* subexpr = subexprs->at(i);
1843  // If the subexpression is a literal or a simple materialized literal it
1844  // is already set in the cloned array.
1845  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1846 
1847  if (!result_saved) {
1848  __ push(r0);
1849  __ Push(Smi::FromInt(expr->literal_index()));
1850  result_saved = true;
1851  }
1852  VisitForAccumulatorValue(subexpr);
1853 
1854  if (IsFastObjectElementsKind(constant_elements_kind)) {
1855  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1856  __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1858  __ str(result_register(), FieldMemOperand(r1, offset));
1859  // Update the write barrier for the array store.
1860  __ RecordWriteField(r1, offset, result_register(), r2,
1863  } else {
1864  __ mov(r3, Operand(Smi::FromInt(i)));
1865  StoreArrayLiteralElementStub stub(isolate());
1866  __ CallStub(&stub);
1867  }
1868 
1869  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1870  }
1871 
1872  if (result_saved) {
1873  __ pop(); // literal index
1874  context()->PlugTOS();
1875  } else {
1876  context()->Plug(r0);
1877  }
1878 }
1879 
1880 
1881 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1882  DCHECK(expr->target()->IsValidReferenceExpression());
1883 
1884  Comment cmnt(masm_, "[ Assignment");
1885 
1886  // Left-hand side can only be a property, a global or a (parameter or local)
1887  // slot.
1888  enum LhsKind {
1889  VARIABLE,
1892  NAMED_SUPER_PROPERTY
1893  };
1894  LhsKind assign_type = VARIABLE;
1895  Property* property = expr->target()->AsProperty();
1896  if (property != NULL) {
1897  assign_type = (property->key()->IsPropertyName())
1898  ? (property->IsSuperAccess() ? NAMED_SUPER_PROPERTY
1899  : NAMED_PROPERTY)
1900  : KEYED_PROPERTY;
1901  }
1902 
1903  // Evaluate LHS expression.
1904  switch (assign_type) {
1905  case VARIABLE:
1906  // Nothing to do here.
1907  break;
1908  case NAMED_PROPERTY:
1909  if (expr->is_compound()) {
1910  // We need the receiver both on the stack and in the register.
1911  VisitForStackValue(property->obj());
1913  } else {
1914  VisitForStackValue(property->obj());
1915  }
1916  break;
1917  case NAMED_SUPER_PROPERTY:
1918  VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1919  EmitLoadHomeObject(property->obj()->AsSuperReference());
1920  __ Push(result_register());
1921  if (expr->is_compound()) {
1922  const Register scratch = r1;
1923  __ ldr(scratch, MemOperand(sp, kPointerSize));
1924  __ Push(scratch);
1925  __ Push(result_register());
1926  }
1927  break;
1928  case KEYED_PROPERTY:
1929  if (expr->is_compound()) {
1930  VisitForStackValue(property->obj());
1931  VisitForStackValue(property->key());
1933  MemOperand(sp, 1 * kPointerSize));
1935  } else {
1936  VisitForStackValue(property->obj());
1937  VisitForStackValue(property->key());
1938  }
1939  break;
1940  }
1941 
1942  // For compound assignments we need another deoptimization point after the
1943  // variable/property load.
1944  if (expr->is_compound()) {
1945  { AccumulatorValueContext context(this);
1946  switch (assign_type) {
1947  case VARIABLE:
1948  EmitVariableLoad(expr->target()->AsVariableProxy());
1949  PrepareForBailout(expr->target(), TOS_REG);
1950  break;
1951  case NAMED_PROPERTY:
1952  EmitNamedPropertyLoad(property);
1953  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1954  break;
1955  case NAMED_SUPER_PROPERTY:
1956  EmitNamedSuperPropertyLoad(property);
1957  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1958  break;
1959  case KEYED_PROPERTY:
1960  EmitKeyedPropertyLoad(property);
1961  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1962  break;
1963  }
1964  }
1965 
1966  Token::Value op = expr->binary_op();
1967  __ push(r0); // Left operand goes on the stack.
1968  VisitForAccumulatorValue(expr->value());
1969 
1970  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1971  ? OVERWRITE_RIGHT
1972  : NO_OVERWRITE;
1973  SetSourcePosition(expr->position() + 1);
1974  AccumulatorValueContext context(this);
1975  if (ShouldInlineSmiCase(op)) {
1976  EmitInlineSmiBinaryOp(expr->binary_operation(),
1977  op,
1978  mode,
1979  expr->target(),
1980  expr->value());
1981  } else {
1982  EmitBinaryOp(expr->binary_operation(), op, mode);
1983  }
1984 
1985  // Deoptimization point in case the binary operation may have side effects.
1986  PrepareForBailout(expr->binary_operation(), TOS_REG);
1987  } else {
1988  VisitForAccumulatorValue(expr->value());
1989  }
1990 
1991  // Record source position before possible IC call.
1992  SetSourcePosition(expr->position());
1993 
1994  // Store the value.
1995  switch (assign_type) {
1996  case VARIABLE:
1997  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1998  expr->op());
1999  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2000  context()->Plug(r0);
2001  break;
2002  case NAMED_PROPERTY:
2004  break;
2005  case NAMED_SUPER_PROPERTY:
2007  break;
2008  case KEYED_PROPERTY:
2010  break;
2011  }
2012 }
2013 
2014 
2015 void FullCodeGenerator::VisitYield(Yield* expr) {
2016  Comment cmnt(masm_, "[ Yield");
2017  // Evaluate yielded value first; the initial iterator definition depends on
2018  // this. It stays on the stack while we update the iterator.
2019  VisitForStackValue(expr->expression());
2020 
2021  switch (expr->yield_kind()) {
2022  case Yield::kSuspend:
2023  // Pop value from top-of-stack slot; box result into result register.
2024  EmitCreateIteratorResult(false);
2025  __ push(result_register());
2026  // Fall through.
2027  case Yield::kInitial: {
2028  Label suspend, continuation, post_runtime, resume;
2029 
2030  __ jmp(&suspend);
2031 
2032  __ bind(&continuation);
2033  __ jmp(&resume);
2034 
2035  __ bind(&suspend);
2036  VisitForAccumulatorValue(expr->generator_object());
2037  DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2038  __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2041  __ mov(r1, cp);
2042  __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2045  __ cmp(sp, r1);
2046  __ b(eq, &post_runtime);
2047  __ push(r0); // generator object
2048  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2050  __ bind(&post_runtime);
2051  __ pop(result_register());
2053 
2054  __ bind(&resume);
2055  context()->Plug(result_register());
2056  break;
2057  }
2058 
2059  case Yield::kFinal: {
2060  VisitForAccumulatorValue(expr->generator_object());
2064  // Pop value from top-of-stack slot, box result into result register.
2068  break;
2069  }
2070 
2071  case Yield::kDelegating: {
2072  VisitForStackValue(expr->generator_object());
2073 
2074  // Initial stack layout is as follows:
2075  // [sp + 1 * kPointerSize] iter
2076  // [sp + 0 * kPointerSize] g
2077 
2078  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2079  Label l_next, l_call, l_loop;
2080  Register load_receiver = LoadDescriptor::ReceiverRegister();
2081  Register load_name = LoadDescriptor::NameRegister();
2082 
2083  // Initial send value is undefined.
2084  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2085  __ b(&l_next);
2086 
2087  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2088  __ bind(&l_catch);
2089  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2090  __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2091  __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2092  __ Push(load_name, r3, r0); // "throw", iter, except
2093  __ jmp(&l_call);
2094 
2095  // try { received = %yield result }
2096  // Shuffle the received result above a try handler and yield it without
2097  // re-boxing.
2098  __ bind(&l_try);
2099  __ pop(r0); // result
2100  __ PushTryHandler(StackHandler::CATCH, expr->index());
2101  const int handler_size = StackHandlerConstants::kSize;
2102  __ push(r0); // result
2103  __ jmp(&l_suspend);
2104  __ bind(&l_continuation);
2105  __ jmp(&l_resume);
2106  __ bind(&l_suspend);
2107  const int generator_object_depth = kPointerSize + handler_size;
2108  __ ldr(r0, MemOperand(sp, generator_object_depth));
2109  __ push(r0); // g
2110  DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2111  __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2114  __ mov(r1, cp);
2115  __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2117  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2119  __ pop(r0); // result
2121  __ bind(&l_resume); // received in r0
2122  __ PopTryHandler();
2123 
2124  // receiver = iter; f = 'next'; arg = received;
2125  __ bind(&l_next);
2126 
2127  __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2128  __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2129  __ Push(load_name, r3, r0); // "next", iter, received
2130 
2131  // result = receiver[f](arg);
2132  __ bind(&l_call);
2133  __ ldr(load_receiver, MemOperand(sp, kPointerSize));
2134  __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
2135  if (FLAG_vector_ics) {
2137  Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2138  }
2139  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2141  __ mov(r1, r0);
2142  __ str(r1, MemOperand(sp, 2 * kPointerSize));
2143  CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2144  __ CallStub(&stub);
2145 
2147  __ Drop(1); // The function is still on the stack; drop it.
2148 
2149  // if (!result.done) goto l_try;
2150  __ bind(&l_loop);
2151  __ Move(load_receiver, r0);
2152 
2153  __ push(load_receiver); // save result
2154  __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2155  if (FLAG_vector_ics) {
2157  Operand(Smi::FromInt(expr->DoneFeedbackSlot())));
2158  }
2159  CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
2160  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2161  CallIC(bool_ic);
2162  __ cmp(r0, Operand(0));
2163  __ b(eq, &l_try);
2164 
2165  // result.value
2166  __ pop(load_receiver); // result
2167  __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2168  if (FLAG_vector_ics) {
2170  Operand(Smi::FromInt(expr->ValueFeedbackSlot())));
2171  }
2172  CallLoadIC(NOT_CONTEXTUAL); // r0=result.value
2173  context()->DropAndPlug(2, r0); // drop iter and g
2174  break;
2175  }
2176  }
2177 }
2178 
2179 
2180 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2181  Expression *value,
2182  JSGeneratorObject::ResumeMode resume_mode) {
2183  // The value stays in r0, and is ultimately read by the resumed generator, as
2184  // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2185  // is read to throw the value when the resumed generator is already closed.
2186  // r1 will hold the generator object until the activation has been resumed.
2187  VisitForStackValue(generator);
2188  VisitForAccumulatorValue(value);
2189  __ pop(r1);
2190 
2191  // Check generator state.
2192  Label wrong_state, closed_state, done;
2196  __ cmp(r3, Operand(Smi::FromInt(0)));
2197  __ b(eq, &closed_state);
2198  __ b(lt, &wrong_state);
2199 
2200  // Load suspended function and context.
2203 
2204  // Load receiver and store as the first argument.
2206  __ push(r2);
2207 
2208  // Push holes for the rest of the arguments to the generator function.
2210  __ ldr(r3,
2212  __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2213  Label push_argument_holes, push_frame;
2214  __ bind(&push_argument_holes);
2215  __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2216  __ b(mi, &push_frame);
2217  __ push(r2);
2218  __ jmp(&push_argument_holes);
2219 
2220  // Enter a new JavaScript frame, and initialize its slots as they were when
2221  // the generator was suspended.
2222  Label resume_frame;
2223  __ bind(&push_frame);
2224  __ bl(&resume_frame);
2225  __ jmp(&done);
2226  __ bind(&resume_frame);
2227  // lr = return address.
2228  // fp = caller's frame pointer.
2229  // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2230  // cp = callee's context,
2231  // r4 = callee's JS function.
2232  __ PushFixedFrame(r4);
2233  // Adjust FP to point to saved FP.
2235 
2236  // Load the operand stack size.
2239  __ SmiUntag(r3);
2240 
2241  // If we are sending a value and there is no operand stack, we can jump back
2242  // in directly.
2243  if (resume_mode == JSGeneratorObject::NEXT) {
2244  Label slow_resume;
2245  __ cmp(r3, Operand(0));
2246  __ b(ne, &slow_resume);
2248 
2249  { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2250  if (FLAG_enable_ool_constant_pool) {
2251  // Load the new code object's constant pool pointer.
2252  __ ldr(pp,
2254  }
2255 
2257  __ SmiUntag(r2);
2258  __ add(r3, r3, r2);
2261  __ Jump(r3);
2262  }
2263  __ bind(&slow_resume);
2264  }
2265 
2266  // Otherwise, we push holes for the operand stack and call the runtime to fix
2267  // up the stack and the handlers.
2268  Label push_operand_holes, call_resume;
2269  __ bind(&push_operand_holes);
2270  __ sub(r3, r3, Operand(1), SetCC);
2271  __ b(mi, &call_resume);
2272  __ push(r2);
2273  __ b(&push_operand_holes);
2274  __ bind(&call_resume);
2275  DCHECK(!result_register().is(r1));
2276  __ Push(r1, result_register());
2277  __ Push(Smi::FromInt(resume_mode));
2278  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2279  // Not reached: the runtime call returns elsewhere.
2280  __ stop("not-reached");
2281 
2282  // Reach here when generator is closed.
2283  __ bind(&closed_state);
2284  if (resume_mode == JSGeneratorObject::NEXT) {
2285  // Return completed iterator result when generator is closed.
2286  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2287  __ push(r2);
2288  // Pop value from top-of-stack slot; box result into result register.
2290  } else {
2291  // Throw the provided value.
2292  __ push(r0);
2293  __ CallRuntime(Runtime::kThrow, 1);
2294  }
2295  __ jmp(&done);
2296 
2297  // Throw error if we attempt to operate on a running generator.
2298  __ bind(&wrong_state);
2299  __ push(r1);
2300  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2301 
2302  __ bind(&done);
2303  context()->Plug(result_register());
2304 }
2305 
2306 
2308  Label gc_required;
2309  Label allocated;
2310 
2311  Handle<Map> map(isolate()->native_context()->iterator_result_map());
2312 
2313  __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
2314  __ jmp(&allocated);
2315 
2316  __ bind(&gc_required);
2317  __ Push(Smi::FromInt(map->instance_size()));
2318  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2319  __ ldr(context_register(),
2321 
2322  __ bind(&allocated);
2323  __ mov(r1, Operand(map));
2324  __ pop(r2);
2325  __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2326  __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2327  DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2331  __ str(r2,
2333  __ str(r3,
2335 
2336  // Only the value field needs a write barrier, as the other values are in the
2337  // root set.
2340 }
2341 
2342 
2343 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2344  SetSourcePosition(prop->position());
2345  Literal* key = prop->key()->AsLiteral();
2346  DCHECK(!prop->IsSuperAccess());
2347 
2348  __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2349  if (FLAG_vector_ics) {
2351  Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2353  } else {
2354  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2355  }
2356 }
2357 
2358 
2359 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2360  // Stack: receiver, home_object.
2361  SetSourcePosition(prop->position());
2362  Literal* key = prop->key()->AsLiteral();
2363  DCHECK(!key->value()->IsSmi());
2364  DCHECK(prop->IsSuperAccess());
2365 
2366  __ Push(key->value());
2367  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2368 }
2369 
2370 
2371 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2372  SetSourcePosition(prop->position());
2373  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2374  if (FLAG_vector_ics) {
2376  Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2377  CallIC(ic);
2378  } else {
2379  CallIC(ic, prop->PropertyFeedbackId());
2380  }
2381 }
2382 
2383 
2384 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2385  Token::Value op,
2387  Expression* left_expr,
2388  Expression* right_expr) {
2389  Label done, smi_case, stub_call;
2390 
2391  Register scratch1 = r2;
2392  Register scratch2 = r3;
2393 
2394  // Get the arguments.
2395  Register left = r1;
2396  Register right = r0;
2397  __ pop(left);
2398 
2399  // Perform combined smi check on both operands.
2400  __ orr(scratch1, left, Operand(right));
2401  STATIC_ASSERT(kSmiTag == 0);
2402  JumpPatchSite patch_site(masm_);
2403  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2404 
2405  __ bind(&stub_call);
2406  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2407  CallIC(code, expr->BinaryOperationFeedbackId());
2408  patch_site.EmitPatchInfo();
2409  __ jmp(&done);
2410 
2411  __ bind(&smi_case);
2412  // Smi case. This code works the same way as the smi-smi case in the type
2413  // recording binary operation stub, see
2414  switch (op) {
2415  case Token::SAR:
2416  __ GetLeastBitsFromSmi(scratch1, right, 5);
2417  __ mov(right, Operand(left, ASR, scratch1));
2418  __ bic(right, right, Operand(kSmiTagMask));
2419  break;
2420  case Token::SHL: {
2421  __ SmiUntag(scratch1, left);
2422  __ GetLeastBitsFromSmi(scratch2, right, 5);
2423  __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2424  __ TrySmiTag(right, scratch1, &stub_call);
2425  break;
2426  }
2427  case Token::SHR: {
2428  __ SmiUntag(scratch1, left);
2429  __ GetLeastBitsFromSmi(scratch2, right, 5);
2430  __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2431  __ tst(scratch1, Operand(0xc0000000));
2432  __ b(ne, &stub_call);
2433  __ SmiTag(right, scratch1);
2434  break;
2435  }
2436  case Token::ADD:
2437  __ add(scratch1, left, Operand(right), SetCC);
2438  __ b(vs, &stub_call);
2439  __ mov(right, scratch1);
2440  break;
2441  case Token::SUB:
2442  __ sub(scratch1, left, Operand(right), SetCC);
2443  __ b(vs, &stub_call);
2444  __ mov(right, scratch1);
2445  break;
2446  case Token::MUL: {
2447  __ SmiUntag(ip, right);
2448  __ smull(scratch1, scratch2, left, ip);
2449  __ mov(ip, Operand(scratch1, ASR, 31));
2450  __ cmp(ip, Operand(scratch2));
2451  __ b(ne, &stub_call);
2452  __ cmp(scratch1, Operand::Zero());
2453  __ mov(right, Operand(scratch1), LeaveCC, ne);
2454  __ b(ne, &done);
2455  __ add(scratch2, right, Operand(left), SetCC);
2456  __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2457  __ b(mi, &stub_call);
2458  break;
2459  }
2460  case Token::BIT_OR:
2461  __ orr(right, left, Operand(right));
2462  break;
2463  case Token::BIT_AND:
2464  __ and_(right, left, Operand(right));
2465  break;
2466  case Token::BIT_XOR:
2467  __ eor(right, left, Operand(right));
2468  break;
2469  default:
2470  UNREACHABLE();
2471  }
2472 
2473  __ bind(&done);
2474  context()->Plug(r0);
2475 }
2476 
2477 
2478 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2479  Token::Value op,
2480  OverwriteMode mode) {
2481  __ pop(r1);
2482  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2483  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2484  CallIC(code, expr->BinaryOperationFeedbackId());
2485  patch_site.EmitPatchInfo();
2486  context()->Plug(r0);
2487 }
2488 
2489 
2490 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2491  DCHECK(expr->IsValidReferenceExpression());
2492 
2493  // Left-hand side can only be a property, a global or a (parameter or local)
2494  // slot.
2496  LhsKind assign_type = VARIABLE;
2497  Property* prop = expr->AsProperty();
2498  if (prop != NULL) {
2499  assign_type = (prop->key()->IsPropertyName())
2500  ? NAMED_PROPERTY
2501  : KEYED_PROPERTY;
2502  }
2503 
2504  switch (assign_type) {
2505  case VARIABLE: {
2506  Variable* var = expr->AsVariableProxy()->var();
2507  EffectContext context(this);
2508  EmitVariableAssignment(var, Token::ASSIGN);
2509  break;
2510  }
2511  case NAMED_PROPERTY: {
2512  __ push(r0); // Preserve value.
2513  VisitForAccumulatorValue(prop->obj());
2515  __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2517  Operand(prop->key()->AsLiteral()->value()));
2518  CallStoreIC();
2519  break;
2520  }
2521  case KEYED_PROPERTY: {
2522  __ push(r0); // Preserve value.
2523  VisitForStackValue(prop->obj());
2524  VisitForAccumulatorValue(prop->key());
2528  Handle<Code> ic =
2529  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2530  CallIC(ic);
2531  break;
2532  }
2533  }
2534  context()->Plug(r0);
2535 }
2536 
2537 
2539  Variable* var, MemOperand location) {
2540  __ str(result_register(), location);
2541  if (var->IsContextSlot()) {
2542  // RecordWrite may destroy all its register arguments.
2543  __ mov(r3, result_register());
2544  int offset = Context::SlotOffset(var->index());
2545  __ RecordWriteContextSlot(
2546  r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2547  }
2548 }
2549 
2550 
2552  if (var->IsUnallocated()) {
2553  // Global var, const, or let.
2554  __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2556  CallStoreIC();
2557 
2558  } else if (op == Token::INIT_CONST_LEGACY) {
2559  // Const initializers need a write barrier.
2560  DCHECK(!var->IsParameter()); // No const parameters.
2561  if (var->IsLookupSlot()) {
2562  __ push(r0);
2563  __ mov(r0, Operand(var->name()));
2564  __ Push(cp, r0); // Context and name.
2565  __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2566  } else {
2567  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2568  Label skip;
2569  MemOperand location = VarOperand(var, r1);
2570  __ ldr(r2, location);
2571  __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2572  __ b(ne, &skip);
2573  EmitStoreToStackLocalOrContextSlot(var, location);
2574  __ bind(&skip);
2575  }
2576 
2577  } else if (var->mode() == LET && op != Token::INIT_LET) {
2578  // Non-initializing assignment to let variable needs a write barrier.
2579  DCHECK(!var->IsLookupSlot());
2580  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2581  Label assign;
2582  MemOperand location = VarOperand(var, r1);
2583  __ ldr(r3, location);
2584  __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2585  __ b(ne, &assign);
2586  __ mov(r3, Operand(var->name()));
2587  __ push(r3);
2588  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2589  // Perform the assignment.
2590  __ bind(&assign);
2591  EmitStoreToStackLocalOrContextSlot(var, location);
2592 
2593  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2594  if (var->IsLookupSlot()) {
2595  // Assignment to var.
2596  __ push(r0); // Value.
2597  __ mov(r1, Operand(var->name()));
2598  __ mov(r0, Operand(Smi::FromInt(strict_mode())));
2599  __ Push(cp, r1, r0); // Context, name, strict mode.
2600  __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2601  } else {
2602  // Assignment to var or initializing assignment to let/const in harmony
2603  // mode.
2604  DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2605  MemOperand location = VarOperand(var, r1);
2606  if (generate_debug_code_ && op == Token::INIT_LET) {
2607  // Check for an uninitialized let binding.
2608  __ ldr(r2, location);
2609  __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2610  __ Check(eq, kLetBindingReInitialization);
2611  }
2612  EmitStoreToStackLocalOrContextSlot(var, location);
2613  }
2614  }
2615  // Non-initializing assignments to consts are ignored.
2616 }
2617 
2618 
2619 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2620  // Assignment to a property, using a named store IC.
2621  Property* prop = expr->target()->AsProperty();
2622  DCHECK(prop != NULL);
2623  DCHECK(prop->key()->IsLiteral());
2624 
2625  // Record source code position before IC call.
2626  SetSourcePosition(expr->position());
2628  Operand(prop->key()->AsLiteral()->value()));
2630  CallStoreIC(expr->AssignmentFeedbackId());
2631 
2632  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2633  context()->Plug(r0);
2634 }
2635 
2636 
2638  // Assignment to named property of super.
2639  // r0 : value
2640  // stack : receiver ('this'), home_object
2641  Property* prop = expr->target()->AsProperty();
2642  DCHECK(prop != NULL);
2643  Literal* key = prop->key()->AsLiteral();
2644  DCHECK(key != NULL);
2645 
2646  __ Push(r0);
2647  __ Push(key->value());
2648  __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2649  : Runtime::kStoreToSuper_Sloppy),
2650  4);
2651  context()->Plug(r0);
2652 }
2653 
2654 
2655 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2656  // Assignment to a property, using a keyed store IC.
2657 
2658  // Record source code position before IC call.
2659  SetSourcePosition(expr->position());
2662 
2663  Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2664  CallIC(ic, expr->AssignmentFeedbackId());
2665 
2666  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2667  context()->Plug(r0);
2668 }
2669 
2670 
2671 void FullCodeGenerator::VisitProperty(Property* expr) {
2672  Comment cmnt(masm_, "[ Property");
2673  Expression* key = expr->key();
2674 
2675  if (key->IsPropertyName()) {
2676  if (!expr->IsSuperAccess()) {
2677  VisitForAccumulatorValue(expr->obj());
2679  EmitNamedPropertyLoad(expr);
2680  } else {
2681  VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2682  EmitLoadHomeObject(expr->obj()->AsSuperReference());
2683  __ Push(result_register());
2685  }
2686  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2687  context()->Plug(r0);
2688  } else {
2689  VisitForStackValue(expr->obj());
2690  VisitForAccumulatorValue(expr->key());
2693  EmitKeyedPropertyLoad(expr);
2694  context()->Plug(r0);
2695  }
2696 }
2697 
2698 
2699 void FullCodeGenerator::CallIC(Handle<Code> code,
2700  TypeFeedbackId ast_id) {
2701  ic_total_count_++;
2702  // All calls must have a predictable size in full-codegen code to ensure that
2703  // the debugger can patch them correctly.
2704  __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2706 }
2707 
2708 
2709 // Code common for calls using the IC.
2710 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2711  Expression* callee = expr->expression();
2712 
2713  CallICState::CallType call_type =
2714  callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2715 
2716  // Get the target function.
2717  if (call_type == CallICState::FUNCTION) {
2718  { StackValueContext context(this);
2719  EmitVariableLoad(callee->AsVariableProxy());
2721  }
2722  // Push undefined as receiver. This is patched in the method prologue if it
2723  // is a sloppy mode method.
2724  __ Push(isolate()->factory()->undefined_value());
2725  } else {
2726  // Load the function from the receiver.
2727  DCHECK(callee->IsProperty());
2728  DCHECK(!callee->AsProperty()->IsSuperAccess());
2730  EmitNamedPropertyLoad(callee->AsProperty());
2731  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2732  // Push the target function under the receiver.
2733  __ ldr(ip, MemOperand(sp, 0));
2734  __ push(ip);
2735  __ str(r0, MemOperand(sp, kPointerSize));
2736  }
2737 
2738  EmitCall(expr, call_type);
2739 }
2740 
2741 
2743  Expression* callee = expr->expression();
2744  DCHECK(callee->IsProperty());
2745  Property* prop = callee->AsProperty();
2746  DCHECK(prop->IsSuperAccess());
2747 
2748  SetSourcePosition(prop->position());
2749  Literal* key = prop->key()->AsLiteral();
2750  DCHECK(!key->value()->IsSmi());
2751  // Load the function from the receiver.
2752  const Register scratch = r1;
2753  SuperReference* super_ref = prop->obj()->AsSuperReference();
2754  EmitLoadHomeObject(super_ref);
2755  __ Push(r0);
2756  VisitForAccumulatorValue(super_ref->this_var());
2757  __ Push(r0);
2758  __ Push(r0);
2759  __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2760  __ Push(scratch);
2761  __ Push(key->value());
2762 
2763  // Stack here:
2764  // - home_object
2765  // - this (receiver)
2766  // - this (receiver) <-- LoadFromSuper will pop here and below.
2767  // - home_object
2768  // - key
2769  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2770 
2771  // Replace home_object with target function.
2772  __ str(r0, MemOperand(sp, kPointerSize));
2773 
2774  // Stack here:
2775  // - target function
2776  // - this (receiver)
2777  EmitCall(expr, CallICState::METHOD);
2778 }
2779 
2780 
2781 // Code common for calls using the IC.
2783  Expression* key) {
2784  // Load the key.
2786 
2787  Expression* callee = expr->expression();
2788 
2789  // Load the function from the receiver.
2790  DCHECK(callee->IsProperty());
2793  EmitKeyedPropertyLoad(callee->AsProperty());
2794  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2795 
2796  // Push the target function under the receiver.
2797  __ ldr(ip, MemOperand(sp, 0));
2798  __ push(ip);
2799  __ str(r0, MemOperand(sp, kPointerSize));
2800 
2801  EmitCall(expr, CallICState::METHOD);
2802 }
2803 
2804 
2805 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2806  // Load the arguments.
2807  ZoneList<Expression*>* args = expr->arguments();
2808  int arg_count = args->length();
2809  { PreservePositionScope scope(masm()->positions_recorder());
2810  for (int i = 0; i < arg_count; i++) {
2811  VisitForStackValue(args->at(i));
2812  }
2813  }
2814 
2815  // Record source position of the IC call.
2816  SetSourcePosition(expr->position());
2817  Handle<Code> ic = CallIC::initialize_stub(
2818  isolate(), arg_count, call_type);
2819  __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2820  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2821  // Don't assign a type feedback id to the IC, since type feedback is provided
2822  // by the vector above.
2823  CallIC(ic);
2824 
2825  RecordJSReturnSite(expr);
2826  // Restore context register.
2828  context()->DropAndPlug(1, r0);
2829 }
2830 
2831 
2833  // r4: copy of the first argument or undefined if it doesn't exist.
2834  if (arg_count > 0) {
2835  __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2836  } else {
2837  __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2838  }
2839 
2840  // r3: the receiver of the enclosing function.
2841  int receiver_offset = 2 + info_->scope()->num_parameters();
2842  __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
2843 
2844  // r2: strict mode.
2845  __ mov(r2, Operand(Smi::FromInt(strict_mode())));
2846 
2847  // r1: the start position of the scope the calls resides in.
2848  __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2849 
2850  // Do the runtime call.
2851  __ Push(r4, r3, r2, r1);
2852  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2853 }
2854 
2855 
2856 void FullCodeGenerator::VisitCall(Call* expr) {
2857 #ifdef DEBUG
2858  // We want to verify that RecordJSReturnSite gets called on all paths
2859  // through this function. Avoid early returns.
2860  expr->return_is_recorded_ = false;
2861 #endif
2862 
2863  Comment cmnt(masm_, "[ Call");
2864  Expression* callee = expr->expression();
2865  Call::CallType call_type = expr->GetCallType(isolate());
2866 
2867  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2868  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2869  // to resolve the function we need to call and the receiver of the
2870  // call. Then we call the resolved function using the given
2871  // arguments.
2872  ZoneList<Expression*>* args = expr->arguments();
2873  int arg_count = args->length();
2874 
2875  { PreservePositionScope pos_scope(masm()->positions_recorder());
2876  VisitForStackValue(callee);
2877  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2878  __ push(r2); // Reserved receiver slot.
2879 
2880  // Push the arguments.
2881  for (int i = 0; i < arg_count; i++) {
2882  VisitForStackValue(args->at(i));
2883  }
2884 
2885  // Push a copy of the function (found below the arguments) and
2886  // resolve eval.
2887  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2888  __ push(r1);
2889  EmitResolvePossiblyDirectEval(arg_count);
2890 
2891  // The runtime call returns a pair of values in r0 (function) and
2892  // r1 (receiver). Touch up the stack with the right values.
2893  __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2894  __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2895  }
2896 
2897  // Record source position for debugger.
2898  SetSourcePosition(expr->position());
2899  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2900  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2901  __ CallStub(&stub);
2902  RecordJSReturnSite(expr);
2903  // Restore context register.
2905  context()->DropAndPlug(1, r0);
2906  } else if (call_type == Call::GLOBAL_CALL) {
2907  EmitCallWithLoadIC(expr);
2908 
2909  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2910  // Call to a lookup slot (dynamically introduced variable).
2911  VariableProxy* proxy = callee->AsVariableProxy();
2912  Label slow, done;
2913 
2914  { PreservePositionScope scope(masm()->positions_recorder());
2915  // Generate code for loading from variables potentially shadowed
2916  // by eval-introduced variables.
2917  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2918  }
2919 
2920  __ bind(&slow);
2921  // Call the runtime to find the function to call (returned in r0)
2922  // and the object holding it (returned in edx).
2923  DCHECK(!context_register().is(r2));
2924  __ mov(r2, Operand(proxy->name()));
2925  __ Push(context_register(), r2);
2926  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2927  __ Push(r0, r1); // Function, receiver.
2928 
2929  // If fast case code has been generated, emit code to push the
2930  // function and receiver and have the slow path jump around this
2931  // code.
2932  if (done.is_linked()) {
2933  Label call;
2934  __ b(&call);
2935  __ bind(&done);
2936  // Push function.
2937  __ push(r0);
2938  // The receiver is implicitly the global receiver. Indicate this
2939  // by passing the hole to the call function stub.
2940  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2941  __ push(r1);
2942  __ bind(&call);
2943  }
2944 
2945  // The receiver is either the global receiver or an object found
2946  // by LoadContextSlot.
2947  EmitCall(expr);
2948  } else if (call_type == Call::PROPERTY_CALL) {
2949  Property* property = callee->AsProperty();
2950  bool is_named_call = property->key()->IsPropertyName();
2951  // super.x() is handled in EmitCallWithLoadIC.
2952  if (property->IsSuperAccess() && is_named_call) {
2954  } else {
2955  {
2956  PreservePositionScope scope(masm()->positions_recorder());
2957  VisitForStackValue(property->obj());
2958  }
2959  if (is_named_call) {
2960  EmitCallWithLoadIC(expr);
2961  } else {
2962  EmitKeyedCallWithLoadIC(expr, property->key());
2963  }
2964  }
2965  } else {
2966  DCHECK(call_type == Call::OTHER_CALL);
2967  // Call to an arbitrary expression not handled specially above.
2968  { PreservePositionScope scope(masm()->positions_recorder());
2969  VisitForStackValue(callee);
2970  }
2971  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2972  __ push(r1);
2973  // Emit function call.
2974  EmitCall(expr);
2975  }
2976 
2977 #ifdef DEBUG
2978  // RecordJSReturnSite should have been called.
2979  DCHECK(expr->return_is_recorded_);
2980 #endif
2981 }
2982 
2983 
2984 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2985  Comment cmnt(masm_, "[ CallNew");
2986  // According to ECMA-262, section 11.2.2, page 44, the function
2987  // expression in new calls must be evaluated before the
2988  // arguments.
2989 
2990  // Push constructor on the stack. If it's not a function it's used as
2991  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2992  // ignored.
2993  VisitForStackValue(expr->expression());
2994 
2995  // Push the arguments ("left-to-right") on the stack.
2996  ZoneList<Expression*>* args = expr->arguments();
2997  int arg_count = args->length();
2998  for (int i = 0; i < arg_count; i++) {
2999  VisitForStackValue(args->at(i));
3000  }
3001 
3002  // Call the construct call builtin that handles allocation and
3003  // constructor invocation.
3004  SetSourcePosition(expr->position());
3005 
3006  // Load function and argument count into r1 and r0.
3007  __ mov(r0, Operand(arg_count));
3008  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3009 
3010  // Record call targets in unoptimized code.
3011  if (FLAG_pretenuring_call_new) {
3012  EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3013  DCHECK(expr->AllocationSiteFeedbackSlot() ==
3014  expr->CallNewFeedbackSlot() + 1);
3015  }
3016 
3017  __ Move(r2, FeedbackVector());
3018  __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
3019 
3020  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3021  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3022  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3023  context()->Plug(r0);
3024 }
3025 
3026 
3027 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3028  ZoneList<Expression*>* args = expr->arguments();
3029  DCHECK(args->length() == 1);
3030 
3031  VisitForAccumulatorValue(args->at(0));
3032 
3033  Label materialize_true, materialize_false;
3034  Label* if_true = NULL;
3035  Label* if_false = NULL;
3036  Label* fall_through = NULL;
3037  context()->PrepareTest(&materialize_true, &materialize_false,
3038  &if_true, &if_false, &fall_through);
3039 
3040  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3041  __ SmiTst(r0);
3042  Split(eq, if_true, if_false, fall_through);
3043 
3044  context()->Plug(if_true, if_false);
3045 }
3046 
3047 
3048 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3049  ZoneList<Expression*>* args = expr->arguments();
3050  DCHECK(args->length() == 1);
3051 
3052  VisitForAccumulatorValue(args->at(0));
3053 
3054  Label materialize_true, materialize_false;
3055  Label* if_true = NULL;
3056  Label* if_false = NULL;
3057  Label* fall_through = NULL;
3058  context()->PrepareTest(&materialize_true, &materialize_false,
3059  &if_true, &if_false, &fall_through);
3060 
3061  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3062  __ NonNegativeSmiTst(r0);
3063  Split(eq, if_true, if_false, fall_through);
3064 
3065  context()->Plug(if_true, if_false);
3066 }
3067 
3068 
3069 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3070  ZoneList<Expression*>* args = expr->arguments();
3071  DCHECK(args->length() == 1);
3072 
3073  VisitForAccumulatorValue(args->at(0));
3074 
3075  Label materialize_true, materialize_false;
3076  Label* if_true = NULL;
3077  Label* if_false = NULL;
3078  Label* fall_through = NULL;
3079  context()->PrepareTest(&materialize_true, &materialize_false,
3080  &if_true, &if_false, &fall_through);
3081 
3082  __ JumpIfSmi(r0, if_false);
3083  __ LoadRoot(ip, Heap::kNullValueRootIndex);
3084  __ cmp(r0, ip);
3085  __ b(eq, if_true);
3087  // Undetectable objects behave like undefined when tested with typeof.
3089  __ tst(r1, Operand(1 << Map::kIsUndetectable));
3090  __ b(ne, if_false);
3092  __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3093  __ b(lt, if_false);
3094  __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3095  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3096  Split(le, if_true, if_false, fall_through);
3097 
3098  context()->Plug(if_true, if_false);
3099 }
3100 
3101 
3102 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3103  ZoneList<Expression*>* args = expr->arguments();
3104  DCHECK(args->length() == 1);
3105 
3106  VisitForAccumulatorValue(args->at(0));
3107 
3108  Label materialize_true, materialize_false;
3109  Label* if_true = NULL;
3110  Label* if_false = NULL;
3111  Label* fall_through = NULL;
3112  context()->PrepareTest(&materialize_true, &materialize_false,
3113  &if_true, &if_false, &fall_through);
3114 
3115  __ JumpIfSmi(r0, if_false);
3116  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
3117  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3118  Split(ge, if_true, if_false, fall_through);
3119 
3120  context()->Plug(if_true, if_false);
3121 }
3122 
3123 
3124 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3125  ZoneList<Expression*>* args = expr->arguments();
3126  DCHECK(args->length() == 1);
3127 
3128  VisitForAccumulatorValue(args->at(0));
3129 
3130  Label materialize_true, materialize_false;
3131  Label* if_true = NULL;
3132  Label* if_false = NULL;
3133  Label* fall_through = NULL;
3134  context()->PrepareTest(&materialize_true, &materialize_false,
3135  &if_true, &if_false, &fall_through);
3136 
3137  __ JumpIfSmi(r0, if_false);
3140  __ tst(r1, Operand(1 << Map::kIsUndetectable));
3141  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3142  Split(ne, if_true, if_false, fall_through);
3143 
3144  context()->Plug(if_true, if_false);
3145 }
3146 
3147 
3148 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3149  CallRuntime* expr) {
3150  ZoneList<Expression*>* args = expr->arguments();
3151  DCHECK(args->length() == 1);
3152 
3153  VisitForAccumulatorValue(args->at(0));
3154 
3155  Label materialize_true, materialize_false, skip_lookup;
3156  Label* if_true = NULL;
3157  Label* if_false = NULL;
3158  Label* fall_through = NULL;
3159  context()->PrepareTest(&materialize_true, &materialize_false,
3160  &if_true, &if_false, &fall_through);
3161 
3162  __ AssertNotSmi(r0);
3163 
3166  __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3167  __ b(ne, &skip_lookup);
3168 
3169  // Check for fast case object. Generate false result for slow case object.
3172  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3173  __ cmp(r2, ip);
3174  __ b(eq, if_false);
3175 
3176  // Look for valueOf name in the descriptor array, and indicate false if
3177  // found. Since we omit an enumeration index check, if it is added via a
3178  // transition that shares its descriptor array, this is a false positive.
3179  Label entry, loop, done;
3180 
3181  // Skip loop if no descriptors are valid.
3182  __ NumberOfOwnDescriptors(r3, r1);
3183  __ cmp(r3, Operand::Zero());
3184  __ b(eq, &done);
3185 
3186  __ LoadInstanceDescriptors(r1, r4);
3187  // r4: descriptor array.
3188  // r3: valid entries in the descriptor array.
3189  __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3190  __ mul(r3, r3, ip);
3191  // Calculate location of the first key name.
3193  // Calculate the end of the descriptor array.
3194  __ mov(r2, r4);
3195  __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
3196 
3197  // Loop through all the keys in the descriptor array. If one of these is the
3198  // string "valueOf" the result is false.
3199  // The use of ip to store the valueOf string assumes that it is not otherwise
3200  // used in the loop below.
3201  __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3202  __ jmp(&entry);
3203  __ bind(&loop);
3204  __ ldr(r3, MemOperand(r4, 0));
3205  __ cmp(r3, ip);
3206  __ b(eq, if_false);
3208  __ bind(&entry);
3209  __ cmp(r4, Operand(r2));
3210  __ b(ne, &loop);
3211 
3212  __ bind(&done);
3213 
3214  // Set the bit in the map to indicate that there is no local valueOf field.
3216  __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3218 
3219  __ bind(&skip_lookup);
3220 
3221  // If a valueOf property is not found on the object check that its
3222  // prototype is the un-modified String prototype. If not result is false.
3224  __ JumpIfSmi(r2, if_false);
3229  __ cmp(r2, r3);
3230  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3231  Split(eq, if_true, if_false, fall_through);
3232 
3233  context()->Plug(if_true, if_false);
3234 }
3235 
3236 
3237 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3238  ZoneList<Expression*>* args = expr->arguments();
3239  DCHECK(args->length() == 1);
3240 
3241  VisitForAccumulatorValue(args->at(0));
3242 
3243  Label materialize_true, materialize_false;
3244  Label* if_true = NULL;
3245  Label* if_false = NULL;
3246  Label* fall_through = NULL;
3247  context()->PrepareTest(&materialize_true, &materialize_false,
3248  &if_true, &if_false, &fall_through);
3249 
3250  __ JumpIfSmi(r0, if_false);
3251  __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3252  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3253  Split(eq, if_true, if_false, fall_through);
3254 
3255  context()->Plug(if_true, if_false);
3256 }
3257 
3258 
3259 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3260  ZoneList<Expression*>* args = expr->arguments();
3261  DCHECK(args->length() == 1);
3262 
3263  VisitForAccumulatorValue(args->at(0));
3264 
3265  Label materialize_true, materialize_false;
3266  Label* if_true = NULL;
3267  Label* if_false = NULL;
3268  Label* fall_through = NULL;
3269  context()->PrepareTest(&materialize_true, &materialize_false,
3270  &if_true, &if_false, &fall_through);
3271 
3272  __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3273  __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3274  __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3275  __ cmp(r2, Operand(0x80000000));
3276  __ cmp(r1, Operand(0x00000000), eq);
3277 
3278  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3279  Split(eq, if_true, if_false, fall_through);
3280 
3281  context()->Plug(if_true, if_false);
3282 }
3283 
3284 
3285 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3286  ZoneList<Expression*>* args = expr->arguments();
3287  DCHECK(args->length() == 1);
3288 
3289  VisitForAccumulatorValue(args->at(0));
3290 
3291  Label materialize_true, materialize_false;
3292  Label* if_true = NULL;
3293  Label* if_false = NULL;
3294  Label* fall_through = NULL;
3295  context()->PrepareTest(&materialize_true, &materialize_false,
3296  &if_true, &if_false, &fall_through);
3297 
3298  __ JumpIfSmi(r0, if_false);
3299  __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3300  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3301  Split(eq, if_true, if_false, fall_through);
3302 
3303  context()->Plug(if_true, if_false);
3304 }
3305 
3306 
3307 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3308  ZoneList<Expression*>* args = expr->arguments();
3309  DCHECK(args->length() == 1);
3310 
3311  VisitForAccumulatorValue(args->at(0));
3312 
3313  Label materialize_true, materialize_false;
3314  Label* if_true = NULL;
3315  Label* if_false = NULL;
3316  Label* fall_through = NULL;
3317  context()->PrepareTest(&materialize_true, &materialize_false,
3318  &if_true, &if_false, &fall_through);
3319 
3320  __ JumpIfSmi(r0, if_false);
3321  __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3322  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3323  Split(eq, if_true, if_false, fall_through);
3324 
3325  context()->Plug(if_true, if_false);
3326 }
3327 
3328 
3329 
3330 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3331  DCHECK(expr->arguments()->length() == 0);
3332 
3333  Label materialize_true, materialize_false;
3334  Label* if_true = NULL;
3335  Label* if_false = NULL;
3336  Label* fall_through = NULL;
3337  context()->PrepareTest(&materialize_true, &materialize_false,
3338  &if_true, &if_false, &fall_through);
3339 
3340  // Get the frame pointer for the calling frame.
3342 
3343  // Skip the arguments adaptor frame if it exists.
3347 
3348  // Check the marker in the calling frame.
3350  __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3351  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3352  Split(eq, if_true, if_false, fall_through);
3353 
3354  context()->Plug(if_true, if_false);
3355 }
3356 
3357 
3358 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3359  ZoneList<Expression*>* args = expr->arguments();
3360  DCHECK(args->length() == 2);
3361 
3362  // Load the two objects into registers and perform the comparison.
3363  VisitForStackValue(args->at(0));
3364  VisitForAccumulatorValue(args->at(1));
3365 
3366  Label materialize_true, materialize_false;
3367  Label* if_true = NULL;
3368  Label* if_false = NULL;
3369  Label* fall_through = NULL;
3370  context()->PrepareTest(&materialize_true, &materialize_false,
3371  &if_true, &if_false, &fall_through);
3372 
3373  __ pop(r1);
3374  __ cmp(r0, r1);
3375  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3376  Split(eq, if_true, if_false, fall_through);
3377 
3378  context()->Plug(if_true, if_false);
3379 }
3380 
3381 
3382 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3383  ZoneList<Expression*>* args = expr->arguments();
3384  DCHECK(args->length() == 1);
3385 
3386  // ArgumentsAccessStub expects the key in edx and the formal
3387  // parameter count in r0.
3388  VisitForAccumulatorValue(args->at(0));
3389  __ mov(r1, r0);
3390  __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3391  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3392  __ CallStub(&stub);
3393  context()->Plug(r0);
3394 }
3395 
3396 
3397 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3398  DCHECK(expr->arguments()->length() == 0);
3399 
3400  // Get the number of formal parameters.
3401  __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3402 
3403  // Check if the calling frame is an arguments adaptor frame.
3407 
3408  // Arguments adaptor case: Read the arguments length from the
3409  // adaptor frame.
3411 
3412  context()->Plug(r0);
3413 }
3414 
3415 
3416 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3417  ZoneList<Expression*>* args = expr->arguments();
3418  DCHECK(args->length() == 1);
3419  Label done, null, function, non_function_constructor;
3420 
3421  VisitForAccumulatorValue(args->at(0));
3422 
3423  // If the object is a smi, we return null.
3424  __ JumpIfSmi(r0, &null);
3425 
3426  // Check that the object is a JS object but take special care of JS
3427  // functions to make sure they have 'Function' as their class.
3428  // Assume that there are only two callable types, and one of them is at
3429  // either end of the type range for JS object types. Saves extra comparisons.
3431  __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3432  // Map is now in r0.
3433  __ b(lt, &null);
3436  __ b(eq, &function);
3437 
3438  __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3440  LAST_SPEC_OBJECT_TYPE - 1);
3441  __ b(eq, &function);
3442  // Assume that there is no larger type.
3444 
3445  // Check if the constructor in the map is a JS function.
3447  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3448  __ b(ne, &non_function_constructor);
3449 
3450  // r0 now contains the constructor function. Grab the
3451  // instance class name from there.
3454  __ b(&done);
3455 
3456  // Functions have class 'Function'.
3457  __ bind(&function);
3458  __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3459  __ jmp(&done);
3460 
3461  // Objects with a non-function constructor have class 'Object'.
3462  __ bind(&non_function_constructor);
3463  __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3464  __ jmp(&done);
3465 
3466  // Non-JS objects have class null.
3467  __ bind(&null);
3468  __ LoadRoot(r0, Heap::kNullValueRootIndex);
3469 
3470  // All done.
3471  __ bind(&done);
3472 
3473  context()->Plug(r0);
3474 }
3475 
3476 
3477 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3478  // Load the arguments on the stack and call the stub.
3479  SubStringStub stub(isolate());
3480  ZoneList<Expression*>* args = expr->arguments();
3481  DCHECK(args->length() == 3);
3482  VisitForStackValue(args->at(0));
3483  VisitForStackValue(args->at(1));
3484  VisitForStackValue(args->at(2));
3485  __ CallStub(&stub);
3486  context()->Plug(r0);
3487 }
3488 
3489 
3490 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3491  // Load the arguments on the stack and call the stub.
3492  RegExpExecStub stub(isolate());
3493  ZoneList<Expression*>* args = expr->arguments();
3494  DCHECK(args->length() == 4);
3495  VisitForStackValue(args->at(0));
3496  VisitForStackValue(args->at(1));
3497  VisitForStackValue(args->at(2));
3498  VisitForStackValue(args->at(3));
3499  __ CallStub(&stub);
3500  context()->Plug(r0);
3501 }
3502 
3503 
3504 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3505  ZoneList<Expression*>* args = expr->arguments();
3506  DCHECK(args->length() == 1);
3507  VisitForAccumulatorValue(args->at(0)); // Load the object.
3508 
3509  Label done;
3510  // If the object is a smi return the object.
3511  __ JumpIfSmi(r0, &done);
3512  // If the object is not a value type, return the object.
3513  __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3515 
3516  __ bind(&done);
3517  context()->Plug(r0);
3518 }
3519 
3520 
3521 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3522  ZoneList<Expression*>* args = expr->arguments();
3523  DCHECK(args->length() == 2);
3524  DCHECK_NE(NULL, args->at(1)->AsLiteral());
3525  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3526 
3527  VisitForAccumulatorValue(args->at(0)); // Load the object.
3528 
3529  Label runtime, done, not_date_object;
3530  Register object = r0;
3531  Register result = r0;
3532  Register scratch0 = r9;
3533  Register scratch1 = r1;
3534 
3535  __ JumpIfSmi(object, &not_date_object);
3536  __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3537  __ b(ne, &not_date_object);
3538 
3539  if (index->value() == 0) {
3540  __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3541  __ jmp(&done);
3542  } else {
3543  if (index->value() < JSDate::kFirstUncachedField) {
3544  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3545  __ mov(scratch1, Operand(stamp));
3546  __ ldr(scratch1, MemOperand(scratch1));
3547  __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3548  __ cmp(scratch1, scratch0);
3549  __ b(ne, &runtime);
3550  __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3551  kPointerSize * index->value()));
3552  __ jmp(&done);
3553  }
3554  __ bind(&runtime);
3555  __ PrepareCallCFunction(2, scratch1);
3556  __ mov(r1, Operand(index));
3557  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3558  __ jmp(&done);
3559  }
3560 
3561  __ bind(&not_date_object);
3562  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3563  __ bind(&done);
3564  context()->Plug(r0);
3565 }
3566 
3567 
3568 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3569  ZoneList<Expression*>* args = expr->arguments();
3570  DCHECK_EQ(3, args->length());
3571 
3572  Register string = r0;
3573  Register index = r1;
3574  Register value = r2;
3575 
3576  VisitForStackValue(args->at(0)); // index
3577  VisitForStackValue(args->at(1)); // value
3578  VisitForAccumulatorValue(args->at(2)); // string
3579  __ Pop(index, value);
3580 
3581  if (FLAG_debug_code) {
3582  __ SmiTst(value);
3583  __ Check(eq, kNonSmiValue);
3584  __ SmiTst(index);
3585  __ Check(eq, kNonSmiIndex);
3586  __ SmiUntag(index, index);
3587  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3588  __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3589  __ SmiTag(index, index);
3590  }
3591 
3592  __ SmiUntag(value, value);
3593  __ add(ip,
3594  string,
3596  __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3597  context()->Plug(string);
3598 }
3599 
3600 
3601 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3602  ZoneList<Expression*>* args = expr->arguments();
3603  DCHECK_EQ(3, args->length());
3604 
3605  Register string = r0;
3606  Register index = r1;
3607  Register value = r2;
3608 
3609  VisitForStackValue(args->at(0)); // index
3610  VisitForStackValue(args->at(1)); // value
3611  VisitForAccumulatorValue(args->at(2)); // string
3612  __ Pop(index, value);
3613 
3614  if (FLAG_debug_code) {
3615  __ SmiTst(value);
3616  __ Check(eq, kNonSmiValue);
3617  __ SmiTst(index);
3618  __ Check(eq, kNonSmiIndex);
3619  __ SmiUntag(index, index);
3620  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3621  __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3622  __ SmiTag(index, index);
3623  }
3624 
3625  __ SmiUntag(value, value);
3626  __ add(ip,
3627  string,
3629  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3630  __ strh(value, MemOperand(ip, index));
3631  context()->Plug(string);
3632 }
3633 
3634 
3635 
3636 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3637  // Load the arguments on the stack and call the runtime function.
3638  ZoneList<Expression*>* args = expr->arguments();
3639  DCHECK(args->length() == 2);
3640  VisitForStackValue(args->at(0));
3641  VisitForStackValue(args->at(1));
3642  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3643  __ CallStub(&stub);
3644  context()->Plug(r0);
3645 }
3646 
3647 
3648 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3649  ZoneList<Expression*>* args = expr->arguments();
3650  DCHECK(args->length() == 2);
3651  VisitForStackValue(args->at(0)); // Load the object.
3652  VisitForAccumulatorValue(args->at(1)); // Load the value.
3653  __ pop(r1); // r0 = value. r1 = object.
3654 
3655  Label done;
3656  // If the object is a smi, return the value.
3657  __ JumpIfSmi(r1, &done);
3658 
3659  // If the object is not a value type, return the value.
3660  __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3661  __ b(ne, &done);
3662 
3663  // Store the value.
3665  // Update the write barrier. Save the value as it will be
3666  // overwritten by the write barrier code and is needed afterward.
3667  __ mov(r2, r0);
3668  __ RecordWriteField(
3670 
3671  __ bind(&done);
3672  context()->Plug(r0);
3673 }
3674 
3675 
3676 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3677  ZoneList<Expression*>* args = expr->arguments();
3678  DCHECK_EQ(args->length(), 1);
3679  // Load the argument into r0 and call the stub.
3680  VisitForAccumulatorValue(args->at(0));
3681 
3682  NumberToStringStub stub(isolate());
3683  __ CallStub(&stub);
3684  context()->Plug(r0);
3685 }
3686 
3687 
3688 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3689  ZoneList<Expression*>* args = expr->arguments();
3690  DCHECK(args->length() == 1);
3691  VisitForAccumulatorValue(args->at(0));
3692 
3693  Label done;
3694  StringCharFromCodeGenerator generator(r0, r1);
3695  generator.GenerateFast(masm_);
3696  __ jmp(&done);
3697 
3698  NopRuntimeCallHelper call_helper;
3699  generator.GenerateSlow(masm_, call_helper);
3700 
3701  __ bind(&done);
3702  context()->Plug(r1);
3703 }
3704 
3705 
3706 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3707  ZoneList<Expression*>* args = expr->arguments();
3708  DCHECK(args->length() == 2);
3709  VisitForStackValue(args->at(0));
3710  VisitForAccumulatorValue(args->at(1));
3711 
3712  Register object = r1;
3713  Register index = r0;
3714  Register result = r3;
3715 
3716  __ pop(object);
3717 
3718  Label need_conversion;
3719  Label index_out_of_range;
3720  Label done;
3721  StringCharCodeAtGenerator generator(object,
3722  index,
3723  result,
3724  &need_conversion,
3725  &need_conversion,
3726  &index_out_of_range,
3728  generator.GenerateFast(masm_);
3729  __ jmp(&done);
3730 
3731  __ bind(&index_out_of_range);
3732  // When the index is out of range, the spec requires us to return
3733  // NaN.
3734  __ LoadRoot(result, Heap::kNanValueRootIndex);
3735  __ jmp(&done);
3736 
3737  __ bind(&need_conversion);
3738  // Load the undefined value into the result register, which will
3739  // trigger conversion.
3740  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3741  __ jmp(&done);
3742 
3743  NopRuntimeCallHelper call_helper;
3744  generator.GenerateSlow(masm_, call_helper);
3745 
3746  __ bind(&done);
3747  context()->Plug(result);
3748 }
3749 
3750 
3751 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3752  ZoneList<Expression*>* args = expr->arguments();
3753  DCHECK(args->length() == 2);
3754  VisitForStackValue(args->at(0));
3755  VisitForAccumulatorValue(args->at(1));
3756 
3757  Register object = r1;
3758  Register index = r0;
3759  Register scratch = r3;
3760  Register result = r0;
3761 
3762  __ pop(object);
3763 
3764  Label need_conversion;
3765  Label index_out_of_range;
3766  Label done;
3767  StringCharAtGenerator generator(object,
3768  index,
3769  scratch,
3770  result,
3771  &need_conversion,
3772  &need_conversion,
3773  &index_out_of_range,
3775  generator.GenerateFast(masm_);
3776  __ jmp(&done);
3777 
3778  __ bind(&index_out_of_range);
3779  // When the index is out of range, the spec requires us to return
3780  // the empty string.
3781  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3782  __ jmp(&done);
3783 
3784  __ bind(&need_conversion);
3785  // Move smi zero into the result register, which will trigger
3786  // conversion.
3787  __ mov(result, Operand(Smi::FromInt(0)));
3788  __ jmp(&done);
3789 
3790  NopRuntimeCallHelper call_helper;
3791  generator.GenerateSlow(masm_, call_helper);
3792 
3793  __ bind(&done);
3794  context()->Plug(result);
3795 }
3796 
3797 
3798 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3799  ZoneList<Expression*>* args = expr->arguments();
3800  DCHECK_EQ(2, args->length());
3801  VisitForStackValue(args->at(0));
3802  VisitForAccumulatorValue(args->at(1));
3803 
3804  __ pop(r1);
3805  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3806  __ CallStub(&stub);
3807  context()->Plug(r0);
3808 }
3809 
3810 
3811 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3812  ZoneList<Expression*>* args = expr->arguments();
3813  DCHECK_EQ(2, args->length());
3814  VisitForStackValue(args->at(0));
3815  VisitForStackValue(args->at(1));
3816 
3817  StringCompareStub stub(isolate());
3818  __ CallStub(&stub);
3819  context()->Plug(r0);
3820 }
3821 
3822 
3823 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3824  ZoneList<Expression*>* args = expr->arguments();
3825  DCHECK(args->length() >= 2);
3826 
3827  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3828  for (int i = 0; i < arg_count + 1; i++) {
3829  VisitForStackValue(args->at(i));
3830  }
3831  VisitForAccumulatorValue(args->last()); // Function.
3832 
3833  Label runtime, done;
3834  // Check for non-function argument (including proxy).
3835  __ JumpIfSmi(r0, &runtime);
3836  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3837  __ b(ne, &runtime);
3838 
3839  // InvokeFunction requires the function in r1. Move it in there.
3840  __ mov(r1, result_register());
3841  ParameterCount count(arg_count);
3842  __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
3844  __ jmp(&done);
3845 
3846  __ bind(&runtime);
3847  __ push(r0);
3848  __ CallRuntime(Runtime::kCall, args->length());
3849  __ bind(&done);
3850 
3851  context()->Plug(r0);
3852 }
3853 
3854 
3855 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3856  RegExpConstructResultStub stub(isolate());
3857  ZoneList<Expression*>* args = expr->arguments();
3858  DCHECK(args->length() == 3);
3859  VisitForStackValue(args->at(0));
3860  VisitForStackValue(args->at(1));
3861  VisitForAccumulatorValue(args->at(2));
3862  __ pop(r1);
3863  __ pop(r2);
3864  __ CallStub(&stub);
3865  context()->Plug(r0);
3866 }
3867 
3868 
3869 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3870  ZoneList<Expression*>* args = expr->arguments();
3871  DCHECK_EQ(2, args->length());
3872  DCHECK_NE(NULL, args->at(0)->AsLiteral());
3873  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3874 
3875  Handle<FixedArray> jsfunction_result_caches(
3876  isolate()->native_context()->jsfunction_result_caches());
3877  if (jsfunction_result_caches->length() <= cache_id) {
3878  __ Abort(kAttemptToUseUndefinedCache);
3879  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3880  context()->Plug(r0);
3881  return;
3882  }
3883 
3884  VisitForAccumulatorValue(args->at(1));
3885 
3886  Register key = r0;
3887  Register cache = r1;
3891  __ ldr(cache,
3893 
3894 
3895  Label done, not_found;
3897  // r2 now holds finger offset as a smi.
3898  __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3899  // r3 now points to the start of fixed array elements.
3900  __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
3901  // Note side effect of PreIndex: r3 now points to the key of the pair.
3902  __ cmp(key, r2);
3903  __ b(ne, &not_found);
3904 
3905  __ ldr(r0, MemOperand(r3, kPointerSize));
3906  __ b(&done);
3907 
3908  __ bind(&not_found);
3909  // Call runtime to perform the lookup.
3910  __ Push(cache, key);
3911  __ CallRuntime(Runtime::kGetFromCache, 2);
3912 
3913  __ bind(&done);
3914  context()->Plug(r0);
3915 }
3916 
3917 
3918 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3919  ZoneList<Expression*>* args = expr->arguments();
3920  VisitForAccumulatorValue(args->at(0));
3921 
3922  Label materialize_true, materialize_false;
3923  Label* if_true = NULL;
3924  Label* if_false = NULL;
3925  Label* fall_through = NULL;
3926  context()->PrepareTest(&materialize_true, &materialize_false,
3927  &if_true, &if_false, &fall_through);
3928 
3931  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3932  Split(eq, if_true, if_false, fall_through);
3933 
3934  context()->Plug(if_true, if_false);
3935 }
3936 
3937 
3938 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3939  ZoneList<Expression*>* args = expr->arguments();
3940  DCHECK(args->length() == 1);
3941  VisitForAccumulatorValue(args->at(0));
3942 
3943  __ AssertString(r0);
3944 
3946  __ IndexFromHash(r0, r0);
3947 
3948  context()->Plug(r0);
3949 }
3950 
3951 
3952 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3953  Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3954  not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3955  one_char_separator_loop_entry, long_separator_loop;
3956  ZoneList<Expression*>* args = expr->arguments();
3957  DCHECK(args->length() == 2);
3958  VisitForStackValue(args->at(1));
3959  VisitForAccumulatorValue(args->at(0));
3960 
3961  // All aliases of the same register have disjoint lifetimes.
3962  Register array = r0;
3963  Register elements = no_reg; // Will be r0.
3964  Register result = no_reg; // Will be r0.
3965  Register separator = r1;
3966  Register array_length = r2;
3967  Register result_pos = no_reg; // Will be r2
3968  Register string_length = r3;
3969  Register string = r4;
3970  Register element = r5;
3971  Register elements_end = r6;
3972  Register scratch = r9;
3973 
3974  // Separator operand is on the stack.
3975  __ pop(separator);
3976 
3977  // Check that the array is a JSArray.
3978  __ JumpIfSmi(array, &bailout);
3979  __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
3980  __ b(ne, &bailout);
3981 
3982  // Check that the array has fast elements.
3983  __ CheckFastElements(scratch, array_length, &bailout);
3984 
3985  // If the array has length zero, return the empty string.
3986  __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3987  __ SmiUntag(array_length, SetCC);
3988  __ b(ne, &non_trivial_array);
3989  __ LoadRoot(r0, Heap::kempty_stringRootIndex);
3990  __ b(&done);
3991 
3992  __ bind(&non_trivial_array);
3993 
3994  // Get the FixedArray containing array's elements.
3995  elements = array;
3996  __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3997  array = no_reg; // End of array's live range.
3998 
3999  // Check that all array elements are sequential one-byte strings, and
4000  // accumulate the sum of their lengths, as a smi-encoded value.
4001  __ mov(string_length, Operand::Zero());
4002  __ add(element,
4003  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4004  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4005  // Loop condition: while (element < elements_end).
4006  // Live values in registers:
4007  // elements: Fixed array of strings.
4008  // array_length: Length of the fixed array of strings (not smi)
4009  // separator: Separator string
4010  // string_length: Accumulated sum of string lengths (smi).
4011  // element: Current array element.
4012  // elements_end: Array end.
4013  if (generate_debug_code_) {
4014  __ cmp(array_length, Operand::Zero());
4015  __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4016  }
4017  __ bind(&loop);
4018  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4019  __ JumpIfSmi(string, &bailout);
4020  __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
4021  __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4022  __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4023  __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4024  __ add(string_length, string_length, Operand(scratch), SetCC);
4025  __ b(vs, &bailout);
4026  __ cmp(element, elements_end);
4027  __ b(lt, &loop);
4028 
4029  // If array_length is 1, return elements[0], a string.
4030  __ cmp(array_length, Operand(1));
4031  __ b(ne, &not_size_one_array);
4032  __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4033  __ b(&done);
4034 
4035  __ bind(&not_size_one_array);
4036 
4037  // Live values in registers:
4038  // separator: Separator string
4039  // array_length: Length of the array.
4040  // string_length: Sum of string lengths (smi).
4041  // elements: FixedArray of strings.
4042 
4043  // Check that the separator is a flat one-byte string.
4044  __ JumpIfSmi(separator, &bailout);
4045  __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4046  __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4047  __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
4048 
4049  // Add (separator length times array_length) - separator length to the
4050  // string_length to get the length of the result string. array_length is not
4051  // smi but the other values are, so the result is a smi
4052  __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4053  __ sub(string_length, string_length, Operand(scratch));
4054  __ smull(scratch, ip, array_length, scratch);
4055  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4056  // zero.
4057  __ cmp(ip, Operand::Zero());
4058  __ b(ne, &bailout);
4059  __ tst(scratch, Operand(0x80000000));
4060  __ b(ne, &bailout);
4061  __ add(string_length, string_length, Operand(scratch), SetCC);
4062  __ b(vs, &bailout);
4063  __ SmiUntag(string_length);
4064 
4065  // Get first element in the array to free up the elements register to be used
4066  // for the result.
4067  __ add(element,
4068  elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4069  result = elements; // End of live range for elements.
4070  elements = no_reg;
4071  // Live values in registers:
4072  // element: First array element
4073  // separator: Separator string
4074  // string_length: Length of result string (not smi)
4075  // array_length: Length of the array.
4076  __ AllocateOneByteString(result, string_length, scratch,
4077  string, // used as scratch
4078  elements_end, // used as scratch
4079  &bailout);
4080  // Prepare for looping. Set up elements_end to end of the array. Set
4081  // result_pos to the position of the result where to write the first
4082  // character.
4083  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4084  result_pos = array_length; // End of live range for array_length.
4085  array_length = no_reg;
4086  __ add(result_pos,
4087  result,
4089 
4090  // Check the length of the separator.
4091  __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4092  __ cmp(scratch, Operand(Smi::FromInt(1)));
4093  __ b(eq, &one_char_separator);
4094  __ b(gt, &long_separator);
4095 
4096  // Empty separator case
4097  __ bind(&empty_separator_loop);
4098  // Live values in registers:
4099  // result_pos: the position to which we are currently copying characters.
4100  // element: Current array element.
4101  // elements_end: Array end.
4102 
4103  // Copy next array element to the result.
4104  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4105  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4106  __ SmiUntag(string_length);
4107  __ add(string,
4108  string,
4110  __ CopyBytes(string, result_pos, string_length, scratch);
4111  __ cmp(element, elements_end);
4112  __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4113  DCHECK(result.is(r0));
4114  __ b(&done);
4115 
4116  // One-character separator case
4117  __ bind(&one_char_separator);
4118  // Replace separator with its one-byte character value.
4119  __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4120  // Jump into the loop after the code that copies the separator, so the first
4121  // element is not preceded by a separator
4122  __ jmp(&one_char_separator_loop_entry);
4123 
4124  __ bind(&one_char_separator_loop);
4125  // Live values in registers:
4126  // result_pos: the position to which we are currently copying characters.
4127  // element: Current array element.
4128  // elements_end: Array end.
4129  // separator: Single separator one-byte char (in lower byte).
4130 
4131  // Copy the separator character to the result.
4132  __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4133 
4134  // Copy next array element to the result.
4135  __ bind(&one_char_separator_loop_entry);
4136  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4137  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4138  __ SmiUntag(string_length);
4139  __ add(string,
4140  string,
4142  __ CopyBytes(string, result_pos, string_length, scratch);
4143  __ cmp(element, elements_end);
4144  __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4145  DCHECK(result.is(r0));
4146  __ b(&done);
4147 
4148  // Long separator case (separator is more than one character). Entry is at the
4149  // label long_separator below.
4150  __ bind(&long_separator_loop);
4151  // Live values in registers:
4152  // result_pos: the position to which we are currently copying characters.
4153  // element: Current array element.
4154  // elements_end: Array end.
4155  // separator: Separator string.
4156 
4157  // Copy the separator to the result.
4158  __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4159  __ SmiUntag(string_length);
4160  __ add(string,
4161  separator,
4163  __ CopyBytes(string, result_pos, string_length, scratch);
4164 
4165  __ bind(&long_separator);
4166  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4167  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4168  __ SmiUntag(string_length);
4169  __ add(string,
4170  string,
4172  __ CopyBytes(string, result_pos, string_length, scratch);
4173  __ cmp(element, elements_end);
4174  __ b(lt, &long_separator_loop); // End while (element < elements_end).
4175  DCHECK(result.is(r0));
4176  __ b(&done);
4177 
4178  __ bind(&bailout);
4179  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4180  __ bind(&done);
4181  context()->Plug(r0);
4182 }
4183 
4184 
4185 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4186  DCHECK(expr->arguments()->length() == 0);
4187  ExternalReference debug_is_active =
4188  ExternalReference::debug_is_active_address(isolate());
4189  __ mov(ip, Operand(debug_is_active));
4190  __ ldrb(r0, MemOperand(ip));
4191  __ SmiTag(r0);
4192  context()->Plug(r0);
4193 }
4194 
4195 
4196 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4197  if (expr->function() != NULL &&
4198  expr->function()->intrinsic_type == Runtime::INLINE) {
4199  Comment cmnt(masm_, "[ InlineRuntimeCall");
4200  EmitInlineRuntimeCall(expr);
4201  return;
4202  }
4203 
4204  Comment cmnt(masm_, "[ CallRuntime");
4205  ZoneList<Expression*>* args = expr->arguments();
4206  int arg_count = args->length();
4207 
4208  if (expr->is_jsruntime()) {
4209  // Push the builtins object as the receiver.
4210  Register receiver = LoadDescriptor::ReceiverRegister();
4211  __ ldr(receiver, GlobalObjectOperand());
4212  __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4213  __ push(receiver);
4214 
4215  // Load the function from the receiver.
4216  __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4217  if (FLAG_vector_ics) {
4219  Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4221  } else {
4222  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4223  }
4224 
4225  // Push the target function under the receiver.
4226  __ ldr(ip, MemOperand(sp, 0));
4227  __ push(ip);
4228  __ str(r0, MemOperand(sp, kPointerSize));
4229 
4230  // Push the arguments ("left-to-right").
4231  int arg_count = args->length();
4232  for (int i = 0; i < arg_count; i++) {
4233  VisitForStackValue(args->at(i));
4234  }
4235 
4236  // Record source position of the IC call.
4237  SetSourcePosition(expr->position());
4238  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4239  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4240  __ CallStub(&stub);
4241 
4242  // Restore context register.
4244 
4245  context()->DropAndPlug(1, r0);
4246  } else {
4247  // Push the arguments ("left-to-right").
4248  for (int i = 0; i < arg_count; i++) {
4249  VisitForStackValue(args->at(i));
4250  }
4251 
4252  // Call the C runtime function.
4253  __ CallRuntime(expr->function(), arg_count);
4254  context()->Plug(r0);
4255  }
4256 }
4257 
4258 
4259 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4260  switch (expr->op()) {
4261  case Token::DELETE: {
4262  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4263  Property* property = expr->expression()->AsProperty();
4264  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4265 
4266  if (property != NULL) {
4267  VisitForStackValue(property->obj());
4268  VisitForStackValue(property->key());
4269  __ mov(r1, Operand(Smi::FromInt(strict_mode())));
4270  __ push(r1);
4271  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4272  context()->Plug(r0);
4273  } else if (proxy != NULL) {
4274  Variable* var = proxy->var();
4275  // Delete of an unqualified identifier is disallowed in strict mode
4276  // but "delete this" is allowed.
4277  DCHECK(strict_mode() == SLOPPY || var->is_this());
4278  if (var->IsUnallocated()) {
4279  __ ldr(r2, GlobalObjectOperand());
4280  __ mov(r1, Operand(var->name()));
4281  __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4282  __ Push(r2, r1, r0);
4283  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4284  context()->Plug(r0);
4285  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4286  // Result of deleting non-global, non-dynamic variables is false.
4287  // The subexpression does not have side effects.
4288  context()->Plug(var->is_this());
4289  } else {
4290  // Non-global variable. Call the runtime to try to delete from the
4291  // context where the variable was introduced.
4292  DCHECK(!context_register().is(r2));
4293  __ mov(r2, Operand(var->name()));
4294  __ Push(context_register(), r2);
4295  __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4296  context()->Plug(r0);
4297  }
4298  } else {
4299  // Result of deleting non-property, non-variable reference is true.
4300  // The subexpression may have side effects.
4301  VisitForEffect(expr->expression());
4302  context()->Plug(true);
4303  }
4304  break;
4305  }
4306 
4307  case Token::VOID: {
4308  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4309  VisitForEffect(expr->expression());
4310  context()->Plug(Heap::kUndefinedValueRootIndex);
4311  break;
4312  }
4313 
4314  case Token::NOT: {
4315  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4316  if (context()->IsEffect()) {
4317  // Unary NOT has no side effects so it's only necessary to visit the
4318  // subexpression. Match the optimizing compiler by not branching.
4319  VisitForEffect(expr->expression());
4320  } else if (context()->IsTest()) {
4321  const TestContext* test = TestContext::cast(context());
4322  // The labels are swapped for the recursive call.
4323  VisitForControl(expr->expression(),
4324  test->false_label(),
4325  test->true_label(),
4326  test->fall_through());
4327  context()->Plug(test->true_label(), test->false_label());
4328  } else {
4329  // We handle value contexts explicitly rather than simply visiting
4330  // for control and plugging the control flow into the context,
4331  // because we need to prepare a pair of extra administrative AST ids
4332  // for the optimizing compiler.
4333  DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4334  Label materialize_true, materialize_false, done;
4335  VisitForControl(expr->expression(),
4336  &materialize_false,
4337  &materialize_true,
4338  &materialize_true);
4339  __ bind(&materialize_true);
4340  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4341  __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4342  if (context()->IsStackValue()) __ push(r0);
4343  __ jmp(&done);
4344  __ bind(&materialize_false);
4345  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4346  __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4347  if (context()->IsStackValue()) __ push(r0);
4348  __ bind(&done);
4349  }
4350  break;
4351  }
4352 
4353  case Token::TYPEOF: {
4354  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4355  { StackValueContext context(this);
4356  VisitForTypeofValue(expr->expression());
4357  }
4358  __ CallRuntime(Runtime::kTypeof, 1);
4359  context()->Plug(r0);
4360  break;
4361  }
4362 
4363  default:
4364  UNREACHABLE();
4365  }
4366 }
4367 
4368 
4369 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4370  DCHECK(expr->expression()->IsValidReferenceExpression());
4371 
4372  Comment cmnt(masm_, "[ CountOperation");
4373  SetSourcePosition(expr->position());
4374 
4375  // Expression can only be a property, a global or a (parameter or local)
4376  // slot.
4378  LhsKind assign_type = VARIABLE;
4379  Property* prop = expr->expression()->AsProperty();
4380  // In case of a property we use the uninitialized expression context
4381  // of the key to detect a named property.
4382  if (prop != NULL) {
4383  assign_type =
4384  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4385  if (prop->IsSuperAccess()) {
4386  // throw exception.
4387  VisitSuperReference(prop->obj()->AsSuperReference());
4388  return;
4389  }
4390  }
4391 
4392  // Evaluate expression and get value.
4393  if (assign_type == VARIABLE) {
4394  DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4395  AccumulatorValueContext context(this);
4396  EmitVariableLoad(expr->expression()->AsVariableProxy());
4397  } else {
4398  // Reserve space for result of postfix operation.
4399  if (expr->is_postfix() && !context()->IsEffect()) {
4400  __ mov(ip, Operand(Smi::FromInt(0)));
4401  __ push(ip);
4402  }
4403  if (assign_type == NAMED_PROPERTY) {
4404  // Put the object both on the stack and in the register.
4405  VisitForStackValue(prop->obj());
4407  EmitNamedPropertyLoad(prop);
4408  } else {
4409  VisitForStackValue(prop->obj());
4410  VisitForStackValue(prop->key());
4412  MemOperand(sp, 1 * kPointerSize));
4414  EmitKeyedPropertyLoad(prop);
4415  }
4416  }
4417 
4418  // We need a second deoptimization point after loading the value
4419  // in case evaluating the property load my have a side effect.
4420  if (assign_type == VARIABLE) {
4421  PrepareForBailout(expr->expression(), TOS_REG);
4422  } else {
4423  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4424  }
4425 
4426  // Inline smi case if we are in a loop.
4427  Label stub_call, done;
4428  JumpPatchSite patch_site(masm_);
4429 
4430  int count_value = expr->op() == Token::INC ? 1 : -1;
4431  if (ShouldInlineSmiCase(expr->op())) {
4432  Label slow;
4433  patch_site.EmitJumpIfNotSmi(r0, &slow);
4434 
4435  // Save result for postfix expressions.
4436  if (expr->is_postfix()) {
4437  if (!context()->IsEffect()) {
4438  // Save the result on the stack. If we have a named or keyed property
4439  // we store the result under the receiver that is currently on top
4440  // of the stack.
4441  switch (assign_type) {
4442  case VARIABLE:
4443  __ push(r0);
4444  break;
4445  case NAMED_PROPERTY:
4446  __ str(r0, MemOperand(sp, kPointerSize));
4447  break;
4448  case KEYED_PROPERTY:
4449  __ str(r0, MemOperand(sp, 2 * kPointerSize));
4450  break;
4451  }
4452  }
4453  }
4454 
4455  __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4456  __ b(vc, &done);
4457  // Call stub. Undo operation first.
4458  __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4459  __ jmp(&stub_call);
4460  __ bind(&slow);
4461  }
4462  ToNumberStub convert_stub(isolate());
4463  __ CallStub(&convert_stub);
4464 
4465  // Save result for postfix expressions.
4466  if (expr->is_postfix()) {
4467  if (!context()->IsEffect()) {
4468  // Save the result on the stack. If we have a named or keyed property
4469  // we store the result under the receiver that is currently on top
4470  // of the stack.
4471  switch (assign_type) {
4472  case VARIABLE:
4473  __ push(r0);
4474  break;
4475  case NAMED_PROPERTY:
4476  __ str(r0, MemOperand(sp, kPointerSize));
4477  break;
4478  case KEYED_PROPERTY:
4479  __ str(r0, MemOperand(sp, 2 * kPointerSize));
4480  break;
4481  }
4482  }
4483  }
4484 
4485 
4486  __ bind(&stub_call);
4487  __ mov(r1, r0);
4488  __ mov(r0, Operand(Smi::FromInt(count_value)));
4489 
4490  // Record position before stub call.
4491  SetSourcePosition(expr->position());
4492 
4493  Handle<Code> code =
4494  CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4495  CallIC(code, expr->CountBinOpFeedbackId());
4496  patch_site.EmitPatchInfo();
4497  __ bind(&done);
4498 
4499  // Store the value returned in r0.
4500  switch (assign_type) {
4501  case VARIABLE:
4502  if (expr->is_postfix()) {
4503  { EffectContext context(this);
4504  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4505  Token::ASSIGN);
4506  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4507  context.Plug(r0);
4508  }
4509  // For all contexts except EffectConstant We have the result on
4510  // top of the stack.
4511  if (!context()->IsEffect()) {
4512  context()->PlugTOS();
4513  }
4514  } else {
4515  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4516  Token::ASSIGN);
4517  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4518  context()->Plug(r0);
4519  }
4520  break;
4521  case NAMED_PROPERTY: {
4523  Operand(prop->key()->AsLiteral()->value()));
4525  CallStoreIC(expr->CountStoreFeedbackId());
4526  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4527  if (expr->is_postfix()) {
4528  if (!context()->IsEffect()) {
4529  context()->PlugTOS();
4530  }
4531  } else {
4532  context()->Plug(r0);
4533  }
4534  break;
4535  }
4536  case KEYED_PROPERTY: {
4539  Handle<Code> ic =
4540  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4541  CallIC(ic, expr->CountStoreFeedbackId());
4542  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4543  if (expr->is_postfix()) {
4544  if (!context()->IsEffect()) {
4545  context()->PlugTOS();
4546  }
4547  } else {
4548  context()->Plug(r0);
4549  }
4550  break;
4551  }
4552  }
4553 }
4554 
4555 
4556 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4557  DCHECK(!context()->IsEffect());
4558  DCHECK(!context()->IsTest());
4559  VariableProxy* proxy = expr->AsVariableProxy();
4560  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4561  Comment cmnt(masm_, "[ Global variable");
4563  __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4564  if (FLAG_vector_ics) {
4566  Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
4567  }
4568  // Use a regular load, not a contextual load, to avoid a reference
4569  // error.
4571  PrepareForBailout(expr, TOS_REG);
4572  context()->Plug(r0);
4573  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4574  Comment cmnt(masm_, "[ Lookup slot");
4575  Label done, slow;
4576 
4577  // Generate code for loading from variables potentially shadowed
4578  // by eval-introduced variables.
4579  EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4580 
4581  __ bind(&slow);
4582  __ mov(r0, Operand(proxy->name()));
4583  __ Push(cp, r0);
4584  __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4585  PrepareForBailout(expr, TOS_REG);
4586  __ bind(&done);
4587 
4588  context()->Plug(r0);
4589  } else {
4590  // This expression cannot throw a reference error at the top level.
4592  }
4593 }
4594 
4595 
4596 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4597  Expression* sub_expr,
4598  Handle<String> check) {
4599  Label materialize_true, materialize_false;
4600  Label* if_true = NULL;
4601  Label* if_false = NULL;
4602  Label* fall_through = NULL;
4603  context()->PrepareTest(&materialize_true, &materialize_false,
4604  &if_true, &if_false, &fall_through);
4605 
4606  { AccumulatorValueContext context(this);
4607  VisitForTypeofValue(sub_expr);
4608  }
4609  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4610 
4611  Factory* factory = isolate()->factory();
4612  if (String::Equals(check, factory->number_string())) {
4613  __ JumpIfSmi(r0, if_true);
4615  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4616  __ cmp(r0, ip);
4617  Split(eq, if_true, if_false, fall_through);
4618  } else if (String::Equals(check, factory->string_string())) {
4619  __ JumpIfSmi(r0, if_false);
4620  // Check for undetectable objects => false.
4621  __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4622  __ b(ge, if_false);
4624  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4625  Split(eq, if_true, if_false, fall_through);
4626  } else if (String::Equals(check, factory->symbol_string())) {
4627  __ JumpIfSmi(r0, if_false);
4628  __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4629  Split(eq, if_true, if_false, fall_through);
4630  } else if (String::Equals(check, factory->boolean_string())) {
4631  __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4632  __ b(eq, if_true);
4633  __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4634  Split(eq, if_true, if_false, fall_through);
4635  } else if (String::Equals(check, factory->undefined_string())) {
4636  __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4637  __ b(eq, if_true);
4638  __ JumpIfSmi(r0, if_false);
4639  // Check for undetectable objects => true.
4642  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4643  Split(ne, if_true, if_false, fall_through);
4644 
4645  } else if (String::Equals(check, factory->function_string())) {
4646  __ JumpIfSmi(r0, if_false);
4648  __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4649  __ b(eq, if_true);
4650  __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4651  Split(eq, if_true, if_false, fall_through);
4652  } else if (String::Equals(check, factory->object_string())) {
4653  __ JumpIfSmi(r0, if_false);
4654  __ CompareRoot(r0, Heap::kNullValueRootIndex);
4655  __ b(eq, if_true);
4656  // Check for JS objects => true.
4657  __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4658  __ b(lt, if_false);
4659  __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4660  __ b(gt, if_false);
4661  // Check for undetectable objects => false.
4663  __ tst(r1, Operand(1 << Map::kIsUndetectable));
4664  Split(eq, if_true, if_false, fall_through);
4665  } else {
4666  if (if_false != fall_through) __ jmp(if_false);
4667  }
4668  context()->Plug(if_true, if_false);
4669 }
4670 
4671 
4672 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4673  Comment cmnt(masm_, "[ CompareOperation");
4674  SetSourcePosition(expr->position());
4675 
4676  // First we try a fast inlined version of the compare when one of
4677  // the operands is a literal.
4678  if (TryLiteralCompare(expr)) return;
4679 
4680  // Always perform the comparison for its control flow. Pack the result
4681  // into the expression's context after the comparison is performed.
4682  Label materialize_true, materialize_false;
4683  Label* if_true = NULL;
4684  Label* if_false = NULL;
4685  Label* fall_through = NULL;
4686  context()->PrepareTest(&materialize_true, &materialize_false,
4687  &if_true, &if_false, &fall_through);
4688 
4689  Token::Value op = expr->op();
4690  VisitForStackValue(expr->left());
4691  switch (op) {
4692  case Token::IN:
4693  VisitForStackValue(expr->right());
4694  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4695  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4696  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4697  __ cmp(r0, ip);
4698  Split(eq, if_true, if_false, fall_through);
4699  break;
4700 
4701  case Token::INSTANCEOF: {
4702  VisitForStackValue(expr->right());
4703  InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4704  __ CallStub(&stub);
4705  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4706  // The stub returns 0 for true.
4707  __ tst(r0, r0);
4708  Split(eq, if_true, if_false, fall_through);
4709  break;
4710  }
4711 
4712  default: {
4713  VisitForAccumulatorValue(expr->right());
4715  __ pop(r1);
4716 
4717  bool inline_smi_code = ShouldInlineSmiCase(op);
4718  JumpPatchSite patch_site(masm_);
4719  if (inline_smi_code) {
4720  Label slow_case;
4721  __ orr(r2, r0, Operand(r1));
4722  patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4723  __ cmp(r1, r0);
4724  Split(cond, if_true, if_false, NULL);
4725  __ bind(&slow_case);
4726  }
4727 
4728  // Record position and call the compare IC.
4729  SetSourcePosition(expr->position());
4730  Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4731  CallIC(ic, expr->CompareOperationFeedbackId());
4732  patch_site.EmitPatchInfo();
4733  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4734  __ cmp(r0, Operand::Zero());
4735  Split(cond, if_true, if_false, fall_through);
4736  }
4737  }
4738 
4739  // Convert the result of the comparison into one expected for this
4740  // expression's context.
4741  context()->Plug(if_true, if_false);
4742 }
4743 
4744 
4745 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4746  Expression* sub_expr,
4747  NilValue nil) {
4748  Label materialize_true, materialize_false;
4749  Label* if_true = NULL;
4750  Label* if_false = NULL;
4751  Label* fall_through = NULL;
4752  context()->PrepareTest(&materialize_true, &materialize_false,
4753  &if_true, &if_false, &fall_through);
4754 
4755  VisitForAccumulatorValue(sub_expr);
4756  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4757  if (expr->op() == Token::EQ_STRICT) {
4758  Heap::RootListIndex nil_value = nil == kNullValue ?
4759  Heap::kNullValueRootIndex :
4760  Heap::kUndefinedValueRootIndex;
4761  __ LoadRoot(r1, nil_value);
4762  __ cmp(r0, r1);
4763  Split(eq, if_true, if_false, fall_through);
4764  } else {
4765  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4766  CallIC(ic, expr->CompareOperationFeedbackId());
4767  __ cmp(r0, Operand(0));
4768  Split(ne, if_true, if_false, fall_through);
4769  }
4770  context()->Plug(if_true, if_false);
4771 }
4772 
4773 
4774 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4776  context()->Plug(r0);
4777 }
4778 
4779 
4781  return r0;
4782 }
4783 
4784 
4786  return cp;
4787 }
4788 
4789 
4790 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4791  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4792  __ str(value, MemOperand(fp, frame_offset));
4793 }
4794 
4795 
4796 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4797  __ ldr(dst, ContextOperand(cp, context_index));
4798 }
4799 
4800 
4802  Scope* declaration_scope = scope()->DeclarationScope();
4803  if (declaration_scope->is_global_scope() ||
4804  declaration_scope->is_module_scope()) {
4805  // Contexts nested in the native context have a canonical empty function
4806  // as their closure, not the anonymous closure containing the global
4807  // code. Pass a smi sentinel and let the runtime look up the empty
4808  // function.
4809  __ mov(ip, Operand(Smi::FromInt(0)));
4810  } else if (declaration_scope->is_eval_scope()) {
4811  // Contexts created by a call to eval have the same closure as the
4812  // context calling eval, not the anonymous closure containing the eval
4813  // code. Fetch it from the context.
4815  } else {
4816  DCHECK(declaration_scope->is_function_scope());
4818  }
4819  __ push(ip);
4820 }
4821 
4822 
4823 // ----------------------------------------------------------------------------
4824 // Non-local control flow support.
4825 
4827  DCHECK(!result_register().is(r1));
4828  // Store result register while executing finally block.
4829  __ push(result_register());
4830  // Cook return address in link register to stack (smi encoded Code* delta)
4831  __ sub(r1, lr, Operand(masm_->CodeObject()));
4832  __ SmiTag(r1);
4833 
4834  // Store result register while executing finally block.
4835  __ push(r1);
4836 
4837  // Store pending message while executing finally block.
4838  ExternalReference pending_message_obj =
4839  ExternalReference::address_of_pending_message_obj(isolate());
4840  __ mov(ip, Operand(pending_message_obj));
4841  __ ldr(r1, MemOperand(ip));
4842  __ push(r1);
4843 
4844  ExternalReference has_pending_message =
4845  ExternalReference::address_of_has_pending_message(isolate());
4846  __ mov(ip, Operand(has_pending_message));
4847  STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4848  __ ldrb(r1, MemOperand(ip));
4849  __ SmiTag(r1);
4850  __ push(r1);
4851 
4852  ExternalReference pending_message_script =
4853  ExternalReference::address_of_pending_message_script(isolate());
4854  __ mov(ip, Operand(pending_message_script));
4855  __ ldr(r1, MemOperand(ip));
4856  __ push(r1);
4857 }
4858 
4859 
4861  DCHECK(!result_register().is(r1));
4862  // Restore pending message from stack.
4863  __ pop(r1);
4864  ExternalReference pending_message_script =
4865  ExternalReference::address_of_pending_message_script(isolate());
4866  __ mov(ip, Operand(pending_message_script));
4867  __ str(r1, MemOperand(ip));
4868 
4869  __ pop(r1);
4870  __ SmiUntag(r1);
4871  ExternalReference has_pending_message =
4872  ExternalReference::address_of_has_pending_message(isolate());
4873  __ mov(ip, Operand(has_pending_message));
4874  STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4875  __ strb(r1, MemOperand(ip));
4876 
4877  __ pop(r1);
4878  ExternalReference pending_message_obj =
4879  ExternalReference::address_of_pending_message_obj(isolate());
4880  __ mov(ip, Operand(pending_message_obj));
4881  __ str(r1, MemOperand(ip));
4882 
4883  // Restore result register from stack.
4884  __ pop(r1);
4885 
4886  // Uncook return address and return.
4887  __ pop(result_register());
4888  __ SmiUntag(r1);
4889  __ add(pc, r1, Operand(masm_->CodeObject()));
4890 }
4891 
4892 
4893 #undef __
4894 
4895 #define __ ACCESS_MASM(masm())
4896 
4898  int* stack_depth,
4899  int* context_length) {
4900  // The macros used here must preserve the result register.
4901 
4902  // Because the handler block contains the context of the finally
4903  // code, we can restore it directly from there for the finally code
4904  // rather than iteratively unwinding contexts via their previous
4905  // links.
4906  __ Drop(*stack_depth); // Down to the handler block.
4907  if (*context_length > 0) {
4908  // Restore the context to its dedicated register and the stack.
4911  }
4912  __ PopTryHandler();
4913  __ bl(finally_entry_);
4914 
4915  *stack_depth = 0;
4916  *context_length = 0;
4917  return previous_;
4918 }
4919 
4920 
4921 #undef __
4922 
4923 
4924 static Address GetInterruptImmediateLoadAddress(Address pc) {
4925  Address load_address = pc - 2 * Assembler::kInstrSize;
4926  if (!FLAG_enable_ool_constant_pool) {
4928  } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
4929  // This is an extended constant pool lookup.
4931  load_address -= 2 * Assembler::kInstrSize;
4932  DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4934  Memory::int32_at(load_address + Assembler::kInstrSize)));
4935  } else {
4936  load_address -= 4 * Assembler::kInstrSize;
4939  Memory::int32_at(load_address + Assembler::kInstrSize)));
4941  Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4943  Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
4944  }
4945  } else if (CpuFeatures::IsSupported(ARMv7) &&
4946  Assembler::IsMovT(Memory::int32_at(load_address))) {
4947  // This is a movw / movt immediate load.
4948  load_address -= Assembler::kInstrSize;
4949  DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4950  } else if (!CpuFeatures::IsSupported(ARMv7) &&
4951  Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
4952  // This is a mov / orr immediate load.
4953  load_address -= 3 * Assembler::kInstrSize;
4956  Memory::int32_at(load_address + Assembler::kInstrSize)));
4958  Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4959  } else {
4960  // This is a small constant pool lookup.
4962  }
4963  return load_address;
4964 }
4965 
4966 
4967 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4968  Address pc,
4969  BackEdgeState target_state,
4970  Code* replacement_code) {
4971  Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4972  Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
4973  CodePatcher patcher(branch_address, 1);
4974  switch (target_state) {
4975  case INTERRUPT:
4976  {
4977  // <decrement profiling counter>
4978  // bpl ok
4979  // ; load interrupt stub address into ip - either of (for ARMv7):
4980  // ; <small cp load> | <extended cp load> | <immediate load>
4981  // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
4982  // | movt ip, #imm | movw ip, #imm
4983  // | ldr ip, [pp, ip]
4984  // ; or (for ARMv6):
4985  // ; <small cp load> | <extended cp load> | <immediate load>
4986  // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4987  // | orr ip, ip, #imm> | orr ip, ip, #imm
4988  // | orr ip, ip, #imm> | orr ip, ip, #imm
4989  // | orr ip, ip, #imm> | orr ip, ip, #imm
4990  // blx ip
4991  // <reset profiling counter>
4992  // ok-label
4993 
4994  // Calculate branch offset to the ok-label - this is the difference
4995  // between the branch address and |pc| (which points at <blx ip>) plus
4996  // kProfileCounterResetSequence instructions
4997  int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
4998  kProfileCounterResetSequenceLength;
4999  patcher.masm()->b(branch_offset, pl);
5000  break;
5001  }
5002  case ON_STACK_REPLACEMENT:
5003  case OSR_AFTER_STACK_CHECK:
5004  // <decrement profiling counter>
5005  // mov r0, r0 (NOP)
5006  // ; load on-stack replacement address into ip - either of (for ARMv7):
5007  // ; <small cp load> | <extended cp load> | <immediate load>
5008  // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
5009  // | movt ip, #imm> | movw ip, #imm
5010  // | ldr ip, [pp, ip]
5011  // ; or (for ARMv6):
5012  // ; <small cp load> | <extended cp load> | <immediate load>
5013  // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5014  // | orr ip, ip, #imm> | orr ip, ip, #imm
5015  // | orr ip, ip, #imm> | orr ip, ip, #imm
5016  // | orr ip, ip, #imm> | orr ip, ip, #imm
5017  // blx ip
5018  // <reset profiling counter>
5019  // ok-label
5020  patcher.masm()->nop();
5021  break;
5022  }
5023 
5024  // Replace the call address.
5025  Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
5026  replacement_code->entry());
5027 
5028  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5029  unoptimized_code, pc_immediate_load_address, replacement_code);
5030 }
5031 
5032 
5034  Isolate* isolate,
5035  Code* unoptimized_code,
5036  Address pc) {
5038 
5039  Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
5040  Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
5041  Address interrupt_address = Assembler::target_address_at(
5042  pc_immediate_load_address, unoptimized_code);
5043 
5044  if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
5045  DCHECK(interrupt_address ==
5046  isolate->builtins()->InterruptCheck()->entry());
5047  return INTERRUPT;
5048  }
5049 
5050  DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
5051 
5052  if (interrupt_address ==
5053  isolate->builtins()->OnStackReplacement()->entry()) {
5054  return ON_STACK_REPLACEMENT;
5055  }
5056 
5057  DCHECK(interrupt_address ==
5058  isolate->builtins()->OsrAfterStackCheck()->entry());
5059  return OSR_AFTER_STACK_CHECK;
5060 }
5061 
5062 
5063 } } // namespace v8::internal
5064 
5065 #endif // V8_TARGET_ARCH_ARM
#define BASE_EMBEDDED
Definition: allocation.h:45
static const int kInstrSize
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
int InstructionsGeneratedSince(Label *label)
void CheckConstPool(bool force_emit, bool require_jump)
friend class BlockConstPoolScope
static bool IsMovImmed(Instr instr)
static bool IsBranch(Instr instr)
static bool IsMovW(Instr instr)
static bool IsMovT(Instr instr)
static const int kJSReturnSequenceInstructions
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target, ICacheFlushMode icache_flush_mode=FLUSH_ICACHE_IF_NEEDED)
static bool IsOrrImmed(Instr instr)
static bool IsLdrPpRegOffset(Instr instr)
static bool IsLdrPcImmediateOffset(Instr instr)
static bool IsBlxIp(Instr instr)
static bool IsLdrPpImmediateOffset(Instr instr)
int SizeOfCodeGeneratedSince(Label *label)
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static BailoutId Declarations()
Definition: utils.h:962
static BailoutId FunctionEntry()
Definition: utils.h:961
static Handle< Code > initialize_stub(Isolate *isolate, int argc, CallICState::CallType call_type)
Definition: ic.cc:1338
static const int kValueOffset
Definition: objects.h:9446
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:225
static const int kConstantPoolOffset
Definition: objects.h:5367
static const int kHeaderSize
Definition: objects.h:5373
static Condition ComputeCondition(Token::Value op)
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1329
void AddNoFrameRange(int from, int to)
Definition: compiler.h:354
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3331
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
Definition: compiler.cc:1243
@ STRING_FUNCTION_PROTOTYPE_MAP_INDEX
Definition: contexts.h:294
static int SlotOffset(int index)
Definition: contexts.h:552
static bool IsSupported(CpuFeature f)
Definition: assembler.h:184
static const int kDescriptorSize
Definition: objects.h:3038
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3032
static const int kEnumCacheOffset
Definition: objects.h:3028
static const int kFirstOffset
Definition: objects.h:3029
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
static const TestContext * cast(const ExpressionContext *context)
Definition: full-codegen.h:778
virtual void Plug(bool flag) const
virtual NestedStatement * Exit(int *stack_depth, int *context_length)
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:99
void EmitAccessor(Expression *expression)
void Split(Condition cc, Label *if_true, Label *if_false, Label *fall_through)
void EmitBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode)
void EmitLiteralCompareTypeof(Expression *expr, Expression *sub_expr, Handle< String > check)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
void PrepareForBailoutForId(BailoutId id, State state)
void EmitNewClosure(Handle< SharedFunctionInfo > info, bool pretenure)
void GetVar(Register destination, Variable *var)
static Register context_register()
ZoneList< Handle< Object > > * globals_
Definition: full-codegen.h:837
void VisitForControl(Expression *expr, Label *if_true, Label *if_false, Label *fall_through)
Definition: full-codegen.h:382
void CallLoadIC(ContextualMode mode, TypeFeedbackId id=TypeFeedbackId::None())
void RecordBackEdge(BailoutId osr_ast_id)
MemOperand StackOperand(Variable *var)
void EmitVariableLoad(VariableProxy *proxy)
void SetVar(Variable *var, Register source, Register scratch0, Register scratch1)
MemOperand ContextSlotOperandCheckExtensions(Variable *var, Label *slow)
void EmitKeyedPropertyAssignment(Assignment *expr)
void DeclareGlobals(Handle< FixedArray > pairs)
void EmitResolvePossiblyDirectEval(int arg_count)
void VisitForStackValue(Expression *expr)
Definition: full-codegen.h:376
void EmitKeyedCallWithLoadIC(Call *expr, Expression *key)
void EmitKeyedPropertyLoad(Property *expr)
void EmitDebugCheckDeclarationContext(Variable *variable)
FunctionLiteral * function()
Definition: full-codegen.h:609
void EmitNamedSuperPropertyLoad(Property *expr)
bool TryLiteralCompare(CompareOperation *compare)
void SetStatementPosition(Statement *stmt)
Handle< FixedArray > FeedbackVector()
Definition: full-codegen.h:432
void StoreToFrameField(int frame_offset, Register value)
void LoadContextField(Register dst, int context_index)
const ExpressionContext * context()
Definition: full-codegen.h:602
void EmitNamedPropertyLoad(Property *expr)
void EmitBackEdgeBookkeeping(IterationStatement *stmt, Label *back_edge_target)
void DoTest(Expression *condition, Label *if_true, Label *if_false, Label *fall_through)
void VisitForAccumulatorValue(Expression *expr)
Definition: full-codegen.h:370
void PrepareForBailout(Expression *node, State state)
void CallStoreIC(TypeFeedbackId id=TypeFeedbackId::None())
MemOperand VarOperand(Variable *var, Register scratch)
void DeclareModules(Handle< FixedArray > descriptions)
void EmitGeneratorResume(Expression *generator, Expression *value, JSGeneratorObject::ResumeMode resume_mode)
void VisitForEffect(Expression *expr)
Definition: full-codegen.h:364
void EmitAssignment(Expression *expr)
void EmitCall(Call *expr, CallICState::CallType=CallICState::FUNCTION)
void SetFunctionPosition(FunctionLiteral *fun)
void EmitLoadHomeObject(SuperReference *expr)
void EmitStoreToStackLocalOrContextSlot(Variable *var, MemOperand location)
void EmitInlineSmiBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode, Expression *left, Expression *right)
void EmitLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
void EmitVariableAssignment(Variable *var, Token::Value op)
void CallIC(Handle< Code > code, TypeFeedbackId id=TypeFeedbackId::None())
void EmitCreateIteratorResult(bool done)
void EmitLoadGlobalCheckExtensions(VariableProxy *proxy, TypeofState typeof_state, Label *slow)
void EmitCallWithLoadIC(Call *expr)
void EnsureSlotContainsAllocationSite(int slot)
void PrepareForBailoutBeforeSplit(Expression *expr, bool should_normalize, Label *if_true, Label *if_false)
void EmitInlineRuntimeCall(CallRuntime *expr)
void EmitSuperCallWithLoadIC(Call *expr)
void EmitNamedSuperPropertyAssignment(Assignment *expr)
void EmitNamedPropertyAssignment(Assignment *expr)
Handle< FixedArray > handler_table_
Definition: full-codegen.h:844
void RecordJSReturnSite(Call *call)
static Register result_register()
void VisitForTypeofValue(Expression *expr)
void EmitDynamicLookupFastCase(VariableProxy *proxy, TypeofState typeof_state, Label *slow, Label *done)
bool ShouldInlineSmiCase(Token::Value op)
Handle< FixedArray > handler_table()
Definition: full-codegen.h:642
void EmitProfilingCounterDecrement(int delta)
void VisitInDuplicateContext(Expression *expr)
static const int kBuiltinsOffset
Definition: objects.h:7458
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static const int kMapOffset
Definition: objects.h:1427
static const int kLengthOffset
Definition: objects.h:10072
static const int kValueOffset
Definition: objects.h:7623
static const int kCacheStampOffset
Definition: objects.h:7631
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kLiteralsOffset
Definition: objects.h:7382
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kResultDonePropertyOffset
Definition: objects.h:7142
static const int kFunctionOffset
Definition: objects.h:7123
static const int kGeneratorClosed
Definition: objects.h:7120
static const int kResultValuePropertyOffset
Definition: objects.h:7141
static const int kGeneratorExecuting
Definition: objects.h:7119
static const int kOperandStackOffset
Definition: objects.h:7127
static const int kReceiverOffset
Definition: objects.h:7125
static const int kContextOffset
Definition: objects.h:7124
static const int kContinuationOffset
Definition: objects.h:7126
static const int kInitialMaxFastElementArray
Definition: objects.h:2180
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kSize
Definition: objects.h:7772
static const int kInObjectFieldCount
Definition: objects.h:7826
static const int kValueOffset
Definition: objects.h:7546
static const Register ReceiverRegister()
static const Register NameRegister()
static int CallSize(Register target, Condition cond=al)
static const int kIsUndetectable
Definition: objects.h:6244
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6251
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kBitField2Offset
Definition: objects.h:6233
static const int kConstructorOffset
Definition: objects.h:6191
static const int kPrototypeOffset
Definition: objects.h:6190
static int32_t & int32_at(Address addr)
Definition: v8memory.h:28
static const int kHashFieldOffset
Definition: objects.h:8486
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
Definition: assembler.h:317
Scope * outer_scope() const
Definition: scopes.h:333
int num_parameters() const
Definition: scopes.h:321
VariableDeclaration * function() const
Definition: scopes.h:309
int ContextChainLength(Scope *scope)
Definition: scopes.cc:715
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:539
Scope * DeclarationScope()
Definition: scopes.cc:737
Variable * arguments() const
Definition: scopes.h:324
Scope * GlobalScope()
Definition: scopes.cc:728
Variable * parameter(int index) const
Definition: scopes.h:316
static const int kHeaderSize
Definition: objects.h:8941
static const int kFormalParameterCountOffset
Definition: objects.h:6946
static const int kInstanceClassNameOffset
Definition: objects.h:6897
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1334
static const int kContextOffset
Definition: frames.h:74
static const int kFixedFrameSizeFromFp
Definition: frames.h:157
static const int kContextOffset
Definition: frames.h:162
static const int kExpressionsOffset
Definition: frames.h:160
static const int kCallerSPOffset
Definition: frames.h:167
static const int kMarkerOffset
Definition: frames.h:161
static const int kCallerFPOffset
Definition: frames.h:165
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8618
static const int kLengthOffset
Definition: objects.h:8802
bool Equals(String *other)
Definition: objects-inl.h:3336
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2258
static TypeFeedbackId None()
Definition: utils.h:945
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
TypeofState
Definition: codegen.h:46
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_BOOL(enable_unaligned_accesses
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define POINTER_SIZE_ALIGN(value)
Definition: globals.h:582
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK_NE(v1, v2)
Definition: logging.h:207
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ CALL_FUNCTION
@ TAG_OBJECT
int int32_t
Definition: unicode.cc:24
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
Vector< const char > CStrVector(const char *data)
Definition: vector.h:158
const int kPointerSize
Definition: globals.h:129
MemOperand ContextOperand(Register context, int index)
const Register r2
@ DO_SMI_CHECK
Definition: globals.h:641
@ STRING_ADD_CHECK_BOTH
Definition: code-stubs.h:1218
@ TRACK_ALLOCATION_SITE
Definition: objects.h:8085
@ DONT_TRACK_ALLOCATION_SITE
Definition: objects.h:8084
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
Definition: objects-inl.h:5448
@ kSeqStringTag
Definition: objects.h:563
const Register cp
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
const Register r6
bool IsDeclaredVariableMode(VariableMode mode)
Definition: globals.h:705
const uint32_t kTwoByteStringTag
Definition: objects.h:556
const Register r0
const int kSmiTagSize
Definition: v8.h:5743
const LowDwVfpRegister d0
const Register ip
const Register r3
const Register fp
const Register sp
const int kPointerSizeLog2
Definition: globals.h:147
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:785
@ JS_REGEXP_TYPE
Definition: objects.h:748
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
Definition: objects.h:788
@ JS_VALUE_TYPE
Definition: objects.h:728
@ JS_DATE_TYPE
Definition: objects.h:730
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:784
@ FIRST_JS_PROXY_TYPE
Definition: objects.h:778
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
@ LAST_JS_PROXY_TYPE
Definition: objects.h:779
const uint32_t kOneByteStringTag
Definition: objects.h:557
@ NO_OVERWRITE
Definition: ic-state.h:58
@ OVERWRITE_RIGHT
Definition: ic-state.h:58
const Register r4
MemOperand FieldMemOperand(Register object, int offset)
bool IsImmutableVariableMode(VariableMode mode)
Definition: globals.h:715
const Register r9
const Register pc
@ DYNAMIC_GLOBAL
Definition: globals.h:689
@ DYNAMIC_LOCAL
Definition: globals.h:693
@ CONST_LEGACY
Definition: globals.h:671
const Register r5
Condition NegateCondition(Condition cond)
Definition: constants-arm.h:86
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
const Register lr
byte * Address
Definition: globals.h:101
NilValue
Definition: v8.h:97
@ kNullValue
Definition: v8.h:97
@ NOT_CONTEXTUAL
Definition: objects.h:174
const Register r1
const int kHeapObjectTag
Definition: v8.h:5737
const Register no_reg
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
Definition: v8.h:5744
const Register pp
@ RECORD_CONSTRUCTOR_TARGET
Definition: globals.h:480
const int kSmiTag
Definition: v8.h:5742
@ NO_CALL_FUNCTION_FLAGS
Definition: globals.h:469
@ CALL_AS_METHOD
Definition: globals.h:470
@ STRING_INDEX_IS_NUMBER
Definition: code-stubs.h:1590
void CopyBytes(uint8_t *target, uint8_t *source)
bool IsFastObjectElementsKind(ElementsKind kind)
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
#define VOID
#define IN
PropertyAttributes
@ NONE
@ READ_ONLY