V8 Project
full-codegen-arm64.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_ARM64
8 
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
19 
22 
23 namespace v8 {
24 namespace internal {
25 
26 #define __ ACCESS_MASM(masm_)
27 
28 class JumpPatchSite BASE_EMBEDDED {
29  public:
30  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
31 #ifdef DEBUG
32  info_emitted_ = false;
33 #endif
34  }
35 
36  ~JumpPatchSite() {
37  if (patch_site_.is_bound()) {
38  DCHECK(info_emitted_);
39  } else {
40  DCHECK(reg_.IsNone());
41  }
42  }
43 
44  void EmitJumpIfNotSmi(Register reg, Label* target) {
45  // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
46  InstructionAccurateScope scope(masm_, 1);
47  DCHECK(!info_emitted_);
48  DCHECK(reg.Is64Bits());
49  DCHECK(!reg.Is(csp));
50  reg_ = reg;
51  __ bind(&patch_site_);
52  __ tbz(xzr, 0, target); // Always taken before patched.
53  }
54 
55  void EmitJumpIfSmi(Register reg, Label* target) {
56  // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
57  InstructionAccurateScope scope(masm_, 1);
58  DCHECK(!info_emitted_);
59  DCHECK(reg.Is64Bits());
60  DCHECK(!reg.Is(csp));
61  reg_ = reg;
62  __ bind(&patch_site_);
63  __ tbnz(xzr, 0, target); // Never taken before patched.
64  }
65 
66  void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
67  UseScratchRegisterScope temps(masm_);
68  Register temp = temps.AcquireX();
69  __ Orr(temp, reg1, reg2);
70  EmitJumpIfNotSmi(temp, target);
71  }
72 
73  void EmitPatchInfo() {
74  Assembler::BlockPoolsScope scope(masm_);
75  InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
76 #ifdef DEBUG
77  info_emitted_ = true;
78 #endif
79  }
80 
81  private:
82  MacroAssembler* masm_;
83  Label patch_site_;
84  Register reg_;
85 #ifdef DEBUG
86  bool info_emitted_;
87 #endif
88 };
89 
90 
91 // Generate code for a JS function. On entry to the function the receiver
92 // and arguments have been pushed on the stack left to right. The actual
93 // argument count matches the formal parameter count expected by the
94 // function.
95 //
96 // The live registers are:
97 // - x1: the JS function object being called (i.e. ourselves).
98 // - cp: our context.
99 // - fp: our caller's frame pointer.
100 // - jssp: stack pointer.
101 // - lr: return address.
102 //
103 // The function builds a JS frame. See JavaScriptFrameConstants in
104 // frames-arm.h for its layout.
106  CompilationInfo* info = info_;
108  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
109 
110  profiling_counter_ = isolate()->factory()->NewCell(
111  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
112  SetFunctionPosition(function());
113  Comment cmnt(masm_, "[ Function compiled by full code generator");
114 
116 
117 #ifdef DEBUG
118  if (strlen(FLAG_stop_at) > 0 &&
119  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
120  __ Debug("stop-at", __LINE__, BREAK);
121  }
122 #endif
123 
124  // Sloppy mode functions and builtins need to replace the receiver with the
125  // global proxy when called as functions (without an explicit receiver
126  // object).
127  if (info->strict_mode() == SLOPPY && !info->is_native()) {
128  Label ok;
129  int receiver_offset = info->scope()->num_parameters() * kXRegSize;
130  __ Peek(x10, receiver_offset);
131  __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
132 
133  __ Ldr(x10, GlobalObjectMemOperand());
135  __ Poke(x10, receiver_offset);
136 
137  __ Bind(&ok);
138  }
139 
140 
141  // Open a frame scope to indicate that there is a frame on the stack.
142  // The MANUAL indicates that the scope shouldn't actually generate code
143  // to set up the frame because we do it manually below.
144  FrameScope frame_scope(masm_, StackFrame::MANUAL);
145 
146  // This call emits the following sequence in a way that can be patched for
147  // code ageing support:
148  // Push(lr, fp, cp, x1);
149  // Add(fp, jssp, 2 * kPointerSize);
150  info->set_prologue_offset(masm_->pc_offset());
151  __ Prologue(info->IsCodePreAgingActive());
152  info->AddNoFrameRange(0, masm_->pc_offset());
153 
154  // Reserve space on the stack for locals.
155  { Comment cmnt(masm_, "[ Allocate locals");
156  int locals_count = info->scope()->num_stack_slots();
157  // Generators allocate locals, if any, in context slots.
158  DCHECK(!info->function()->is_generator() || locals_count == 0);
159 
160  if (locals_count > 0) {
161  if (locals_count >= 128) {
162  Label ok;
163  DCHECK(jssp.Is(__ StackPointer()));
164  __ Sub(x10, jssp, locals_count * kPointerSize);
165  __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
166  __ B(hs, &ok);
167  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
168  __ Bind(&ok);
169  }
170  __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
171  if (FLAG_optimize_for_size) {
172  __ PushMultipleTimes(x10 , locals_count);
173  } else {
174  const int kMaxPushes = 32;
175  if (locals_count >= kMaxPushes) {
176  int loop_iterations = locals_count / kMaxPushes;
177  __ Mov(x3, loop_iterations);
178  Label loop_header;
179  __ Bind(&loop_header);
180  // Do pushes.
181  __ PushMultipleTimes(x10 , kMaxPushes);
182  __ Subs(x3, x3, 1);
183  __ B(ne, &loop_header);
184  }
185  int remaining = locals_count % kMaxPushes;
186  // Emit the remaining pushes.
187  __ PushMultipleTimes(x10 , remaining);
188  }
189  }
190  }
191 
192  bool function_in_register_x1 = true;
193 
194  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
195  if (heap_slots > 0) {
196  // Argument to NewContext is the function, which is still in x1.
197  Comment cmnt(masm_, "[ Allocate context");
198  bool need_write_barrier = true;
199  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
200  __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
201  __ Push(x1, x10);
202  __ CallRuntime(Runtime::kNewGlobalContext, 2);
203  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
204  FastNewContextStub stub(isolate(), heap_slots);
205  __ CallStub(&stub);
206  // Result of FastNewContextStub is always in new space.
207  need_write_barrier = false;
208  } else {
209  __ Push(x1);
210  __ CallRuntime(Runtime::kNewFunctionContext, 1);
211  }
212  function_in_register_x1 = false;
213  // Context is returned in x0. It replaces the context passed to us.
214  // It's saved in the stack and kept live in cp.
215  __ Mov(cp, x0);
217  // Copy any necessary parameters into the context.
218  int num_parameters = info->scope()->num_parameters();
219  for (int i = 0; i < num_parameters; i++) {
220  Variable* var = scope()->parameter(i);
221  if (var->IsContextSlot()) {
222  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
223  (num_parameters - 1 - i) * kPointerSize;
224  // Load parameter from stack.
225  __ Ldr(x10, MemOperand(fp, parameter_offset));
226  // Store it in the context.
227  MemOperand target = ContextMemOperand(cp, var->index());
228  __ Str(x10, target);
229 
230  // Update the write barrier.
231  if (need_write_barrier) {
232  __ RecordWriteContextSlot(
233  cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
234  } else if (FLAG_debug_code) {
235  Label done;
236  __ JumpIfInNewSpace(cp, &done);
237  __ Abort(kExpectedNewSpaceObject);
238  __ bind(&done);
239  }
240  }
241  }
242  }
243 
244  Variable* arguments = scope()->arguments();
245  if (arguments != NULL) {
246  // Function uses arguments object.
247  Comment cmnt(masm_, "[ Allocate arguments object");
248  if (!function_in_register_x1) {
249  // Load this again, if it's used by the local context below.
251  } else {
252  __ Mov(x3, x1);
253  }
254  // Receiver is just before the parameters on the caller's stack.
255  int num_parameters = info->scope()->num_parameters();
256  int offset = num_parameters * kPointerSize;
257  __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
258  __ Mov(x1, Smi::FromInt(num_parameters));
259  __ Push(x3, x2, x1);
260 
261  // Arguments to ArgumentsAccessStub:
262  // function, receiver address, parameter count.
263  // The stub will rewrite receiver and parameter count if the previous
264  // stack frame was an arguments adapter frame.
266  if (strict_mode() == STRICT) {
268  } else if (function()->has_duplicate_parameters()) {
270  } else {
272  }
273  ArgumentsAccessStub stub(isolate(), type);
274  __ CallStub(&stub);
275 
276  SetVar(arguments, x0, x1, x2);
277  }
278 
279  if (FLAG_trace) {
280  __ CallRuntime(Runtime::kTraceEnter, 0);
281  }
282 
283 
284  // Visit the declarations and body unless there is an illegal
285  // redeclaration.
286  if (scope()->HasIllegalRedeclaration()) {
287  Comment cmnt(masm_, "[ Declarations");
289 
290  } else {
292  { Comment cmnt(masm_, "[ Declarations");
293  if (scope()->is_function_scope() && scope()->function() != NULL) {
294  VariableDeclaration* function = scope()->function();
295  DCHECK(function->proxy()->var()->mode() == CONST ||
296  function->proxy()->var()->mode() == CONST_LEGACY);
297  DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
298  VisitVariableDeclaration(function);
299  }
300  VisitDeclarations(scope()->declarations());
301  }
302  }
303 
304  { Comment cmnt(masm_, "[ Stack check");
306  Label ok;
307  DCHECK(jssp.Is(__ StackPointer()));
308  __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
309  __ B(hs, &ok);
310  PredictableCodeSizeScope predictable(masm_,
312  __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
313  __ Bind(&ok);
314  }
315 
316  { Comment cmnt(masm_, "[ Body");
317  DCHECK(loop_depth() == 0);
318  VisitStatements(function()->body());
319  DCHECK(loop_depth() == 0);
320  }
321 
322  // Always emit a 'return undefined' in case control fell off the end of
323  // the body.
324  { Comment cmnt(masm_, "[ return <undefined>;");
325  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
326  }
328 
329  // Force emission of the pools, so they don't get emitted in the middle
330  // of the back edge table.
331  masm()->CheckVeneerPool(true, false);
332  masm()->CheckConstPool(true, false);
333 }
334 
335 
337  __ Mov(x0, Smi::FromInt(0));
338 }
339 
340 
342  __ Mov(x2, Operand(profiling_counter_));
343  __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
344  __ Subs(x3, x3, Smi::FromInt(delta));
345  __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
346 }
347 
348 
350  int reset_value = FLAG_interrupt_budget;
351  if (info_->is_debug()) {
352  // Detect debug break requests as soon as possible.
353  reset_value = FLAG_interrupt_budget >> 4;
354  }
355  __ Mov(x2, Operand(profiling_counter_));
356  __ Mov(x3, Smi::FromInt(reset_value));
357  __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
358 }
359 
360 
361 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
362  Label* back_edge_target) {
363  DCHECK(jssp.Is(__ StackPointer()));
364  Comment cmnt(masm_, "[ Back edge bookkeeping");
365  // Block literal pools whilst emitting back edge code.
366  Assembler::BlockPoolsScope block_const_pool(masm_);
367  Label ok;
368 
369  DCHECK(back_edge_target->is_bound());
370  // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
371  // to reduce the absolute error due to the integer division. To do that,
372  // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
373  // the result).
374  int distance =
375  masm_->SizeOfCodeGeneratedSince(back_edge_target) + kCodeSizeMultiplier / 2;
376  int weight = Min(kMaxBackEdgeWeight,
377  Max(1, distance / kCodeSizeMultiplier));
379  __ B(pl, &ok);
380  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
381 
382  // Record a mapping of this PC offset to the OSR id. This is used to find
383  // the AST id from the unoptimized code in order to use it as a key into
384  // the deoptimization input data found in the optimized code.
385  RecordBackEdge(stmt->OsrEntryId());
386 
388 
389  __ Bind(&ok);
390  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
391  // Record a mapping of the OSR id to this PC. This is used if the OSR
392  // entry becomes the target of a bailout. We don't expect it to be, but
393  // we want it to work if it is.
394  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
395 }
396 
397 
399  Comment cmnt(masm_, "[ Return sequence");
400 
401  if (return_label_.is_bound()) {
402  __ B(&return_label_);
403 
404  } else {
405  __ Bind(&return_label_);
406  if (FLAG_trace) {
407  // Push the return value on the stack as the parameter.
408  // Runtime::TraceExit returns its parameter in x0.
410  __ CallRuntime(Runtime::kTraceExit, 1);
411  DCHECK(x0.Is(result_register()));
412  }
413  // Pretend that the exit is a backwards jump to the entry.
414  int weight = 1;
415  if (info_->ShouldSelfOptimize()) {
416  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
417  } else {
418  int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
419  weight = Min(kMaxBackEdgeWeight,
420  Max(1, distance / kCodeSizeMultiplier));
421  }
423  Label ok;
424  __ B(pl, &ok);
425  __ Push(x0);
426  __ Call(isolate()->builtins()->InterruptCheck(),
428  __ Pop(x0);
430  __ Bind(&ok);
431 
432  // Make sure that the constant pool is not emitted inside of the return
433  // sequence. This sequence can get patched when the debugger is used. See
434  // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
435  {
436  InstructionAccurateScope scope(masm_,
438  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
439  __ RecordJSReturn();
440  // This code is generated using Assembler methods rather than Macro
441  // Assembler methods because it will be patched later on, and so the size
442  // of the generated code must be consistent.
443  const Register& current_sp = __ StackPointer();
444  // Nothing ensures 16 bytes alignment here.
445  DCHECK(!current_sp.Is(csp));
446  __ mov(current_sp, fp);
447  int no_frame_start = masm_->pc_offset();
448  __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
449  // Drop the arguments and receiver and return.
450  // TODO(all): This implementation is overkill as it supports 2**31+1
451  // arguments, consider how to improve it without creating a security
452  // hole.
453  __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
454  __ add(current_sp, current_sp, ip0);
455  __ ret();
456  __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1));
457  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
458  }
459  }
460 }
461 
462 
463 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
464  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
465 }
466 
467 
468 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
469  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
470  codegen()->GetVar(result_register(), var);
471 }
472 
473 
474 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
475  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
476  codegen()->GetVar(result_register(), var);
478 }
479 
480 
481 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
482  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
483  // For simplicity we always test the accumulator register.
484  codegen()->GetVar(result_register(), var);
485  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
486  codegen()->DoTest(this);
487 }
488 
489 
491  // Root values have no side effects.
492 }
493 
494 
496  Heap::RootListIndex index) const {
497  __ LoadRoot(result_register(), index);
498 }
499 
500 
502  Heap::RootListIndex index) const {
503  __ LoadRoot(result_register(), index);
505 }
506 
507 
509  codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
510  false_label_);
511  if (index == Heap::kUndefinedValueRootIndex ||
512  index == Heap::kNullValueRootIndex ||
513  index == Heap::kFalseValueRootIndex) {
514  if (false_label_ != fall_through_) __ B(false_label_);
515  } else if (index == Heap::kTrueValueRootIndex) {
516  if (true_label_ != fall_through_) __ B(true_label_);
517  } else {
518  __ LoadRoot(result_register(), index);
519  codegen()->DoTest(this);
520  }
521 }
522 
523 
524 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
525 }
526 
527 
529  Handle<Object> lit) const {
530  __ Mov(result_register(), Operand(lit));
531 }
532 
533 
534 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
535  // Immediates cannot be pushed directly.
536  __ Mov(result_register(), Operand(lit));
538 }
539 
540 
541 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
542  codegen()->PrepareForBailoutBeforeSplit(condition(),
543  true,
544  true_label_,
545  false_label_);
546  DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
547  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
548  if (false_label_ != fall_through_) __ B(false_label_);
549  } else if (lit->IsTrue() || lit->IsJSObject()) {
550  if (true_label_ != fall_through_) __ B(true_label_);
551  } else if (lit->IsString()) {
552  if (String::cast(*lit)->length() == 0) {
553  if (false_label_ != fall_through_) __ B(false_label_);
554  } else {
555  if (true_label_ != fall_through_) __ B(true_label_);
556  }
557  } else if (lit->IsSmi()) {
558  if (Smi::cast(*lit)->value() == 0) {
559  if (false_label_ != fall_through_) __ B(false_label_);
560  } else {
561  if (true_label_ != fall_through_) __ B(true_label_);
562  }
563  } else {
564  // For simplicity we always test the accumulator register.
565  __ Mov(result_register(), Operand(lit));
566  codegen()->DoTest(this);
567  }
568 }
569 
570 
572  Register reg) const {
573  DCHECK(count > 0);
574  __ Drop(count);
575 }
576 
577 
579  int count,
580  Register reg) const {
581  DCHECK(count > 0);
582  __ Drop(count);
583  __ Move(result_register(), reg);
584 }
585 
586 
588  Register reg) const {
589  DCHECK(count > 0);
590  if (count > 1) __ Drop(count - 1);
591  __ Poke(reg, 0);
592 }
593 
594 
596  Register reg) const {
597  DCHECK(count > 0);
598  // For simplicity we always test the accumulator register.
599  __ Drop(count);
600  __ Mov(result_register(), reg);
601  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
602  codegen()->DoTest(this);
603 }
604 
605 
606 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
607  Label* materialize_false) const {
608  DCHECK(materialize_true == materialize_false);
609  __ Bind(materialize_true);
610 }
611 
612 
614  Label* materialize_true,
615  Label* materialize_false) const {
616  Label done;
617  __ Bind(materialize_true);
618  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
619  __ B(&done);
620  __ Bind(materialize_false);
621  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
622  __ Bind(&done);
623 }
624 
625 
627  Label* materialize_true,
628  Label* materialize_false) const {
629  Label done;
630  __ Bind(materialize_true);
631  __ LoadRoot(x10, Heap::kTrueValueRootIndex);
632  __ B(&done);
633  __ Bind(materialize_false);
634  __ LoadRoot(x10, Heap::kFalseValueRootIndex);
635  __ Bind(&done);
636  __ Push(x10);
637 }
638 
639 
640 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
641  Label* materialize_false) const {
642  DCHECK(materialize_true == true_label_);
643  DCHECK(materialize_false == false_label_);
644 }
645 
646 
648 }
649 
650 
652  Heap::RootListIndex value_root_index =
653  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
654  __ LoadRoot(result_register(), value_root_index);
655 }
656 
657 
659  Heap::RootListIndex value_root_index =
660  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
661  __ LoadRoot(x10, value_root_index);
662  __ Push(x10);
663 }
664 
665 
667  codegen()->PrepareForBailoutBeforeSplit(condition(),
668  true,
669  true_label_,
670  false_label_);
671  if (flag) {
672  if (true_label_ != fall_through_) {
673  __ B(true_label_);
674  }
675  } else {
676  if (false_label_ != fall_through_) {
677  __ B(false_label_);
678  }
679  }
680 }
681 
682 
683 void FullCodeGenerator::DoTest(Expression* condition,
684  Label* if_true,
685  Label* if_false,
686  Label* fall_through) {
687  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
688  CallIC(ic, condition->test_id());
689  __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
690 }
691 
692 
693 // If (cond), branch to if_true.
694 // If (!cond), branch to if_false.
695 // fall_through is used as an optimization in cases where only one branch
696 // instruction is necessary.
698  Label* if_true,
699  Label* if_false,
700  Label* fall_through) {
701  if (if_false == fall_through) {
702  __ B(cond, if_true);
703  } else if (if_true == fall_through) {
704  DCHECK(if_false != fall_through);
705  __ B(NegateCondition(cond), if_false);
706  } else {
707  __ B(cond, if_true);
708  __ B(if_false);
709  }
710 }
711 
712 
714  // Offset is negative because higher indexes are at lower addresses.
715  int offset = -var->index() * kXRegSize;
716  // Adjust by a (parameter or local) base offset.
717  if (var->IsParameter()) {
718  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
719  } else {
721  }
722  return MemOperand(fp, offset);
723 }
724 
725 
726 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
727  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
728  if (var->IsContextSlot()) {
729  int context_chain_length = scope()->ContextChainLength(var->scope());
730  __ LoadContext(scratch, context_chain_length);
731  return ContextMemOperand(scratch, var->index());
732  } else {
733  return StackOperand(var);
734  }
735 }
736 
737 
738 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
739  // Use destination as scratch.
740  MemOperand location = VarOperand(var, dest);
741  __ Ldr(dest, location);
742 }
743 
744 
745 void FullCodeGenerator::SetVar(Variable* var,
746  Register src,
747  Register scratch0,
748  Register scratch1) {
749  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
750  DCHECK(!AreAliased(src, scratch0, scratch1));
751  MemOperand location = VarOperand(var, scratch0);
752  __ Str(src, location);
753 
754  // Emit the write barrier code if the location is in the heap.
755  if (var->IsContextSlot()) {
756  // scratch0 contains the correct context.
757  __ RecordWriteContextSlot(scratch0,
758  location.offset(),
759  src,
760  scratch1,
763  }
764 }
765 
766 
768  bool should_normalize,
769  Label* if_true,
770  Label* if_false) {
771  // Only prepare for bailouts before splits if we're in a test
772  // context. Otherwise, we let the Visit function deal with the
773  // preparation to avoid preparing with the same AST id twice.
774  if (!context()->IsTest() || !info_->IsOptimizable()) return;
775 
776  // TODO(all): Investigate to see if there is something to work on here.
777  Label skip;
778  if (should_normalize) {
779  __ B(&skip);
780  }
781  PrepareForBailout(expr, TOS_REG);
782  if (should_normalize) {
783  __ CompareRoot(x0, Heap::kTrueValueRootIndex);
784  Split(eq, if_true, if_false, NULL);
785  __ Bind(&skip);
786  }
787 }
788 
789 
791  // The variable in the declaration always resides in the current function
792  // context.
793  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
794  if (generate_debug_code_) {
795  // Check that we're not inside a with or catch context.
797  __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
798  __ Check(ne, kDeclarationInWithContext);
799  __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
800  __ Check(ne, kDeclarationInCatchContext);
801  }
802 }
803 
804 
805 void FullCodeGenerator::VisitVariableDeclaration(
806  VariableDeclaration* declaration) {
807  // If it was not possible to allocate the variable at compile time, we
808  // need to "declare" it at runtime to make sure it actually exists in the
809  // local context.
810  VariableProxy* proxy = declaration->proxy();
811  VariableMode mode = declaration->mode();
812  Variable* variable = proxy->var();
813  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
814 
815  switch (variable->location()) {
817  globals_->Add(variable->name(), zone());
818  globals_->Add(variable->binding_needs_init()
819  ? isolate()->factory()->the_hole_value()
820  : isolate()->factory()->undefined_value(),
821  zone());
822  break;
823 
824  case Variable::PARAMETER:
825  case Variable::LOCAL:
826  if (hole_init) {
827  Comment cmnt(masm_, "[ VariableDeclaration");
828  __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
829  __ Str(x10, StackOperand(variable));
830  }
831  break;
832 
833  case Variable::CONTEXT:
834  if (hole_init) {
835  Comment cmnt(masm_, "[ VariableDeclaration");
837  __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
838  __ Str(x10, ContextMemOperand(cp, variable->index()));
839  // No write barrier since the_hole_value is in old space.
840  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
841  }
842  break;
843 
844  case Variable::LOOKUP: {
845  Comment cmnt(masm_, "[ VariableDeclaration");
846  __ Mov(x2, Operand(variable->name()));
847  // Declaration nodes are always introduced in one of four modes.
850  : NONE;
851  __ Mov(x1, Smi::FromInt(attr));
852  // Push initial value, if any.
853  // Note: For variables we must not push an initial value (such as
854  // 'undefined') because we may have a (legal) redeclaration and we
855  // must not destroy the current value.
856  if (hole_init) {
857  __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
858  __ Push(cp, x2, x1, x0);
859  } else {
860  // Pushing 0 (xzr) indicates no initial value.
861  __ Push(cp, x2, x1, xzr);
862  }
863  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
864  break;
865  }
866  }
867 }
868 
869 
870 void FullCodeGenerator::VisitFunctionDeclaration(
871  FunctionDeclaration* declaration) {
872  VariableProxy* proxy = declaration->proxy();
873  Variable* variable = proxy->var();
874  switch (variable->location()) {
875  case Variable::UNALLOCATED: {
876  globals_->Add(variable->name(), zone());
877  Handle<SharedFunctionInfo> function =
878  Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
879  // Check for stack overflow exception.
880  if (function.is_null()) return SetStackOverflow();
881  globals_->Add(function, zone());
882  break;
883  }
884 
885  case Variable::PARAMETER:
886  case Variable::LOCAL: {
887  Comment cmnt(masm_, "[ Function Declaration");
888  VisitForAccumulatorValue(declaration->fun());
889  __ Str(result_register(), StackOperand(variable));
890  break;
891  }
892 
893  case Variable::CONTEXT: {
894  Comment cmnt(masm_, "[ Function Declaration");
896  VisitForAccumulatorValue(declaration->fun());
897  __ Str(result_register(), ContextMemOperand(cp, variable->index()));
898  int offset = Context::SlotOffset(variable->index());
899  // We know that we have written a function, which is not a smi.
900  __ RecordWriteContextSlot(cp,
901  offset,
902  result_register(),
903  x2,
908  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
909  break;
910  }
911 
912  case Variable::LOOKUP: {
913  Comment cmnt(masm_, "[ Function Declaration");
914  __ Mov(x2, Operand(variable->name()));
915  __ Mov(x1, Smi::FromInt(NONE));
916  __ Push(cp, x2, x1);
917  // Push initial value for function declaration.
918  VisitForStackValue(declaration->fun());
919  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
920  break;
921  }
922  }
923 }
924 
925 
926 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
927  Variable* variable = declaration->proxy()->var();
928  DCHECK(variable->location() == Variable::CONTEXT);
929  DCHECK(variable->interface()->IsFrozen());
930 
931  Comment cmnt(masm_, "[ ModuleDeclaration");
933 
934  // Load instance object.
935  __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
936  __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
938 
939  // Assign it.
940  __ Str(x1, ContextMemOperand(cp, variable->index()));
941  // We know that we have written a module, which is not a smi.
942  __ RecordWriteContextSlot(cp,
943  Context::SlotOffset(variable->index()),
944  x1,
945  x3,
950  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
951 
952  // Traverse info body.
953  Visit(declaration->module());
954 }
955 
956 
957 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
958  VariableProxy* proxy = declaration->proxy();
959  Variable* variable = proxy->var();
960  switch (variable->location()) {
962  // TODO(rossberg)
963  break;
964 
965  case Variable::CONTEXT: {
966  Comment cmnt(masm_, "[ ImportDeclaration");
968  // TODO(rossberg)
969  break;
970  }
971 
972  case Variable::PARAMETER:
973  case Variable::LOCAL:
974  case Variable::LOOKUP:
975  UNREACHABLE();
976  }
977 }
978 
979 
980 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
981  // TODO(rossberg)
982 }
983 
984 
985 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
986  // Call the runtime to declare the globals.
987  __ Mov(x11, Operand(pairs));
988  Register flags = xzr;
990  flags = x10;
992  }
993  __ Push(cp, x11, flags);
994  __ CallRuntime(Runtime::kDeclareGlobals, 3);
995  // Return value is ignored.
996 }
997 
998 
999 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1000  // Call the runtime to declare the modules.
1001  __ Push(descriptions);
1002  __ CallRuntime(Runtime::kDeclareModules, 1);
1003  // Return value is ignored.
1004 }
1005 
1006 
1007 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1008  ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
1009  Comment cmnt(masm_, "[ SwitchStatement");
1010  Breakable nested_statement(this, stmt);
1011  SetStatementPosition(stmt);
1012 
1013  // Keep the switch value on the stack until a case matches.
1014  VisitForStackValue(stmt->tag());
1015  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1016 
1017  ZoneList<CaseClause*>* clauses = stmt->cases();
1018  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1019 
1020  Label next_test; // Recycled for each test.
1021  // Compile all the tests with branches to their bodies.
1022  for (int i = 0; i < clauses->length(); i++) {
1023  CaseClause* clause = clauses->at(i);
1024  clause->body_target()->Unuse();
1025 
1026  // The default is not a test, but remember it as final fall through.
1027  if (clause->is_default()) {
1028  default_clause = clause;
1029  continue;
1030  }
1031 
1032  Comment cmnt(masm_, "[ Case comparison");
1033  __ Bind(&next_test);
1034  next_test.Unuse();
1035 
1036  // Compile the label expression.
1037  VisitForAccumulatorValue(clause->label());
1038 
1039  // Perform the comparison as if via '==='.
1040  __ Peek(x1, 0); // Switch value.
1041 
1042  JumpPatchSite patch_site(masm_);
1043  if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1044  Label slow_case;
1045  patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1046  __ Cmp(x1, x0);
1047  __ B(ne, &next_test);
1048  __ Drop(1); // Switch value is no longer needed.
1049  __ B(clause->body_target());
1050  __ Bind(&slow_case);
1051  }
1052 
1053  // Record position before stub call for type feedback.
1054  SetSourcePosition(clause->position());
1055  Handle<Code> ic =
1056  CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1057  CallIC(ic, clause->CompareId());
1058  patch_site.EmitPatchInfo();
1059 
1060  Label skip;
1061  __ B(&skip);
1062  PrepareForBailout(clause, TOS_REG);
1063  __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1064  __ Drop(1);
1065  __ B(clause->body_target());
1066  __ Bind(&skip);
1067 
1068  __ Cbnz(x0, &next_test);
1069  __ Drop(1); // Switch value is no longer needed.
1070  __ B(clause->body_target());
1071  }
1072 
1073  // Discard the test value and jump to the default if present, otherwise to
1074  // the end of the statement.
1075  __ Bind(&next_test);
1076  __ Drop(1); // Switch value is no longer needed.
1077  if (default_clause == NULL) {
1078  __ B(nested_statement.break_label());
1079  } else {
1080  __ B(default_clause->body_target());
1081  }
1082 
1083  // Compile all the case bodies.
1084  for (int i = 0; i < clauses->length(); i++) {
1085  Comment cmnt(masm_, "[ Case body");
1086  CaseClause* clause = clauses->at(i);
1087  __ Bind(clause->body_target());
1088  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1089  VisitStatements(clause->statements());
1090  }
1091 
1092  __ Bind(nested_statement.break_label());
1093  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1094 }
1095 
1096 
1097 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1098  ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1099  Comment cmnt(masm_, "[ ForInStatement");
1100  int slot = stmt->ForInFeedbackSlot();
1101  // TODO(all): This visitor probably needs better comments and a revisit.
1102  SetStatementPosition(stmt);
1103 
1104  Label loop, exit;
1105  ForIn loop_statement(this, stmt);
1107 
1108  // Get the object to enumerate over. If the object is null or undefined, skip
1109  // over the loop. See ECMA-262 version 5, section 12.6.4.
1110  VisitForAccumulatorValue(stmt->enumerable());
1111  __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1112  Register null_value = x15;
1113  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1114  __ Cmp(x0, null_value);
1115  __ B(eq, &exit);
1116 
1117  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1118 
1119  // Convert the object to a JS object.
1120  Label convert, done_convert;
1121  __ JumpIfSmi(x0, &convert);
1122  __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1123  __ Bind(&convert);
1124  __ Push(x0);
1125  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1126  __ Bind(&done_convert);
1127  __ Push(x0);
1128 
1129  // Check for proxies.
1130  Label call_runtime;
1132  __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1133 
1134  // Check cache validity in generated code. This is a fast case for
1135  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1136  // guarantee cache validity, call the runtime system to check cache
1137  // validity or get the property names in a fixed array.
1138  __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1139 
1140  // The enum cache is valid. Load the map of the object being
1141  // iterated over and use the cache for the iteration.
1142  Label use_cache;
1144  __ B(&use_cache);
1145 
1146  // Get the set of properties to enumerate.
1147  __ Bind(&call_runtime);
1148  __ Push(x0); // Duplicate the enumerable object on the stack.
1149  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1150 
1151  // If we got a map from the runtime call, we can do a fast
1152  // modification check. Otherwise, we got a fixed array, and we have
1153  // to do a slow check.
1154  Label fixed_array, no_descriptors;
1156  __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1157 
1158  // We got a map in register x0. Get the enumeration cache from it.
1159  __ Bind(&use_cache);
1160 
1161  __ EnumLengthUntagged(x1, x0);
1162  __ Cbz(x1, &no_descriptors);
1163 
1164  __ LoadInstanceDescriptors(x0, x2);
1166  __ Ldr(x2,
1168 
1169  // Set up the four remaining stack slots.
1170  __ SmiTag(x1);
1171  // Map, enumeration cache, enum cache length, zero (both last as smis).
1172  __ Push(x0, x2, x1, xzr);
1173  __ B(&loop);
1174 
1175  __ Bind(&no_descriptors);
1176  __ Drop(1);
1177  __ B(&exit);
1178 
1179  // We got a fixed array in register x0. Iterate through that.
1180  __ Bind(&fixed_array);
1181 
1182  __ LoadObject(x1, FeedbackVector());
1183  __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1184  __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
1185 
1186  __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1187  __ Peek(x10, 0); // Get enumerated object.
1189  // TODO(all): similar check was done already. Can we avoid it here?
1190  __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1191  DCHECK(Smi::FromInt(0) == 0);
1192  __ CzeroX(x1, le); // Zero indicates proxy.
1194  // Smi and array, fixed array length (as smi) and initial index.
1195  __ Push(x1, x0, x2, xzr);
1196 
1197  // Generate code for doing the condition check.
1198  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1199  __ Bind(&loop);
1200  // Load the current count to x0, load the length to x1.
1201  __ PeekPair(x0, x1, 0);
1202  __ Cmp(x0, x1); // Compare to the array length.
1203  __ B(hs, loop_statement.break_label());
1204 
1205  // Get the current entry of the array into register r3.
1206  __ Peek(x10, 2 * kXRegSize);
1207  __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1209 
1210  // Get the expected map from the stack or a smi in the
1211  // permanent slow case into register x10.
1212  __ Peek(x2, 3 * kXRegSize);
1213 
1214  // Check if the expected map still matches that of the enumerable.
1215  // If not, we may have to filter the key.
1216  Label update_each;
1217  __ Peek(x1, 4 * kXRegSize);
1218  __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1219  __ Cmp(x11, x2);
1220  __ B(eq, &update_each);
1221 
1222  // For proxies, no filtering is done.
1223  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1224  STATIC_ASSERT(kSmiTag == 0);
1225  __ Cbz(x2, &update_each);
1226 
1227  // Convert the entry to a string or (smi) 0 if it isn't a property
1228  // any more. If the property has been removed while iterating, we
1229  // just skip it.
1230  __ Push(x1, x3);
1231  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1232  __ Mov(x3, x0);
1233  __ Cbz(x0, loop_statement.continue_label());
1234 
1235  // Update the 'each' property or variable from the possibly filtered
1236  // entry in register x3.
1237  __ Bind(&update_each);
1238  __ Mov(result_register(), x3);
1239  // Perform the assignment as if via '='.
1240  { EffectContext context(this);
1241  EmitAssignment(stmt->each());
1242  }
1243 
1244  // Generate code for the body of the loop.
1245  Visit(stmt->body());
1246 
1247  // Generate code for going to the next element by incrementing
1248  // the index (smi) stored on top of the stack.
1249  __ Bind(loop_statement.continue_label());
1250  // TODO(all): We could use a callee saved register to avoid popping.
1251  __ Pop(x0);
1252  __ Add(x0, x0, Smi::FromInt(1));
1253  __ Push(x0);
1254 
1255  EmitBackEdgeBookkeeping(stmt, &loop);
1256  __ B(&loop);
1257 
1258  // Remove the pointers stored on the stack.
1259  __ Bind(loop_statement.break_label());
1260  __ Drop(5);
1261 
1262  // Exit and decrement the loop depth.
1263  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1264  __ Bind(&exit);
1266 }
1267 
1268 
1269 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1270  Comment cmnt(masm_, "[ ForOfStatement");
1271  SetStatementPosition(stmt);
1272 
1273  Iteration loop_statement(this, stmt);
1275 
1276  // var iterator = iterable[Symbol.iterator]();
1277  VisitForEffect(stmt->assign_iterator());
1278 
1279  // Loop entry.
1280  __ Bind(loop_statement.continue_label());
1281 
1282  // result = iterator.next()
1283  VisitForEffect(stmt->next_result());
1284 
1285  // if (result.done) break;
1286  Label result_not_done;
1287  VisitForControl(stmt->result_done(),
1288  loop_statement.break_label(),
1289  &result_not_done,
1290  &result_not_done);
1291  __ Bind(&result_not_done);
1292 
1293  // each = result.value
1294  VisitForEffect(stmt->assign_each());
1295 
1296  // Generate code for the body of the loop.
1297  Visit(stmt->body());
1298 
1299  // Check stack before looping.
1300  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1301  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1302  __ B(loop_statement.continue_label());
1303 
1304  // Exit and decrement the loop depth.
1305  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1306  __ Bind(loop_statement.break_label());
1308 }
1309 
1310 
1311 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1312  bool pretenure) {
1313  // Use the fast case closure allocation code that allocates in new space for
1314  // nested functions that don't need literals cloning. If we're running with
1315  // the --always-opt or the --prepare-always-opt flag, we need to use the
1316  // runtime function so that the new function we are creating here gets a
1317  // chance to have its code optimized and doesn't just get a copy of the
1318  // existing unoptimized code.
1319  if (!FLAG_always_opt &&
1320  !FLAG_prepare_always_opt &&
1321  !pretenure &&
1322  scope()->is_function_scope() &&
1323  info->num_literals() == 0) {
1324  FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1325  __ Mov(x2, Operand(info));
1326  __ CallStub(&stub);
1327  } else {
1328  __ Mov(x11, Operand(info));
1329  __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1330  : Heap::kFalseValueRootIndex);
1331  __ Push(cp, x11, x10);
1332  __ CallRuntime(Runtime::kNewClosure, 3);
1333  }
1334  context()->Plug(x0);
1335 }
1336 
1337 
1338 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1339  Comment cmnt(masm_, "[ VariableProxy");
1340  EmitVariableLoad(expr);
1341 }
1342 
1343 
1344 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1345  Comment cnmt(masm_, "[ SuperReference ");
1346 
1349 
1350  Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1351  __ Mov(LoadDescriptor::NameRegister(), Operand(home_object_symbol));
1352 
1353  CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1354 
1355  __ Mov(x10, Operand(isolate()->factory()->undefined_value()));
1356  __ cmp(x0, x10);
1357  Label done;
1358  __ b(&done, ne);
1359  __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1360  __ bind(&done);
1361 }
1362 
1363 
1364 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1365  TypeofState typeof_state,
1366  Label* slow) {
1367  Register current = cp;
1368  Register next = x10;
1369  Register temp = x11;
1370 
1371  Scope* s = scope();
1372  while (s != NULL) {
1373  if (s->num_heap_slots() > 0) {
1374  if (s->calls_sloppy_eval()) {
1375  // Check that extension is NULL.
1376  __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1377  __ Cbnz(temp, slow);
1378  }
1379  // Load next context in chain.
1380  __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1381  // Walk the rest of the chain without clobbering cp.
1382  current = next;
1383  }
1384  // If no outer scope calls eval, we do not need to check more
1385  // context extensions.
1386  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1387  s = s->outer_scope();
1388  }
1389 
1390  if (s->is_eval_scope()) {
1391  Label loop, fast;
1392  __ Mov(next, current);
1393 
1394  __ Bind(&loop);
1395  // Terminate at native context.
1396  __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1397  __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1398  // Check that extension is NULL.
1399  __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1400  __ Cbnz(temp, slow);
1401  // Load next context in chain.
1402  __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1403  __ B(&loop);
1404  __ Bind(&fast);
1405  }
1406 
1408  __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1409  if (FLAG_vector_ics) {
1411  Smi::FromInt(proxy->VariableFeedbackSlot()));
1412  }
1413 
1414  ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1415  : CONTEXTUAL;
1416  CallLoadIC(mode);
1417 }
1418 
1419 
1421  Label* slow) {
1422  DCHECK(var->IsContextSlot());
1423  Register context = cp;
1424  Register next = x10;
1425  Register temp = x11;
1426 
1427  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1428  if (s->num_heap_slots() > 0) {
1429  if (s->calls_sloppy_eval()) {
1430  // Check that extension is NULL.
1432  __ Cbnz(temp, slow);
1433  }
1435  // Walk the rest of the chain without clobbering cp.
1436  context = next;
1437  }
1438  }
1439  // Check that last extension is NULL.
1441  __ Cbnz(temp, slow);
1442 
1443  // This function is used only for loads, not stores, so it's safe to
1444  // return an cp-based operand (the write barrier cannot be allowed to
1445  // destroy the cp register).
1446  return ContextMemOperand(context, var->index());
1447 }
1448 
1449 
1450 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1451  TypeofState typeof_state,
1452  Label* slow,
1453  Label* done) {
1454  // Generate fast-case code for variables that might be shadowed by
1455  // eval-introduced variables. Eval is used a lot without
1456  // introducing variables. In those cases, we do not want to
1457  // perform a runtime call for all variables in the scope
1458  // containing the eval.
1459  Variable* var = proxy->var();
1460  if (var->mode() == DYNAMIC_GLOBAL) {
1461  EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1462  __ B(done);
1463  } else if (var->mode() == DYNAMIC_LOCAL) {
1464  Variable* local = var->local_if_not_shadowed();
1465  __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1466  if (local->mode() == LET || local->mode() == CONST ||
1467  local->mode() == CONST_LEGACY) {
1468  __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1469  if (local->mode() == CONST_LEGACY) {
1470  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1471  } else { // LET || CONST
1472  __ Mov(x0, Operand(var->name()));
1473  __ Push(x0);
1474  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1475  }
1476  }
1477  __ B(done);
1478  }
1479 }
1480 
1481 
1482 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1483  // Record position before possible IC call.
1484  SetSourcePosition(proxy->position());
1485  Variable* var = proxy->var();
1486 
1487  // Three cases: global variables, lookup variables, and all other types of
1488  // variables.
1489  switch (var->location()) {
1490  case Variable::UNALLOCATED: {
1491  Comment cmnt(masm_, "Global variable");
1493  __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1494  if (FLAG_vector_ics) {
1496  Smi::FromInt(proxy->VariableFeedbackSlot()));
1497  }
1499  context()->Plug(x0);
1500  break;
1501  }
1502 
1503  case Variable::PARAMETER:
1504  case Variable::LOCAL:
1505  case Variable::CONTEXT: {
1506  Comment cmnt(masm_, var->IsContextSlot()
1507  ? "Context variable"
1508  : "Stack variable");
1509  if (var->binding_needs_init()) {
1510  // var->scope() may be NULL when the proxy is located in eval code and
1511  // refers to a potential outside binding. Currently those bindings are
1512  // always looked up dynamically, i.e. in that case
1513  // var->location() == LOOKUP.
1514  // always holds.
1515  DCHECK(var->scope() != NULL);
1516 
1517  // Check if the binding really needs an initialization check. The check
1518  // can be skipped in the following situation: we have a LET or CONST
1519  // binding in harmony mode, both the Variable and the VariableProxy have
1520  // the same declaration scope (i.e. they are both in global code, in the
1521  // same function or in the same eval code) and the VariableProxy is in
1522  // the source physically located after the initializer of the variable.
1523  //
1524  // We cannot skip any initialization checks for CONST in non-harmony
1525  // mode because const variables may be declared but never initialized:
1526  // if (false) { const x; }; var y = x;
1527  //
1528  // The condition on the declaration scopes is a conservative check for
1529  // nested functions that access a binding and are called before the
1530  // binding is initialized:
1531  // function() { f(); let x = 1; function f() { x = 2; } }
1532  //
1533  bool skip_init_check;
1534  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1535  skip_init_check = false;
1536  } else {
1537  // Check that we always have valid source position.
1538  DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1539  DCHECK(proxy->position() != RelocInfo::kNoPosition);
1540  skip_init_check = var->mode() != CONST_LEGACY &&
1541  var->initializer_position() < proxy->position();
1542  }
1543 
1544  if (!skip_init_check) {
1545  // Let and const need a read barrier.
1546  GetVar(x0, var);
1547  Label done;
1548  __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1549  if (var->mode() == LET || var->mode() == CONST) {
1550  // Throw a reference error when using an uninitialized let/const
1551  // binding in harmony mode.
1552  __ Mov(x0, Operand(var->name()));
1553  __ Push(x0);
1554  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1555  __ Bind(&done);
1556  } else {
1557  // Uninitalized const bindings outside of harmony mode are unholed.
1558  DCHECK(var->mode() == CONST_LEGACY);
1559  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1560  __ Bind(&done);
1561  }
1562  context()->Plug(x0);
1563  break;
1564  }
1565  }
1566  context()->Plug(var);
1567  break;
1568  }
1569 
1570  case Variable::LOOKUP: {
1571  Label done, slow;
1572  // Generate code for loading from variables potentially shadowed by
1573  // eval-introduced variables.
1574  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1575  __ Bind(&slow);
1576  Comment cmnt(masm_, "Lookup variable");
1577  __ Mov(x1, Operand(var->name()));
1578  __ Push(cp, x1); // Context and name.
1579  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1580  __ Bind(&done);
1581  context()->Plug(x0);
1582  break;
1583  }
1584  }
1585 }
1586 
1587 
1588 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1589  Comment cmnt(masm_, "[ RegExpLiteral");
1590  Label materialized;
1591  // Registers will be used as follows:
1592  // x5 = materialized value (RegExp literal)
1593  // x4 = JS function, literals array
1594  // x3 = literal index
1595  // x2 = RegExp pattern
1596  // x1 = RegExp flags
1597  // x0 = RegExp literal clone
1600  int literal_offset =
1601  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1602  __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1603  __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1604 
1605  // Create regexp literal using runtime function.
1606  // Result will be in x0.
1607  __ Mov(x3, Smi::FromInt(expr->literal_index()));
1608  __ Mov(x2, Operand(expr->pattern()));
1609  __ Mov(x1, Operand(expr->flags()));
1610  __ Push(x4, x3, x2, x1);
1611  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1612  __ Mov(x5, x0);
1613 
1614  __ Bind(&materialized);
1616  Label allocated, runtime_allocate;
1617  __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1618  __ B(&allocated);
1619 
1620  __ Bind(&runtime_allocate);
1621  __ Mov(x10, Smi::FromInt(size));
1622  __ Push(x5, x10);
1623  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1624  __ Pop(x5);
1625 
1626  __ Bind(&allocated);
1627  // After this, registers are used as follows:
1628  // x0: Newly allocated regexp.
1629  // x5: Materialized regexp.
1630  // x10, x11, x12: temps.
1631  __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1632  context()->Plug(x0);
1633 }
1634 
1635 
1636 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1637  if (expression == NULL) {
1638  __ LoadRoot(x10, Heap::kNullValueRootIndex);
1639  __ Push(x10);
1640  } else {
1641  VisitForStackValue(expression);
1642  }
1643 }
1644 
1645 
1646 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1647  Comment cmnt(masm_, "[ ObjectLiteral");
1648 
1649  expr->BuildConstantProperties(isolate());
1650  Handle<FixedArray> constant_properties = expr->constant_properties();
1653  __ Mov(x2, Smi::FromInt(expr->literal_index()));
1654  __ Mov(x1, Operand(constant_properties));
1655  int flags = expr->fast_elements()
1656  ? ObjectLiteral::kFastElements
1657  : ObjectLiteral::kNoFlags;
1658  flags |= expr->has_function()
1659  ? ObjectLiteral::kHasFunction
1660  : ObjectLiteral::kNoFlags;
1661  __ Mov(x0, Smi::FromInt(flags));
1662  int properties_count = constant_properties->length() / 2;
1663  const int max_cloned_properties =
1665  if (expr->may_store_doubles() || expr->depth() > 1 ||
1666  masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1667  properties_count > max_cloned_properties) {
1668  __ Push(x3, x2, x1, x0);
1669  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1670  } else {
1671  FastCloneShallowObjectStub stub(isolate(), properties_count);
1672  __ CallStub(&stub);
1673  }
1674 
1675  // If result_saved is true the result is on top of the stack. If
1676  // result_saved is false the result is in x0.
1677  bool result_saved = false;
1678 
1679  // Mark all computed expressions that are bound to a key that
1680  // is shadowed by a later occurrence of the same key. For the
1681  // marked expressions, no store code is emitted.
1682  expr->CalculateEmitStore(zone());
1683 
1684  AccessorTable accessor_table(zone());
1685  for (int i = 0; i < expr->properties()->length(); i++) {
1686  ObjectLiteral::Property* property = expr->properties()->at(i);
1687  if (property->IsCompileTimeValue()) continue;
1688 
1689  Literal* key = property->key();
1690  Expression* value = property->value();
1691  if (!result_saved) {
1692  __ Push(x0); // Save result on stack
1693  result_saved = true;
1694  }
1695  switch (property->kind()) {
1697  UNREACHABLE();
1698  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1699  DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1700  // Fall through.
1701  case ObjectLiteral::Property::COMPUTED:
1702  if (key->value()->IsInternalizedString()) {
1703  if (property->emit_store()) {
1704  VisitForAccumulatorValue(value);
1706  __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1708  CallStoreIC(key->LiteralFeedbackId());
1710  } else {
1711  VisitForEffect(value);
1712  }
1713  break;
1714  }
1715  if (property->emit_store()) {
1716  // Duplicate receiver on stack.
1717  __ Peek(x0, 0);
1718  __ Push(x0);
1719  VisitForStackValue(key);
1720  VisitForStackValue(value);
1721  __ Mov(x0, Smi::FromInt(SLOPPY)); // Strict mode
1722  __ Push(x0);
1723  __ CallRuntime(Runtime::kSetProperty, 4);
1724  } else {
1725  VisitForEffect(key);
1726  VisitForEffect(value);
1727  }
1728  break;
1729  case ObjectLiteral::Property::PROTOTYPE:
1730  if (property->emit_store()) {
1731  // Duplicate receiver on stack.
1732  __ Peek(x0, 0);
1733  __ Push(x0);
1734  VisitForStackValue(value);
1735  __ CallRuntime(Runtime::kSetPrototype, 2);
1736  } else {
1737  VisitForEffect(value);
1738  }
1739  break;
1740  case ObjectLiteral::Property::GETTER:
1741  accessor_table.lookup(key)->second->getter = value;
1742  break;
1743  case ObjectLiteral::Property::SETTER:
1744  accessor_table.lookup(key)->second->setter = value;
1745  break;
1746  }
1747  }
1748 
1749  // Emit code to define accessors, using only a single call to the runtime for
1750  // each pair of corresponding getters and setters.
1751  for (AccessorTable::Iterator it = accessor_table.begin();
1752  it != accessor_table.end();
1753  ++it) {
1754  __ Peek(x10, 0); // Duplicate receiver.
1755  __ Push(x10);
1756  VisitForStackValue(it->first);
1757  EmitAccessor(it->second->getter);
1758  EmitAccessor(it->second->setter);
1759  __ Mov(x10, Smi::FromInt(NONE));
1760  __ Push(x10);
1761  __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1762  }
1763 
1764  if (expr->has_function()) {
1765  DCHECK(result_saved);
1766  __ Peek(x0, 0);
1767  __ Push(x0);
1768  __ CallRuntime(Runtime::kToFastProperties, 1);
1769  }
1770 
1771  if (result_saved) {
1772  context()->PlugTOS();
1773  } else {
1774  context()->Plug(x0);
1775  }
1776 }
1777 
1778 
1779 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1780  Comment cmnt(masm_, "[ ArrayLiteral");
1781 
1782  expr->BuildConstantElements(isolate());
1783  int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1784  : ArrayLiteral::kNoFlags;
1785 
1786  ZoneList<Expression*>* subexprs = expr->values();
1787  int length = subexprs->length();
1788  Handle<FixedArray> constant_elements = expr->constant_elements();
1789  DCHECK_EQ(2, constant_elements->length());
1790  ElementsKind constant_elements_kind =
1791  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1792  bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1793  Handle<FixedArrayBase> constant_elements_values(
1794  FixedArrayBase::cast(constant_elements->get(1)));
1795 
1796  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1797  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1798  // If the only customer of allocation sites is transitioning, then
1799  // we can turn it off if we don't have anywhere else to transition to.
1800  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1801  }
1802 
1805  __ Mov(x2, Smi::FromInt(expr->literal_index()));
1806  __ Mov(x1, Operand(constant_elements));
1807  if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1808  __ Mov(x0, Smi::FromInt(flags));
1809  __ Push(x3, x2, x1, x0);
1810  __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1811  } else {
1812  FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1813  __ CallStub(&stub);
1814  }
1815 
1816  bool result_saved = false; // Is the result saved to the stack?
1817 
1818  // Emit code to evaluate all the non-constant subexpressions and to store
1819  // them into the newly cloned array.
1820  for (int i = 0; i < length; i++) {
1821  Expression* subexpr = subexprs->at(i);
1822  // If the subexpression is a literal or a simple materialized literal it
1823  // is already set in the cloned array.
1824  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1825 
1826  if (!result_saved) {
1827  __ Mov(x1, Smi::FromInt(expr->literal_index()));
1828  __ Push(x0, x1);
1829  result_saved = true;
1830  }
1831  VisitForAccumulatorValue(subexpr);
1832 
1833  if (IsFastObjectElementsKind(constant_elements_kind)) {
1834  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1835  __ Peek(x6, kPointerSize); // Copy of array literal.
1837  __ Str(result_register(), FieldMemOperand(x1, offset));
1838  // Update the write barrier for the array store.
1839  __ RecordWriteField(x1, offset, result_register(), x10,
1842  } else {
1843  __ Mov(x3, Smi::FromInt(i));
1844  StoreArrayLiteralElementStub stub(isolate());
1845  __ CallStub(&stub);
1846  }
1847 
1848  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1849  }
1850 
1851  if (result_saved) {
1852  __ Drop(1); // literal index
1853  context()->PlugTOS();
1854  } else {
1855  context()->Plug(x0);
1856  }
1857 }
1858 
1859 
1860 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1861  DCHECK(expr->target()->IsValidReferenceExpression());
1862 
1863  Comment cmnt(masm_, "[ Assignment");
1864 
1865  // Left-hand side can only be a property, a global or a (parameter or local)
1866  // slot.
1867  enum LhsKind {
1868  VARIABLE,
1871  NAMED_SUPER_PROPERTY
1872  };
1873  LhsKind assign_type = VARIABLE;
1874  Property* property = expr->target()->AsProperty();
1875  if (property != NULL) {
1876  assign_type = (property->key()->IsPropertyName())
1877  ? (property->IsSuperAccess() ? NAMED_SUPER_PROPERTY
1878  : NAMED_PROPERTY)
1879  : KEYED_PROPERTY;
1880  }
1881 
1882  // Evaluate LHS expression.
1883  switch (assign_type) {
1884  case VARIABLE:
1885  // Nothing to do here.
1886  break;
1887  case NAMED_PROPERTY:
1888  if (expr->is_compound()) {
1889  // We need the receiver both on the stack and in the register.
1890  VisitForStackValue(property->obj());
1892  } else {
1893  VisitForStackValue(property->obj());
1894  }
1895  break;
1896  case NAMED_SUPER_PROPERTY:
1897  VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1898  EmitLoadHomeObject(property->obj()->AsSuperReference());
1899  __ Push(result_register());
1900  if (expr->is_compound()) {
1901  const Register scratch = x10;
1902  __ Peek(scratch, kPointerSize);
1903  __ Push(scratch, result_register());
1904  }
1905  break;
1906  case KEYED_PROPERTY:
1907  if (expr->is_compound()) {
1908  VisitForStackValue(property->obj());
1909  VisitForStackValue(property->key());
1911  __ Peek(LoadDescriptor::NameRegister(), 0);
1912  } else {
1913  VisitForStackValue(property->obj());
1914  VisitForStackValue(property->key());
1915  }
1916  break;
1917  }
1918 
1919  // For compound assignments we need another deoptimization point after the
1920  // variable/property load.
1921  if (expr->is_compound()) {
1922  { AccumulatorValueContext context(this);
1923  switch (assign_type) {
1924  case VARIABLE:
1925  EmitVariableLoad(expr->target()->AsVariableProxy());
1926  PrepareForBailout(expr->target(), TOS_REG);
1927  break;
1928  case NAMED_PROPERTY:
1929  EmitNamedPropertyLoad(property);
1930  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1931  break;
1932  case NAMED_SUPER_PROPERTY:
1933  EmitNamedSuperPropertyLoad(property);
1934  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1935  break;
1936  case KEYED_PROPERTY:
1937  EmitKeyedPropertyLoad(property);
1938  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1939  break;
1940  }
1941  }
1942 
1943  Token::Value op = expr->binary_op();
1944  __ Push(x0); // Left operand goes on the stack.
1945  VisitForAccumulatorValue(expr->value());
1946 
1947  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1948  ? OVERWRITE_RIGHT
1949  : NO_OVERWRITE;
1950  SetSourcePosition(expr->position() + 1);
1951  AccumulatorValueContext context(this);
1952  if (ShouldInlineSmiCase(op)) {
1953  EmitInlineSmiBinaryOp(expr->binary_operation(),
1954  op,
1955  mode,
1956  expr->target(),
1957  expr->value());
1958  } else {
1959  EmitBinaryOp(expr->binary_operation(), op, mode);
1960  }
1961 
1962  // Deoptimization point in case the binary operation may have side effects.
1963  PrepareForBailout(expr->binary_operation(), TOS_REG);
1964  } else {
1965  VisitForAccumulatorValue(expr->value());
1966  }
1967 
1968  // Record source position before possible IC call.
1969  SetSourcePosition(expr->position());
1970 
1971  // Store the value.
1972  switch (assign_type) {
1973  case VARIABLE:
1974  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1975  expr->op());
1976  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1977  context()->Plug(x0);
1978  break;
1979  case NAMED_PROPERTY:
1981  break;
1982  case NAMED_SUPER_PROPERTY:
1984  break;
1985  case KEYED_PROPERTY:
1987  break;
1988  }
1989 }
1990 
1991 
1992 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1993  SetSourcePosition(prop->position());
1994  Literal* key = prop->key()->AsLiteral();
1995  DCHECK(!prop->IsSuperAccess());
1996 
1997  __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
1998  if (FLAG_vector_ics) {
2000  Smi::FromInt(prop->PropertyFeedbackSlot()));
2002  } else {
2003  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2004  }
2005 }
2006 
2007 
2008 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2009  // Stack: receiver, home_object.
2010  SetSourcePosition(prop->position());
2011  Literal* key = prop->key()->AsLiteral();
2012  DCHECK(!key->value()->IsSmi());
2013  DCHECK(prop->IsSuperAccess());
2014 
2015  __ Push(key->value());
2016  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2017 }
2018 
2019 
2020 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2021  SetSourcePosition(prop->position());
2022  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2023  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2024  if (FLAG_vector_ics) {
2026  Smi::FromInt(prop->PropertyFeedbackSlot()));
2027  CallIC(ic);
2028  } else {
2029  CallIC(ic, prop->PropertyFeedbackId());
2030  }
2031 }
2032 
2033 
2034 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2035  Token::Value op,
2037  Expression* left_expr,
2038  Expression* right_expr) {
2039  Label done, both_smis, stub_call;
2040 
2041  // Get the arguments.
2042  Register left = x1;
2043  Register right = x0;
2044  Register result = x0;
2045  __ Pop(left);
2046 
2047  // Perform combined smi check on both operands.
2048  __ Orr(x10, left, right);
2049  JumpPatchSite patch_site(masm_);
2050  patch_site.EmitJumpIfSmi(x10, &both_smis);
2051 
2052  __ Bind(&stub_call);
2053 
2054  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2055  {
2056  Assembler::BlockPoolsScope scope(masm_);
2057  CallIC(code, expr->BinaryOperationFeedbackId());
2058  patch_site.EmitPatchInfo();
2059  }
2060  __ B(&done);
2061 
2062  __ Bind(&both_smis);
2063  // Smi case. This code works in the same way as the smi-smi case in the type
2064  // recording binary operation stub, see
2065  // BinaryOpStub::GenerateSmiSmiOperation for comments.
2066  // TODO(all): That doesn't exist any more. Where are the comments?
2067  //
2068  // The set of operations that needs to be supported here is controlled by
2069  // FullCodeGenerator::ShouldInlineSmiCase().
2070  switch (op) {
2071  case Token::SAR:
2072  __ Ubfx(right, right, kSmiShift, 5);
2073  __ Asr(result, left, right);
2074  __ Bic(result, result, kSmiShiftMask);
2075  break;
2076  case Token::SHL:
2077  __ Ubfx(right, right, kSmiShift, 5);
2078  __ Lsl(result, left, right);
2079  break;
2080  case Token::SHR:
2081  // If `left >>> right` >= 0x80000000, the result is not representable in a
2082  // signed 32-bit smi.
2083  __ Ubfx(right, right, kSmiShift, 5);
2084  __ Lsr(x10, left, right);
2085  __ Tbnz(x10, kXSignBit, &stub_call);
2086  __ Bic(result, x10, kSmiShiftMask);
2087  break;
2088  case Token::ADD:
2089  __ Adds(x10, left, right);
2090  __ B(vs, &stub_call);
2091  __ Mov(result, x10);
2092  break;
2093  case Token::SUB:
2094  __ Subs(x10, left, right);
2095  __ B(vs, &stub_call);
2096  __ Mov(result, x10);
2097  break;
2098  case Token::MUL: {
2099  Label not_minus_zero, done;
2100  STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2101  STATIC_ASSERT(kSmiTag == 0);
2102  __ Smulh(x10, left, right);
2103  __ Cbnz(x10, &not_minus_zero);
2104  __ Eor(x11, left, right);
2105  __ Tbnz(x11, kXSignBit, &stub_call);
2106  __ Mov(result, x10);
2107  __ B(&done);
2108  __ Bind(&not_minus_zero);
2109  __ Cls(x11, x10);
2110  __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2111  __ B(lt, &stub_call);
2112  __ SmiTag(result, x10);
2113  __ Bind(&done);
2114  break;
2115  }
2116  case Token::BIT_OR:
2117  __ Orr(result, left, right);
2118  break;
2119  case Token::BIT_AND:
2120  __ And(result, left, right);
2121  break;
2122  case Token::BIT_XOR:
2123  __ Eor(result, left, right);
2124  break;
2125  default:
2126  UNREACHABLE();
2127  }
2128 
2129  __ Bind(&done);
2130  context()->Plug(x0);
2131 }
2132 
2133 
2134 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2135  Token::Value op,
2136  OverwriteMode mode) {
2137  __ Pop(x1);
2138  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2139  JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2140  {
2141  Assembler::BlockPoolsScope scope(masm_);
2142  CallIC(code, expr->BinaryOperationFeedbackId());
2143  patch_site.EmitPatchInfo();
2144  }
2145  context()->Plug(x0);
2146 }
2147 
2148 
2149 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2150  DCHECK(expr->IsValidReferenceExpression());
2151 
2152  // Left-hand side can only be a property, a global or a (parameter or local)
2153  // slot.
2155  LhsKind assign_type = VARIABLE;
2156  Property* prop = expr->AsProperty();
2157  if (prop != NULL) {
2158  assign_type = (prop->key()->IsPropertyName())
2159  ? NAMED_PROPERTY
2160  : KEYED_PROPERTY;
2161  }
2162 
2163  switch (assign_type) {
2164  case VARIABLE: {
2165  Variable* var = expr->AsVariableProxy()->var();
2166  EffectContext context(this);
2167  EmitVariableAssignment(var, Token::ASSIGN);
2168  break;
2169  }
2170  case NAMED_PROPERTY: {
2171  __ Push(x0); // Preserve value.
2172  VisitForAccumulatorValue(prop->obj());
2173  // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2174  // this copy.
2176  __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2178  Operand(prop->key()->AsLiteral()->value()));
2179  CallStoreIC();
2180  break;
2181  }
2182  case KEYED_PROPERTY: {
2183  __ Push(x0); // Preserve value.
2184  VisitForStackValue(prop->obj());
2185  VisitForAccumulatorValue(prop->key());
2186  __ Mov(StoreDescriptor::NameRegister(), x0);
2189  Handle<Code> ic =
2190  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2191  CallIC(ic);
2192  break;
2193  }
2194  }
2195  context()->Plug(x0);
2196 }
2197 
2198 
2200  Variable* var, MemOperand location) {
2201  __ Str(result_register(), location);
2202  if (var->IsContextSlot()) {
2203  // RecordWrite may destroy all its register arguments.
2204  __ Mov(x10, result_register());
2205  int offset = Context::SlotOffset(var->index());
2206  __ RecordWriteContextSlot(
2207  x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2208  }
2209 }
2210 
2211 
2213  Token::Value op) {
2214  ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2215  if (var->IsUnallocated()) {
2216  // Global var, const, or let.
2217  __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2219  CallStoreIC();
2220 
2221  } else if (op == Token::INIT_CONST_LEGACY) {
2222  // Const initializers need a write barrier.
2223  DCHECK(!var->IsParameter()); // No const parameters.
2224  if (var->IsLookupSlot()) {
2225  __ Mov(x1, Operand(var->name()));
2226  __ Push(x0, cp, x1);
2227  __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2228  } else {
2229  DCHECK(var->IsStackLocal() || var->IsContextSlot());
2230  Label skip;
2231  MemOperand location = VarOperand(var, x1);
2232  __ Ldr(x10, location);
2233  __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2234  EmitStoreToStackLocalOrContextSlot(var, location);
2235  __ Bind(&skip);
2236  }
2237 
2238  } else if (var->mode() == LET && op != Token::INIT_LET) {
2239  // Non-initializing assignment to let variable needs a write barrier.
2240  DCHECK(!var->IsLookupSlot());
2241  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2242  Label assign;
2243  MemOperand location = VarOperand(var, x1);
2244  __ Ldr(x10, location);
2245  __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2246  __ Mov(x10, Operand(var->name()));
2247  __ Push(x10);
2248  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2249  // Perform the assignment.
2250  __ Bind(&assign);
2251  EmitStoreToStackLocalOrContextSlot(var, location);
2252 
2253  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2254  if (var->IsLookupSlot()) {
2255  // Assignment to var.
2256  __ Mov(x11, Operand(var->name()));
2257  __ Mov(x10, Smi::FromInt(strict_mode()));
2258  // jssp[0] : mode.
2259  // jssp[8] : name.
2260  // jssp[16] : context.
2261  // jssp[24] : value.
2262  __ Push(x0, cp, x11, x10);
2263  __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2264  } else {
2265  // Assignment to var or initializing assignment to let/const in harmony
2266  // mode.
2267  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2268  MemOperand location = VarOperand(var, x1);
2269  if (FLAG_debug_code && op == Token::INIT_LET) {
2270  __ Ldr(x10, location);
2271  __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2272  __ Check(eq, kLetBindingReInitialization);
2273  }
2274  EmitStoreToStackLocalOrContextSlot(var, location);
2275  }
2276  }
2277  // Non-initializing assignments to consts are ignored.
2278 }
2279 
2280 
2281 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2282  ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2283  // Assignment to a property, using a named store IC.
2284  Property* prop = expr->target()->AsProperty();
2285  DCHECK(prop != NULL);
2286  DCHECK(prop->key()->IsLiteral());
2287 
2288  // Record source code position before IC call.
2289  SetSourcePosition(expr->position());
2291  Operand(prop->key()->AsLiteral()->value()));
2293  CallStoreIC(expr->AssignmentFeedbackId());
2294 
2295  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2296  context()->Plug(x0);
2297 }
2298 
2299 
2301  // Assignment to named property of super.
2302  // x0 : value
2303  // stack : receiver ('this'), home_object
2304  Property* prop = expr->target()->AsProperty();
2305  DCHECK(prop != NULL);
2306  Literal* key = prop->key()->AsLiteral();
2307  DCHECK(key != NULL);
2308 
2309  __ Push(x0);
2310  __ Push(key->value());
2311  __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2312  : Runtime::kStoreToSuper_Sloppy),
2313  4);
2314  context()->Plug(x0);
2315 }
2316 
2317 
2318 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2319  ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2320  // Assignment to a property, using a keyed store IC.
2321 
2322  // Record source code position before IC call.
2323  SetSourcePosition(expr->position());
2324  // TODO(all): Could we pass this in registers rather than on the stack?
2327 
2328  Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2329  CallIC(ic, expr->AssignmentFeedbackId());
2330 
2331  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2332  context()->Plug(x0);
2333 }
2334 
2335 
2336 void FullCodeGenerator::VisitProperty(Property* expr) {
2337  Comment cmnt(masm_, "[ Property");
2338  Expression* key = expr->key();
2339 
2340  if (key->IsPropertyName()) {
2341  if (!expr->IsSuperAccess()) {
2342  VisitForAccumulatorValue(expr->obj());
2344  EmitNamedPropertyLoad(expr);
2345  } else {
2346  VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2347  EmitLoadHomeObject(expr->obj()->AsSuperReference());
2348  __ Push(result_register());
2350  }
2351  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2352  context()->Plug(x0);
2353  } else {
2354  VisitForStackValue(expr->obj());
2355  VisitForAccumulatorValue(expr->key());
2356  __ Move(LoadDescriptor::NameRegister(), x0);
2358  EmitKeyedPropertyLoad(expr);
2359  context()->Plug(x0);
2360  }
2361 }
2362 
2363 
2364 void FullCodeGenerator::CallIC(Handle<Code> code,
2365  TypeFeedbackId ast_id) {
2366  ic_total_count_++;
2367  // All calls must have a predictable size in full-codegen code to ensure that
2368  // the debugger can patch them correctly.
2369  __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2370 }
2371 
2372 
2373 // Code common for calls using the IC.
2374 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2375  Expression* callee = expr->expression();
2376 
2377  CallICState::CallType call_type =
2378  callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2379 
2380  // Get the target function.
2381  if (call_type == CallICState::FUNCTION) {
2382  { StackValueContext context(this);
2383  EmitVariableLoad(callee->AsVariableProxy());
2385  }
2386  // Push undefined as receiver. This is patched in the method prologue if it
2387  // is a sloppy mode method.
2388  __ Push(isolate()->factory()->undefined_value());
2389  } else {
2390  // Load the function from the receiver.
2391  DCHECK(callee->IsProperty());
2392  DCHECK(!callee->AsProperty()->IsSuperAccess());
2394  EmitNamedPropertyLoad(callee->AsProperty());
2395  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2396  // Push the target function under the receiver.
2397  __ Pop(x10);
2398  __ Push(x0, x10);
2399  }
2400 
2401  EmitCall(expr, call_type);
2402 }
2403 
2404 
2406  Expression* callee = expr->expression();
2407  DCHECK(callee->IsProperty());
2408  Property* prop = callee->AsProperty();
2409  DCHECK(prop->IsSuperAccess());
2410 
2411  SetSourcePosition(prop->position());
2412  Literal* key = prop->key()->AsLiteral();
2413  DCHECK(!key->value()->IsSmi());
2414 
2415  // Load the function from the receiver.
2416  const Register scratch = x10;
2417  SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2418  EmitLoadHomeObject(super_ref);
2419  __ Push(x0);
2420  VisitForAccumulatorValue(super_ref->this_var());
2421  __ Push(x0);
2422  __ Peek(scratch, kPointerSize);
2423  __ Push(x0, scratch);
2424  __ Push(key->value());
2425 
2426  // Stack here:
2427  // - home_object
2428  // - this (receiver)
2429  // - this (receiver) <-- LoadFromSuper will pop here and below.
2430  // - home_object
2431  // - key
2432  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2433 
2434  // Replace home_object with target function.
2435  __ Poke(x0, kPointerSize);
2436 
2437  // Stack here:
2438  // - target function
2439  // - this (receiver)
2440  EmitCall(expr, CallICState::METHOD);
2441 }
2442 
2443 
2444 // Code common for calls using the IC.
2446  Expression* key) {
2447  // Load the key.
2449 
2450  Expression* callee = expr->expression();
2451 
2452  // Load the function from the receiver.
2453  DCHECK(callee->IsProperty());
2455  __ Move(LoadDescriptor::NameRegister(), x0);
2456  EmitKeyedPropertyLoad(callee->AsProperty());
2457  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2458 
2459  // Push the target function under the receiver.
2460  __ Pop(x10);
2461  __ Push(x0, x10);
2462 
2463  EmitCall(expr, CallICState::METHOD);
2464 }
2465 
2466 
2467 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2468  // Load the arguments.
2469  ZoneList<Expression*>* args = expr->arguments();
2470  int arg_count = args->length();
2471  { PreservePositionScope scope(masm()->positions_recorder());
2472  for (int i = 0; i < arg_count; i++) {
2473  VisitForStackValue(args->at(i));
2474  }
2475  }
2476  // Record source position of the IC call.
2477  SetSourcePosition(expr->position());
2478 
2479  Handle<Code> ic = CallIC::initialize_stub(
2480  isolate(), arg_count, call_type);
2481  __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
2482  __ Peek(x1, (arg_count + 1) * kXRegSize);
2483  // Don't assign a type feedback id to the IC, since type feedback is provided
2484  // by the vector above.
2485  CallIC(ic);
2486 
2487  RecordJSReturnSite(expr);
2488  // Restore context register.
2490  context()->DropAndPlug(1, x0);
2491 }
2492 
2493 
2495  ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2496  // Prepare to push a copy of the first argument or undefined if it doesn't
2497  // exist.
2498  if (arg_count > 0) {
2499  __ Peek(x10, arg_count * kXRegSize);
2500  } else {
2501  __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2502  }
2503 
2504  // Prepare to push the receiver of the enclosing function.
2505  int receiver_offset = 2 + info_->scope()->num_parameters();
2506  __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2507 
2508  // Prepare to push the language mode.
2509  __ Mov(x12, Smi::FromInt(strict_mode()));
2510  // Prepare to push the start position of the scope the calls resides in.
2511  __ Mov(x13, Smi::FromInt(scope()->start_position()));
2512 
2513  // Push.
2514  __ Push(x10, x11, x12, x13);
2515 
2516  // Do the runtime call.
2517  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2518 }
2519 
2520 
2521 void FullCodeGenerator::VisitCall(Call* expr) {
2522 #ifdef DEBUG
2523  // We want to verify that RecordJSReturnSite gets called on all paths
2524  // through this function. Avoid early returns.
2525  expr->return_is_recorded_ = false;
2526 #endif
2527 
2528  Comment cmnt(masm_, "[ Call");
2529  Expression* callee = expr->expression();
2530  Call::CallType call_type = expr->GetCallType(isolate());
2531 
2532  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2533  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2534  // to resolve the function we need to call and the receiver of the
2535  // call. Then we call the resolved function using the given
2536  // arguments.
2537  ZoneList<Expression*>* args = expr->arguments();
2538  int arg_count = args->length();
2539 
2540  {
2541  PreservePositionScope pos_scope(masm()->positions_recorder());
2542  VisitForStackValue(callee);
2543  __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2544  __ Push(x10); // Reserved receiver slot.
2545 
2546  // Push the arguments.
2547  for (int i = 0; i < arg_count; i++) {
2548  VisitForStackValue(args->at(i));
2549  }
2550 
2551  // Push a copy of the function (found below the arguments) and
2552  // resolve eval.
2553  __ Peek(x10, (arg_count + 1) * kPointerSize);
2554  __ Push(x10);
2555  EmitResolvePossiblyDirectEval(arg_count);
2556 
2557  // The runtime call returns a pair of values in x0 (function) and
2558  // x1 (receiver). Touch up the stack with the right values.
2559  __ PokePair(x1, x0, arg_count * kPointerSize);
2560  }
2561 
2562  // Record source position for debugger.
2563  SetSourcePosition(expr->position());
2564 
2565  // Call the evaluated function.
2566  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2567  __ Peek(x1, (arg_count + 1) * kXRegSize);
2568  __ CallStub(&stub);
2569  RecordJSReturnSite(expr);
2570  // Restore context register.
2572  context()->DropAndPlug(1, x0);
2573 
2574  } else if (call_type == Call::GLOBAL_CALL) {
2575  EmitCallWithLoadIC(expr);
2576 
2577  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2578  // Call to a lookup slot (dynamically introduced variable).
2579  VariableProxy* proxy = callee->AsVariableProxy();
2580  Label slow, done;
2581 
2582  { PreservePositionScope scope(masm()->positions_recorder());
2583  // Generate code for loading from variables potentially shadowed
2584  // by eval-introduced variables.
2585  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2586  }
2587 
2588  __ Bind(&slow);
2589  // Call the runtime to find the function to call (returned in x0)
2590  // and the object holding it (returned in x1).
2591  __ Mov(x10, Operand(proxy->name()));
2592  __ Push(context_register(), x10);
2593  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2594  __ Push(x0, x1); // Receiver, function.
2595 
2596  // If fast case code has been generated, emit code to push the
2597  // function and receiver and have the slow path jump around this
2598  // code.
2599  if (done.is_linked()) {
2600  Label call;
2601  __ B(&call);
2602  __ Bind(&done);
2603  // Push function.
2604  // The receiver is implicitly the global receiver. Indicate this
2605  // by passing the undefined to the call function stub.
2606  __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2607  __ Push(x0, x1);
2608  __ Bind(&call);
2609  }
2610 
2611  // The receiver is either the global receiver or an object found
2612  // by LoadContextSlot.
2613  EmitCall(expr);
2614  } else if (call_type == Call::PROPERTY_CALL) {
2615  Property* property = callee->AsProperty();
2616  bool is_named_call = property->key()->IsPropertyName();
2617  // super.x() is handled in EmitCallWithLoadIC.
2618  if (property->IsSuperAccess() && is_named_call) {
2620  } else {
2621  {
2622  PreservePositionScope scope(masm()->positions_recorder());
2623  VisitForStackValue(property->obj());
2624  }
2625  if (is_named_call) {
2626  EmitCallWithLoadIC(expr);
2627  } else {
2628  EmitKeyedCallWithLoadIC(expr, property->key());
2629  }
2630  }
2631  } else {
2632  DCHECK(call_type == Call::OTHER_CALL);
2633  // Call to an arbitrary expression not handled specially above.
2634  { PreservePositionScope scope(masm()->positions_recorder());
2635  VisitForStackValue(callee);
2636  }
2637  __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2638  __ Push(x1);
2639  // Emit function call.
2640  EmitCall(expr);
2641  }
2642 
2643 #ifdef DEBUG
2644  // RecordJSReturnSite should have been called.
2645  DCHECK(expr->return_is_recorded_);
2646 #endif
2647 }
2648 
2649 
2650 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2651  Comment cmnt(masm_, "[ CallNew");
2652  // According to ECMA-262, section 11.2.2, page 44, the function
2653  // expression in new calls must be evaluated before the
2654  // arguments.
2655 
2656  // Push constructor on the stack. If it's not a function it's used as
2657  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2658  // ignored.
2659  VisitForStackValue(expr->expression());
2660 
2661  // Push the arguments ("left-to-right") on the stack.
2662  ZoneList<Expression*>* args = expr->arguments();
2663  int arg_count = args->length();
2664  for (int i = 0; i < arg_count; i++) {
2665  VisitForStackValue(args->at(i));
2666  }
2667 
2668  // Call the construct call builtin that handles allocation and
2669  // constructor invocation.
2670  SetSourcePosition(expr->position());
2671 
2672  // Load function and argument count into x1 and x0.
2673  __ Mov(x0, arg_count);
2674  __ Peek(x1, arg_count * kXRegSize);
2675 
2676  // Record call targets in unoptimized code.
2677  if (FLAG_pretenuring_call_new) {
2678  EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2679  DCHECK(expr->AllocationSiteFeedbackSlot() ==
2680  expr->CallNewFeedbackSlot() + 1);
2681  }
2682 
2683  __ LoadObject(x2, FeedbackVector());
2684  __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
2685 
2686  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2687  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2688  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2689  context()->Plug(x0);
2690 }
2691 
2692 
2693 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2694  ZoneList<Expression*>* args = expr->arguments();
2695  DCHECK(args->length() == 1);
2696 
2697  VisitForAccumulatorValue(args->at(0));
2698 
2699  Label materialize_true, materialize_false;
2700  Label* if_true = NULL;
2701  Label* if_false = NULL;
2702  Label* fall_through = NULL;
2703  context()->PrepareTest(&materialize_true, &materialize_false,
2704  &if_true, &if_false, &fall_through);
2705 
2706  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2707  __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2708 
2709  context()->Plug(if_true, if_false);
2710 }
2711 
2712 
2713 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2714  ZoneList<Expression*>* args = expr->arguments();
2715  DCHECK(args->length() == 1);
2716 
2717  VisitForAccumulatorValue(args->at(0));
2718 
2719  Label materialize_true, materialize_false;
2720  Label* if_true = NULL;
2721  Label* if_false = NULL;
2722  Label* fall_through = NULL;
2723  context()->PrepareTest(&materialize_true, &materialize_false,
2724  &if_true, &if_false, &fall_through);
2725 
2726  uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
2727 
2728  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2729  __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
2730 
2731  context()->Plug(if_true, if_false);
2732 }
2733 
2734 
2735 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2736  ZoneList<Expression*>* args = expr->arguments();
2737  DCHECK(args->length() == 1);
2738 
2739  VisitForAccumulatorValue(args->at(0));
2740 
2741  Label materialize_true, materialize_false;
2742  Label* if_true = NULL;
2743  Label* if_false = NULL;
2744  Label* fall_through = NULL;
2745  context()->PrepareTest(&materialize_true, &materialize_false,
2746  &if_true, &if_false, &fall_through);
2747 
2748  __ JumpIfSmi(x0, if_false);
2749  __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2750  __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2751  // Undetectable objects behave like undefined when tested with typeof.
2752  __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2753  __ Tbnz(x11, Map::kIsUndetectable, if_false);
2754  __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
2756  __ B(lt, if_false);
2758  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2759  Split(le, if_true, if_false, fall_through);
2760 
2761  context()->Plug(if_true, if_false);
2762 }
2763 
2764 
2765 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2766  ZoneList<Expression*>* args = expr->arguments();
2767  DCHECK(args->length() == 1);
2768 
2769  VisitForAccumulatorValue(args->at(0));
2770 
2771  Label materialize_true, materialize_false;
2772  Label* if_true = NULL;
2773  Label* if_false = NULL;
2774  Label* fall_through = NULL;
2775  context()->PrepareTest(&materialize_true, &materialize_false,
2776  &if_true, &if_false, &fall_through);
2777 
2778  __ JumpIfSmi(x0, if_false);
2779  __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
2780  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2781  Split(ge, if_true, if_false, fall_through);
2782 
2783  context()->Plug(if_true, if_false);
2784 }
2785 
2786 
2787 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2788  ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
2789  ZoneList<Expression*>* args = expr->arguments();
2790  DCHECK(args->length() == 1);
2791 
2792  VisitForAccumulatorValue(args->at(0));
2793 
2794  Label materialize_true, materialize_false;
2795  Label* if_true = NULL;
2796  Label* if_false = NULL;
2797  Label* fall_through = NULL;
2798  context()->PrepareTest(&materialize_true, &materialize_false,
2799  &if_true, &if_false, &fall_through);
2800 
2801  __ JumpIfSmi(x0, if_false);
2802  __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2803  __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2804  __ Tst(x11, 1 << Map::kIsUndetectable);
2805  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2806  Split(ne, if_true, if_false, fall_through);
2807 
2808  context()->Plug(if_true, if_false);
2809 }
2810 
2811 
2812 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2813  CallRuntime* expr) {
2814  ZoneList<Expression*>* args = expr->arguments();
2815  DCHECK(args->length() == 1);
2816  VisitForAccumulatorValue(args->at(0));
2817 
2818  Label materialize_true, materialize_false, skip_lookup;
2819  Label* if_true = NULL;
2820  Label* if_false = NULL;
2821  Label* fall_through = NULL;
2822  context()->PrepareTest(&materialize_true, &materialize_false,
2823  &if_true, &if_false, &fall_through);
2824 
2825  Register object = x0;
2826  __ AssertNotSmi(object);
2827 
2828  Register map = x10;
2829  Register bitfield2 = x11;
2831  __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
2832  __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
2833 
2834  // Check for fast case object. Generate false result for slow case object.
2835  Register props = x12;
2836  Register props_map = x12;
2837  Register hash_table_map = x13;
2838  __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
2839  __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
2840  __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
2841  __ Cmp(props_map, hash_table_map);
2842  __ B(eq, if_false);
2843 
2844  // Look for valueOf name in the descriptor array, and indicate false if found.
2845  // Since we omit an enumeration index check, if it is added via a transition
2846  // that shares its descriptor array, this is a false positive.
2847  Label loop, done;
2848 
2849  // Skip loop if no descriptors are valid.
2850  Register descriptors = x12;
2851  Register descriptors_length = x13;
2852  __ NumberOfOwnDescriptors(descriptors_length, map);
2853  __ Cbz(descriptors_length, &done);
2854 
2855  __ LoadInstanceDescriptors(map, descriptors);
2856 
2857  // Calculate the end of the descriptor array.
2858  Register descriptors_end = x14;
2860  __ Mul(descriptors_length, descriptors_length, x15);
2861  // Calculate location of the first key name.
2862  __ Add(descriptors, descriptors,
2864  // Calculate the end of the descriptor array.
2865  __ Add(descriptors_end, descriptors,
2866  Operand(descriptors_length, LSL, kPointerSizeLog2));
2867 
2868  // Loop through all the keys in the descriptor array. If one of these is the
2869  // string "valueOf" the result is false.
2870  Register valueof_string = x1;
2871  int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
2872  __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
2873  __ Bind(&loop);
2874  __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
2875  __ Cmp(x15, valueof_string);
2876  __ B(eq, if_false);
2877  __ Cmp(descriptors, descriptors_end);
2878  __ B(ne, &loop);
2879 
2880  __ Bind(&done);
2881 
2882  // Set the bit in the map to indicate that there is no local valueOf field.
2884  __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2886 
2887  __ Bind(&skip_lookup);
2888 
2889  // If a valueOf property is not found on the object check that its prototype
2890  // is the unmodified String prototype. If not result is false.
2891  Register prototype = x1;
2892  Register global_idx = x2;
2893  Register native_context = x2;
2894  Register string_proto = x3;
2895  Register proto_map = x4;
2896  __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
2897  __ JumpIfSmi(prototype, if_false);
2898  __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
2899  __ Ldr(global_idx, GlobalObjectMemOperand());
2900  __ Ldr(native_context,
2902  __ Ldr(string_proto,
2903  ContextMemOperand(native_context,
2905  __ Cmp(proto_map, string_proto);
2906 
2907  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2908  Split(eq, if_true, if_false, fall_through);
2909 
2910  context()->Plug(if_true, if_false);
2911 }
2912 
2913 
2914 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2915  ZoneList<Expression*>* args = expr->arguments();
2916  DCHECK(args->length() == 1);
2917 
2918  VisitForAccumulatorValue(args->at(0));
2919 
2920  Label materialize_true, materialize_false;
2921  Label* if_true = NULL;
2922  Label* if_false = NULL;
2923  Label* fall_through = NULL;
2924  context()->PrepareTest(&materialize_true, &materialize_false,
2925  &if_true, &if_false, &fall_through);
2926 
2927  __ JumpIfSmi(x0, if_false);
2928  __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
2929  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2930  Split(eq, if_true, if_false, fall_through);
2931 
2932  context()->Plug(if_true, if_false);
2933 }
2934 
2935 
2936 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
2937  ZoneList<Expression*>* args = expr->arguments();
2938  DCHECK(args->length() == 1);
2939 
2940  VisitForAccumulatorValue(args->at(0));
2941 
2942  Label materialize_true, materialize_false;
2943  Label* if_true = NULL;
2944  Label* if_false = NULL;
2945  Label* fall_through = NULL;
2946  context()->PrepareTest(&materialize_true, &materialize_false,
2947  &if_true, &if_false, &fall_through);
2948 
2949  // Only a HeapNumber can be -0.0, so return false if we have something else.
2950  __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
2951 
2952  // Test the bit pattern.
2954  __ Cmp(x10, 1); // Set V on 0x8000000000000000.
2955 
2956  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2957  Split(vs, if_true, if_false, fall_through);
2958 
2959  context()->Plug(if_true, if_false);
2960 }
2961 
2962 
2963 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2964  ZoneList<Expression*>* args = expr->arguments();
2965  DCHECK(args->length() == 1);
2966 
2967  VisitForAccumulatorValue(args->at(0));
2968 
2969  Label materialize_true, materialize_false;
2970  Label* if_true = NULL;
2971  Label* if_false = NULL;
2972  Label* fall_through = NULL;
2973  context()->PrepareTest(&materialize_true, &materialize_false,
2974  &if_true, &if_false, &fall_through);
2975 
2976  __ JumpIfSmi(x0, if_false);
2977  __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2978  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2979  Split(eq, if_true, if_false, fall_through);
2980 
2981  context()->Plug(if_true, if_false);
2982 }
2983 
2984 
2985 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2986  ZoneList<Expression*>* args = expr->arguments();
2987  DCHECK(args->length() == 1);
2988 
2989  VisitForAccumulatorValue(args->at(0));
2990 
2991  Label materialize_true, materialize_false;
2992  Label* if_true = NULL;
2993  Label* if_false = NULL;
2994  Label* fall_through = NULL;
2995  context()->PrepareTest(&materialize_true, &materialize_false,
2996  &if_true, &if_false, &fall_through);
2997 
2998  __ JumpIfSmi(x0, if_false);
2999  __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3000  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3001  Split(eq, if_true, if_false, fall_through);
3002 
3003  context()->Plug(if_true, if_false);
3004 }
3005 
3006 
3007 
3008 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3009  DCHECK(expr->arguments()->length() == 0);
3010 
3011  Label materialize_true, materialize_false;
3012  Label* if_true = NULL;
3013  Label* if_false = NULL;
3014  Label* fall_through = NULL;
3015  context()->PrepareTest(&materialize_true, &materialize_false,
3016  &if_true, &if_false, &fall_through);
3017 
3018  // Get the frame pointer for the calling frame.
3020 
3021  // Skip the arguments adaptor frame if it exists.
3022  Label check_frame_marker;
3025  __ B(ne, &check_frame_marker);
3027 
3028  // Check the marker in the calling frame.
3029  __ Bind(&check_frame_marker);
3031  __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
3032  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3033  Split(eq, if_true, if_false, fall_through);
3034 
3035  context()->Plug(if_true, if_false);
3036 }
3037 
3038 
3039 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3040  ZoneList<Expression*>* args = expr->arguments();
3041  DCHECK(args->length() == 2);
3042 
3043  // Load the two objects into registers and perform the comparison.
3044  VisitForStackValue(args->at(0));
3045  VisitForAccumulatorValue(args->at(1));
3046 
3047  Label materialize_true, materialize_false;
3048  Label* if_true = NULL;
3049  Label* if_false = NULL;
3050  Label* fall_through = NULL;
3051  context()->PrepareTest(&materialize_true, &materialize_false,
3052  &if_true, &if_false, &fall_through);
3053 
3054  __ Pop(x1);
3055  __ Cmp(x0, x1);
3056  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3057  Split(eq, if_true, if_false, fall_through);
3058 
3059  context()->Plug(if_true, if_false);
3060 }
3061 
3062 
3063 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3064  ZoneList<Expression*>* args = expr->arguments();
3065  DCHECK(args->length() == 1);
3066 
3067  // ArgumentsAccessStub expects the key in x1.
3068  VisitForAccumulatorValue(args->at(0));
3069  __ Mov(x1, x0);
3070  __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3071  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3072  __ CallStub(&stub);
3073  context()->Plug(x0);
3074 }
3075 
3076 
3077 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3078  DCHECK(expr->arguments()->length() == 0);
3079  Label exit;
3080  // Get the number of formal parameters.
3081  __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3082 
3083  // Check if the calling frame is an arguments adaptor frame.
3087  __ B(ne, &exit);
3088 
3089  // Arguments adaptor case: Read the arguments length from the
3090  // adaptor frame.
3092 
3093  __ Bind(&exit);
3094  context()->Plug(x0);
3095 }
3096 
3097 
3098 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3099  ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3100  ZoneList<Expression*>* args = expr->arguments();
3101  DCHECK(args->length() == 1);
3102  Label done, null, function, non_function_constructor;
3103 
3104  VisitForAccumulatorValue(args->at(0));
3105 
3106  // If the object is a smi, we return null.
3107  __ JumpIfSmi(x0, &null);
3108 
3109  // Check that the object is a JS object but take special care of JS
3110  // functions to make sure they have 'Function' as their class.
3111  // Assume that there are only two callable types, and one of them is at
3112  // either end of the type range for JS object types. Saves extra comparisons.
3114  __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3115  // x10: object's map.
3116  // x11: object's type.
3117  __ B(lt, &null);
3120  __ B(eq, &function);
3121 
3122  __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3124  LAST_SPEC_OBJECT_TYPE - 1);
3125  __ B(eq, &function);
3126  // Assume that there is no larger type.
3128 
3129  // Check if the constructor in the map is a JS function.
3130  __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
3131  __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
3132  &non_function_constructor);
3133 
3134  // x12 now contains the constructor function. Grab the
3135  // instance class name from there.
3137  __ Ldr(x0,
3139  __ B(&done);
3140 
3141  // Functions have class 'Function'.
3142  __ Bind(&function);
3143  __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3144  __ B(&done);
3145 
3146  // Objects with a non-function constructor have class 'Object'.
3147  __ Bind(&non_function_constructor);
3148  __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3149  __ B(&done);
3150 
3151  // Non-JS objects have class null.
3152  __ Bind(&null);
3153  __ LoadRoot(x0, Heap::kNullValueRootIndex);
3154 
3155  // All done.
3156  __ Bind(&done);
3157 
3158  context()->Plug(x0);
3159 }
3160 
3161 
3162 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3163  // Load the arguments on the stack and call the stub.
3164  SubStringStub stub(isolate());
3165  ZoneList<Expression*>* args = expr->arguments();
3166  DCHECK(args->length() == 3);
3167  VisitForStackValue(args->at(0));
3168  VisitForStackValue(args->at(1));
3169  VisitForStackValue(args->at(2));
3170  __ CallStub(&stub);
3171  context()->Plug(x0);
3172 }
3173 
3174 
3175 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3176  // Load the arguments on the stack and call the stub.
3177  RegExpExecStub stub(isolate());
3178  ZoneList<Expression*>* args = expr->arguments();
3179  DCHECK(args->length() == 4);
3180  VisitForStackValue(args->at(0));
3181  VisitForStackValue(args->at(1));
3182  VisitForStackValue(args->at(2));
3183  VisitForStackValue(args->at(3));
3184  __ CallStub(&stub);
3185  context()->Plug(x0);
3186 }
3187 
3188 
3189 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3190  ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3191  ZoneList<Expression*>* args = expr->arguments();
3192  DCHECK(args->length() == 1);
3193  VisitForAccumulatorValue(args->at(0)); // Load the object.
3194 
3195  Label done;
3196  // If the object is a smi return the object.
3197  __ JumpIfSmi(x0, &done);
3198  // If the object is not a value type, return the object.
3199  __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3201 
3202  __ Bind(&done);
3203  context()->Plug(x0);
3204 }
3205 
3206 
3207 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3208  ZoneList<Expression*>* args = expr->arguments();
3209  DCHECK(args->length() == 2);
3210  DCHECK_NE(NULL, args->at(1)->AsLiteral());
3211  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3212 
3213  VisitForAccumulatorValue(args->at(0)); // Load the object.
3214 
3215  Label runtime, done, not_date_object;
3216  Register object = x0;
3217  Register result = x0;
3218  Register stamp_addr = x10;
3219  Register stamp_cache = x11;
3220 
3221  __ JumpIfSmi(object, &not_date_object);
3222  __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, &not_date_object);
3223 
3224  if (index->value() == 0) {
3225  __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3226  __ B(&done);
3227  } else {
3228  if (index->value() < JSDate::kFirstUncachedField) {
3229  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3230  __ Mov(x10, stamp);
3231  __ Ldr(stamp_addr, MemOperand(x10));
3232  __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3233  __ Cmp(stamp_addr, stamp_cache);
3234  __ B(ne, &runtime);
3235  __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3236  kPointerSize * index->value()));
3237  __ B(&done);
3238  }
3239 
3240  __ Bind(&runtime);
3241  __ Mov(x1, index);
3242  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3243  __ B(&done);
3244  }
3245 
3246  __ Bind(&not_date_object);
3247  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3248  __ Bind(&done);
3249  context()->Plug(x0);
3250 }
3251 
3252 
3253 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3254  ZoneList<Expression*>* args = expr->arguments();
3255  DCHECK_EQ(3, args->length());
3256 
3257  Register string = x0;
3258  Register index = x1;
3259  Register value = x2;
3260  Register scratch = x10;
3261 
3262  VisitForStackValue(args->at(0)); // index
3263  VisitForStackValue(args->at(1)); // value
3264  VisitForAccumulatorValue(args->at(2)); // string
3265  __ Pop(value, index);
3266 
3267  if (FLAG_debug_code) {
3268  __ AssertSmi(value, kNonSmiValue);
3269  __ AssertSmi(index, kNonSmiIndex);
3270  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3271  __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3272  one_byte_seq_type);
3273  }
3274 
3275  __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3276  __ SmiUntag(value);
3277  __ SmiUntag(index);
3278  __ Strb(value, MemOperand(scratch, index));
3279  context()->Plug(string);
3280 }
3281 
3282 
3283 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3284  ZoneList<Expression*>* args = expr->arguments();
3285  DCHECK_EQ(3, args->length());
3286 
3287  Register string = x0;
3288  Register index = x1;
3289  Register value = x2;
3290  Register scratch = x10;
3291 
3292  VisitForStackValue(args->at(0)); // index
3293  VisitForStackValue(args->at(1)); // value
3294  VisitForAccumulatorValue(args->at(2)); // string
3295  __ Pop(value, index);
3296 
3297  if (FLAG_debug_code) {
3298  __ AssertSmi(value, kNonSmiValue);
3299  __ AssertSmi(index, kNonSmiIndex);
3300  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3301  __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3302  two_byte_seq_type);
3303  }
3304 
3305  __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3306  __ SmiUntag(value);
3307  __ SmiUntag(index);
3308  __ Strh(value, MemOperand(scratch, index, LSL, 1));
3309  context()->Plug(string);
3310 }
3311 
3312 
3313 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3314  // Load the arguments on the stack and call the MathPow stub.
3315  ZoneList<Expression*>* args = expr->arguments();
3316  DCHECK(args->length() == 2);
3317  VisitForStackValue(args->at(0));
3318  VisitForStackValue(args->at(1));
3319  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3320  __ CallStub(&stub);
3321  context()->Plug(x0);
3322 }
3323 
3324 
3325 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3326  ZoneList<Expression*>* args = expr->arguments();
3327  DCHECK(args->length() == 2);
3328  VisitForStackValue(args->at(0)); // Load the object.
3329  VisitForAccumulatorValue(args->at(1)); // Load the value.
3330  __ Pop(x1);
3331  // x0 = value.
3332  // x1 = object.
3333 
3334  Label done;
3335  // If the object is a smi, return the value.
3336  __ JumpIfSmi(x1, &done);
3337 
3338  // If the object is not a value type, return the value.
3339  __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3340 
3341  // Store the value.
3343  // Update the write barrier. Save the value as it will be
3344  // overwritten by the write barrier code and is needed afterward.
3345  __ Mov(x10, x0);
3346  __ RecordWriteField(
3348 
3349  __ Bind(&done);
3350  context()->Plug(x0);
3351 }
3352 
3353 
3354 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3355  ZoneList<Expression*>* args = expr->arguments();
3356  DCHECK_EQ(args->length(), 1);
3357 
3358  // Load the argument into x0 and call the stub.
3359  VisitForAccumulatorValue(args->at(0));
3360 
3361  NumberToStringStub stub(isolate());
3362  __ CallStub(&stub);
3363  context()->Plug(x0);
3364 }
3365 
3366 
3367 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3368  ZoneList<Expression*>* args = expr->arguments();
3369  DCHECK(args->length() == 1);
3370 
3371  VisitForAccumulatorValue(args->at(0));
3372 
3373  Label done;
3374  Register code = x0;
3375  Register result = x1;
3376 
3377  StringCharFromCodeGenerator generator(code, result);
3378  generator.GenerateFast(masm_);
3379  __ B(&done);
3380 
3381  NopRuntimeCallHelper call_helper;
3382  generator.GenerateSlow(masm_, call_helper);
3383 
3384  __ Bind(&done);
3385  context()->Plug(result);
3386 }
3387 
3388 
3389 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3390  ZoneList<Expression*>* args = expr->arguments();
3391  DCHECK(args->length() == 2);
3392 
3393  VisitForStackValue(args->at(0));
3394  VisitForAccumulatorValue(args->at(1));
3395 
3396  Register object = x1;
3397  Register index = x0;
3398  Register result = x3;
3399 
3400  __ Pop(object);
3401 
3402  Label need_conversion;
3403  Label index_out_of_range;
3404  Label done;
3405  StringCharCodeAtGenerator generator(object,
3406  index,
3407  result,
3408  &need_conversion,
3409  &need_conversion,
3410  &index_out_of_range,
3412  generator.GenerateFast(masm_);
3413  __ B(&done);
3414 
3415  __ Bind(&index_out_of_range);
3416  // When the index is out of range, the spec requires us to return NaN.
3417  __ LoadRoot(result, Heap::kNanValueRootIndex);
3418  __ B(&done);
3419 
3420  __ Bind(&need_conversion);
3421  // Load the undefined value into the result register, which will
3422  // trigger conversion.
3423  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3424  __ B(&done);
3425 
3426  NopRuntimeCallHelper call_helper;
3427  generator.GenerateSlow(masm_, call_helper);
3428 
3429  __ Bind(&done);
3430  context()->Plug(result);
3431 }
3432 
3433 
3434 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3435  ZoneList<Expression*>* args = expr->arguments();
3436  DCHECK(args->length() == 2);
3437 
3438  VisitForStackValue(args->at(0));
3439  VisitForAccumulatorValue(args->at(1));
3440 
3441  Register object = x1;
3442  Register index = x0;
3443  Register result = x0;
3444 
3445  __ Pop(object);
3446 
3447  Label need_conversion;
3448  Label index_out_of_range;
3449  Label done;
3450  StringCharAtGenerator generator(object,
3451  index,
3452  x3,
3453  result,
3454  &need_conversion,
3455  &need_conversion,
3456  &index_out_of_range,
3458  generator.GenerateFast(masm_);
3459  __ B(&done);
3460 
3461  __ Bind(&index_out_of_range);
3462  // When the index is out of range, the spec requires us to return
3463  // the empty string.
3464  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3465  __ B(&done);
3466 
3467  __ Bind(&need_conversion);
3468  // Move smi zero into the result register, which will trigger conversion.
3469  __ Mov(result, Smi::FromInt(0));
3470  __ B(&done);
3471 
3472  NopRuntimeCallHelper call_helper;
3473  generator.GenerateSlow(masm_, call_helper);
3474 
3475  __ Bind(&done);
3476  context()->Plug(result);
3477 }
3478 
3479 
3480 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3481  ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3482  ZoneList<Expression*>* args = expr->arguments();
3483  DCHECK_EQ(2, args->length());
3484 
3485  VisitForStackValue(args->at(0));
3486  VisitForAccumulatorValue(args->at(1));
3487 
3488  __ Pop(x1);
3489  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3490  __ CallStub(&stub);
3491 
3492  context()->Plug(x0);
3493 }
3494 
3495 
3496 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3497  ZoneList<Expression*>* args = expr->arguments();
3498  DCHECK_EQ(2, args->length());
3499  VisitForStackValue(args->at(0));
3500  VisitForStackValue(args->at(1));
3501 
3502  StringCompareStub stub(isolate());
3503  __ CallStub(&stub);
3504  context()->Plug(x0);
3505 }
3506 
3507 
3508 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3509  ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3510  ZoneList<Expression*>* args = expr->arguments();
3511  DCHECK(args->length() >= 2);
3512 
3513  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3514  for (int i = 0; i < arg_count + 1; i++) {
3515  VisitForStackValue(args->at(i));
3516  }
3517  VisitForAccumulatorValue(args->last()); // Function.
3518 
3519  Label runtime, done;
3520  // Check for non-function argument (including proxy).
3521  __ JumpIfSmi(x0, &runtime);
3522  __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3523 
3524  // InvokeFunction requires the function in x1. Move it in there.
3525  __ Mov(x1, x0);
3526  ParameterCount count(arg_count);
3527  __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3529  __ B(&done);
3530 
3531  __ Bind(&runtime);
3532  __ Push(x0);
3533  __ CallRuntime(Runtime::kCall, args->length());
3534  __ Bind(&done);
3535 
3536  context()->Plug(x0);
3537 }
3538 
3539 
3540 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3541  RegExpConstructResultStub stub(isolate());
3542  ZoneList<Expression*>* args = expr->arguments();
3543  DCHECK(args->length() == 3);
3544  VisitForStackValue(args->at(0));
3545  VisitForStackValue(args->at(1));
3546  VisitForAccumulatorValue(args->at(2));
3547  __ Pop(x1, x2);
3548  __ CallStub(&stub);
3549  context()->Plug(x0);
3550 }
3551 
3552 
3553 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3554  ZoneList<Expression*>* args = expr->arguments();
3555  DCHECK_EQ(2, args->length());
3556  DCHECK_NE(NULL, args->at(0)->AsLiteral());
3557  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3558 
3559  Handle<FixedArray> jsfunction_result_caches(
3560  isolate()->native_context()->jsfunction_result_caches());
3561  if (jsfunction_result_caches->length() <= cache_id) {
3562  __ Abort(kAttemptToUseUndefinedCache);
3563  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3564  context()->Plug(x0);
3565  return;
3566  }
3567 
3568  VisitForAccumulatorValue(args->at(1));
3569 
3570  Register key = x0;
3571  Register cache = x1;
3572  __ Ldr(cache, GlobalObjectMemOperand());
3574  __ Ldr(cache, ContextMemOperand(cache,
3576  __ Ldr(cache,
3578 
3579  Label done;
3580  __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
3582  __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
3583  __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
3584 
3585  // Load the key and data from the cache.
3586  __ Ldp(x2, x3, MemOperand(x3));
3587 
3588  __ Cmp(key, x2);
3589  __ CmovX(x0, x3, eq);
3590  __ B(eq, &done);
3591 
3592  // Call runtime to perform the lookup.
3593  __ Push(cache, key);
3594  __ CallRuntime(Runtime::kGetFromCache, 2);
3595 
3596  __ Bind(&done);
3597  context()->Plug(x0);
3598 }
3599 
3600 
3601 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3602  ZoneList<Expression*>* args = expr->arguments();
3603  VisitForAccumulatorValue(args->at(0));
3604 
3605  Label materialize_true, materialize_false;
3606  Label* if_true = NULL;
3607  Label* if_false = NULL;
3608  Label* fall_through = NULL;
3609  context()->PrepareTest(&materialize_true, &materialize_false,
3610  &if_true, &if_false, &fall_through);
3611 
3614  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3615  Split(eq, if_true, if_false, fall_through);
3616 
3617  context()->Plug(if_true, if_false);
3618 }
3619 
3620 
3621 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3622  ZoneList<Expression*>* args = expr->arguments();
3623  DCHECK(args->length() == 1);
3624  VisitForAccumulatorValue(args->at(0));
3625 
3626  __ AssertString(x0);
3627 
3629  __ IndexFromHash(x10, x0);
3630 
3631  context()->Plug(x0);
3632 }
3633 
3634 
3635 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3636  ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
3637 
3638  ZoneList<Expression*>* args = expr->arguments();
3639  DCHECK(args->length() == 2);
3640  VisitForStackValue(args->at(1));
3641  VisitForAccumulatorValue(args->at(0));
3642 
3643  Register array = x0;
3644  Register result = x0;
3645  Register elements = x1;
3646  Register element = x2;
3647  Register separator = x3;
3648  Register array_length = x4;
3649  Register result_pos = x5;
3650  Register map = x6;
3651  Register string_length = x10;
3652  Register elements_end = x11;
3653  Register string = x12;
3654  Register scratch1 = x13;
3655  Register scratch2 = x14;
3656  Register scratch3 = x7;
3657  Register separator_length = x15;
3658 
3659  Label bailout, done, one_char_separator, long_separator,
3660  non_trivial_array, not_size_one_array, loop,
3661  empty_separator_loop, one_char_separator_loop,
3662  one_char_separator_loop_entry, long_separator_loop;
3663 
3664  // The separator operand is on the stack.
3665  __ Pop(separator);
3666 
3667  // Check that the array is a JSArray.
3668  __ JumpIfSmi(array, &bailout);
3669  __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
3670 
3671  // Check that the array has fast elements.
3672  __ CheckFastElements(map, scratch1, &bailout);
3673 
3674  // If the array has length zero, return the empty string.
3675  // Load and untag the length of the array.
3676  // It is an unsigned value, so we can skip sign extension.
3677  // We assume little endianness.
3678  __ Ldrsw(array_length,
3680  __ Cbnz(array_length, &non_trivial_array);
3681  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3682  __ B(&done);
3683 
3684  __ Bind(&non_trivial_array);
3685  // Get the FixedArray containing array's elements.
3686  __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3687 
3688  // Check that all array elements are sequential one-byte strings, and
3689  // accumulate the sum of their lengths.
3690  __ Mov(string_length, 0);
3691  __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3692  __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3693  // Loop condition: while (element < elements_end).
3694  // Live values in registers:
3695  // elements: Fixed array of strings.
3696  // array_length: Length of the fixed array of strings (not smi)
3697  // separator: Separator string
3698  // string_length: Accumulated sum of string lengths (not smi).
3699  // element: Current array element.
3700  // elements_end: Array end.
3701  if (FLAG_debug_code) {
3702  __ Cmp(array_length, 0);
3703  __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3704  }
3705  __ Bind(&loop);
3706  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3707  __ JumpIfSmi(string, &bailout);
3708  __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3709  __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3710  __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3711  __ Ldrsw(scratch1,
3713  __ Adds(string_length, string_length, scratch1);
3714  __ B(vs, &bailout);
3715  __ Cmp(element, elements_end);
3716  __ B(lt, &loop);
3717 
3718  // If array_length is 1, return elements[0], a string.
3719  __ Cmp(array_length, 1);
3720  __ B(ne, &not_size_one_array);
3721  __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
3722  __ B(&done);
3723 
3724  __ Bind(&not_size_one_array);
3725 
3726  // Live values in registers:
3727  // separator: Separator string
3728  // array_length: Length of the array (not smi).
3729  // string_length: Sum of string lengths (not smi).
3730  // elements: FixedArray of strings.
3731 
3732  // Check that the separator is a flat one-byte string.
3733  __ JumpIfSmi(separator, &bailout);
3734  __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3735  __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3736  __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3737 
3738  // Add (separator length times array_length) - separator length to the
3739  // string_length to get the length of the result string.
3740  // Load the separator length as untagged.
3741  // We assume little endianness, and that the length is positive.
3742  __ Ldrsw(separator_length,
3743  UntagSmiFieldMemOperand(separator,
3745  __ Sub(string_length, string_length, separator_length);
3746  __ Umaddl(string_length, array_length.W(), separator_length.W(),
3747  string_length);
3748 
3749  // Get first element in the array.
3750  __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3751  // Live values in registers:
3752  // element: First array element
3753  // separator: Separator string
3754  // string_length: Length of result string (not smi)
3755  // array_length: Length of the array (not smi).
3756  __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
3757  &bailout);
3758 
3759  // Prepare for looping. Set up elements_end to end of the array. Set
3760  // result_pos to the position of the result where to write the first
3761  // character.
3762  // TODO(all): useless unless AllocateOneByteString trashes the register.
3763  __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3764  __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3765 
3766  // Check the length of the separator.
3767  __ Cmp(separator_length, 1);
3768  __ B(eq, &one_char_separator);
3769  __ B(gt, &long_separator);
3770 
3771  // Empty separator case
3772  __ Bind(&empty_separator_loop);
3773  // Live values in registers:
3774  // result_pos: the position to which we are currently copying characters.
3775  // element: Current array element.
3776  // elements_end: Array end.
3777 
3778  // Copy next array element to the result.
3779  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3780  __ Ldrsw(string_length,
3782  __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3783  __ CopyBytes(result_pos, string, string_length, scratch1);
3784  __ Cmp(element, elements_end);
3785  __ B(lt, &empty_separator_loop); // End while (element < elements_end).
3786  __ B(&done);
3787 
3788  // One-character separator case
3789  __ Bind(&one_char_separator);
3790  // Replace separator with its one-byte character value.
3791  __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3792  // Jump into the loop after the code that copies the separator, so the first
3793  // element is not preceded by a separator
3794  __ B(&one_char_separator_loop_entry);
3795 
3796  __ Bind(&one_char_separator_loop);
3797  // Live values in registers:
3798  // result_pos: the position to which we are currently copying characters.
3799  // element: Current array element.
3800  // elements_end: Array end.
3801  // separator: Single separator one-byte char (in lower byte).
3802 
3803  // Copy the separator character to the result.
3804  __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
3805 
3806  // Copy next array element to the result.
3807  __ Bind(&one_char_separator_loop_entry);
3808  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3809  __ Ldrsw(string_length,
3811  __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3812  __ CopyBytes(result_pos, string, string_length, scratch1);
3813  __ Cmp(element, elements_end);
3814  __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
3815  __ B(&done);
3816 
3817  // Long separator case (separator is more than one character). Entry is at the
3818  // label long_separator below.
3819  __ Bind(&long_separator_loop);
3820  // Live values in registers:
3821  // result_pos: the position to which we are currently copying characters.
3822  // element: Current array element.
3823  // elements_end: Array end.
3824  // separator: Separator string.
3825 
3826  // Copy the separator to the result.
3827  // TODO(all): hoist next two instructions.
3828  __ Ldrsw(string_length,
3830  __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3831  __ CopyBytes(result_pos, string, string_length, scratch1);
3832 
3833  __ Bind(&long_separator);
3834  __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3835  __ Ldrsw(string_length,
3837  __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3838  __ CopyBytes(result_pos, string, string_length, scratch1);
3839  __ Cmp(element, elements_end);
3840  __ B(lt, &long_separator_loop); // End while (element < elements_end).
3841  __ B(&done);
3842 
3843  __ Bind(&bailout);
3844  // Returning undefined will force slower code to handle it.
3845  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3846  __ Bind(&done);
3847  context()->Plug(result);
3848 }
3849 
3850 
3851 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3852  DCHECK(expr->arguments()->length() == 0);
3853  ExternalReference debug_is_active =
3854  ExternalReference::debug_is_active_address(isolate());
3855  __ Mov(x10, debug_is_active);
3856  __ Ldrb(x0, MemOperand(x10));
3857  __ SmiTag(x0);
3858  context()->Plug(x0);
3859 }
3860 
3861 
3862 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3863  if (expr->function() != NULL &&
3864  expr->function()->intrinsic_type == Runtime::INLINE) {
3865  Comment cmnt(masm_, "[ InlineRuntimeCall");
3866  EmitInlineRuntimeCall(expr);
3867  return;
3868  }
3869 
3870  Comment cmnt(masm_, "[ CallRunTime");
3871  ZoneList<Expression*>* args = expr->arguments();
3872  int arg_count = args->length();
3873 
3874  if (expr->is_jsruntime()) {
3875  // Push the builtins object as the receiver.
3876  __ Ldr(x10, GlobalObjectMemOperand());
3880 
3881  // Load the function from the receiver.
3882  Handle<String> name = expr->name();
3883  __ Mov(LoadDescriptor::NameRegister(), Operand(name));
3884  if (FLAG_vector_ics) {
3886  Smi::FromInt(expr->CallRuntimeFeedbackSlot()));
3888  } else {
3889  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
3890  }
3891 
3892  // Push the target function under the receiver.
3893  __ Pop(x10);
3894  __ Push(x0, x10);
3895 
3896  int arg_count = args->length();
3897  for (int i = 0; i < arg_count; i++) {
3898  VisitForStackValue(args->at(i));
3899  }
3900 
3901  // Record source position of the IC call.
3902  SetSourcePosition(expr->position());
3903  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3904  __ Peek(x1, (arg_count + 1) * kPointerSize);
3905  __ CallStub(&stub);
3906 
3907  // Restore context register.
3909 
3910  context()->DropAndPlug(1, x0);
3911  } else {
3912  // Push the arguments ("left-to-right").
3913  for (int i = 0; i < arg_count; i++) {
3914  VisitForStackValue(args->at(i));
3915  }
3916 
3917  // Call the C runtime function.
3918  __ CallRuntime(expr->function(), arg_count);
3919  context()->Plug(x0);
3920  }
3921 }
3922 
3923 
3924 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3925  switch (expr->op()) {
3926  case Token::DELETE: {
3927  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3928  Property* property = expr->expression()->AsProperty();
3929  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3930 
3931  if (property != NULL) {
3932  VisitForStackValue(property->obj());
3933  VisitForStackValue(property->key());
3934  __ Mov(x10, Smi::FromInt(strict_mode()));
3935  __ Push(x10);
3936  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3937  context()->Plug(x0);
3938  } else if (proxy != NULL) {
3939  Variable* var = proxy->var();
3940  // Delete of an unqualified identifier is disallowed in strict mode
3941  // but "delete this" is allowed.
3942  DCHECK(strict_mode() == SLOPPY || var->is_this());
3943  if (var->IsUnallocated()) {
3944  __ Ldr(x12, GlobalObjectMemOperand());
3945  __ Mov(x11, Operand(var->name()));
3946  __ Mov(x10, Smi::FromInt(SLOPPY));
3947  __ Push(x12, x11, x10);
3948  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3949  context()->Plug(x0);
3950  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3951  // Result of deleting non-global, non-dynamic variables is false.
3952  // The subexpression does not have side effects.
3953  context()->Plug(var->is_this());
3954  } else {
3955  // Non-global variable. Call the runtime to try to delete from the
3956  // context where the variable was introduced.
3957  __ Mov(x2, Operand(var->name()));
3958  __ Push(context_register(), x2);
3959  __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
3960  context()->Plug(x0);
3961  }
3962  } else {
3963  // Result of deleting non-property, non-variable reference is true.
3964  // The subexpression may have side effects.
3965  VisitForEffect(expr->expression());
3966  context()->Plug(true);
3967  }
3968  break;
3969  break;
3970  }
3971  case Token::VOID: {
3972  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3973  VisitForEffect(expr->expression());
3974  context()->Plug(Heap::kUndefinedValueRootIndex);
3975  break;
3976  }
3977  case Token::NOT: {
3978  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3979  if (context()->IsEffect()) {
3980  // Unary NOT has no side effects so it's only necessary to visit the
3981  // subexpression. Match the optimizing compiler by not branching.
3982  VisitForEffect(expr->expression());
3983  } else if (context()->IsTest()) {
3984  const TestContext* test = TestContext::cast(context());
3985  // The labels are swapped for the recursive call.
3986  VisitForControl(expr->expression(),
3987  test->false_label(),
3988  test->true_label(),
3989  test->fall_through());
3990  context()->Plug(test->true_label(), test->false_label());
3991  } else {
3992  DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3993  // TODO(jbramley): This could be much more efficient using (for
3994  // example) the CSEL instruction.
3995  Label materialize_true, materialize_false, done;
3996  VisitForControl(expr->expression(),
3997  &materialize_false,
3998  &materialize_true,
3999  &materialize_true);
4000 
4001  __ Bind(&materialize_true);
4002  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4003  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4004  __ B(&done);
4005 
4006  __ Bind(&materialize_false);
4007  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4008  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4009  __ B(&done);
4010 
4011  __ Bind(&done);
4012  if (context()->IsStackValue()) {
4013  __ Push(result_register());
4014  }
4015  }
4016  break;
4017  }
4018  case Token::TYPEOF: {
4019  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4020  {
4021  StackValueContext context(this);
4022  VisitForTypeofValue(expr->expression());
4023  }
4024  __ CallRuntime(Runtime::kTypeof, 1);
4025  context()->Plug(x0);
4026  break;
4027  }
4028  default:
4029  UNREACHABLE();
4030  }
4031 }
4032 
4033 
4034 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4035  DCHECK(expr->expression()->IsValidReferenceExpression());
4036 
4037  Comment cmnt(masm_, "[ CountOperation");
4038  SetSourcePosition(expr->position());
4039 
4040  // Expression can only be a property, a global or a (parameter or local)
4041  // slot.
4043  LhsKind assign_type = VARIABLE;
4044  Property* prop = expr->expression()->AsProperty();
4045  // In case of a property we use the uninitialized expression context
4046  // of the key to detect a named property.
4047  if (prop != NULL) {
4048  assign_type =
4049  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4050  if (prop->IsSuperAccess()) {
4051  // throw exception.
4052  VisitSuperReference(prop->obj()->AsSuperReference());
4053  return;
4054  }
4055  }
4056 
4057  // Evaluate expression and get value.
4058  if (assign_type == VARIABLE) {
4059  DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4060  AccumulatorValueContext context(this);
4061  EmitVariableLoad(expr->expression()->AsVariableProxy());
4062  } else {
4063  // Reserve space for result of postfix operation.
4064  if (expr->is_postfix() && !context()->IsEffect()) {
4065  __ Push(xzr);
4066  }
4067  if (assign_type == NAMED_PROPERTY) {
4068  // Put the object both on the stack and in the register.
4069  VisitForStackValue(prop->obj());
4071  EmitNamedPropertyLoad(prop);
4072  } else {
4073  // KEYED_PROPERTY
4074  VisitForStackValue(prop->obj());
4075  VisitForStackValue(prop->key());
4077  __ Peek(LoadDescriptor::NameRegister(), 0);
4078  EmitKeyedPropertyLoad(prop);
4079  }
4080  }
4081 
4082  // We need a second deoptimization point after loading the value
4083  // in case evaluating the property load my have a side effect.
4084  if (assign_type == VARIABLE) {
4085  PrepareForBailout(expr->expression(), TOS_REG);
4086  } else {
4087  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4088  }
4089 
4090  // Inline smi case if we are in a loop.
4091  Label stub_call, done;
4092  JumpPatchSite patch_site(masm_);
4093 
4094  int count_value = expr->op() == Token::INC ? 1 : -1;
4095  if (ShouldInlineSmiCase(expr->op())) {
4096  Label slow;
4097  patch_site.EmitJumpIfNotSmi(x0, &slow);
4098 
4099  // Save result for postfix expressions.
4100  if (expr->is_postfix()) {
4101  if (!context()->IsEffect()) {
4102  // Save the result on the stack. If we have a named or keyed property we
4103  // store the result under the receiver that is currently on top of the
4104  // stack.
4105  switch (assign_type) {
4106  case VARIABLE:
4107  __ Push(x0);
4108  break;
4109  case NAMED_PROPERTY:
4110  __ Poke(x0, kPointerSize);
4111  break;
4112  case KEYED_PROPERTY:
4113  __ Poke(x0, kPointerSize * 2);
4114  break;
4115  }
4116  }
4117  }
4118 
4119  __ Adds(x0, x0, Smi::FromInt(count_value));
4120  __ B(vc, &done);
4121  // Call stub. Undo operation first.
4122  __ Sub(x0, x0, Smi::FromInt(count_value));
4123  __ B(&stub_call);
4124  __ Bind(&slow);
4125  }
4126  ToNumberStub convert_stub(isolate());
4127  __ CallStub(&convert_stub);
4128 
4129  // Save result for postfix expressions.
4130  if (expr->is_postfix()) {
4131  if (!context()->IsEffect()) {
4132  // Save the result on the stack. If we have a named or keyed property
4133  // we store the result under the receiver that is currently on top
4134  // of the stack.
4135  switch (assign_type) {
4136  case VARIABLE:
4137  __ Push(x0);
4138  break;
4139  case NAMED_PROPERTY:
4140  __ Poke(x0, kXRegSize);
4141  break;
4142  case KEYED_PROPERTY:
4143  __ Poke(x0, 2 * kXRegSize);
4144  break;
4145  }
4146  }
4147  }
4148 
4149  __ Bind(&stub_call);
4150  __ Mov(x1, x0);
4151  __ Mov(x0, Smi::FromInt(count_value));
4152 
4153  // Record position before stub call.
4154  SetSourcePosition(expr->position());
4155 
4156  {
4157  Assembler::BlockPoolsScope scope(masm_);
4158  Handle<Code> code =
4159  CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4160  CallIC(code, expr->CountBinOpFeedbackId());
4161  patch_site.EmitPatchInfo();
4162  }
4163  __ Bind(&done);
4164 
4165  // Store the value returned in x0.
4166  switch (assign_type) {
4167  case VARIABLE:
4168  if (expr->is_postfix()) {
4169  { EffectContext context(this);
4170  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4171  Token::ASSIGN);
4172  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4173  context.Plug(x0);
4174  }
4175  // For all contexts except EffectConstant We have the result on
4176  // top of the stack.
4177  if (!context()->IsEffect()) {
4178  context()->PlugTOS();
4179  }
4180  } else {
4181  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4182  Token::ASSIGN);
4183  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4184  context()->Plug(x0);
4185  }
4186  break;
4187  case NAMED_PROPERTY: {
4189  Operand(prop->key()->AsLiteral()->value()));
4191  CallStoreIC(expr->CountStoreFeedbackId());
4192  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4193  if (expr->is_postfix()) {
4194  if (!context()->IsEffect()) {
4195  context()->PlugTOS();
4196  }
4197  } else {
4198  context()->Plug(x0);
4199  }
4200  break;
4201  }
4202  case KEYED_PROPERTY: {
4205  Handle<Code> ic =
4206  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4207  CallIC(ic, expr->CountStoreFeedbackId());
4208  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4209  if (expr->is_postfix()) {
4210  if (!context()->IsEffect()) {
4211  context()->PlugTOS();
4212  }
4213  } else {
4214  context()->Plug(x0);
4215  }
4216  break;
4217  }
4218  }
4219 }
4220 
4221 
4222 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4223  DCHECK(!context()->IsEffect());
4224  DCHECK(!context()->IsTest());
4225  VariableProxy* proxy = expr->AsVariableProxy();
4226  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4227  Comment cmnt(masm_, "Global variable");
4229  __ Mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4230  if (FLAG_vector_ics) {
4232  Smi::FromInt(proxy->VariableFeedbackSlot()));
4233  }
4234  // Use a regular load, not a contextual load, to avoid a reference
4235  // error.
4237  PrepareForBailout(expr, TOS_REG);
4238  context()->Plug(x0);
4239  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4240  Label done, slow;
4241 
4242  // Generate code for loading from variables potentially shadowed
4243  // by eval-introduced variables.
4244  EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4245 
4246  __ Bind(&slow);
4247  __ Mov(x0, Operand(proxy->name()));
4248  __ Push(cp, x0);
4249  __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4250  PrepareForBailout(expr, TOS_REG);
4251  __ Bind(&done);
4252 
4253  context()->Plug(x0);
4254  } else {
4255  // This expression cannot throw a reference error at the top level.
4257  }
4258 }
4259 
4260 
4261 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4262  Expression* sub_expr,
4263  Handle<String> check) {
4264  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4265  Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4266  Label materialize_true, materialize_false;
4267  Label* if_true = NULL;
4268  Label* if_false = NULL;
4269  Label* fall_through = NULL;
4270  context()->PrepareTest(&materialize_true, &materialize_false,
4271  &if_true, &if_false, &fall_through);
4272 
4273  { AccumulatorValueContext context(this);
4274  VisitForTypeofValue(sub_expr);
4275  }
4276  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4277 
4278  Factory* factory = isolate()->factory();
4279  if (String::Equals(check, factory->number_string())) {
4280  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4281  __ JumpIfSmi(x0, if_true);
4283  __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4284  Split(eq, if_true, if_false, fall_through);
4285  } else if (String::Equals(check, factory->string_string())) {
4286  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4287  __ JumpIfSmi(x0, if_false);
4288  // Check for undetectable objects => false.
4289  __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4290  __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4291  __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4292  fall_through);
4293  } else if (String::Equals(check, factory->symbol_string())) {
4294  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4295  __ JumpIfSmi(x0, if_false);
4296  __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4297  Split(eq, if_true, if_false, fall_through);
4298  } else if (String::Equals(check, factory->boolean_string())) {
4299  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4300  __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4301  __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4302  Split(eq, if_true, if_false, fall_through);
4303  } else if (String::Equals(check, factory->undefined_string())) {
4304  ASM_LOCATION(
4305  "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4306  __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4307  __ JumpIfSmi(x0, if_false);
4308  // Check for undetectable objects => true.
4310  __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4311  __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4312  fall_through);
4313  } else if (String::Equals(check, factory->function_string())) {
4314  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4315  __ JumpIfSmi(x0, if_false);
4317  __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4318  __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4319  fall_through);
4320 
4321  } else if (String::Equals(check, factory->object_string())) {
4322  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4323  __ JumpIfSmi(x0, if_false);
4324  __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4325  // Check for JS objects => true.
4326  Register map = x10;
4327  __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4328  if_false, lt);
4329  __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4330  __ B(gt, if_false);
4331  // Check for undetectable objects => false.
4333 
4334  __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4335  fall_through);
4336 
4337  } else {
4338  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4339  if (if_false != fall_through) __ B(if_false);
4340  }
4341  context()->Plug(if_true, if_false);
4342 }
4343 
4344 
4345 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4346  Comment cmnt(masm_, "[ CompareOperation");
4347  SetSourcePosition(expr->position());
4348 
4349  // Try to generate an optimized comparison with a literal value.
4350  // TODO(jbramley): This only checks common values like NaN or undefined.
4351  // Should it also handle ARM64 immediate operands?
4352  if (TryLiteralCompare(expr)) {
4353  return;
4354  }
4355 
4356  // Assign labels according to context()->PrepareTest.
4357  Label materialize_true;
4358  Label materialize_false;
4359  Label* if_true = NULL;
4360  Label* if_false = NULL;
4361  Label* fall_through = NULL;
4362  context()->PrepareTest(&materialize_true, &materialize_false,
4363  &if_true, &if_false, &fall_through);
4364 
4365  Token::Value op = expr->op();
4366  VisitForStackValue(expr->left());
4367  switch (op) {
4368  case Token::IN:
4369  VisitForStackValue(expr->right());
4370  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4371  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4372  __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4373  Split(eq, if_true, if_false, fall_through);
4374  break;
4375 
4376  case Token::INSTANCEOF: {
4377  VisitForStackValue(expr->right());
4378  InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4379  __ CallStub(&stub);
4380  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4381  // The stub returns 0 for true.
4382  __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4383  break;
4384  }
4385 
4386  default: {
4387  VisitForAccumulatorValue(expr->right());
4389 
4390  // Pop the stack value.
4391  __ Pop(x1);
4392 
4393  JumpPatchSite patch_site(masm_);
4394  if (ShouldInlineSmiCase(op)) {
4395  Label slow_case;
4396  patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4397  __ Cmp(x1, x0);
4398  Split(cond, if_true, if_false, NULL);
4399  __ Bind(&slow_case);
4400  }
4401 
4402  // Record position and call the compare IC.
4403  SetSourcePosition(expr->position());
4404  Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4405  CallIC(ic, expr->CompareOperationFeedbackId());
4406  patch_site.EmitPatchInfo();
4407  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4408  __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4409  }
4410  }
4411 
4412  // Convert the result of the comparison into one expected for this
4413  // expression's context.
4414  context()->Plug(if_true, if_false);
4415 }
4416 
4417 
4418 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4419  Expression* sub_expr,
4420  NilValue nil) {
4421  ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4422  Label materialize_true, materialize_false;
4423  Label* if_true = NULL;
4424  Label* if_false = NULL;
4425  Label* fall_through = NULL;
4426  context()->PrepareTest(&materialize_true, &materialize_false,
4427  &if_true, &if_false, &fall_through);
4428 
4429  VisitForAccumulatorValue(sub_expr);
4430  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4431 
4432  if (expr->op() == Token::EQ_STRICT) {
4433  Heap::RootListIndex nil_value = nil == kNullValue ?
4434  Heap::kNullValueRootIndex :
4435  Heap::kUndefinedValueRootIndex;
4436  __ CompareRoot(x0, nil_value);
4437  Split(eq, if_true, if_false, fall_through);
4438  } else {
4439  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4440  CallIC(ic, expr->CompareOperationFeedbackId());
4441  __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4442  }
4443 
4444  context()->Plug(if_true, if_false);
4445 }
4446 
4447 
4448 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4450  context()->Plug(x0);
4451 }
4452 
4453 
4454 void FullCodeGenerator::VisitYield(Yield* expr) {
4455  Comment cmnt(masm_, "[ Yield");
4456  // Evaluate yielded value first; the initial iterator definition depends on
4457  // this. It stays on the stack while we update the iterator.
4458  VisitForStackValue(expr->expression());
4459 
4460  // TODO(jbramley): Tidy this up once the merge is done, using named registers
4461  // and suchlike. The implementation changes a little by bleeding_edge so I
4462  // don't want to spend too much time on it now.
4463 
4464  switch (expr->yield_kind()) {
4465  case Yield::kSuspend:
4466  // Pop value from top-of-stack slot; box result into result register.
4467  EmitCreateIteratorResult(false);
4468  __ Push(result_register());
4469  // Fall through.
4470  case Yield::kInitial: {
4471  Label suspend, continuation, post_runtime, resume;
4472 
4473  __ B(&suspend);
4474 
4475  // TODO(jbramley): This label is bound here because the following code
4476  // looks at its pos(). Is it possible to do something more efficient here,
4477  // perhaps using Adr?
4478  __ Bind(&continuation);
4479  __ B(&resume);
4480 
4481  __ Bind(&suspend);
4482  VisitForAccumulatorValue(expr->generator_object());
4483  DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4484  __ Mov(x1, Smi::FromInt(continuation.pos()));
4487  __ Mov(x1, cp);
4488  __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4491  __ Cmp(__ StackPointer(), x1);
4492  __ B(eq, &post_runtime);
4493  __ Push(x0); // generator object
4494  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4496  __ Bind(&post_runtime);
4497  __ Pop(result_register());
4499 
4500  __ Bind(&resume);
4501  context()->Plug(result_register());
4502  break;
4503  }
4504 
4505  case Yield::kFinal: {
4506  VisitForAccumulatorValue(expr->generator_object());
4510  // Pop value from top-of-stack slot, box result into result register.
4514  break;
4515  }
4516 
4517  case Yield::kDelegating: {
4518  VisitForStackValue(expr->generator_object());
4519 
4520  // Initial stack layout is as follows:
4521  // [sp + 1 * kPointerSize] iter
4522  // [sp + 0 * kPointerSize] g
4523 
4524  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4525  Label l_next, l_call, l_loop;
4526  Register load_receiver = LoadDescriptor::ReceiverRegister();
4527  Register load_name = LoadDescriptor::NameRegister();
4528 
4529  // Initial send value is undefined.
4530  __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4531  __ B(&l_next);
4532 
4533  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4534  __ Bind(&l_catch);
4535  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
4536  __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
4537  __ Peek(x3, 1 * kPointerSize); // iter
4538  __ Push(load_name, x3, x0); // "throw", iter, except
4539  __ B(&l_call);
4540 
4541  // try { received = %yield result }
4542  // Shuffle the received result above a try handler and yield it without
4543  // re-boxing.
4544  __ Bind(&l_try);
4545  __ Pop(x0); // result
4546  __ PushTryHandler(StackHandler::CATCH, expr->index());
4547  const int handler_size = StackHandlerConstants::kSize;
4548  __ Push(x0); // result
4549  __ B(&l_suspend);
4550 
4551  // TODO(jbramley): This label is bound here because the following code
4552  // looks at its pos(). Is it possible to do something more efficient here,
4553  // perhaps using Adr?
4554  __ Bind(&l_continuation);
4555  __ B(&l_resume);
4556 
4557  __ Bind(&l_suspend);
4558  const int generator_object_depth = kPointerSize + handler_size;
4559  __ Peek(x0, generator_object_depth);
4560  __ Push(x0); // g
4561  DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4562  __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4565  __ Mov(x1, cp);
4566  __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4568  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4570  __ Pop(x0); // result
4572  __ Bind(&l_resume); // received in x0
4573  __ PopTryHandler();
4574 
4575  // receiver = iter; f = 'next'; arg = received;
4576  __ Bind(&l_next);
4577 
4578  __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
4579  __ Peek(x3, 1 * kPointerSize); // iter
4580  __ Push(load_name, x3, x0); // "next", iter, received
4581 
4582  // result = receiver[f](arg);
4583  __ Bind(&l_call);
4584  __ Peek(load_receiver, 1 * kPointerSize);
4585  __ Peek(load_name, 2 * kPointerSize);
4586  if (FLAG_vector_ics) {
4588  Smi::FromInt(expr->KeyedLoadFeedbackSlot()));
4589  }
4590  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
4592  __ Mov(x1, x0);
4593  __ Poke(x1, 2 * kPointerSize);
4594  CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
4595  __ CallStub(&stub);
4596 
4598  __ Drop(1); // The function is still on the stack; drop it.
4599 
4600  // if (!result.done) goto l_try;
4601  __ Bind(&l_loop);
4602  __ Move(load_receiver, x0);
4603 
4604  __ Push(load_receiver); // save result
4605  __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
4606  if (FLAG_vector_ics) {
4608  Smi::FromInt(expr->DoneFeedbackSlot()));
4609  }
4610  CallLoadIC(NOT_CONTEXTUAL); // x0=result.done
4611  // The ToBooleanStub argument (result.done) is in x0.
4612  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4613  CallIC(bool_ic);
4614  __ Cbz(x0, &l_try);
4615 
4616  // result.value
4617  __ Pop(load_receiver); // result
4618  __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
4619  if (FLAG_vector_ics) {
4621  Smi::FromInt(expr->ValueFeedbackSlot()));
4622  }
4623  CallLoadIC(NOT_CONTEXTUAL); // x0=result.value
4624  context()->DropAndPlug(2, x0); // drop iter and g
4625  break;
4626  }
4627  }
4628 }
4629 
4630 
4631 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4632  Expression *value,
4633  JSGeneratorObject::ResumeMode resume_mode) {
4634  ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4635  Register value_reg = x0;
4636  Register generator_object = x1;
4637  Register the_hole = x2;
4638  Register operand_stack_size = w3;
4639  Register function = x4;
4640 
4641  // The value stays in x0, and is ultimately read by the resumed generator, as
4642  // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
4643  // is read to throw the value when the resumed generator is already closed. r1
4644  // will hold the generator object until the activation has been resumed.
4645  VisitForStackValue(generator);
4646  VisitForAccumulatorValue(value);
4647  __ Pop(generator_object);
4648 
4649  // Check generator state.
4650  Label wrong_state, closed_state, done;
4651  __ Ldr(x10, FieldMemOperand(generator_object,
4655  __ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state);
4656  __ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state);
4657 
4658  // Load suspended function and context.
4659  __ Ldr(cp, FieldMemOperand(generator_object,
4661  __ Ldr(function, FieldMemOperand(generator_object,
4663 
4664  // Load receiver and store as the first argument.
4665  __ Ldr(x10, FieldMemOperand(generator_object,
4667  __ Push(x10);
4668 
4669  // Push holes for the rest of the arguments to the generator function.
4671 
4672  // The number of arguments is stored as an int32_t, and -1 is a marker
4673  // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4674  // extension to correctly handle it. However, in this case, we operate on
4675  // 32-bit W registers, so extension isn't required.
4676  __ Ldr(w10, FieldMemOperand(x10,
4678  __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4679  __ PushMultipleTimes(the_hole, w10);
4680 
4681  // Enter a new JavaScript frame, and initialize its slots as they were when
4682  // the generator was suspended.
4683  Label resume_frame;
4684  __ Bl(&resume_frame);
4685  __ B(&done);
4686 
4687  __ Bind(&resume_frame);
4688  __ Push(lr, // Return address.
4689  fp, // Caller's frame pointer.
4690  cp, // Callee's context.
4691  function); // Callee's JS Function.
4692  __ Add(fp, __ StackPointer(), kPointerSize * 2);
4693 
4694  // Load and untag the operand stack size.
4695  __ Ldr(x10, FieldMemOperand(generator_object,
4697  __ Ldr(operand_stack_size,
4699 
4700  // If we are sending a value and there is no operand stack, we can jump back
4701  // in directly.
4702  if (resume_mode == JSGeneratorObject::NEXT) {
4703  Label slow_resume;
4704  __ Cbnz(operand_stack_size, &slow_resume);
4705  __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4706  __ Ldrsw(x11,
4707  UntagSmiFieldMemOperand(generator_object,
4709  __ Add(x10, x10, x11);
4711  __ Str(x12, FieldMemOperand(generator_object,
4713  __ Br(x10);
4714 
4715  __ Bind(&slow_resume);
4716  }
4717 
4718  // Otherwise, we push holes for the operand stack and call the runtime to fix
4719  // up the stack and the handlers.
4720  __ PushMultipleTimes(the_hole, operand_stack_size);
4721 
4722  __ Mov(x10, Smi::FromInt(resume_mode));
4723  __ Push(generator_object, result_register(), x10);
4724  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
4725  // Not reached: the runtime call returns elsewhere.
4726  __ Unreachable();
4727 
4728  // Reach here when generator is closed.
4729  __ Bind(&closed_state);
4730  if (resume_mode == JSGeneratorObject::NEXT) {
4731  // Return completed iterator result when generator is closed.
4732  __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
4733  __ Push(x10);
4734  // Pop value from top-of-stack slot; box result into result register.
4736  } else {
4737  // Throw the provided value.
4738  __ Push(value_reg);
4739  __ CallRuntime(Runtime::kThrow, 1);
4740  }
4741  __ B(&done);
4742 
4743  // Throw error if we attempt to operate on a running generator.
4744  __ Bind(&wrong_state);
4745  __ Push(generator_object);
4746  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
4747 
4748  __ Bind(&done);
4749  context()->Plug(result_register());
4750 }
4751 
4752 
4754  Label gc_required;
4755  Label allocated;
4756 
4757  Handle<Map> map(isolate()->native_context()->iterator_result_map());
4758 
4759  // Allocate and populate an object with this form: { value: VAL, done: DONE }
4760 
4761  Register result = x0;
4762  __ Allocate(map->instance_size(), result, x10, x11, &gc_required, TAG_OBJECT);
4763  __ B(&allocated);
4764 
4765  __ Bind(&gc_required);
4766  __ Push(Smi::FromInt(map->instance_size()));
4767  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
4768  __ Ldr(context_register(),
4770 
4771  __ Bind(&allocated);
4772  Register map_reg = x1;
4773  Register result_value = x2;
4774  Register boolean_done = x3;
4775  Register empty_fixed_array = x4;
4776  Register untagged_result = x5;
4777  __ Mov(map_reg, Operand(map));
4778  __ Pop(result_value);
4779  __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4780  __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4781  DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
4786  __ ObjectUntag(untagged_result, result);
4787  __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
4788  __ Stp(empty_fixed_array, empty_fixed_array,
4789  MemOperand(untagged_result, JSObject::kPropertiesOffset));
4790  __ Stp(result_value, boolean_done,
4791  MemOperand(untagged_result,
4793 
4794  // Only the value field needs a write barrier, as the other values are in the
4795  // root set.
4796  __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
4797  x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
4798 }
4799 
4800 
4801 // TODO(all): I don't like this method.
4802 // It seems to me that in too many places x0 is used in place of this.
4803 // Also, this function is not suitable for all places where x0 should be
4804 // abstracted (eg. when used as an argument). But some places assume that the
4805 // first argument register is x0, and use this function instead.
4806 // Considering that most of the register allocation is hard-coded in the
4807 // FullCodeGen, that it is unlikely we will need to change it extensively, and
4808 // that abstracting the allocation through functions would not yield any
4809 // performance benefit, I think the existence of this function is debatable.
4811  return x0;
4812 }
4813 
4814 
4816  return cp;
4817 }
4818 
4819 
4820 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4821  DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4822  __ Str(value, MemOperand(fp, frame_offset));
4823 }
4824 
4825 
4826 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4827  __ Ldr(dst, ContextMemOperand(cp, context_index));
4828 }
4829 
4830 
4832  Scope* declaration_scope = scope()->DeclarationScope();
4833  if (declaration_scope->is_global_scope() ||
4834  declaration_scope->is_module_scope()) {
4835  // Contexts nested in the native context have a canonical empty function
4836  // as their closure, not the anonymous closure containing the global
4837  // code. Pass a smi sentinel and let the runtime look up the empty
4838  // function.
4839  DCHECK(kSmiTag == 0);
4840  __ Push(xzr);
4841  } else if (declaration_scope->is_eval_scope()) {
4842  // Contexts created by a call to eval have the same closure as the
4843  // context calling eval, not the anonymous closure containing the eval
4844  // code. Fetch it from the context.
4846  __ Push(x10);
4847  } else {
4848  DCHECK(declaration_scope->is_function_scope());
4850  __ Push(x10);
4851  }
4852 }
4853 
4854 
4856  ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4857  DCHECK(!result_register().is(x10));
4858  // Preserve the result register while executing finally block.
4859  // Also cook the return address in lr to the stack (smi encoded Code* delta).
4860  __ Sub(x10, lr, Operand(masm_->CodeObject()));
4861  __ SmiTag(x10);
4862  __ Push(result_register(), x10);
4863 
4864  // Store pending message while executing finally block.
4865  ExternalReference pending_message_obj =
4866  ExternalReference::address_of_pending_message_obj(isolate());
4867  __ Mov(x10, pending_message_obj);
4868  __ Ldr(x10, MemOperand(x10));
4869 
4870  ExternalReference has_pending_message =
4871  ExternalReference::address_of_has_pending_message(isolate());
4872  STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4873  __ Mov(x11, has_pending_message);
4874  __ Ldrb(x11, MemOperand(x11));
4875  __ SmiTag(x11);
4876 
4877  __ Push(x10, x11);
4878 
4879  ExternalReference pending_message_script =
4880  ExternalReference::address_of_pending_message_script(isolate());
4881  __ Mov(x10, pending_message_script);
4882  __ Ldr(x10, MemOperand(x10));
4883  __ Push(x10);
4884 }
4885 
4886 
4888  ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4889  DCHECK(!result_register().is(x10));
4890 
4891  // Restore pending message from stack.
4892  __ Pop(x10, x11, x12);
4893  ExternalReference pending_message_script =
4894  ExternalReference::address_of_pending_message_script(isolate());
4895  __ Mov(x13, pending_message_script);
4896  __ Str(x10, MemOperand(x13));
4897 
4898  __ SmiUntag(x11);
4899  ExternalReference has_pending_message =
4900  ExternalReference::address_of_has_pending_message(isolate());
4901  __ Mov(x13, has_pending_message);
4902  STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
4903  __ Strb(x11, MemOperand(x13));
4904 
4905  ExternalReference pending_message_obj =
4906  ExternalReference::address_of_pending_message_obj(isolate());
4907  __ Mov(x13, pending_message_obj);
4908  __ Str(x12, MemOperand(x13));
4909 
4910  // Restore result register and cooked return address from the stack.
4911  __ Pop(x10, result_register());
4912 
4913  // Uncook the return address (see EnterFinallyBlock).
4914  __ SmiUntag(x10);
4915  __ Add(x11, x10, Operand(masm_->CodeObject()));
4916  __ Br(x11);
4917 }
4918 
4919 
4920 #undef __
4921 
4922 
4923 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4924  Address pc,
4925  BackEdgeState target_state,
4926  Code* replacement_code) {
4927  // Turn the jump into a nop.
4928  Address branch_address = pc - 3 * kInstructionSize;
4929  PatchingAssembler patcher(branch_address, 1);
4930 
4931  DCHECK(Instruction::Cast(branch_address)
4932  ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
4933  (Instruction::Cast(branch_address)->IsCondBranchImm() &&
4934  Instruction::Cast(branch_address)->ImmPCOffset() ==
4935  6 * kInstructionSize));
4936 
4937  switch (target_state) {
4938  case INTERRUPT:
4939  // <decrement profiling counter>
4940  // .. .. .. .. b.pl ok
4941  // .. .. .. .. ldr x16, pc+<interrupt stub address>
4942  // .. .. .. .. blr x16
4943  // ... more instructions.
4944  // ok-label
4945  // Jump offset is 6 instructions.
4946  patcher.b(6, pl);
4947  break;
4948  case ON_STACK_REPLACEMENT:
4949  case OSR_AFTER_STACK_CHECK:
4950  // <decrement profiling counter>
4951  // .. .. .. .. mov x0, x0 (NOP)
4952  // .. .. .. .. ldr x16, pc+<on-stack replacement address>
4953  // .. .. .. .. blr x16
4954  patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4955  break;
4956  }
4957 
4958  // Replace the call address.
4959  Instruction* load = Instruction::Cast(pc)->preceding(2);
4960  Address interrupt_address_pointer =
4961  reinterpret_cast<Address>(load) + load->ImmPCOffset();
4962  DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
4963  reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4964  ->builtins()
4965  ->OnStackReplacement()
4966  ->entry())) ||
4967  (Memory::uint64_at(interrupt_address_pointer) ==
4968  reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4969  ->builtins()
4970  ->InterruptCheck()
4971  ->entry())) ||
4972  (Memory::uint64_at(interrupt_address_pointer) ==
4973  reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4974  ->builtins()
4975  ->OsrAfterStackCheck()
4976  ->entry())) ||
4977  (Memory::uint64_at(interrupt_address_pointer) ==
4978  reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4979  ->builtins()
4980  ->OnStackReplacement()
4981  ->entry())));
4982  Memory::uint64_at(interrupt_address_pointer) =
4983  reinterpret_cast<uint64_t>(replacement_code->entry());
4984 
4985  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4986  unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4987 }
4988 
4989 
4991  Isolate* isolate,
4992  Code* unoptimized_code,
4993  Address pc) {
4994  // TODO(jbramley): There should be some extra assertions here (as in the ARM
4995  // back-end), but this function is gone in bleeding_edge so it might not
4996  // matter anyway.
4997  Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
4998 
4999  if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
5000  Instruction* load = Instruction::Cast(pc)->preceding(2);
5001  uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
5002  load->ImmPCOffset());
5003  if (entry == reinterpret_cast<uint64_t>(
5004  isolate->builtins()->OnStackReplacement()->entry())) {
5005  return ON_STACK_REPLACEMENT;
5006  } else if (entry == reinterpret_cast<uint64_t>(
5007  isolate->builtins()->OsrAfterStackCheck()->entry())) {
5008  return OSR_AFTER_STACK_CHECK;
5009  } else {
5010  UNREACHABLE();
5011  }
5012  }
5013 
5014  return INTERRUPT;
5015 }
5016 
5017 
5018 #define __ ACCESS_MASM(masm())
5019 
5020 
5022  int* stack_depth,
5023  int* context_length) {
5024  ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
5025  // The macros used here must preserve the result register.
5026 
5027  // Because the handler block contains the context of the finally
5028  // code, we can restore it directly from there for the finally code
5029  // rather than iteratively unwinding contexts via their previous
5030  // links.
5031  __ Drop(*stack_depth); // Down to the handler block.
5032  if (*context_length > 0) {
5033  // Restore the context to its dedicated register and the stack.
5036  }
5037  __ PopTryHandler();
5038  __ Bl(finally_entry_);
5039 
5040  *stack_depth = 0;
5041  *context_length = 0;
5042  return previous_;
5043 }
5044 
5045 
5046 #undef __
5047 
5048 
5049 } } // namespace v8::internal
5050 
5051 #endif // V8_TARGET_ARCH_ARM64
#define BASE_EMBEDDED
Definition: allocation.h:45
void CheckConstPool(bool force_emit, bool require_jump)
void CheckVeneerPool(bool force_emit, bool require_jump, int margin=kVeneerDistanceMargin)
static const int kJSRetSequenceInstructions
static const int kCallSizeWithRelocation
int SizeOfCodeGeneratedSince(Label *label)
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static BailoutId Declarations()
Definition: utils.h:962
static BailoutId FunctionEntry()
Definition: utils.h:961
static Handle< Code > initialize_stub(Isolate *isolate, int argc, CallICState::CallType call_type)
Definition: ic.cc:1338
static const int kValueOffset
Definition: objects.h:9446
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:225
static Condition ComputeCondition(Token::Value op)
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1329
void AddNoFrameRange(int from, int to)
Definition: compiler.h:354
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3331
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
Definition: compiler.cc:1243
@ STRING_FUNCTION_PROTOTYPE_MAP_INDEX
Definition: contexts.h:294
static int SlotOffset(int index)
Definition: contexts.h:552
static const int kDescriptorSize
Definition: objects.h:3038
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3032
static const int kEnumCacheOffset
Definition: objects.h:3028
static const int kFirstOffset
Definition: objects.h:3029
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
static const TestContext * cast(const ExpressionContext *context)
Definition: full-codegen.h:778
virtual void Plug(bool flag) const
virtual NestedStatement * Exit(int *stack_depth, int *context_length)
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:99
void EmitAccessor(Expression *expression)
void Split(Condition cc, Label *if_true, Label *if_false, Label *fall_through)
void EmitBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode)
void EmitLiteralCompareTypeof(Expression *expr, Expression *sub_expr, Handle< String > check)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
void PrepareForBailoutForId(BailoutId id, State state)
void EmitNewClosure(Handle< SharedFunctionInfo > info, bool pretenure)
void GetVar(Register destination, Variable *var)
static Register context_register()
ZoneList< Handle< Object > > * globals_
Definition: full-codegen.h:837
void VisitForControl(Expression *expr, Label *if_true, Label *if_false, Label *fall_through)
Definition: full-codegen.h:382
void CallLoadIC(ContextualMode mode, TypeFeedbackId id=TypeFeedbackId::None())
void RecordBackEdge(BailoutId osr_ast_id)
MemOperand StackOperand(Variable *var)
void EmitVariableLoad(VariableProxy *proxy)
void SetVar(Variable *var, Register source, Register scratch0, Register scratch1)
MemOperand ContextSlotOperandCheckExtensions(Variable *var, Label *slow)
void EmitKeyedPropertyAssignment(Assignment *expr)
void DeclareGlobals(Handle< FixedArray > pairs)
void EmitResolvePossiblyDirectEval(int arg_count)
void VisitForStackValue(Expression *expr)
Definition: full-codegen.h:376
void EmitKeyedCallWithLoadIC(Call *expr, Expression *key)
void EmitKeyedPropertyLoad(Property *expr)
void EmitDebugCheckDeclarationContext(Variable *variable)
FunctionLiteral * function()
Definition: full-codegen.h:609
void EmitNamedSuperPropertyLoad(Property *expr)
bool TryLiteralCompare(CompareOperation *compare)
void SetStatementPosition(Statement *stmt)
Handle< FixedArray > FeedbackVector()
Definition: full-codegen.h:432
void StoreToFrameField(int frame_offset, Register value)
void LoadContextField(Register dst, int context_index)
const ExpressionContext * context()
Definition: full-codegen.h:602
void EmitNamedPropertyLoad(Property *expr)
void EmitBackEdgeBookkeeping(IterationStatement *stmt, Label *back_edge_target)
void DoTest(Expression *condition, Label *if_true, Label *if_false, Label *fall_through)
void VisitForAccumulatorValue(Expression *expr)
Definition: full-codegen.h:370
void PrepareForBailout(Expression *node, State state)
void CallStoreIC(TypeFeedbackId id=TypeFeedbackId::None())
MemOperand VarOperand(Variable *var, Register scratch)
void DeclareModules(Handle< FixedArray > descriptions)
void EmitGeneratorResume(Expression *generator, Expression *value, JSGeneratorObject::ResumeMode resume_mode)
void VisitForEffect(Expression *expr)
Definition: full-codegen.h:364
void EmitAssignment(Expression *expr)
void EmitCall(Call *expr, CallICState::CallType=CallICState::FUNCTION)
void SetFunctionPosition(FunctionLiteral *fun)
void EmitLoadHomeObject(SuperReference *expr)
void EmitStoreToStackLocalOrContextSlot(Variable *var, MemOperand location)
void EmitInlineSmiBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode, Expression *left, Expression *right)
void EmitLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
void EmitVariableAssignment(Variable *var, Token::Value op)
void CallIC(Handle< Code > code, TypeFeedbackId id=TypeFeedbackId::None())
void EmitCreateIteratorResult(bool done)
void EmitLoadGlobalCheckExtensions(VariableProxy *proxy, TypeofState typeof_state, Label *slow)
void EmitCallWithLoadIC(Call *expr)
void EnsureSlotContainsAllocationSite(int slot)
void PrepareForBailoutBeforeSplit(Expression *expr, bool should_normalize, Label *if_true, Label *if_false)
void EmitInlineRuntimeCall(CallRuntime *expr)
void EmitSuperCallWithLoadIC(Call *expr)
void EmitNamedSuperPropertyAssignment(Assignment *expr)
void EmitNamedPropertyAssignment(Assignment *expr)
Handle< FixedArray > handler_table_
Definition: full-codegen.h:844
void RecordJSReturnSite(Call *call)
static Register result_register()
void VisitForTypeofValue(Expression *expr)
void EmitDynamicLookupFastCase(VariableProxy *proxy, TypeofState typeof_state, Label *slow, Label *done)
bool ShouldInlineSmiCase(Token::Value op)
Handle< FixedArray > handler_table()
Definition: full-codegen.h:642
void EmitProfilingCounterDecrement(int delta)
void VisitInDuplicateContext(Expression *expr)
static const int kBuiltinsOffset
Definition: objects.h:7458
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static const int kValueOffset
Definition: objects.h:1506
static const int kMapOffset
Definition: objects.h:1427
static void Emit(MacroAssembler *masm, const Register &reg, const Label *smi_check)
static Instruction * Cast(T src)
Instruction * preceding(int count=1)
static const int kLengthOffset
Definition: objects.h:10072
static const int kValueOffset
Definition: objects.h:7623
static const int kCacheStampOffset
Definition: objects.h:7631
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kLiteralsOffset
Definition: objects.h:7382
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kResultDonePropertyOffset
Definition: objects.h:7142
static const int kFunctionOffset
Definition: objects.h:7123
static const int kGeneratorClosed
Definition: objects.h:7120
static const int kResultValuePropertyOffset
Definition: objects.h:7141
static const int kGeneratorExecuting
Definition: objects.h:7119
static const int kOperandStackOffset
Definition: objects.h:7127
static const int kReceiverOffset
Definition: objects.h:7125
static const int kContextOffset
Definition: objects.h:7124
static const int kContinuationOffset
Definition: objects.h:7126
static const int kInitialMaxFastElementArray
Definition: objects.h:2180
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kSize
Definition: objects.h:7772
static const int kInObjectFieldCount
Definition: objects.h:7826
static const int kValueOffset
Definition: objects.h:7546
static const Register ReceiverRegister()
static const Register NameRegister()
static const int kIsUndetectable
Definition: objects.h:6244
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6251
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kBitField2Offset
Definition: objects.h:6233
static const int kConstructorOffset
Definition: objects.h:6191
static const int kPrototypeOffset
Definition: objects.h:6190
static uint64_t & uint64_at(Address addr)
Definition: v8memory.h:32
static const int kHashFieldOffset
Definition: objects.h:8486
static Operand UntagSmiAndScale(Register smi, int scale)
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
Definition: assembler.h:317
Scope * outer_scope() const
Definition: scopes.h:333
int num_parameters() const
Definition: scopes.h:321
VariableDeclaration * function() const
Definition: scopes.h:309
int ContextChainLength(Scope *scope)
Definition: scopes.cc:715
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:539
Scope * DeclarationScope()
Definition: scopes.cc:737
Variable * arguments() const
Definition: scopes.h:324
Scope * GlobalScope()
Definition: scopes.cc:728
Variable * parameter(int index) const
Definition: scopes.h:316
static const int kHeaderSize
Definition: objects.h:8941
static const int kFormalParameterCountOffset
Definition: objects.h:6946
static const int kInstanceClassNameOffset
Definition: objects.h:6897
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1334
static const int kContextOffset
Definition: frames.h:74
static const int kContextOffset
Definition: frames.h:162
static const int kExpressionsOffset
Definition: frames.h:160
static const int kCallerSPOffset
Definition: frames.h:167
static const int kMarkerOffset
Definition: frames.h:161
static const int kCallerFPOffset
Definition: frames.h:165
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8618
static const int kLengthOffset
Definition: objects.h:8802
bool Equals(String *other)
Definition: objects-inl.h:3336
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2258
static TypeFeedbackId None()
Definition: utils.h:945
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
TypeofState
Definition: codegen.h:46
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_BOOL(enable_unaligned_accesses
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define POINTER_SIZE_ALIGN(value)
Definition: globals.h:582
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK_NE(v1, v2)
Definition: logging.h:207
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
#define ASM_LOCATION(message)
@ CALL_FUNCTION
@ TAG_OBJECT
#define V8_UINT64_C(x)
Definition: macros.h:357
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
Vector< const char > CStrVector(const char *data)
Definition: vector.h:158
const int kPointerSize
Definition: globals.h:129
@ DO_SMI_CHECK
Definition: globals.h:641
@ STRING_ADD_CHECK_BOTH
Definition: code-stubs.h:1218
@ TRACK_ALLOCATION_SITE
Definition: objects.h:8085
@ DONT_TRACK_ALLOCATION_SITE
Definition: objects.h:8084
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
Definition: objects-inl.h:5448
@ kSeqStringTag
Definition: objects.h:563
const int kSmiShift
bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
const Register cp
const unsigned kXRegSizeInBits
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
bool IsDeclaredVariableMode(VariableMode mode)
Definition: globals.h:705
const uint32_t kTwoByteStringTag
Definition: objects.h:556
MemOperand GlobalObjectMemOperand()
const Register fp
const int kPointerSizeLog2
Definition: globals.h:147
MemOperand ContextMemOperand(Register context, int index)
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:785
@ JS_REGEXP_TYPE
Definition: objects.h:748
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
Definition: objects.h:788
@ JS_VALUE_TYPE
Definition: objects.h:728
@ JS_DATE_TYPE
Definition: objects.h:730
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:784
@ FIRST_JS_PROXY_TYPE
Definition: objects.h:778
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
@ LAST_JS_PROXY_TYPE
Definition: objects.h:779
const uint32_t kOneByteStringTag
Definition: objects.h:557
@ NO_OVERWRITE
Definition: ic-state.h:58
@ OVERWRITE_RIGHT
Definition: ic-state.h:58
MemOperand FieldMemOperand(Register object, int offset)
const unsigned kLoadLiteralScaleLog2
bool IsImmutableVariableMode(VariableMode mode)
Definition: globals.h:715
const Register pc
@ DYNAMIC_GLOBAL
Definition: globals.h:689
@ DYNAMIC_LOCAL
Definition: globals.h:693
@ CONST_LEGACY
Definition: globals.h:671
const uint64_t kSmiShiftMask
Condition NegateCondition(Condition cond)
Definition: constants-arm.h:86
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
const Register lr
byte * Address
Definition: globals.h:101
NilValue
Definition: v8.h:97
@ kNullValue
Definition: v8.h:97
@ NOT_CONTEXTUAL
Definition: objects.h:174
const unsigned kXRegSize
const int kHeapObjectTag
Definition: v8.h:5737
const int kSmiValueSize
Definition: v8.h:5806
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
Definition: v8.h:5744
@ RECORD_CONSTRUCTOR_TARGET
Definition: globals.h:480
const int kSmiTag
Definition: v8.h:5742
@ NO_CALL_FUNCTION_FLAGS
Definition: globals.h:469
@ CALL_AS_METHOD
Definition: globals.h:470
const unsigned kInstructionSize
@ STRING_INDEX_IS_NUMBER
Definition: code-stubs.h:1590
void CopyBytes(uint8_t *target, uint8_t *source)
bool IsFastObjectElementsKind(ElementsKind kind)
const int64_t kXSignBit
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
#define VOID
#define IN
PropertyAttributes
@ NONE
@ READ_ONLY