V8 Project
full-codegen-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_X64
8 
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 #define __ ACCESS_MASM(masm_)
24 
25 
26 class JumpPatchSite BASE_EMBEDDED {
27  public:
28  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
29 #ifdef DEBUG
30  info_emitted_ = false;
31 #endif
32  }
33 
34  ~JumpPatchSite() {
35  DCHECK(patch_site_.is_bound() == info_emitted_);
36  }
37 
38  void EmitJumpIfNotSmi(Register reg,
39  Label* target,
40  Label::Distance near_jump = Label::kFar) {
41  __ testb(reg, Immediate(kSmiTagMask));
42  EmitJump(not_carry, target, near_jump); // Always taken before patched.
43  }
44 
45  void EmitJumpIfSmi(Register reg,
46  Label* target,
47  Label::Distance near_jump = Label::kFar) {
48  __ testb(reg, Immediate(kSmiTagMask));
49  EmitJump(carry, target, near_jump); // Never taken before patched.
50  }
51 
52  void EmitPatchInfo() {
53  if (patch_site_.is_bound()) {
54  int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
55  DCHECK(is_uint8(delta_to_patch_site));
56  __ testl(rax, Immediate(delta_to_patch_site));
57 #ifdef DEBUG
58  info_emitted_ = true;
59 #endif
60  } else {
61  __ nop(); // Signals no inlined code.
62  }
63  }
64 
65  private:
66  // jc will be patched with jz, jnc will become jnz.
67  void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
68  DCHECK(!patch_site_.is_bound() && !info_emitted_);
69  DCHECK(cc == carry || cc == not_carry);
70  __ bind(&patch_site_);
71  __ j(cc, target, near_jump);
72  }
73 
74  MacroAssembler* masm_;
75  Label patch_site_;
76 #ifdef DEBUG
77  bool info_emitted_;
78 #endif
79 };
80 
81 
82 // Generate code for a JS function. On entry to the function the receiver
83 // and arguments have been pushed on the stack left to right, with the
84 // return address on top of them. The actual argument count matches the
85 // formal parameter count expected by the function.
86 //
87 // The live registers are:
88 // o rdi: the JS function object being called (i.e. ourselves)
89 // o rsi: our context
90 // o rbp: our caller's frame pointer
91 // o rsp: stack pointer (pointing to return address)
92 //
93 // The function builds a JS frame. Please see JavaScriptFrameConstants in
94 // frames-x64.h for its layout.
96  CompilationInfo* info = info_;
98  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
99 
100  profiling_counter_ = isolate()->factory()->NewCell(
101  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
102  SetFunctionPosition(function());
103  Comment cmnt(masm_, "[ function compiled by full code generator");
104 
106 
107 #ifdef DEBUG
108  if (strlen(FLAG_stop_at) > 0 &&
109  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110  __ int3();
111  }
112 #endif
113 
114  // Sloppy mode functions and builtins need to replace the receiver with the
115  // global proxy when called as functions (without an explicit receiver
116  // object).
117  if (info->strict_mode() == SLOPPY && !info->is_native()) {
118  Label ok;
119  // +1 for return address.
120  StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
121  __ movp(rcx, args.GetReceiverOperand());
122 
123  __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
124  __ j(not_equal, &ok, Label::kNear);
125 
126  __ movp(rcx, GlobalObjectOperand());
128 
129  __ movp(args.GetReceiverOperand(), rcx);
130 
131  __ bind(&ok);
132  }
133 
134  // Open a frame scope to indicate that there is a frame on the stack. The
135  // MANUAL indicates that the scope shouldn't actually generate code to set up
136  // the frame (that is done below).
137  FrameScope frame_scope(masm_, StackFrame::MANUAL);
138 
139  info->set_prologue_offset(masm_->pc_offset());
140  __ Prologue(info->IsCodePreAgingActive());
141  info->AddNoFrameRange(0, masm_->pc_offset());
142 
143  { Comment cmnt(masm_, "[ Allocate locals");
144  int locals_count = info->scope()->num_stack_slots();
145  // Generators allocate locals, if any, in context slots.
146  DCHECK(!info->function()->is_generator() || locals_count == 0);
147  if (locals_count == 1) {
148  __ PushRoot(Heap::kUndefinedValueRootIndex);
149  } else if (locals_count > 1) {
150  if (locals_count >= 128) {
151  Label ok;
152  __ movp(rcx, rsp);
153  __ subp(rcx, Immediate(locals_count * kPointerSize));
154  __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
155  __ j(above_equal, &ok, Label::kNear);
156  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
157  __ bind(&ok);
158  }
159  __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
160  const int kMaxPushes = 32;
161  if (locals_count >= kMaxPushes) {
162  int loop_iterations = locals_count / kMaxPushes;
163  __ movp(rcx, Immediate(loop_iterations));
164  Label loop_header;
165  __ bind(&loop_header);
166  // Do pushes.
167  for (int i = 0; i < kMaxPushes; i++) {
168  __ Push(rdx);
169  }
170  // Continue loop if not done.
171  __ decp(rcx);
172  __ j(not_zero, &loop_header, Label::kNear);
173  }
174  int remaining = locals_count % kMaxPushes;
175  // Emit the remaining pushes.
176  for (int i = 0; i < remaining; i++) {
177  __ Push(rdx);
178  }
179  }
180  }
181 
182  bool function_in_register = true;
183 
184  // Possibly allocate a local context.
185  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186  if (heap_slots > 0) {
187  Comment cmnt(masm_, "[ Allocate context");
188  bool need_write_barrier = true;
189  // Argument to NewContext is the function, which is still in rdi.
190  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
191  __ Push(rdi);
192  __ Push(info->scope()->GetScopeInfo());
193  __ CallRuntime(Runtime::kNewGlobalContext, 2);
194  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
195  FastNewContextStub stub(isolate(), heap_slots);
196  __ CallStub(&stub);
197  // Result of FastNewContextStub is always in new space.
198  need_write_barrier = false;
199  } else {
200  __ Push(rdi);
201  __ CallRuntime(Runtime::kNewFunctionContext, 1);
202  }
203  function_in_register = false;
204  // Context is returned in rax. It replaces the context passed to us.
205  // It's saved in the stack and kept live in rsi.
206  __ movp(rsi, rax);
208 
209  // Copy any necessary parameters into the context.
210  int num_parameters = info->scope()->num_parameters();
211  for (int i = 0; i < num_parameters; i++) {
212  Variable* var = scope()->parameter(i);
213  if (var->IsContextSlot()) {
214  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
215  (num_parameters - 1 - i) * kPointerSize;
216  // Load parameter from stack.
217  __ movp(rax, Operand(rbp, parameter_offset));
218  // Store it in the context.
219  int context_offset = Context::SlotOffset(var->index());
220  __ movp(Operand(rsi, context_offset), rax);
221  // Update the write barrier. This clobbers rax and rbx.
222  if (need_write_barrier) {
223  __ RecordWriteContextSlot(
224  rsi, context_offset, rax, rbx, kDontSaveFPRegs);
225  } else if (FLAG_debug_code) {
226  Label done;
227  __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
228  __ Abort(kExpectedNewSpaceObject);
229  __ bind(&done);
230  }
231  }
232  }
233  }
234 
235  // Possibly allocate an arguments object.
236  Variable* arguments = scope()->arguments();
237  if (arguments != NULL) {
238  // Arguments object must be allocated after the context object, in
239  // case the "arguments" or ".arguments" variables are in the context.
240  Comment cmnt(masm_, "[ Allocate arguments object");
241  if (function_in_register) {
242  __ Push(rdi);
243  } else {
245  }
246  // The receiver is just before the parameters on the caller's stack.
247  int num_parameters = info->scope()->num_parameters();
248  int offset = num_parameters * kPointerSize;
249  __ leap(rdx,
250  Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
251  __ Push(rdx);
252  __ Push(Smi::FromInt(num_parameters));
253  // Arguments to ArgumentsAccessStub:
254  // function, receiver address, parameter count.
255  // The stub will rewrite receiver and parameter count if the previous
256  // stack frame was an arguments adapter frame.
258  if (strict_mode() == STRICT) {
260  } else if (function()->has_duplicate_parameters()) {
262  } else {
264  }
265  ArgumentsAccessStub stub(isolate(), type);
266  __ CallStub(&stub);
267 
268  SetVar(arguments, rax, rbx, rdx);
269  }
270 
271  if (FLAG_trace) {
272  __ CallRuntime(Runtime::kTraceEnter, 0);
273  }
274 
275  // Visit the declarations and body unless there is an illegal
276  // redeclaration.
277  if (scope()->HasIllegalRedeclaration()) {
278  Comment cmnt(masm_, "[ Declarations");
280 
281  } else {
283  { Comment cmnt(masm_, "[ Declarations");
284  // For named function expressions, declare the function name as a
285  // constant.
286  if (scope()->is_function_scope() && scope()->function() != NULL) {
287  VariableDeclaration* function = scope()->function();
288  DCHECK(function->proxy()->var()->mode() == CONST ||
289  function->proxy()->var()->mode() == CONST_LEGACY);
290  DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
291  VisitVariableDeclaration(function);
292  }
293  VisitDeclarations(scope()->declarations());
294  }
295 
296  { Comment cmnt(masm_, "[ Stack check");
298  Label ok;
299  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
300  __ j(above_equal, &ok, Label::kNear);
301  __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
302  __ bind(&ok);
303  }
304 
305  { Comment cmnt(masm_, "[ Body");
306  DCHECK(loop_depth() == 0);
307  VisitStatements(function()->body());
308  DCHECK(loop_depth() == 0);
309  }
310  }
311 
312  // Always emit a 'return undefined' in case control fell off the end of
313  // the body.
314  { Comment cmnt(masm_, "[ return <undefined>;");
315  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
317  }
318 }
319 
320 
322  __ Set(rax, 0);
323 }
324 
325 
328  __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
329  Smi::FromInt(-delta));
330 }
331 
332 
334  int reset_value = FLAG_interrupt_budget;
336  __ Move(kScratchRegister, Smi::FromInt(reset_value));
338 }
339 
340 
341 static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
342 
343 
344 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
345  Label* back_edge_target) {
346  Comment cmnt(masm_, "[ Back edge bookkeeping");
347  Label ok;
348 
349  DCHECK(back_edge_target->is_bound());
350  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
351  int weight = Min(kMaxBackEdgeWeight,
352  Max(1, distance / kCodeSizeMultiplier));
354 
355  __ j(positive, &ok, Label::kNear);
356  {
357  PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
358  DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
359  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
360 
361  // Record a mapping of this PC offset to the OSR id. This is used to find
362  // the AST id from the unoptimized code in order to use it as a key into
363  // the deoptimization input data found in the optimized code.
364  RecordBackEdge(stmt->OsrEntryId());
365 
367  }
368  __ bind(&ok);
369 
370  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
371  // Record a mapping of the OSR id to this PC. This is used if the OSR
372  // entry becomes the target of a bailout. We don't expect it to be, but
373  // we want it to work if it is.
374  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
375 }
376 
377 
379  Comment cmnt(masm_, "[ Return sequence");
380  if (return_label_.is_bound()) {
381  __ jmp(&return_label_);
382  } else {
383  __ bind(&return_label_);
384  if (FLAG_trace) {
385  __ Push(rax);
386  __ CallRuntime(Runtime::kTraceExit, 1);
387  }
388  // Pretend that the exit is a backwards jump to the entry.
389  int weight = 1;
390  if (info_->ShouldSelfOptimize()) {
391  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
392  } else {
393  int distance = masm_->pc_offset();
394  weight = Min(kMaxBackEdgeWeight,
395  Max(1, distance / kCodeSizeMultiplier));
396  }
398  Label ok;
399  __ j(positive, &ok, Label::kNear);
400  __ Push(rax);
401  __ call(isolate()->builtins()->InterruptCheck(),
403  __ Pop(rax);
405  __ bind(&ok);
406 #ifdef DEBUG
407  // Add a label for checking the size of the code used for returning.
408  Label check_exit_codesize;
409  masm_->bind(&check_exit_codesize);
410 #endif
411  CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
412  __ RecordJSReturn();
413  // Do not use the leave instruction here because it is too short to
414  // patch with the code required by the debugger.
415  __ movp(rsp, rbp);
416  __ popq(rbp);
417  int no_frame_start = masm_->pc_offset();
418 
419  int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
420  __ Ret(arguments_bytes, rcx);
421 
422  // Add padding that will be overwritten by a debugger breakpoint. We
423  // have just generated at least 7 bytes: "movp rsp, rbp; pop rbp; ret k"
424  // (3 + 1 + 3) for x64 and at least 6 (2 + 1 + 3) bytes for x32.
425  const int kPadding = Assembler::kJSReturnSequenceLength -
426  kPointerSize == kInt64Size ? 7 : 6;
427  for (int i = 0; i < kPadding; ++i) {
428  masm_->int3();
429  }
430  // Check that the size of the code used for returning is large enough
431  // for the debugger's requirements.
433  masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
434 
435  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
436  }
437 }
438 
439 
440 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
441  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
442 }
443 
444 
445 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
446  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
447  codegen()->GetVar(result_register(), var);
448 }
449 
450 
451 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
452  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
453  MemOperand operand = codegen()->VarOperand(var, result_register());
454  __ Push(operand);
455 }
456 
457 
458 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
459  codegen()->GetVar(result_register(), var);
460  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
461  codegen()->DoTest(this);
462 }
463 
464 
466 }
467 
468 
470  Heap::RootListIndex index) const {
471  __ LoadRoot(result_register(), index);
472 }
473 
474 
476  Heap::RootListIndex index) const {
477  __ PushRoot(index);
478 }
479 
480 
482  codegen()->PrepareForBailoutBeforeSplit(condition(),
483  true,
484  true_label_,
485  false_label_);
486  if (index == Heap::kUndefinedValueRootIndex ||
487  index == Heap::kNullValueRootIndex ||
488  index == Heap::kFalseValueRootIndex) {
489  if (false_label_ != fall_through_) __ jmp(false_label_);
490  } else if (index == Heap::kTrueValueRootIndex) {
491  if (true_label_ != fall_through_) __ jmp(true_label_);
492  } else {
493  __ LoadRoot(result_register(), index);
494  codegen()->DoTest(this);
495  }
496 }
497 
498 
499 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
500 }
501 
502 
504  Handle<Object> lit) const {
505  if (lit->IsSmi()) {
506  __ SafeMove(result_register(), Smi::cast(*lit));
507  } else {
508  __ Move(result_register(), lit);
509  }
510 }
511 
512 
513 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
514  if (lit->IsSmi()) {
515  __ SafePush(Smi::cast(*lit));
516  } else {
517  __ Push(lit);
518  }
519 }
520 
521 
522 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
523  codegen()->PrepareForBailoutBeforeSplit(condition(),
524  true,
525  true_label_,
526  false_label_);
527  DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
528  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
529  if (false_label_ != fall_through_) __ jmp(false_label_);
530  } else if (lit->IsTrue() || lit->IsJSObject()) {
531  if (true_label_ != fall_through_) __ jmp(true_label_);
532  } else if (lit->IsString()) {
533  if (String::cast(*lit)->length() == 0) {
534  if (false_label_ != fall_through_) __ jmp(false_label_);
535  } else {
536  if (true_label_ != fall_through_) __ jmp(true_label_);
537  }
538  } else if (lit->IsSmi()) {
539  if (Smi::cast(*lit)->value() == 0) {
540  if (false_label_ != fall_through_) __ jmp(false_label_);
541  } else {
542  if (true_label_ != fall_through_) __ jmp(true_label_);
543  }
544  } else {
545  // For simplicity we always test the accumulator register.
546  __ Move(result_register(), lit);
547  codegen()->DoTest(this);
548  }
549 }
550 
551 
553  Register reg) const {
554  DCHECK(count > 0);
555  __ Drop(count);
556 }
557 
558 
560  int count,
561  Register reg) const {
562  DCHECK(count > 0);
563  __ Drop(count);
564  __ Move(result_register(), reg);
565 }
566 
567 
569  Register reg) const {
570  DCHECK(count > 0);
571  if (count > 1) __ Drop(count - 1);
572  __ movp(Operand(rsp, 0), reg);
573 }
574 
575 
577  Register reg) const {
578  DCHECK(count > 0);
579  // For simplicity we always test the accumulator register.
580  __ Drop(count);
581  __ Move(result_register(), reg);
582  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
583  codegen()->DoTest(this);
584 }
585 
586 
587 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
588  Label* materialize_false) const {
589  DCHECK(materialize_true == materialize_false);
590  __ bind(materialize_true);
591 }
592 
593 
595  Label* materialize_true,
596  Label* materialize_false) const {
597  Label done;
598  __ bind(materialize_true);
599  __ Move(result_register(), isolate()->factory()->true_value());
600  __ jmp(&done, Label::kNear);
601  __ bind(materialize_false);
602  __ Move(result_register(), isolate()->factory()->false_value());
603  __ bind(&done);
604 }
605 
606 
608  Label* materialize_true,
609  Label* materialize_false) const {
610  Label done;
611  __ bind(materialize_true);
612  __ Push(isolate()->factory()->true_value());
613  __ jmp(&done, Label::kNear);
614  __ bind(materialize_false);
615  __ Push(isolate()->factory()->false_value());
616  __ bind(&done);
617 }
618 
619 
620 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
621  Label* materialize_false) const {
622  DCHECK(materialize_true == true_label_);
623  DCHECK(materialize_false == false_label_);
624 }
625 
626 
628 }
629 
630 
632  Heap::RootListIndex value_root_index =
633  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
634  __ LoadRoot(result_register(), value_root_index);
635 }
636 
637 
639  Heap::RootListIndex value_root_index =
640  flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
641  __ PushRoot(value_root_index);
642 }
643 
644 
646  codegen()->PrepareForBailoutBeforeSplit(condition(),
647  true,
648  true_label_,
649  false_label_);
650  if (flag) {
651  if (true_label_ != fall_through_) __ jmp(true_label_);
652  } else {
653  if (false_label_ != fall_through_) __ jmp(false_label_);
654  }
655 }
656 
657 
658 void FullCodeGenerator::DoTest(Expression* condition,
659  Label* if_true,
660  Label* if_false,
661  Label* fall_through) {
662  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
663  CallIC(ic, condition->test_id());
664  __ testp(result_register(), result_register());
665  // The stub returns nonzero for true.
666  Split(not_zero, if_true, if_false, fall_through);
667 }
668 
669 
671  Label* if_true,
672  Label* if_false,
673  Label* fall_through) {
674  if (if_false == fall_through) {
675  __ j(cc, if_true);
676  } else if (if_true == fall_through) {
677  __ j(NegateCondition(cc), if_false);
678  } else {
679  __ j(cc, if_true);
680  __ jmp(if_false);
681  }
682 }
683 
684 
686  DCHECK(var->IsStackAllocated());
687  // Offset is negative because higher indexes are at lower addresses.
688  int offset = -var->index() * kPointerSize;
689  // Adjust by a (parameter or local) base offset.
690  if (var->IsParameter()) {
691  offset += kFPOnStackSize + kPCOnStackSize +
692  (info_->scope()->num_parameters() - 1) * kPointerSize;
693  } else {
695  }
696  return Operand(rbp, offset);
697 }
698 
699 
700 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
701  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
702  if (var->IsContextSlot()) {
703  int context_chain_length = scope()->ContextChainLength(var->scope());
704  __ LoadContext(scratch, context_chain_length);
705  return ContextOperand(scratch, var->index());
706  } else {
707  return StackOperand(var);
708  }
709 }
710 
711 
712 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
713  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
714  MemOperand location = VarOperand(var, dest);
715  __ movp(dest, location);
716 }
717 
718 
719 void FullCodeGenerator::SetVar(Variable* var,
720  Register src,
721  Register scratch0,
722  Register scratch1) {
723  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
724  DCHECK(!scratch0.is(src));
725  DCHECK(!scratch0.is(scratch1));
726  DCHECK(!scratch1.is(src));
727  MemOperand location = VarOperand(var, scratch0);
728  __ movp(location, src);
729 
730  // Emit the write barrier code if the location is in the heap.
731  if (var->IsContextSlot()) {
732  int offset = Context::SlotOffset(var->index());
733  __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
734  }
735 }
736 
737 
739  bool should_normalize,
740  Label* if_true,
741  Label* if_false) {
742  // Only prepare for bailouts before splits if we're in a test
743  // context. Otherwise, we let the Visit function deal with the
744  // preparation to avoid preparing with the same AST id twice.
745  if (!context()->IsTest() || !info_->IsOptimizable()) return;
746 
747  Label skip;
748  if (should_normalize) __ jmp(&skip, Label::kNear);
749  PrepareForBailout(expr, TOS_REG);
750  if (should_normalize) {
751  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
752  Split(equal, if_true, if_false, NULL);
753  __ bind(&skip);
754  }
755 }
756 
757 
759  // The variable in the declaration always resides in the current context.
760  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
761  if (generate_debug_code_) {
762  // Check that we're not inside a with or catch context.
764  __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
765  __ Check(not_equal, kDeclarationInWithContext);
766  __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
767  __ Check(not_equal, kDeclarationInCatchContext);
768  }
769 }
770 
771 
772 void FullCodeGenerator::VisitVariableDeclaration(
773  VariableDeclaration* declaration) {
774  // If it was not possible to allocate the variable at compile time, we
775  // need to "declare" it at runtime to make sure it actually exists in the
776  // local context.
777  VariableProxy* proxy = declaration->proxy();
778  VariableMode mode = declaration->mode();
779  Variable* variable = proxy->var();
780  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
781  switch (variable->location()) {
783  globals_->Add(variable->name(), zone());
784  globals_->Add(variable->binding_needs_init()
785  ? isolate()->factory()->the_hole_value()
786  : isolate()->factory()->undefined_value(),
787  zone());
788  break;
789 
790  case Variable::PARAMETER:
791  case Variable::LOCAL:
792  if (hole_init) {
793  Comment cmnt(masm_, "[ VariableDeclaration");
794  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
795  __ movp(StackOperand(variable), kScratchRegister);
796  }
797  break;
798 
799  case Variable::CONTEXT:
800  if (hole_init) {
801  Comment cmnt(masm_, "[ VariableDeclaration");
803  __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
804  __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
805  // No write barrier since the hole value is in old space.
806  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
807  }
808  break;
809 
810  case Variable::LOOKUP: {
811  Comment cmnt(masm_, "[ VariableDeclaration");
812  __ Push(rsi);
813  __ Push(variable->name());
814  // Declaration nodes are always introduced in one of four modes.
816  PropertyAttributes attr =
818  __ Push(Smi::FromInt(attr));
819  // Push initial value, if any.
820  // Note: For variables we must not push an initial value (such as
821  // 'undefined') because we may have a (legal) redeclaration and we
822  // must not destroy the current value.
823  if (hole_init) {
824  __ PushRoot(Heap::kTheHoleValueRootIndex);
825  } else {
826  __ Push(Smi::FromInt(0)); // Indicates no initial value.
827  }
828  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
829  break;
830  }
831  }
832 }
833 
834 
835 void FullCodeGenerator::VisitFunctionDeclaration(
836  FunctionDeclaration* declaration) {
837  VariableProxy* proxy = declaration->proxy();
838  Variable* variable = proxy->var();
839  switch (variable->location()) {
840  case Variable::UNALLOCATED: {
841  globals_->Add(variable->name(), zone());
842  Handle<SharedFunctionInfo> function =
843  Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
844  // Check for stack-overflow exception.
845  if (function.is_null()) return SetStackOverflow();
846  globals_->Add(function, zone());
847  break;
848  }
849 
850  case Variable::PARAMETER:
851  case Variable::LOCAL: {
852  Comment cmnt(masm_, "[ FunctionDeclaration");
853  VisitForAccumulatorValue(declaration->fun());
854  __ movp(StackOperand(variable), result_register());
855  break;
856  }
857 
858  case Variable::CONTEXT: {
859  Comment cmnt(masm_, "[ FunctionDeclaration");
861  VisitForAccumulatorValue(declaration->fun());
862  __ movp(ContextOperand(rsi, variable->index()), result_register());
863  int offset = Context::SlotOffset(variable->index());
864  // We know that we have written a function, which is not a smi.
865  __ RecordWriteContextSlot(rsi,
866  offset,
867  result_register(),
868  rcx,
872  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
873  break;
874  }
875 
876  case Variable::LOOKUP: {
877  Comment cmnt(masm_, "[ FunctionDeclaration");
878  __ Push(rsi);
879  __ Push(variable->name());
881  VisitForStackValue(declaration->fun());
882  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
883  break;
884  }
885  }
886 }
887 
888 
889 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
890  Variable* variable = declaration->proxy()->var();
891  DCHECK(variable->location() == Variable::CONTEXT);
892  DCHECK(variable->interface()->IsFrozen());
893 
894  Comment cmnt(masm_, "[ ModuleDeclaration");
896 
897  // Load instance object.
898  __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope()));
899  __ movp(rax, ContextOperand(rax, variable->interface()->Index()));
901 
902  // Assign it.
903  __ movp(ContextOperand(rsi, variable->index()), rax);
904  // We know that we have written a module, which is not a smi.
905  __ RecordWriteContextSlot(rsi,
906  Context::SlotOffset(variable->index()),
907  rax,
908  rcx,
912  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
913 
914  // Traverse into body.
915  Visit(declaration->module());
916 }
917 
918 
919 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
920  VariableProxy* proxy = declaration->proxy();
921  Variable* variable = proxy->var();
922  switch (variable->location()) {
924  // TODO(rossberg)
925  break;
926 
927  case Variable::CONTEXT: {
928  Comment cmnt(masm_, "[ ImportDeclaration");
930  // TODO(rossberg)
931  break;
932  }
933 
934  case Variable::PARAMETER:
935  case Variable::LOCAL:
936  case Variable::LOOKUP:
937  UNREACHABLE();
938  }
939 }
940 
941 
942 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
943  // TODO(rossberg)
944 }
945 
946 
947 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
948  // Call the runtime to declare the globals.
949  __ Push(rsi); // The context is the first argument.
950  __ Push(pairs);
952  __ CallRuntime(Runtime::kDeclareGlobals, 3);
953  // Return value is ignored.
954 }
955 
956 
957 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
958  // Call the runtime to declare the modules.
959  __ Push(descriptions);
960  __ CallRuntime(Runtime::kDeclareModules, 1);
961  // Return value is ignored.
962 }
963 
964 
965 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
966  Comment cmnt(masm_, "[ SwitchStatement");
967  Breakable nested_statement(this, stmt);
968  SetStatementPosition(stmt);
969 
970  // Keep the switch value on the stack until a case matches.
971  VisitForStackValue(stmt->tag());
972  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
973 
974  ZoneList<CaseClause*>* clauses = stmt->cases();
975  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
976 
977  Label next_test; // Recycled for each test.
978  // Compile all the tests with branches to their bodies.
979  for (int i = 0; i < clauses->length(); i++) {
980  CaseClause* clause = clauses->at(i);
981  clause->body_target()->Unuse();
982 
983  // The default is not a test, but remember it as final fall through.
984  if (clause->is_default()) {
985  default_clause = clause;
986  continue;
987  }
988 
989  Comment cmnt(masm_, "[ Case comparison");
990  __ bind(&next_test);
991  next_test.Unuse();
992 
993  // Compile the label expression.
994  VisitForAccumulatorValue(clause->label());
995 
996  // Perform the comparison as if via '==='.
997  __ movp(rdx, Operand(rsp, 0)); // Switch value.
998  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
999  JumpPatchSite patch_site(masm_);
1000  if (inline_smi_code) {
1001  Label slow_case;
1002  __ movp(rcx, rdx);
1003  __ orp(rcx, rax);
1004  patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
1005 
1006  __ cmpp(rdx, rax);
1007  __ j(not_equal, &next_test);
1008  __ Drop(1); // Switch value is no longer needed.
1009  __ jmp(clause->body_target());
1010  __ bind(&slow_case);
1011  }
1012 
1013  // Record position before stub call for type feedback.
1014  SetSourcePosition(clause->position());
1015  Handle<Code> ic =
1016  CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1017  CallIC(ic, clause->CompareId());
1018  patch_site.EmitPatchInfo();
1019 
1020  Label skip;
1021  __ jmp(&skip, Label::kNear);
1022  PrepareForBailout(clause, TOS_REG);
1023  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
1024  __ j(not_equal, &next_test);
1025  __ Drop(1);
1026  __ jmp(clause->body_target());
1027  __ bind(&skip);
1028 
1029  __ testp(rax, rax);
1030  __ j(not_equal, &next_test);
1031  __ Drop(1); // Switch value is no longer needed.
1032  __ jmp(clause->body_target());
1033  }
1034 
1035  // Discard the test value and jump to the default if present, otherwise to
1036  // the end of the statement.
1037  __ bind(&next_test);
1038  __ Drop(1); // Switch value is no longer needed.
1039  if (default_clause == NULL) {
1040  __ jmp(nested_statement.break_label());
1041  } else {
1042  __ jmp(default_clause->body_target());
1043  }
1044 
1045  // Compile all the case bodies.
1046  for (int i = 0; i < clauses->length(); i++) {
1047  Comment cmnt(masm_, "[ Case body");
1048  CaseClause* clause = clauses->at(i);
1049  __ bind(clause->body_target());
1050  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1051  VisitStatements(clause->statements());
1052  }
1053 
1054  __ bind(nested_statement.break_label());
1055  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1056 }
1057 
1058 
1059 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1060  Comment cmnt(masm_, "[ ForInStatement");
1061  int slot = stmt->ForInFeedbackSlot();
1062  SetStatementPosition(stmt);
1063 
1064  Label loop, exit;
1065  ForIn loop_statement(this, stmt);
1067 
1068  // Get the object to enumerate over. If the object is null or undefined, skip
1069  // over the loop. See ECMA-262 version 5, section 12.6.4.
1070  VisitForAccumulatorValue(stmt->enumerable());
1071  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1072  __ j(equal, &exit);
1073  Register null_value = rdi;
1074  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1075  __ cmpp(rax, null_value);
1076  __ j(equal, &exit);
1077 
1078  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1079 
1080  // Convert the object to a JS object.
1081  Label convert, done_convert;
1082  __ JumpIfSmi(rax, &convert);
1083  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1084  __ j(above_equal, &done_convert);
1085  __ bind(&convert);
1086  __ Push(rax);
1087  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1088  __ bind(&done_convert);
1089  __ Push(rax);
1090 
1091  // Check for proxies.
1092  Label call_runtime;
1094  __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1095  __ j(below_equal, &call_runtime);
1096 
1097  // Check cache validity in generated code. This is a fast case for
1098  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1099  // guarantee cache validity, call the runtime system to check cache
1100  // validity or get the property names in a fixed array.
1101  __ CheckEnumCache(null_value, &call_runtime);
1102 
1103  // The enum cache is valid. Load the map of the object being
1104  // iterated over and use the cache for the iteration.
1105  Label use_cache;
1107  __ jmp(&use_cache, Label::kNear);
1108 
1109  // Get the set of properties to enumerate.
1110  __ bind(&call_runtime);
1111  __ Push(rax); // Duplicate the enumerable object on the stack.
1112  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1113 
1114  // If we got a map from the runtime call, we can do a fast
1115  // modification check. Otherwise, we got a fixed array, and we have
1116  // to do a slow check.
1117  Label fixed_array;
1118  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1119  Heap::kMetaMapRootIndex);
1120  __ j(not_equal, &fixed_array);
1121 
1122  // We got a map in register rax. Get the enumeration cache from it.
1123  __ bind(&use_cache);
1124 
1125  Label no_descriptors;
1126 
1127  __ EnumLength(rdx, rax);
1128  __ Cmp(rdx, Smi::FromInt(0));
1129  __ j(equal, &no_descriptors);
1130 
1131  __ LoadInstanceDescriptors(rax, rcx);
1134 
1135  // Set up the four remaining stack slots.
1136  __ Push(rax); // Map.
1137  __ Push(rcx); // Enumeration cache.
1138  __ Push(rdx); // Number of valid entries for the map in the enum cache.
1139  __ Push(Smi::FromInt(0)); // Initial index.
1140  __ jmp(&loop);
1141 
1142  __ bind(&no_descriptors);
1143  __ addp(rsp, Immediate(kPointerSize));
1144  __ jmp(&exit);
1145 
1146  // We got a fixed array in register rax. Iterate through that.
1147  Label non_proxy;
1148  __ bind(&fixed_array);
1149 
1150  // No need for a write barrier, we are storing a Smi in the feedback vector.
1151  __ Move(rbx, FeedbackVector());
1154  __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1155  __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1157  __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1158  __ j(above, &non_proxy);
1159  __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1160  __ bind(&non_proxy);
1161  __ Push(rbx); // Smi
1162  __ Push(rax); // Array
1164  __ Push(rax); // Fixed array length (as smi).
1165  __ Push(Smi::FromInt(0)); // Initial index.
1166 
1167  // Generate code for doing the condition check.
1168  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1169  __ bind(&loop);
1170  __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
1171  __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
1172  __ j(above_equal, loop_statement.break_label());
1173 
1174  // Get the current entry of the array into register rbx.
1175  __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1176  SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1177  __ movp(rbx, FieldOperand(rbx,
1178  index.reg,
1179  index.scale,
1181 
1182  // Get the expected map from the stack or a smi in the
1183  // permanent slow case into register rdx.
1184  __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1185 
1186  // Check if the expected map still matches that of the enumerable.
1187  // If not, we may have to filter the key.
1188  Label update_each;
1189  __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1191  __ j(equal, &update_each, Label::kNear);
1192 
1193  // For proxies, no filtering is done.
1194  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1195  __ Cmp(rdx, Smi::FromInt(0));
1196  __ j(equal, &update_each, Label::kNear);
1197 
1198  // Convert the entry to a string or null if it isn't a property
1199  // anymore. If the property has been removed while iterating, we
1200  // just skip it.
1201  __ Push(rcx); // Enumerable.
1202  __ Push(rbx); // Current entry.
1203  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1204  __ Cmp(rax, Smi::FromInt(0));
1205  __ j(equal, loop_statement.continue_label());
1206  __ movp(rbx, rax);
1207 
1208  // Update the 'each' property or variable from the possibly filtered
1209  // entry in register rbx.
1210  __ bind(&update_each);
1211  __ movp(result_register(), rbx);
1212  // Perform the assignment as if via '='.
1213  { EffectContext context(this);
1214  EmitAssignment(stmt->each());
1215  }
1216 
1217  // Generate code for the body of the loop.
1218  Visit(stmt->body());
1219 
1220  // Generate code for going to the next element by incrementing the
1221  // index (smi) stored on top of the stack.
1222  __ bind(loop_statement.continue_label());
1223  __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1224 
1225  EmitBackEdgeBookkeeping(stmt, &loop);
1226  __ jmp(&loop);
1227 
1228  // Remove the pointers stored on the stack.
1229  __ bind(loop_statement.break_label());
1230  __ addp(rsp, Immediate(5 * kPointerSize));
1231 
1232  // Exit and decrement the loop depth.
1233  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1234  __ bind(&exit);
1236 }
1237 
1238 
1239 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1240  Comment cmnt(masm_, "[ ForOfStatement");
1241  SetStatementPosition(stmt);
1242 
1243  Iteration loop_statement(this, stmt);
1245 
1246  // var iterator = iterable[Symbol.iterator]();
1247  VisitForEffect(stmt->assign_iterator());
1248 
1249  // Loop entry.
1250  __ bind(loop_statement.continue_label());
1251 
1252  // result = iterator.next()
1253  VisitForEffect(stmt->next_result());
1254 
1255  // if (result.done) break;
1256  Label result_not_done;
1257  VisitForControl(stmt->result_done(),
1258  loop_statement.break_label(),
1259  &result_not_done,
1260  &result_not_done);
1261  __ bind(&result_not_done);
1262 
1263  // each = result.value
1264  VisitForEffect(stmt->assign_each());
1265 
1266  // Generate code for the body of the loop.
1267  Visit(stmt->body());
1268 
1269  // Check stack before looping.
1270  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1271  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1272  __ jmp(loop_statement.continue_label());
1273 
1274  // Exit and decrement the loop depth.
1275  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1276  __ bind(loop_statement.break_label());
1278 }
1279 
1280 
1281 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1282  bool pretenure) {
1283  // Use the fast case closure allocation code that allocates in new
1284  // space for nested functions that don't need literals cloning. If
1285  // we're running with the --always-opt or the --prepare-always-opt
1286  // flag, we need to use the runtime function so that the new function
1287  // we are creating here gets a chance to have its code optimized and
1288  // doesn't just get a copy of the existing unoptimized code.
1289  if (!FLAG_always_opt &&
1290  !FLAG_prepare_always_opt &&
1291  !pretenure &&
1292  scope()->is_function_scope() &&
1293  info->num_literals() == 0) {
1294  FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1295  __ Move(rbx, info);
1296  __ CallStub(&stub);
1297  } else {
1298  __ Push(rsi);
1299  __ Push(info);
1300  __ Push(pretenure
1301  ? isolate()->factory()->true_value()
1302  : isolate()->factory()->false_value());
1303  __ CallRuntime(Runtime::kNewClosure, 3);
1304  }
1305  context()->Plug(rax);
1306 }
1307 
1308 
1309 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1310  Comment cmnt(masm_, "[ VariableProxy");
1311  EmitVariableLoad(expr);
1312 }
1313 
1314 
1315 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1316  Comment cnmt(masm_, "[ SuperReference ");
1317 
1320 
1321  Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1322  __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
1323 
1324  CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1325 
1326  __ Cmp(rax, isolate()->factory()->undefined_value());
1327  Label done;
1328  __ j(not_equal, &done);
1329  __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1330  __ bind(&done);
1331 }
1332 
1333 
1334 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1335  TypeofState typeof_state,
1336  Label* slow) {
1337  Register context = rsi;
1338  Register temp = rdx;
1339 
1340  Scope* s = scope();
1341  while (s != NULL) {
1342  if (s->num_heap_slots() > 0) {
1343  if (s->calls_sloppy_eval()) {
1344  // Check that extension is NULL.
1346  Immediate(0));
1347  __ j(not_equal, slow);
1348  }
1349  // Load next context in chain.
1351  // Walk the rest of the chain without clobbering rsi.
1352  context = temp;
1353  }
1354  // If no outer scope calls eval, we do not need to check more
1355  // context extensions. If we have reached an eval scope, we check
1356  // all extensions from this point.
1357  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1358  s = s->outer_scope();
1359  }
1360 
1361  if (s != NULL && s->is_eval_scope()) {
1362  // Loop up the context chain. There is no frame effect so it is
1363  // safe to use raw labels here.
1364  Label next, fast;
1365  if (!context.is(temp)) {
1366  __ movp(temp, context);
1367  }
1368  // Load map for comparison into register, outside loop.
1369  __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1370  __ bind(&next);
1371  // Terminate at native context.
1373  __ j(equal, &fast, Label::kNear);
1374  // Check that extension is NULL.
1375  __ cmpp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1376  __ j(not_equal, slow);
1377  // Load next context in chain.
1378  __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1379  __ jmp(&next);
1380  __ bind(&fast);
1381  }
1382 
1383  // All extension objects were empty and it is safe to use a global
1384  // load IC call.
1386  __ Move(LoadDescriptor::NameRegister(), proxy->var()->name());
1387  if (FLAG_vector_ics) {
1389  Smi::FromInt(proxy->VariableFeedbackSlot()));
1390  }
1391 
1392  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1393  ? NOT_CONTEXTUAL
1394  : CONTEXTUAL;
1395  CallLoadIC(mode);
1396 }
1397 
1398 
1400  Label* slow) {
1401  DCHECK(var->IsContextSlot());
1402  Register context = rsi;
1403  Register temp = rbx;
1404 
1405  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1406  if (s->num_heap_slots() > 0) {
1407  if (s->calls_sloppy_eval()) {
1408  // Check that extension is NULL.
1410  Immediate(0));
1411  __ j(not_equal, slow);
1412  }
1414  // Walk the rest of the chain without clobbering rsi.
1415  context = temp;
1416  }
1417  }
1418  // Check that last extension is NULL.
1419  __ cmpp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1420  __ j(not_equal, slow);
1421 
1422  // This function is used only for loads, not stores, so it's safe to
1423  // return an rsi-based operand (the write barrier cannot be allowed to
1424  // destroy the rsi register).
1425  return ContextOperand(context, var->index());
1426 }
1427 
1428 
1429 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1430  TypeofState typeof_state,
1431  Label* slow,
1432  Label* done) {
1433  // Generate fast-case code for variables that might be shadowed by
1434  // eval-introduced variables. Eval is used a lot without
1435  // introducing variables. In those cases, we do not want to
1436  // perform a runtime call for all variables in the scope
1437  // containing the eval.
1438  Variable* var = proxy->var();
1439  if (var->mode() == DYNAMIC_GLOBAL) {
1440  EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1441  __ jmp(done);
1442  } else if (var->mode() == DYNAMIC_LOCAL) {
1443  Variable* local = var->local_if_not_shadowed();
1444  __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1445  if (local->mode() == LET || local->mode() == CONST ||
1446  local->mode() == CONST_LEGACY) {
1447  __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1448  __ j(not_equal, done);
1449  if (local->mode() == CONST_LEGACY) {
1450  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1451  } else { // LET || CONST
1452  __ Push(var->name());
1453  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1454  }
1455  }
1456  __ jmp(done);
1457  }
1458 }
1459 
1460 
1461 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1462  // Record position before possible IC call.
1463  SetSourcePosition(proxy->position());
1464  Variable* var = proxy->var();
1465 
1466  // Three cases: global variables, lookup variables, and all other types of
1467  // variables.
1468  switch (var->location()) {
1469  case Variable::UNALLOCATED: {
1470  Comment cmnt(masm_, "[ Global variable");
1471  __ Move(LoadDescriptor::NameRegister(), var->name());
1473  if (FLAG_vector_ics) {
1475  Smi::FromInt(proxy->VariableFeedbackSlot()));
1476  }
1478  context()->Plug(rax);
1479  break;
1480  }
1481 
1482  case Variable::PARAMETER:
1483  case Variable::LOCAL:
1484  case Variable::CONTEXT: {
1485  Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1486  : "[ Stack slot");
1487  if (var->binding_needs_init()) {
1488  // var->scope() may be NULL when the proxy is located in eval code and
1489  // refers to a potential outside binding. Currently those bindings are
1490  // always looked up dynamically, i.e. in that case
1491  // var->location() == LOOKUP.
1492  // always holds.
1493  DCHECK(var->scope() != NULL);
1494 
1495  // Check if the binding really needs an initialization check. The check
1496  // can be skipped in the following situation: we have a LET or CONST
1497  // binding in harmony mode, both the Variable and the VariableProxy have
1498  // the same declaration scope (i.e. they are both in global code, in the
1499  // same function or in the same eval code) and the VariableProxy is in
1500  // the source physically located after the initializer of the variable.
1501  //
1502  // We cannot skip any initialization checks for CONST in non-harmony
1503  // mode because const variables may be declared but never initialized:
1504  // if (false) { const x; }; var y = x;
1505  //
1506  // The condition on the declaration scopes is a conservative check for
1507  // nested functions that access a binding and are called before the
1508  // binding is initialized:
1509  // function() { f(); let x = 1; function f() { x = 2; } }
1510  //
1511  bool skip_init_check;
1512  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1513  skip_init_check = false;
1514  } else {
1515  // Check that we always have valid source position.
1516  DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1517  DCHECK(proxy->position() != RelocInfo::kNoPosition);
1518  skip_init_check = var->mode() != CONST_LEGACY &&
1519  var->initializer_position() < proxy->position();
1520  }
1521 
1522  if (!skip_init_check) {
1523  // Let and const need a read barrier.
1524  Label done;
1525  GetVar(rax, var);
1526  __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1527  __ j(not_equal, &done, Label::kNear);
1528  if (var->mode() == LET || var->mode() == CONST) {
1529  // Throw a reference error when using an uninitialized let/const
1530  // binding in harmony mode.
1531  __ Push(var->name());
1532  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1533  } else {
1534  // Uninitalized const bindings outside of harmony mode are unholed.
1535  DCHECK(var->mode() == CONST_LEGACY);
1536  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1537  }
1538  __ bind(&done);
1539  context()->Plug(rax);
1540  break;
1541  }
1542  }
1543  context()->Plug(var);
1544  break;
1545  }
1546 
1547  case Variable::LOOKUP: {
1548  Comment cmnt(masm_, "[ Lookup slot");
1549  Label done, slow;
1550  // Generate code for loading from variables potentially shadowed
1551  // by eval-introduced variables.
1552  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1553  __ bind(&slow);
1554  __ Push(rsi); // Context.
1555  __ Push(var->name());
1556  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1557  __ bind(&done);
1558  context()->Plug(rax);
1559  break;
1560  }
1561  }
1562 }
1563 
1564 
1565 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1566  Comment cmnt(masm_, "[ RegExpLiteral");
1567  Label materialized;
1568  // Registers will be used as follows:
1569  // rdi = JS function.
1570  // rcx = literals array.
1571  // rbx = regexp literal.
1572  // rax = regexp literal clone.
1575  int literal_offset =
1576  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1577  __ movp(rbx, FieldOperand(rcx, literal_offset));
1578  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1579  __ j(not_equal, &materialized, Label::kNear);
1580 
1581  // Create regexp literal using runtime function
1582  // Result will be in rax.
1583  __ Push(rcx);
1584  __ Push(Smi::FromInt(expr->literal_index()));
1585  __ Push(expr->pattern());
1586  __ Push(expr->flags());
1587  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1588  __ movp(rbx, rax);
1589 
1590  __ bind(&materialized);
1592  Label allocated, runtime_allocate;
1593  __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1594  __ jmp(&allocated);
1595 
1596  __ bind(&runtime_allocate);
1597  __ Push(rbx);
1599  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1600  __ Pop(rbx);
1601 
1602  __ bind(&allocated);
1603  // Copy the content into the newly allocated memory.
1604  // (Unroll copy loop once for better throughput).
1605  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1606  __ movp(rdx, FieldOperand(rbx, i));
1607  __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
1608  __ movp(FieldOperand(rax, i), rdx);
1609  __ movp(FieldOperand(rax, i + kPointerSize), rcx);
1610  }
1611  if ((size % (2 * kPointerSize)) != 0) {
1612  __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
1613  __ movp(FieldOperand(rax, size - kPointerSize), rdx);
1614  }
1615  context()->Plug(rax);
1616 }
1617 
1618 
1619 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1620  if (expression == NULL) {
1621  __ PushRoot(Heap::kNullValueRootIndex);
1622  } else {
1623  VisitForStackValue(expression);
1624  }
1625 }
1626 
1627 
1628 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1629  Comment cmnt(masm_, "[ ObjectLiteral");
1630 
1631  expr->BuildConstantProperties(isolate());
1632  Handle<FixedArray> constant_properties = expr->constant_properties();
1633  int flags = expr->fast_elements()
1634  ? ObjectLiteral::kFastElements
1635  : ObjectLiteral::kNoFlags;
1636  flags |= expr->has_function()
1637  ? ObjectLiteral::kHasFunction
1638  : ObjectLiteral::kNoFlags;
1639  int properties_count = constant_properties->length() / 2;
1640  if (expr->may_store_doubles() || expr->depth() > 1 ||
1641  masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1645  __ Push(Smi::FromInt(expr->literal_index()));
1646  __ Push(constant_properties);
1648  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1649  } else {
1652  __ Move(rbx, Smi::FromInt(expr->literal_index()));
1653  __ Move(rcx, constant_properties);
1654  __ Move(rdx, Smi::FromInt(flags));
1655  FastCloneShallowObjectStub stub(isolate(), properties_count);
1656  __ CallStub(&stub);
1657  }
1658 
1659  // If result_saved is true the result is on top of the stack. If
1660  // result_saved is false the result is in rax.
1661  bool result_saved = false;
1662 
1663  // Mark all computed expressions that are bound to a key that
1664  // is shadowed by a later occurrence of the same key. For the
1665  // marked expressions, no store code is emitted.
1666  expr->CalculateEmitStore(zone());
1667 
1668  AccessorTable accessor_table(zone());
1669  for (int i = 0; i < expr->properties()->length(); i++) {
1670  ObjectLiteral::Property* property = expr->properties()->at(i);
1671  if (property->IsCompileTimeValue()) continue;
1672 
1673  Literal* key = property->key();
1674  Expression* value = property->value();
1675  if (!result_saved) {
1676  __ Push(rax); // Save result on the stack
1677  result_saved = true;
1678  }
1679  switch (property->kind()) {
1681  UNREACHABLE();
1682  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1684  // Fall through.
1685  case ObjectLiteral::Property::COMPUTED:
1686  if (key->value()->IsInternalizedString()) {
1687  if (property->emit_store()) {
1688  VisitForAccumulatorValue(value);
1690  __ Move(StoreDescriptor::NameRegister(), key->value());
1691  __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1692  CallStoreIC(key->LiteralFeedbackId());
1694  } else {
1695  VisitForEffect(value);
1696  }
1697  break;
1698  }
1699  __ Push(Operand(rsp, 0)); // Duplicate receiver.
1700  VisitForStackValue(key);
1701  VisitForStackValue(value);
1702  if (property->emit_store()) {
1703  __ Push(Smi::FromInt(SLOPPY)); // Strict mode
1704  __ CallRuntime(Runtime::kSetProperty, 4);
1705  } else {
1706  __ Drop(3);
1707  }
1708  break;
1709  case ObjectLiteral::Property::PROTOTYPE:
1710  __ Push(Operand(rsp, 0)); // Duplicate receiver.
1711  VisitForStackValue(value);
1712  if (property->emit_store()) {
1713  __ CallRuntime(Runtime::kSetPrototype, 2);
1714  } else {
1715  __ Drop(2);
1716  }
1717  break;
1718  case ObjectLiteral::Property::GETTER:
1719  accessor_table.lookup(key)->second->getter = value;
1720  break;
1721  case ObjectLiteral::Property::SETTER:
1722  accessor_table.lookup(key)->second->setter = value;
1723  break;
1724  }
1725  }
1726 
1727  // Emit code to define accessors, using only a single call to the runtime for
1728  // each pair of corresponding getters and setters.
1729  for (AccessorTable::Iterator it = accessor_table.begin();
1730  it != accessor_table.end();
1731  ++it) {
1732  __ Push(Operand(rsp, 0)); // Duplicate receiver.
1733  VisitForStackValue(it->first);
1734  EmitAccessor(it->second->getter);
1735  EmitAccessor(it->second->setter);
1737  __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1738  }
1739 
1740  if (expr->has_function()) {
1741  DCHECK(result_saved);
1742  __ Push(Operand(rsp, 0));
1743  __ CallRuntime(Runtime::kToFastProperties, 1);
1744  }
1745 
1746  if (result_saved) {
1747  context()->PlugTOS();
1748  } else {
1749  context()->Plug(rax);
1750  }
1751 }
1752 
1753 
1754 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1755  Comment cmnt(masm_, "[ ArrayLiteral");
1756 
1757  expr->BuildConstantElements(isolate());
1758  int flags = expr->depth() == 1
1759  ? ArrayLiteral::kShallowElements
1760  : ArrayLiteral::kNoFlags;
1761 
1762  ZoneList<Expression*>* subexprs = expr->values();
1763  int length = subexprs->length();
1764  Handle<FixedArray> constant_elements = expr->constant_elements();
1765  DCHECK_EQ(2, constant_elements->length());
1766  ElementsKind constant_elements_kind =
1767  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1768  bool has_constant_fast_elements =
1769  IsFastObjectElementsKind(constant_elements_kind);
1770  Handle<FixedArrayBase> constant_elements_values(
1771  FixedArrayBase::cast(constant_elements->get(1)));
1772 
1773  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1774  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1775  // If the only customer of allocation sites is transitioning, then
1776  // we can turn it off if we don't have anywhere else to transition to.
1777  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1778  }
1779 
1780  if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1783  __ Push(Smi::FromInt(expr->literal_index()));
1784  __ Push(constant_elements);
1786  __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1787  } else {
1790  __ Move(rbx, Smi::FromInt(expr->literal_index()));
1791  __ Move(rcx, constant_elements);
1792  FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1793  __ CallStub(&stub);
1794  }
1795 
1796  bool result_saved = false; // Is the result saved to the stack?
1797 
1798  // Emit code to evaluate all the non-constant subexpressions and to store
1799  // them into the newly cloned array.
1800  for (int i = 0; i < length; i++) {
1801  Expression* subexpr = subexprs->at(i);
1802  // If the subexpression is a literal or a simple materialized literal it
1803  // is already set in the cloned array.
1804  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1805 
1806  if (!result_saved) {
1807  __ Push(rax); // array literal
1808  __ Push(Smi::FromInt(expr->literal_index()));
1809  result_saved = true;
1810  }
1811  VisitForAccumulatorValue(subexpr);
1812 
1813  if (IsFastObjectElementsKind(constant_elements_kind)) {
1814  // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1815  // cannot transition and don't need to call the runtime stub.
1816  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1817  __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal.
1819  // Store the subexpression value in the array's elements.
1820  __ movp(FieldOperand(rbx, offset), result_register());
1821  // Update the write barrier for the array store.
1822  __ RecordWriteField(rbx, offset, result_register(), rcx,
1826  } else {
1827  // Store the subexpression value in the array's elements.
1828  __ Move(rcx, Smi::FromInt(i));
1829  StoreArrayLiteralElementStub stub(isolate());
1830  __ CallStub(&stub);
1831  }
1832 
1833  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1834  }
1835 
1836  if (result_saved) {
1837  __ addp(rsp, Immediate(kPointerSize)); // literal index
1838  context()->PlugTOS();
1839  } else {
1840  context()->Plug(rax);
1841  }
1842 }
1843 
1844 
1845 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1846  DCHECK(expr->target()->IsValidReferenceExpression());
1847 
1848  Comment cmnt(masm_, "[ Assignment");
1849 
1850  // Left-hand side can only be a property, a global or a (parameter or local)
1851  // slot.
1852  enum LhsKind {
1853  VARIABLE,
1856  NAMED_SUPER_PROPERTY
1857  };
1858  LhsKind assign_type = VARIABLE;
1859  Property* property = expr->target()->AsProperty();
1860  if (property != NULL) {
1861  assign_type = (property->key()->IsPropertyName())
1862  ? (property->IsSuperAccess() ? NAMED_SUPER_PROPERTY
1863  : NAMED_PROPERTY)
1864  : KEYED_PROPERTY;
1865  }
1866 
1867  // Evaluate LHS expression.
1868  switch (assign_type) {
1869  case VARIABLE:
1870  // Nothing to do here.
1871  break;
1872  case NAMED_PROPERTY:
1873  if (expr->is_compound()) {
1874  // We need the receiver both on the stack and in the register.
1875  VisitForStackValue(property->obj());
1876  __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1877  } else {
1878  VisitForStackValue(property->obj());
1879  }
1880  break;
1881  case NAMED_SUPER_PROPERTY:
1882  VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1883  EmitLoadHomeObject(property->obj()->AsSuperReference());
1884  __ Push(result_register());
1885  if (expr->is_compound()) {
1887  __ Push(result_register());
1888  }
1889  break;
1890  case KEYED_PROPERTY: {
1891  if (expr->is_compound()) {
1892  VisitForStackValue(property->obj());
1893  VisitForStackValue(property->key());
1895  __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
1896  } else {
1897  VisitForStackValue(property->obj());
1898  VisitForStackValue(property->key());
1899  }
1900  break;
1901  }
1902  }
1903 
1904  // For compound assignments we need another deoptimization point after the
1905  // variable/property load.
1906  if (expr->is_compound()) {
1907  { AccumulatorValueContext context(this);
1908  switch (assign_type) {
1909  case VARIABLE:
1910  EmitVariableLoad(expr->target()->AsVariableProxy());
1911  PrepareForBailout(expr->target(), TOS_REG);
1912  break;
1913  case NAMED_PROPERTY:
1914  EmitNamedPropertyLoad(property);
1915  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1916  break;
1917  case NAMED_SUPER_PROPERTY:
1918  EmitNamedSuperPropertyLoad(property);
1919  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1920  break;
1921  case KEYED_PROPERTY:
1922  EmitKeyedPropertyLoad(property);
1923  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1924  break;
1925  }
1926  }
1927 
1928  Token::Value op = expr->binary_op();
1929  __ Push(rax); // Left operand goes on the stack.
1930  VisitForAccumulatorValue(expr->value());
1931 
1932  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1933  ? OVERWRITE_RIGHT
1934  : NO_OVERWRITE;
1935  SetSourcePosition(expr->position() + 1);
1936  AccumulatorValueContext context(this);
1937  if (ShouldInlineSmiCase(op)) {
1938  EmitInlineSmiBinaryOp(expr->binary_operation(),
1939  op,
1940  mode,
1941  expr->target(),
1942  expr->value());
1943  } else {
1944  EmitBinaryOp(expr->binary_operation(), op, mode);
1945  }
1946  // Deoptimization point in case the binary operation may have side effects.
1947  PrepareForBailout(expr->binary_operation(), TOS_REG);
1948  } else {
1949  VisitForAccumulatorValue(expr->value());
1950  }
1951 
1952  // Record source position before possible IC call.
1953  SetSourcePosition(expr->position());
1954 
1955  // Store the value.
1956  switch (assign_type) {
1957  case VARIABLE:
1958  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1959  expr->op());
1960  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1961  context()->Plug(rax);
1962  break;
1963  case NAMED_PROPERTY:
1965  break;
1966  case NAMED_SUPER_PROPERTY:
1968  break;
1969  case KEYED_PROPERTY:
1971  break;
1972  }
1973 }
1974 
1975 
1976 void FullCodeGenerator::VisitYield(Yield* expr) {
1977  Comment cmnt(masm_, "[ Yield");
1978  // Evaluate yielded value first; the initial iterator definition depends on
1979  // this. It stays on the stack while we update the iterator.
1980  VisitForStackValue(expr->expression());
1981 
1982  switch (expr->yield_kind()) {
1983  case Yield::kSuspend:
1984  // Pop value from top-of-stack slot; box result into result register.
1985  EmitCreateIteratorResult(false);
1986  __ Push(result_register());
1987  // Fall through.
1988  case Yield::kInitial: {
1989  Label suspend, continuation, post_runtime, resume;
1990 
1991  __ jmp(&suspend);
1992 
1993  __ bind(&continuation);
1994  __ jmp(&resume);
1995 
1996  __ bind(&suspend);
1997  VisitForAccumulatorValue(expr->generator_object());
1998  DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2000  Smi::FromInt(continuation.pos()));
2002  __ movp(rcx, rsi);
2003  __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2004  kDontSaveFPRegs);
2006  __ cmpp(rsp, rbx);
2007  __ j(equal, &post_runtime);
2008  __ Push(rax); // generator object
2009  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2010  __ movp(context_register(),
2012  __ bind(&post_runtime);
2013 
2014  __ Pop(result_register());
2016 
2017  __ bind(&resume);
2018  context()->Plug(result_register());
2019  break;
2020  }
2021 
2022  case Yield::kFinal: {
2023  VisitForAccumulatorValue(expr->generator_object());
2027  // Pop value from top-of-stack slot, box result into result register.
2031  break;
2032  }
2033 
2034  case Yield::kDelegating: {
2035  VisitForStackValue(expr->generator_object());
2036 
2037  // Initial stack layout is as follows:
2038  // [sp + 1 * kPointerSize] iter
2039  // [sp + 0 * kPointerSize] g
2040 
2041  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2042  Label l_next, l_call, l_loop;
2043  Register load_receiver = LoadDescriptor::ReceiverRegister();
2044  Register load_name = LoadDescriptor::NameRegister();
2045 
2046  // Initial send value is undefined.
2047  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2048  __ jmp(&l_next);
2049 
2050  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2051  __ bind(&l_catch);
2052  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2053  __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2054  __ Push(load_name);
2055  __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2056  __ Push(rax); // exception
2057  __ jmp(&l_call);
2058 
2059  // try { received = %yield result }
2060  // Shuffle the received result above a try handler and yield it without
2061  // re-boxing.
2062  __ bind(&l_try);
2063  __ Pop(rax); // result
2064  __ PushTryHandler(StackHandler::CATCH, expr->index());
2065  const int handler_size = StackHandlerConstants::kSize;
2066  __ Push(rax); // result
2067  __ jmp(&l_suspend);
2068  __ bind(&l_continuation);
2069  __ jmp(&l_resume);
2070  __ bind(&l_suspend);
2071  const int generator_object_depth = kPointerSize + handler_size;
2072  __ movp(rax, Operand(rsp, generator_object_depth));
2073  __ Push(rax); // g
2074  DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2076  Smi::FromInt(l_continuation.pos()));
2078  __ movp(rcx, rsi);
2079  __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2080  kDontSaveFPRegs);
2081  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2082  __ movp(context_register(),
2084  __ Pop(rax); // result
2086  __ bind(&l_resume); // received in rax
2087  __ PopTryHandler();
2088 
2089  // receiver = iter; f = 'next'; arg = received;
2090  __ bind(&l_next);
2091 
2092  __ LoadRoot(load_name, Heap::knext_stringRootIndex);
2093  __ Push(load_name); // "next"
2094  __ Push(Operand(rsp, 2 * kPointerSize)); // iter
2095  __ Push(rax); // received
2096 
2097  // result = receiver[f](arg);
2098  __ bind(&l_call);
2099  __ movp(load_receiver, Operand(rsp, kPointerSize));
2100  if (FLAG_vector_ics) {
2102  Smi::FromInt(expr->KeyedLoadFeedbackSlot()));
2103  }
2104  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2106  __ movp(rdi, rax);
2107  __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2108  CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2109  __ CallStub(&stub);
2110 
2112  __ Drop(1); // The function is still on the stack; drop it.
2113 
2114  // if (!result.done) goto l_try;
2115  __ bind(&l_loop);
2116  __ Move(load_receiver, rax);
2117  __ Push(load_receiver); // save result
2118  __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2119  if (FLAG_vector_ics) {
2121  Smi::FromInt(expr->DoneFeedbackSlot()));
2122  }
2123  CallLoadIC(NOT_CONTEXTUAL); // rax=result.done
2124  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2125  CallIC(bool_ic);
2126  __ testp(result_register(), result_register());
2127  __ j(zero, &l_try);
2128 
2129  // result.value
2130  __ Pop(load_receiver); // result
2131  __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2132  if (FLAG_vector_ics) {
2134  Smi::FromInt(expr->ValueFeedbackSlot()));
2135  }
2136  CallLoadIC(NOT_CONTEXTUAL); // result.value in rax
2137  context()->DropAndPlug(2, rax); // drop iter and g
2138  break;
2139  }
2140  }
2141 }
2142 
2143 
2144 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2145  Expression *value,
2146  JSGeneratorObject::ResumeMode resume_mode) {
2147  // The value stays in rax, and is ultimately read by the resumed generator, as
2148  // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2149  // is read to throw the value when the resumed generator is already closed.
2150  // rbx will hold the generator object until the activation has been resumed.
2151  VisitForStackValue(generator);
2152  VisitForAccumulatorValue(value);
2153  __ Pop(rbx);
2154 
2155  // Check generator state.
2156  Label wrong_state, closed_state, done;
2160  Smi::FromInt(0));
2161  __ j(equal, &closed_state);
2162  __ j(less, &wrong_state);
2163 
2164  // Load suspended function and context.
2167 
2168  // Push receiver.
2170 
2171  // Push holes for arguments to generator function.
2173  __ LoadSharedFunctionInfoSpecialField(rdx, rdx,
2175  __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2176  Label push_argument_holes, push_frame;
2177  __ bind(&push_argument_holes);
2178  __ subp(rdx, Immediate(1));
2179  __ j(carry, &push_frame);
2180  __ Push(rcx);
2181  __ jmp(&push_argument_holes);
2182 
2183  // Enter a new JavaScript frame, and initialize its slots as they were when
2184  // the generator was suspended.
2185  Label resume_frame;
2186  __ bind(&push_frame);
2187  __ call(&resume_frame);
2188  __ jmp(&done);
2189  __ bind(&resume_frame);
2190  __ pushq(rbp); // Caller's frame pointer.
2191  __ movp(rbp, rsp);
2192  __ Push(rsi); // Callee's context.
2193  __ Push(rdi); // Callee's JS Function.
2194 
2195  // Load the operand stack size.
2198  __ SmiToInteger32(rdx, rdx);
2199 
2200  // If we are sending a value and there is no operand stack, we can jump back
2201  // in directly.
2202  if (resume_mode == JSGeneratorObject::NEXT) {
2203  Label slow_resume;
2204  __ cmpp(rdx, Immediate(0));
2205  __ j(not_zero, &slow_resume);
2207  __ SmiToInteger64(rcx,
2209  __ addp(rdx, rcx);
2212  __ jmp(rdx);
2213  __ bind(&slow_resume);
2214  }
2215 
2216  // Otherwise, we push holes for the operand stack and call the runtime to fix
2217  // up the stack and the handlers.
2218  Label push_operand_holes, call_resume;
2219  __ bind(&push_operand_holes);
2220  __ subp(rdx, Immediate(1));
2221  __ j(carry, &call_resume);
2222  __ Push(rcx);
2223  __ jmp(&push_operand_holes);
2224  __ bind(&call_resume);
2225  __ Push(rbx);
2226  __ Push(result_register());
2227  __ Push(Smi::FromInt(resume_mode));
2228  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2229  // Not reached: the runtime call returns elsewhere.
2230  __ Abort(kGeneratorFailedToResume);
2231 
2232  // Reach here when generator is closed.
2233  __ bind(&closed_state);
2234  if (resume_mode == JSGeneratorObject::NEXT) {
2235  // Return completed iterator result when generator is closed.
2236  __ PushRoot(Heap::kUndefinedValueRootIndex);
2237  // Pop value from top-of-stack slot; box result into result register.
2239  } else {
2240  // Throw the provided value.
2241  __ Push(rax);
2242  __ CallRuntime(Runtime::kThrow, 1);
2243  }
2244  __ jmp(&done);
2245 
2246  // Throw error if we attempt to operate on a running generator.
2247  __ bind(&wrong_state);
2248  __ Push(rbx);
2249  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2250 
2251  __ bind(&done);
2252  context()->Plug(result_register());
2253 }
2254 
2255 
2257  Label gc_required;
2258  Label allocated;
2259 
2260  Handle<Map> map(isolate()->native_context()->iterator_result_map());
2261 
2262  __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
2263  __ jmp(&allocated);
2264 
2265  __ bind(&gc_required);
2266  __ Push(Smi::FromInt(map->instance_size()));
2267  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2268  __ movp(context_register(),
2270 
2271  __ bind(&allocated);
2272  __ Move(rbx, map);
2273  __ Pop(rcx);
2274  __ Move(rdx, isolate()->factory()->ToBoolean(done));
2275  DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2278  isolate()->factory()->empty_fixed_array());
2280  isolate()->factory()->empty_fixed_array());
2282  rcx);
2284  rdx);
2285 
2286  // Only the value field needs a write barrier, as the other values are in the
2287  // root set.
2289  rcx, rdx, kDontSaveFPRegs);
2290 }
2291 
2292 
2293 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2294  SetSourcePosition(prop->position());
2295  Literal* key = prop->key()->AsLiteral();
2296  DCHECK(!prop->IsSuperAccess());
2297 
2298  __ Move(LoadDescriptor::NameRegister(), key->value());
2299  if (FLAG_vector_ics) {
2301  Smi::FromInt(prop->PropertyFeedbackSlot()));
2303  } else {
2304  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2305  }
2306 }
2307 
2308 
2309 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2310  // Stack: receiver, home_object
2311  SetSourcePosition(prop->position());
2312  Literal* key = prop->key()->AsLiteral();
2313  DCHECK(!key->value()->IsSmi());
2314  DCHECK(prop->IsSuperAccess());
2315 
2316  __ Push(key->value());
2317  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2318 }
2319 
2320 
2321 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2322  SetSourcePosition(prop->position());
2323  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2324  if (FLAG_vector_ics) {
2326  Smi::FromInt(prop->PropertyFeedbackSlot()));
2327  CallIC(ic);
2328  } else {
2329  CallIC(ic, prop->PropertyFeedbackId());
2330  }
2331 }
2332 
2333 
2334 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2335  Token::Value op,
2337  Expression* left,
2338  Expression* right) {
2339  // Do combined smi check of the operands. Left operand is on the
2340  // stack (popped into rdx). Right operand is in rax but moved into
2341  // rcx to make the shifts easier.
2342  Label done, stub_call, smi_case;
2343  __ Pop(rdx);
2344  __ movp(rcx, rax);
2345  __ orp(rax, rdx);
2346  JumpPatchSite patch_site(masm_);
2347  patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2348 
2349  __ bind(&stub_call);
2350  __ movp(rax, rcx);
2351  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2352  CallIC(code, expr->BinaryOperationFeedbackId());
2353  patch_site.EmitPatchInfo();
2354  __ jmp(&done, Label::kNear);
2355 
2356  __ bind(&smi_case);
2357  switch (op) {
2358  case Token::SAR:
2359  __ SmiShiftArithmeticRight(rax, rdx, rcx);
2360  break;
2361  case Token::SHL:
2362  __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
2363  break;
2364  case Token::SHR:
2365  __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2366  break;
2367  case Token::ADD:
2368  __ SmiAdd(rax, rdx, rcx, &stub_call);
2369  break;
2370  case Token::SUB:
2371  __ SmiSub(rax, rdx, rcx, &stub_call);
2372  break;
2373  case Token::MUL:
2374  __ SmiMul(rax, rdx, rcx, &stub_call);
2375  break;
2376  case Token::BIT_OR:
2377  __ SmiOr(rax, rdx, rcx);
2378  break;
2379  case Token::BIT_AND:
2380  __ SmiAnd(rax, rdx, rcx);
2381  break;
2382  case Token::BIT_XOR:
2383  __ SmiXor(rax, rdx, rcx);
2384  break;
2385  default:
2386  UNREACHABLE();
2387  break;
2388  }
2389 
2390  __ bind(&done);
2391  context()->Plug(rax);
2392 }
2393 
2394 
2395 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2396  Token::Value op,
2397  OverwriteMode mode) {
2398  __ Pop(rdx);
2399  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2400  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2401  CallIC(code, expr->BinaryOperationFeedbackId());
2402  patch_site.EmitPatchInfo();
2403  context()->Plug(rax);
2404 }
2405 
2406 
2407 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2408  DCHECK(expr->IsValidReferenceExpression());
2409 
2410  // Left-hand side can only be a property, a global or a (parameter or local)
2411  // slot.
2413  LhsKind assign_type = VARIABLE;
2414  Property* prop = expr->AsProperty();
2415  if (prop != NULL) {
2416  assign_type = (prop->key()->IsPropertyName())
2417  ? NAMED_PROPERTY
2418  : KEYED_PROPERTY;
2419  }
2420 
2421  switch (assign_type) {
2422  case VARIABLE: {
2423  Variable* var = expr->AsVariableProxy()->var();
2424  EffectContext context(this);
2425  EmitVariableAssignment(var, Token::ASSIGN);
2426  break;
2427  }
2428  case NAMED_PROPERTY: {
2429  __ Push(rax); // Preserve value.
2430  VisitForAccumulatorValue(prop->obj());
2432  __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2434  prop->key()->AsLiteral()->value());
2435  CallStoreIC();
2436  break;
2437  }
2438  case KEYED_PROPERTY: {
2439  __ Push(rax); // Preserve value.
2440  VisitForStackValue(prop->obj());
2441  VisitForAccumulatorValue(prop->key());
2444  __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2445  Handle<Code> ic =
2446  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2447  CallIC(ic);
2448  break;
2449  }
2450  }
2451  context()->Plug(rax);
2452 }
2453 
2454 
2456  Variable* var, MemOperand location) {
2457  __ movp(location, rax);
2458  if (var->IsContextSlot()) {
2459  __ movp(rdx, rax);
2460  __ RecordWriteContextSlot(
2461  rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2462  }
2463 }
2464 
2465 
2467  Token::Value op) {
2468  if (var->IsUnallocated()) {
2469  // Global var, const, or let.
2470  __ Move(StoreDescriptor::NameRegister(), var->name());
2472  CallStoreIC();
2473 
2474  } else if (op == Token::INIT_CONST_LEGACY) {
2475  // Const initializers need a write barrier.
2476  DCHECK(!var->IsParameter()); // No const parameters.
2477  if (var->IsLookupSlot()) {
2478  __ Push(rax);
2479  __ Push(rsi);
2480  __ Push(var->name());
2481  __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2482  } else {
2483  DCHECK(var->IsStackLocal() || var->IsContextSlot());
2484  Label skip;
2485  MemOperand location = VarOperand(var, rcx);
2486  __ movp(rdx, location);
2487  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2488  __ j(not_equal, &skip);
2489  EmitStoreToStackLocalOrContextSlot(var, location);
2490  __ bind(&skip);
2491  }
2492 
2493  } else if (var->mode() == LET && op != Token::INIT_LET) {
2494  // Non-initializing assignment to let variable needs a write barrier.
2495  DCHECK(!var->IsLookupSlot());
2496  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2497  Label assign;
2498  MemOperand location = VarOperand(var, rcx);
2499  __ movp(rdx, location);
2500  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2501  __ j(not_equal, &assign, Label::kNear);
2502  __ Push(var->name());
2503  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2504  __ bind(&assign);
2505  EmitStoreToStackLocalOrContextSlot(var, location);
2506 
2507  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2508  if (var->IsLookupSlot()) {
2509  // Assignment to var.
2510  __ Push(rax); // Value.
2511  __ Push(rsi); // Context.
2512  __ Push(var->name());
2514  __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2515  } else {
2516  // Assignment to var or initializing assignment to let/const in harmony
2517  // mode.
2518  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2519  MemOperand location = VarOperand(var, rcx);
2520  if (generate_debug_code_ && op == Token::INIT_LET) {
2521  // Check for an uninitialized let binding.
2522  __ movp(rdx, location);
2523  __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2524  __ Check(equal, kLetBindingReInitialization);
2525  }
2526  EmitStoreToStackLocalOrContextSlot(var, location);
2527  }
2528  }
2529  // Non-initializing assignments to consts are ignored.
2530 }
2531 
2532 
2533 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2534  // Assignment to a property, using a named store IC.
2535  Property* prop = expr->target()->AsProperty();
2536  DCHECK(prop != NULL);
2537  DCHECK(prop->key()->IsLiteral());
2538 
2539  // Record source code position before IC call.
2540  SetSourcePosition(expr->position());
2541  __ Move(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2543  CallStoreIC(expr->AssignmentFeedbackId());
2544 
2545  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2546  context()->Plug(rax);
2547 }
2548 
2549 
2551  // Assignment to named property of super.
2552  // rax : value
2553  // stack : receiver ('this'), home_object
2554  Property* prop = expr->target()->AsProperty();
2555  DCHECK(prop != NULL);
2556  Literal* key = prop->key()->AsLiteral();
2557  DCHECK(key != NULL);
2558 
2559  __ Push(rax);
2560  __ Push(key->value());
2561  __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2562  : Runtime::kStoreToSuper_Sloppy),
2563  4);
2564  context()->Plug(rax);
2565 }
2566 
2567 
2568 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2569  // Assignment to a property, using a keyed store IC.
2570 
2571  __ Pop(StoreDescriptor::NameRegister()); // Key.
2574  // Record source code position before IC call.
2575  SetSourcePosition(expr->position());
2576  Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2577  CallIC(ic, expr->AssignmentFeedbackId());
2578 
2579  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2580  context()->Plug(rax);
2581 }
2582 
2583 
2584 void FullCodeGenerator::VisitProperty(Property* expr) {
2585  Comment cmnt(masm_, "[ Property");
2586  Expression* key = expr->key();
2587 
2588  if (key->IsPropertyName()) {
2589  if (!expr->IsSuperAccess()) {
2590  VisitForAccumulatorValue(expr->obj());
2593  EmitNamedPropertyLoad(expr);
2594  } else {
2595  VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2596  EmitLoadHomeObject(expr->obj()->AsSuperReference());
2597  __ Push(result_register());
2599  }
2600  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2601  context()->Plug(rax);
2602  } else {
2603  VisitForStackValue(expr->obj());
2604  VisitForAccumulatorValue(expr->key());
2607  EmitKeyedPropertyLoad(expr);
2608  context()->Plug(rax);
2609  }
2610 }
2611 
2612 
2613 void FullCodeGenerator::CallIC(Handle<Code> code,
2614  TypeFeedbackId ast_id) {
2615  ic_total_count_++;
2616  __ call(code, RelocInfo::CODE_TARGET, ast_id);
2617 }
2618 
2619 
2620 // Code common for calls using the IC.
2621 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2622  Expression* callee = expr->expression();
2623 
2624  CallICState::CallType call_type =
2625  callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2626  // Get the target function.
2627  if (call_type == CallICState::FUNCTION) {
2628  { StackValueContext context(this);
2629  EmitVariableLoad(callee->AsVariableProxy());
2631  }
2632  // Push undefined as receiver. This is patched in the method prologue if it
2633  // is a sloppy mode method.
2634  __ Push(isolate()->factory()->undefined_value());
2635  } else {
2636  // Load the function from the receiver.
2637  DCHECK(callee->IsProperty());
2638  DCHECK(!callee->AsProperty()->IsSuperAccess());
2639  __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2640  EmitNamedPropertyLoad(callee->AsProperty());
2641  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2642  // Push the target function under the receiver.
2643  __ Push(Operand(rsp, 0));
2644  __ movp(Operand(rsp, kPointerSize), rax);
2645  }
2646 
2647  EmitCall(expr, call_type);
2648 }
2649 
2650 
2652  Expression* callee = expr->expression();
2653  DCHECK(callee->IsProperty());
2654  Property* prop = callee->AsProperty();
2655  DCHECK(prop->IsSuperAccess());
2656 
2657  SetSourcePosition(prop->position());
2658  Literal* key = prop->key()->AsLiteral();
2659  DCHECK(!key->value()->IsSmi());
2660  // Load the function from the receiver.
2661  SuperReference* super_ref = prop->obj()->AsSuperReference();
2662  EmitLoadHomeObject(super_ref);
2663  __ Push(rax);
2664  VisitForAccumulatorValue(super_ref->this_var());
2665  __ Push(rax);
2666  __ Push(rax);
2667  __ Push(Operand(rsp, kPointerSize * 2));
2668  __ Push(key->value());
2669 
2670  // Stack here:
2671  // - home_object
2672  // - this (receiver)
2673  // - this (receiver) <-- LoadFromSuper will pop here and below.
2674  // - home_object
2675  // - key
2676  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2677 
2678  // Replace home_object with target function.
2679  __ movp(Operand(rsp, kPointerSize), rax);
2680 
2681  // Stack here:
2682  // - target function
2683  // - this (receiver)
2684  EmitCall(expr, CallICState::METHOD);
2685 }
2686 
2687 
2688 // Common code for calls using the IC.
2690  Expression* key) {
2691  // Load the key.
2693 
2694  Expression* callee = expr->expression();
2695 
2696  // Load the function from the receiver.
2697  DCHECK(callee->IsProperty());
2698  __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2700  EmitKeyedPropertyLoad(callee->AsProperty());
2701  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2702 
2703  // Push the target function under the receiver.
2704  __ Push(Operand(rsp, 0));
2705  __ movp(Operand(rsp, kPointerSize), rax);
2706 
2707  EmitCall(expr, CallICState::METHOD);
2708 }
2709 
2710 
2711 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2712  // Load the arguments.
2713  ZoneList<Expression*>* args = expr->arguments();
2714  int arg_count = args->length();
2715  { PreservePositionScope scope(masm()->positions_recorder());
2716  for (int i = 0; i < arg_count; i++) {
2717  VisitForStackValue(args->at(i));
2718  }
2719  }
2720 
2721  // Record source position of the IC call.
2722  SetSourcePosition(expr->position());
2723  Handle<Code> ic = CallIC::initialize_stub(
2724  isolate(), arg_count, call_type);
2725  __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
2726  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2727  // Don't assign a type feedback id to the IC, since type feedback is provided
2728  // by the vector above.
2729  CallIC(ic);
2730 
2731  RecordJSReturnSite(expr);
2732 
2733  // Restore context register.
2735  // Discard the function left on TOS.
2736  context()->DropAndPlug(1, rax);
2737 }
2738 
2739 
2741  // Push copy of the first argument or undefined if it doesn't exist.
2742  if (arg_count > 0) {
2743  __ Push(Operand(rsp, arg_count * kPointerSize));
2744  } else {
2745  __ PushRoot(Heap::kUndefinedValueRootIndex);
2746  }
2747 
2748  // Push the receiver of the enclosing function and do runtime call.
2749  StackArgumentsAccessor args(rbp, info_->scope()->num_parameters());
2750  __ Push(args.GetReceiverOperand());
2751 
2752  // Push the language mode.
2754 
2755  // Push the start position of the scope the calls resides in.
2756  __ Push(Smi::FromInt(scope()->start_position()));
2757 
2758  // Do the runtime call.
2759  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2760 }
2761 
2762 
2763 void FullCodeGenerator::VisitCall(Call* expr) {
2764 #ifdef DEBUG
2765  // We want to verify that RecordJSReturnSite gets called on all paths
2766  // through this function. Avoid early returns.
2767  expr->return_is_recorded_ = false;
2768 #endif
2769 
2770  Comment cmnt(masm_, "[ Call");
2771  Expression* callee = expr->expression();
2772  Call::CallType call_type = expr->GetCallType(isolate());
2773 
2774  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2775  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2776  // to resolve the function we need to call and the receiver of the call.
2777  // Then we call the resolved function using the given arguments.
2778  ZoneList<Expression*>* args = expr->arguments();
2779  int arg_count = args->length();
2780  { PreservePositionScope pos_scope(masm()->positions_recorder());
2781  VisitForStackValue(callee);
2782  __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2783 
2784  // Push the arguments.
2785  for (int i = 0; i < arg_count; i++) {
2786  VisitForStackValue(args->at(i));
2787  }
2788 
2789  // Push a copy of the function (found below the arguments) and resolve
2790  // eval.
2791  __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
2792  EmitResolvePossiblyDirectEval(arg_count);
2793 
2794  // The runtime call returns a pair of values in rax (function) and
2795  // rdx (receiver). Touch up the stack with the right values.
2796  __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2797  __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2798  }
2799  // Record source position for debugger.
2800  SetSourcePosition(expr->position());
2801  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2802  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2803  __ CallStub(&stub);
2804  RecordJSReturnSite(expr);
2805  // Restore context register.
2807  context()->DropAndPlug(1, rax);
2808  } else if (call_type == Call::GLOBAL_CALL) {
2809  EmitCallWithLoadIC(expr);
2810 
2811  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2812  // Call to a lookup slot (dynamically introduced variable).
2813  VariableProxy* proxy = callee->AsVariableProxy();
2814  Label slow, done;
2815 
2816  { PreservePositionScope scope(masm()->positions_recorder());
2817  // Generate code for loading from variables potentially shadowed by
2818  // eval-introduced variables.
2819  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2820  }
2821  __ bind(&slow);
2822  // Call the runtime to find the function to call (returned in rax) and
2823  // the object holding it (returned in rdx).
2825  __ Push(proxy->name());
2826  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2827  __ Push(rax); // Function.
2828  __ Push(rdx); // Receiver.
2829 
2830  // If fast case code has been generated, emit code to push the function
2831  // and receiver and have the slow path jump around this code.
2832  if (done.is_linked()) {
2833  Label call;
2834  __ jmp(&call, Label::kNear);
2835  __ bind(&done);
2836  // Push function.
2837  __ Push(rax);
2838  // The receiver is implicitly the global receiver. Indicate this by
2839  // passing the hole to the call function stub.
2840  __ PushRoot(Heap::kUndefinedValueRootIndex);
2841  __ bind(&call);
2842  }
2843 
2844  // The receiver is either the global receiver or an object found by
2845  // LoadContextSlot.
2846  EmitCall(expr);
2847  } else if (call_type == Call::PROPERTY_CALL) {
2848  Property* property = callee->AsProperty();
2849  bool is_named_call = property->key()->IsPropertyName();
2850  // super.x() is handled in EmitCallWithLoadIC.
2851  if (property->IsSuperAccess() && is_named_call) {
2853  } else {
2854  {
2855  PreservePositionScope scope(masm()->positions_recorder());
2856  VisitForStackValue(property->obj());
2857  }
2858  if (is_named_call) {
2859  EmitCallWithLoadIC(expr);
2860  } else {
2861  EmitKeyedCallWithLoadIC(expr, property->key());
2862  }
2863  }
2864  } else {
2865  DCHECK(call_type == Call::OTHER_CALL);
2866  // Call to an arbitrary expression not handled specially above.
2867  { PreservePositionScope scope(masm()->positions_recorder());
2868  VisitForStackValue(callee);
2869  }
2870  __ PushRoot(Heap::kUndefinedValueRootIndex);
2871  // Emit function call.
2872  EmitCall(expr);
2873  }
2874 
2875 #ifdef DEBUG
2876  // RecordJSReturnSite should have been called.
2877  DCHECK(expr->return_is_recorded_);
2878 #endif
2879 }
2880 
2881 
2882 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2883  Comment cmnt(masm_, "[ CallNew");
2884  // According to ECMA-262, section 11.2.2, page 44, the function
2885  // expression in new calls must be evaluated before the
2886  // arguments.
2887 
2888  // Push constructor on the stack. If it's not a function it's used as
2889  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2890  // ignored.
2891  VisitForStackValue(expr->expression());
2892 
2893  // Push the arguments ("left-to-right") on the stack.
2894  ZoneList<Expression*>* args = expr->arguments();
2895  int arg_count = args->length();
2896  for (int i = 0; i < arg_count; i++) {
2897  VisitForStackValue(args->at(i));
2898  }
2899 
2900  // Call the construct call builtin that handles allocation and
2901  // constructor invocation.
2902  SetSourcePosition(expr->position());
2903 
2904  // Load function and argument count into rdi and rax.
2905  __ Set(rax, arg_count);
2906  __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2907 
2908  // Record call targets in unoptimized code, but not in the snapshot.
2909  if (FLAG_pretenuring_call_new) {
2910  EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2911  DCHECK(expr->AllocationSiteFeedbackSlot() ==
2912  expr->CallNewFeedbackSlot() + 1);
2913  }
2914 
2915  __ Move(rbx, FeedbackVector());
2916  __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
2917 
2918  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2919  __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2920  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2921  context()->Plug(rax);
2922 }
2923 
2924 
2925 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2926  ZoneList<Expression*>* args = expr->arguments();
2927  DCHECK(args->length() == 1);
2928 
2929  VisitForAccumulatorValue(args->at(0));
2930 
2931  Label materialize_true, materialize_false;
2932  Label* if_true = NULL;
2933  Label* if_false = NULL;
2934  Label* fall_through = NULL;
2935  context()->PrepareTest(&materialize_true, &materialize_false,
2936  &if_true, &if_false, &fall_through);
2937 
2938  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2939  __ JumpIfSmi(rax, if_true);
2940  __ jmp(if_false);
2941 
2942  context()->Plug(if_true, if_false);
2943 }
2944 
2945 
2946 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2947  ZoneList<Expression*>* args = expr->arguments();
2948  DCHECK(args->length() == 1);
2949 
2950  VisitForAccumulatorValue(args->at(0));
2951 
2952  Label materialize_true, materialize_false;
2953  Label* if_true = NULL;
2954  Label* if_false = NULL;
2955  Label* fall_through = NULL;
2956  context()->PrepareTest(&materialize_true, &materialize_false,
2957  &if_true, &if_false, &fall_through);
2958 
2959  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2960  Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2961  Split(non_negative_smi, if_true, if_false, fall_through);
2962 
2963  context()->Plug(if_true, if_false);
2964 }
2965 
2966 
2967 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2968  ZoneList<Expression*>* args = expr->arguments();
2969  DCHECK(args->length() == 1);
2970 
2971  VisitForAccumulatorValue(args->at(0));
2972 
2973  Label materialize_true, materialize_false;
2974  Label* if_true = NULL;
2975  Label* if_false = NULL;
2976  Label* fall_through = NULL;
2977  context()->PrepareTest(&materialize_true, &materialize_false,
2978  &if_true, &if_false, &fall_through);
2979 
2980  __ JumpIfSmi(rax, if_false);
2981  __ CompareRoot(rax, Heap::kNullValueRootIndex);
2982  __ j(equal, if_true);
2984  // Undetectable objects behave like undefined when tested with typeof.
2986  Immediate(1 << Map::kIsUndetectable));
2987  __ j(not_zero, if_false);
2989  __ cmpp(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2990  __ j(below, if_false);
2991  __ cmpp(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2992  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2993  Split(below_equal, if_true, if_false, fall_through);
2994 
2995  context()->Plug(if_true, if_false);
2996 }
2997 
2998 
2999 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3000  ZoneList<Expression*>* args = expr->arguments();
3001  DCHECK(args->length() == 1);
3002 
3003  VisitForAccumulatorValue(args->at(0));
3004 
3005  Label materialize_true, materialize_false;
3006  Label* if_true = NULL;
3007  Label* if_false = NULL;
3008  Label* fall_through = NULL;
3009  context()->PrepareTest(&materialize_true, &materialize_false,
3010  &if_true, &if_false, &fall_through);
3011 
3012  __ JumpIfSmi(rax, if_false);
3013  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
3014  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3015  Split(above_equal, if_true, if_false, fall_through);
3016 
3017  context()->Plug(if_true, if_false);
3018 }
3019 
3020 
3021 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3022  ZoneList<Expression*>* args = expr->arguments();
3023  DCHECK(args->length() == 1);
3024 
3025  VisitForAccumulatorValue(args->at(0));
3026 
3027  Label materialize_true, materialize_false;
3028  Label* if_true = NULL;
3029  Label* if_false = NULL;
3030  Label* fall_through = NULL;
3031  context()->PrepareTest(&materialize_true, &materialize_false,
3032  &if_true, &if_false, &fall_through);
3033 
3034  __ JumpIfSmi(rax, if_false);
3037  Immediate(1 << Map::kIsUndetectable));
3038  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3039  Split(not_zero, if_true, if_false, fall_through);
3040 
3041  context()->Plug(if_true, if_false);
3042 }
3043 
3044 
3045 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3046  CallRuntime* expr) {
3047  ZoneList<Expression*>* args = expr->arguments();
3048  DCHECK(args->length() == 1);
3049 
3050  VisitForAccumulatorValue(args->at(0));
3051 
3052  Label materialize_true, materialize_false, skip_lookup;
3053  Label* if_true = NULL;
3054  Label* if_false = NULL;
3055  Label* fall_through = NULL;
3056  context()->PrepareTest(&materialize_true, &materialize_false,
3057  &if_true, &if_false, &fall_through);
3058 
3059  __ AssertNotSmi(rax);
3060 
3061  // Check whether this map has already been checked to be safe for default
3062  // valueOf.
3066  __ j(not_zero, &skip_lookup);
3067 
3068  // Check for fast case object. Generate false result for slow case object.
3071  __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
3072  __ j(equal, if_false);
3073 
3074  // Look for valueOf string in the descriptor array, and indicate false if
3075  // found. Since we omit an enumeration index check, if it is added via a
3076  // transition that shares its descriptor array, this is a false positive.
3077  Label entry, loop, done;
3078 
3079  // Skip loop if no descriptors are valid.
3080  __ NumberOfOwnDescriptors(rcx, rbx);
3081  __ cmpp(rcx, Immediate(0));
3082  __ j(equal, &done);
3083 
3084  __ LoadInstanceDescriptors(rbx, r8);
3085  // rbx: descriptor array.
3086  // rcx: valid entries in the descriptor array.
3087  // Calculate the end of the descriptor array.
3088  __ imulp(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
3089  __ leap(rcx,
3091  // Calculate location of the first key name.
3092  __ addp(r8, Immediate(DescriptorArray::kFirstOffset));
3093  // Loop through all the keys in the descriptor array. If one of these is the
3094  // internalized string "valueOf" the result is false.
3095  __ jmp(&entry);
3096  __ bind(&loop);
3097  __ movp(rdx, FieldOperand(r8, 0));
3098  __ Cmp(rdx, isolate()->factory()->value_of_string());
3099  __ j(equal, if_false);
3100  __ addp(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3101  __ bind(&entry);
3102  __ cmpp(r8, rcx);
3103  __ j(not_equal, &loop);
3104 
3105  __ bind(&done);
3106 
3107  // Set the bit in the map to indicate that there is no local valueOf field.
3110 
3111  __ bind(&skip_lookup);
3112 
3113  // If a valueOf property is not found on the object check that its
3114  // prototype is the un-modified String prototype. If not result is false.
3116  __ testp(rcx, Immediate(kSmiTagMask));
3117  __ j(zero, if_false);
3121  __ cmpp(rcx,
3123  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3124  Split(equal, if_true, if_false, fall_through);
3125 
3126  context()->Plug(if_true, if_false);
3127 }
3128 
3129 
3130 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3131  ZoneList<Expression*>* args = expr->arguments();
3132  DCHECK(args->length() == 1);
3133 
3134  VisitForAccumulatorValue(args->at(0));
3135 
3136  Label materialize_true, materialize_false;
3137  Label* if_true = NULL;
3138  Label* if_false = NULL;
3139  Label* fall_through = NULL;
3140  context()->PrepareTest(&materialize_true, &materialize_false,
3141  &if_true, &if_false, &fall_through);
3142 
3143  __ JumpIfSmi(rax, if_false);
3144  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3145  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3146  Split(equal, if_true, if_false, fall_through);
3147 
3148  context()->Plug(if_true, if_false);
3149 }
3150 
3151 
3152 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3153  ZoneList<Expression*>* args = expr->arguments();
3154  DCHECK(args->length() == 1);
3155 
3156  VisitForAccumulatorValue(args->at(0));
3157 
3158  Label materialize_true, materialize_false;
3159  Label* if_true = NULL;
3160  Label* if_false = NULL;
3161  Label* fall_through = NULL;
3162  context()->PrepareTest(&materialize_true, &materialize_false,
3163  &if_true, &if_false, &fall_through);
3164 
3165  Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3166  __ CheckMap(rax, map, if_false, DO_SMI_CHECK);
3167  __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset),
3168  Immediate(0x1));
3169  __ j(no_overflow, if_false);
3170  __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset),
3171  Immediate(0x00000000));
3172  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3173  Split(equal, if_true, if_false, fall_through);
3174 
3175  context()->Plug(if_true, if_false);
3176 }
3177 
3178 
3179 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3180  ZoneList<Expression*>* args = expr->arguments();
3181  DCHECK(args->length() == 1);
3182 
3183  VisitForAccumulatorValue(args->at(0));
3184 
3185  Label materialize_true, materialize_false;
3186  Label* if_true = NULL;
3187  Label* if_false = NULL;
3188  Label* fall_through = NULL;
3189  context()->PrepareTest(&materialize_true, &materialize_false,
3190  &if_true, &if_false, &fall_through);
3191 
3192  __ JumpIfSmi(rax, if_false);
3193  __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3194  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3195  Split(equal, if_true, if_false, fall_through);
3196 
3197  context()->Plug(if_true, if_false);
3198 }
3199 
3200 
3201 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3202  ZoneList<Expression*>* args = expr->arguments();
3203  DCHECK(args->length() == 1);
3204 
3205  VisitForAccumulatorValue(args->at(0));
3206 
3207  Label materialize_true, materialize_false;
3208  Label* if_true = NULL;
3209  Label* if_false = NULL;
3210  Label* fall_through = NULL;
3211  context()->PrepareTest(&materialize_true, &materialize_false,
3212  &if_true, &if_false, &fall_through);
3213 
3214  __ JumpIfSmi(rax, if_false);
3215  __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3216  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3217  Split(equal, if_true, if_false, fall_through);
3218 
3219  context()->Plug(if_true, if_false);
3220 }
3221 
3222 
3223 
3224 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3225  DCHECK(expr->arguments()->length() == 0);
3226 
3227  Label materialize_true, materialize_false;
3228  Label* if_true = NULL;
3229  Label* if_false = NULL;
3230  Label* fall_through = NULL;
3231  context()->PrepareTest(&materialize_true, &materialize_false,
3232  &if_true, &if_false, &fall_through);
3233 
3234  // Get the frame pointer for the calling frame.
3236 
3237  // Skip the arguments adaptor frame if it exists.
3238  Label check_frame_marker;
3241  __ j(not_equal, &check_frame_marker);
3243 
3244  // Check the marker in the calling frame.
3245  __ bind(&check_frame_marker);
3247  Smi::FromInt(StackFrame::CONSTRUCT));
3248  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3249  Split(equal, if_true, if_false, fall_through);
3250 
3251  context()->Plug(if_true, if_false);
3252 }
3253 
3254 
3255 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3256  ZoneList<Expression*>* args = expr->arguments();
3257  DCHECK(args->length() == 2);
3258 
3259  // Load the two objects into registers and perform the comparison.
3260  VisitForStackValue(args->at(0));
3261  VisitForAccumulatorValue(args->at(1));
3262 
3263  Label materialize_true, materialize_false;
3264  Label* if_true = NULL;
3265  Label* if_false = NULL;
3266  Label* fall_through = NULL;
3267  context()->PrepareTest(&materialize_true, &materialize_false,
3268  &if_true, &if_false, &fall_through);
3269 
3270  __ Pop(rbx);
3271  __ cmpp(rax, rbx);
3272  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3273  Split(equal, if_true, if_false, fall_through);
3274 
3275  context()->Plug(if_true, if_false);
3276 }
3277 
3278 
3279 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3280  ZoneList<Expression*>* args = expr->arguments();
3281  DCHECK(args->length() == 1);
3282 
3283  // ArgumentsAccessStub expects the key in rdx and the formal
3284  // parameter count in rax.
3285  VisitForAccumulatorValue(args->at(0));
3286  __ movp(rdx, rax);
3287  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3288  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3289  __ CallStub(&stub);
3290  context()->Plug(rax);
3291 }
3292 
3293 
3294 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3295  DCHECK(expr->arguments()->length() == 0);
3296 
3297  Label exit;
3298  // Get the number of formal parameters.
3299  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3300 
3301  // Check if the calling frame is an arguments adaptor frame.
3305  __ j(not_equal, &exit, Label::kNear);
3306 
3307  // Arguments adaptor case: Read the arguments length from the
3308  // adaptor frame.
3310 
3311  __ bind(&exit);
3312  __ AssertSmi(rax);
3313  context()->Plug(rax);
3314 }
3315 
3316 
3317 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3318  ZoneList<Expression*>* args = expr->arguments();
3319  DCHECK(args->length() == 1);
3320  Label done, null, function, non_function_constructor;
3321 
3322  VisitForAccumulatorValue(args->at(0));
3323 
3324  // If the object is a smi, we return null.
3325  __ JumpIfSmi(rax, &null);
3326 
3327  // Check that the object is a JS object but take special care of JS
3328  // functions to make sure they have 'Function' as their class.
3329  // Assume that there are only two callable types, and one of them is at
3330  // either end of the type range for JS object types. Saves extra comparisons.
3332  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3333  // Map is now in rax.
3334  __ j(below, &null);
3337  __ j(equal, &function);
3338 
3339  __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3341  LAST_SPEC_OBJECT_TYPE - 1);
3342  __ j(equal, &function);
3343  // Assume that there is no larger type.
3345 
3346  // Check if the constructor in the map is a JS function.
3348  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3349  __ j(not_equal, &non_function_constructor);
3350 
3351  // rax now contains the constructor function. Grab the
3352  // instance class name from there.
3355  __ jmp(&done);
3356 
3357  // Functions have class 'Function'.
3358  __ bind(&function);
3359  __ Move(rax, isolate()->factory()->Function_string());
3360  __ jmp(&done);
3361 
3362  // Objects with a non-function constructor have class 'Object'.
3363  __ bind(&non_function_constructor);
3364  __ Move(rax, isolate()->factory()->Object_string());
3365  __ jmp(&done);
3366 
3367  // Non-JS objects have class null.
3368  __ bind(&null);
3369  __ LoadRoot(rax, Heap::kNullValueRootIndex);
3370 
3371  // All done.
3372  __ bind(&done);
3373 
3374  context()->Plug(rax);
3375 }
3376 
3377 
3378 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3379  // Load the arguments on the stack and call the stub.
3380  SubStringStub stub(isolate());
3381  ZoneList<Expression*>* args = expr->arguments();
3382  DCHECK(args->length() == 3);
3383  VisitForStackValue(args->at(0));
3384  VisitForStackValue(args->at(1));
3385  VisitForStackValue(args->at(2));
3386  __ CallStub(&stub);
3387  context()->Plug(rax);
3388 }
3389 
3390 
3391 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3392  // Load the arguments on the stack and call the stub.
3393  RegExpExecStub stub(isolate());
3394  ZoneList<Expression*>* args = expr->arguments();
3395  DCHECK(args->length() == 4);
3396  VisitForStackValue(args->at(0));
3397  VisitForStackValue(args->at(1));
3398  VisitForStackValue(args->at(2));
3399  VisitForStackValue(args->at(3));
3400  __ CallStub(&stub);
3401  context()->Plug(rax);
3402 }
3403 
3404 
3405 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3406  ZoneList<Expression*>* args = expr->arguments();
3407  DCHECK(args->length() == 1);
3408 
3409  VisitForAccumulatorValue(args->at(0)); // Load the object.
3410 
3411  Label done;
3412  // If the object is a smi return the object.
3413  __ JumpIfSmi(rax, &done);
3414  // If the object is not a value type, return the object.
3415  __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3416  __ j(not_equal, &done);
3418 
3419  __ bind(&done);
3420  context()->Plug(rax);
3421 }
3422 
3423 
3424 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3425  ZoneList<Expression*>* args = expr->arguments();
3426  DCHECK(args->length() == 2);
3427  DCHECK_NE(NULL, args->at(1)->AsLiteral());
3428  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3429 
3430  VisitForAccumulatorValue(args->at(0)); // Load the object.
3431 
3432  Label runtime, done, not_date_object;
3433  Register object = rax;
3434  Register result = rax;
3435  Register scratch = rcx;
3436 
3437  __ JumpIfSmi(object, &not_date_object);
3438  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3439  __ j(not_equal, &not_date_object);
3440 
3441  if (index->value() == 0) {
3442  __ movp(result, FieldOperand(object, JSDate::kValueOffset));
3443  __ jmp(&done);
3444  } else {
3445  if (index->value() < JSDate::kFirstUncachedField) {
3446  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3447  Operand stamp_operand = __ ExternalOperand(stamp);
3448  __ movp(scratch, stamp_operand);
3449  __ cmpp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3450  __ j(not_equal, &runtime, Label::kNear);
3451  __ movp(result, FieldOperand(object, JSDate::kValueOffset +
3452  kPointerSize * index->value()));
3453  __ jmp(&done);
3454  }
3455  __ bind(&runtime);
3456  __ PrepareCallCFunction(2);
3457  __ movp(arg_reg_1, object);
3458  __ Move(arg_reg_2, index, Assembler::RelocInfoNone());
3459  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3461  __ jmp(&done);
3462  }
3463 
3464  __ bind(&not_date_object);
3465  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3466  __ bind(&done);
3467  context()->Plug(rax);
3468 }
3469 
3470 
3471 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3472  ZoneList<Expression*>* args = expr->arguments();
3473  DCHECK_EQ(3, args->length());
3474 
3475  Register string = rax;
3476  Register index = rbx;
3477  Register value = rcx;
3478 
3479  VisitForStackValue(args->at(0)); // index
3480  VisitForStackValue(args->at(1)); // value
3481  VisitForAccumulatorValue(args->at(2)); // string
3482  __ Pop(value);
3483  __ Pop(index);
3484 
3485  if (FLAG_debug_code) {
3486  __ Check(__ CheckSmi(value), kNonSmiValue);
3487  __ Check(__ CheckSmi(index), kNonSmiValue);
3488  }
3489 
3490  __ SmiToInteger32(value, value);
3491  __ SmiToInteger32(index, index);
3492 
3493  if (FLAG_debug_code) {
3494  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3495  __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3496  }
3497 
3498  __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3499  value);
3500  context()->Plug(string);
3501 }
3502 
3503 
3504 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3505  ZoneList<Expression*>* args = expr->arguments();
3506  DCHECK_EQ(3, args->length());
3507 
3508  Register string = rax;
3509  Register index = rbx;
3510  Register value = rcx;
3511 
3512  VisitForStackValue(args->at(0)); // index
3513  VisitForStackValue(args->at(1)); // value
3514  VisitForAccumulatorValue(args->at(2)); // string
3515  __ Pop(value);
3516  __ Pop(index);
3517 
3518  if (FLAG_debug_code) {
3519  __ Check(__ CheckSmi(value), kNonSmiValue);
3520  __ Check(__ CheckSmi(index), kNonSmiValue);
3521  }
3522 
3523  __ SmiToInteger32(value, value);
3524  __ SmiToInteger32(index, index);
3525 
3526  if (FLAG_debug_code) {
3527  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3528  __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3529  }
3530 
3531  __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3532  value);
3533  context()->Plug(rax);
3534 }
3535 
3536 
3537 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3538  // Load the arguments on the stack and call the runtime function.
3539  ZoneList<Expression*>* args = expr->arguments();
3540  DCHECK(args->length() == 2);
3541  VisitForStackValue(args->at(0));
3542  VisitForStackValue(args->at(1));
3543  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3544  __ CallStub(&stub);
3545  context()->Plug(rax);
3546 }
3547 
3548 
3549 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3550  ZoneList<Expression*>* args = expr->arguments();
3551  DCHECK(args->length() == 2);
3552 
3553  VisitForStackValue(args->at(0)); // Load the object.
3554  VisitForAccumulatorValue(args->at(1)); // Load the value.
3555  __ Pop(rbx); // rax = value. rbx = object.
3556 
3557  Label done;
3558  // If the object is a smi, return the value.
3559  __ JumpIfSmi(rbx, &done);
3560 
3561  // If the object is not a value type, return the value.
3562  __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3563  __ j(not_equal, &done);
3564 
3565  // Store the value.
3567  // Update the write barrier. Save the value as it will be
3568  // overwritten by the write barrier code and is needed afterward.
3569  __ movp(rdx, rax);
3570  __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3571 
3572  __ bind(&done);
3573  context()->Plug(rax);
3574 }
3575 
3576 
3577 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3578  ZoneList<Expression*>* args = expr->arguments();
3579  DCHECK_EQ(args->length(), 1);
3580 
3581  // Load the argument into rax and call the stub.
3582  VisitForAccumulatorValue(args->at(0));
3583 
3584  NumberToStringStub stub(isolate());
3585  __ CallStub(&stub);
3586  context()->Plug(rax);
3587 }
3588 
3589 
3590 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3591  ZoneList<Expression*>* args = expr->arguments();
3592  DCHECK(args->length() == 1);
3593 
3594  VisitForAccumulatorValue(args->at(0));
3595 
3596  Label done;
3597  StringCharFromCodeGenerator generator(rax, rbx);
3598  generator.GenerateFast(masm_);
3599  __ jmp(&done);
3600 
3601  NopRuntimeCallHelper call_helper;
3602  generator.GenerateSlow(masm_, call_helper);
3603 
3604  __ bind(&done);
3605  context()->Plug(rbx);
3606 }
3607 
3608 
3609 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3610  ZoneList<Expression*>* args = expr->arguments();
3611  DCHECK(args->length() == 2);
3612 
3613  VisitForStackValue(args->at(0));
3614  VisitForAccumulatorValue(args->at(1));
3615 
3616  Register object = rbx;
3617  Register index = rax;
3618  Register result = rdx;
3619 
3620  __ Pop(object);
3621 
3622  Label need_conversion;
3623  Label index_out_of_range;
3624  Label done;
3625  StringCharCodeAtGenerator generator(object,
3626  index,
3627  result,
3628  &need_conversion,
3629  &need_conversion,
3630  &index_out_of_range,
3632  generator.GenerateFast(masm_);
3633  __ jmp(&done);
3634 
3635  __ bind(&index_out_of_range);
3636  // When the index is out of range, the spec requires us to return
3637  // NaN.
3638  __ LoadRoot(result, Heap::kNanValueRootIndex);
3639  __ jmp(&done);
3640 
3641  __ bind(&need_conversion);
3642  // Move the undefined value into the result register, which will
3643  // trigger conversion.
3644  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3645  __ jmp(&done);
3646 
3647  NopRuntimeCallHelper call_helper;
3648  generator.GenerateSlow(masm_, call_helper);
3649 
3650  __ bind(&done);
3651  context()->Plug(result);
3652 }
3653 
3654 
3655 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3656  ZoneList<Expression*>* args = expr->arguments();
3657  DCHECK(args->length() == 2);
3658 
3659  VisitForStackValue(args->at(0));
3660  VisitForAccumulatorValue(args->at(1));
3661 
3662  Register object = rbx;
3663  Register index = rax;
3664  Register scratch = rdx;
3665  Register result = rax;
3666 
3667  __ Pop(object);
3668 
3669  Label need_conversion;
3670  Label index_out_of_range;
3671  Label done;
3672  StringCharAtGenerator generator(object,
3673  index,
3674  scratch,
3675  result,
3676  &need_conversion,
3677  &need_conversion,
3678  &index_out_of_range,
3680  generator.GenerateFast(masm_);
3681  __ jmp(&done);
3682 
3683  __ bind(&index_out_of_range);
3684  // When the index is out of range, the spec requires us to return
3685  // the empty string.
3686  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3687  __ jmp(&done);
3688 
3689  __ bind(&need_conversion);
3690  // Move smi zero into the result register, which will trigger
3691  // conversion.
3692  __ Move(result, Smi::FromInt(0));
3693  __ jmp(&done);
3694 
3695  NopRuntimeCallHelper call_helper;
3696  generator.GenerateSlow(masm_, call_helper);
3697 
3698  __ bind(&done);
3699  context()->Plug(result);
3700 }
3701 
3702 
3703 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3704  ZoneList<Expression*>* args = expr->arguments();
3705  DCHECK_EQ(2, args->length());
3706  VisitForStackValue(args->at(0));
3707  VisitForAccumulatorValue(args->at(1));
3708 
3709  __ Pop(rdx);
3710  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3711  __ CallStub(&stub);
3712  context()->Plug(rax);
3713 }
3714 
3715 
3716 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3717  ZoneList<Expression*>* args = expr->arguments();
3718  DCHECK_EQ(2, args->length());
3719 
3720  VisitForStackValue(args->at(0));
3721  VisitForStackValue(args->at(1));
3722 
3723  StringCompareStub stub(isolate());
3724  __ CallStub(&stub);
3725  context()->Plug(rax);
3726 }
3727 
3728 
3729 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3730  ZoneList<Expression*>* args = expr->arguments();
3731  DCHECK(args->length() >= 2);
3732 
3733  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3734  for (int i = 0; i < arg_count + 1; i++) {
3735  VisitForStackValue(args->at(i));
3736  }
3737  VisitForAccumulatorValue(args->last()); // Function.
3738 
3739  Label runtime, done;
3740  // Check for non-function argument (including proxy).
3741  __ JumpIfSmi(rax, &runtime);
3742  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3743  __ j(not_equal, &runtime);
3744 
3745  // InvokeFunction requires the function in rdi. Move it in there.
3746  __ movp(rdi, result_register());
3747  ParameterCount count(arg_count);
3748  __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper());
3750  __ jmp(&done);
3751 
3752  __ bind(&runtime);
3753  __ Push(rax);
3754  __ CallRuntime(Runtime::kCall, args->length());
3755  __ bind(&done);
3756 
3757  context()->Plug(rax);
3758 }
3759 
3760 
3761 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3762  RegExpConstructResultStub stub(isolate());
3763  ZoneList<Expression*>* args = expr->arguments();
3764  DCHECK(args->length() == 3);
3765  VisitForStackValue(args->at(0));
3766  VisitForStackValue(args->at(1));
3767  VisitForAccumulatorValue(args->at(2));
3768  __ Pop(rbx);
3769  __ Pop(rcx);
3770  __ CallStub(&stub);
3771  context()->Plug(rax);
3772 }
3773 
3774 
3775 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3776  ZoneList<Expression*>* args = expr->arguments();
3777  DCHECK_EQ(2, args->length());
3778 
3779  DCHECK_NE(NULL, args->at(0)->AsLiteral());
3780  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3781 
3782  Handle<FixedArray> jsfunction_result_caches(
3783  isolate()->native_context()->jsfunction_result_caches());
3784  if (jsfunction_result_caches->length() <= cache_id) {
3785  __ Abort(kAttemptToUseUndefinedCache);
3786  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3787  context()->Plug(rax);
3788  return;
3789  }
3790 
3791  VisitForAccumulatorValue(args->at(1));
3792 
3793  Register key = rax;
3794  Register cache = rbx;
3795  Register tmp = rcx;
3797  __ movp(cache,
3799  __ movp(cache,
3801  __ movp(cache,
3802  FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3803 
3804  Label done, not_found;
3805  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3807  // tmp now holds finger offset as a smi.
3808  SmiIndex index =
3809  __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3810  __ cmpp(key, FieldOperand(cache,
3811  index.reg,
3812  index.scale,
3814  __ j(not_equal, &not_found, Label::kNear);
3815  __ movp(rax, FieldOperand(cache,
3816  index.reg,
3817  index.scale,
3819  __ jmp(&done, Label::kNear);
3820 
3821  __ bind(&not_found);
3822  // Call runtime to perform the lookup.
3823  __ Push(cache);
3824  __ Push(key);
3825  __ CallRuntime(Runtime::kGetFromCache, 2);
3826 
3827  __ bind(&done);
3828  context()->Plug(rax);
3829 }
3830 
3831 
3832 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3833  ZoneList<Expression*>* args = expr->arguments();
3834  DCHECK(args->length() == 1);
3835 
3836  VisitForAccumulatorValue(args->at(0));
3837 
3838  Label materialize_true, materialize_false;
3839  Label* if_true = NULL;
3840  Label* if_false = NULL;
3841  Label* fall_through = NULL;
3842  context()->PrepareTest(&materialize_true, &materialize_false,
3843  &if_true, &if_false, &fall_through);
3844 
3847  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3848  __ j(zero, if_true);
3849  __ jmp(if_false);
3850 
3851  context()->Plug(if_true, if_false);
3852 }
3853 
3854 
3855 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3856  ZoneList<Expression*>* args = expr->arguments();
3857  DCHECK(args->length() == 1);
3858  VisitForAccumulatorValue(args->at(0));
3859 
3860  __ AssertString(rax);
3861 
3864  __ IndexFromHash(rax, rax);
3865 
3866  context()->Plug(rax);
3867 }
3868 
3869 
3870 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3871  Label bailout, return_result, done, one_char_separator, long_separator,
3872  non_trivial_array, not_size_one_array, loop,
3873  loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3874  ZoneList<Expression*>* args = expr->arguments();
3875  DCHECK(args->length() == 2);
3876  // We will leave the separator on the stack until the end of the function.
3877  VisitForStackValue(args->at(1));
3878  // Load this to rax (= array)
3879  VisitForAccumulatorValue(args->at(0));
3880  // All aliases of the same register have disjoint lifetimes.
3881  Register array = rax;
3882  Register elements = no_reg; // Will be rax.
3883 
3884  Register index = rdx;
3885 
3886  Register string_length = rcx;
3887 
3888  Register string = rsi;
3889 
3890  Register scratch = rbx;
3891 
3892  Register array_length = rdi;
3893  Register result_pos = no_reg; // Will be rdi.
3894 
3895  Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3896  Operand result_operand = Operand(rsp, 1 * kPointerSize);
3897  Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3898  // Separator operand is already pushed. Make room for the two
3899  // other stack fields, and clear the direction flag in anticipation
3900  // of calling CopyBytes.
3901  __ subp(rsp, Immediate(2 * kPointerSize));
3902  __ cld();
3903  // Check that the array is a JSArray
3904  __ JumpIfSmi(array, &bailout);
3905  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3906  __ j(not_equal, &bailout);
3907 
3908  // Check that the array has fast elements.
3909  __ CheckFastElements(scratch, &bailout);
3910 
3911  // Array has fast elements, so its length must be a smi.
3912  // If the array has length zero, return the empty string.
3913  __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset));
3914  __ SmiCompare(array_length, Smi::FromInt(0));
3915  __ j(not_zero, &non_trivial_array);
3916  __ LoadRoot(rax, Heap::kempty_stringRootIndex);
3917  __ jmp(&return_result);
3918 
3919  // Save the array length on the stack.
3920  __ bind(&non_trivial_array);
3921  __ SmiToInteger32(array_length, array_length);
3922  __ movl(array_length_operand, array_length);
3923 
3924  // Save the FixedArray containing array's elements.
3925  // End of array's live range.
3926  elements = array;
3927  __ movp(elements, FieldOperand(array, JSArray::kElementsOffset));
3928  array = no_reg;
3929 
3930 
3931  // Check that all array elements are sequential one-byte strings, and
3932  // accumulate the sum of their lengths, as a smi-encoded value.
3933  __ Set(index, 0);
3934  __ Set(string_length, 0);
3935  // Loop condition: while (index < array_length).
3936  // Live loop registers: index(int32), array_length(int32), string(String*),
3937  // scratch, string_length(int32), elements(FixedArray*).
3938  if (generate_debug_code_) {
3939  __ cmpp(index, array_length);
3940  __ Assert(below, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3941  }
3942  __ bind(&loop);
3943  __ movp(string, FieldOperand(elements,
3944  index,
3947  __ JumpIfSmi(string, &bailout);
3948  __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3949  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3950  __ andb(scratch, Immediate(
3952  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3953  __ j(not_equal, &bailout);
3954  __ AddSmiField(string_length,
3956  __ j(overflow, &bailout);
3957  __ incl(index);
3958  __ cmpl(index, array_length);
3959  __ j(less, &loop);
3960 
3961  // Live registers:
3962  // string_length: Sum of string lengths.
3963  // elements: FixedArray of strings.
3964  // index: Array length.
3965  // array_length: Array length.
3966 
3967  // If array_length is 1, return elements[0], a string.
3968  __ cmpl(array_length, Immediate(1));
3969  __ j(not_equal, &not_size_one_array);
3970  __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3971  __ jmp(&return_result);
3972 
3973  __ bind(&not_size_one_array);
3974 
3975  // End of array_length live range.
3976  result_pos = array_length;
3977  array_length = no_reg;
3978 
3979  // Live registers:
3980  // string_length: Sum of string lengths.
3981  // elements: FixedArray of strings.
3982  // index: Array length.
3983 
3984  // Check that the separator is a sequential one-byte string.
3985  __ movp(string, separator_operand);
3986  __ JumpIfSmi(string, &bailout);
3987  __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset));
3988  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3989  __ andb(scratch, Immediate(
3991  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3992  __ j(not_equal, &bailout);
3993 
3994  // Live registers:
3995  // string_length: Sum of string lengths.
3996  // elements: FixedArray of strings.
3997  // index: Array length.
3998  // string: Separator string.
3999 
4000  // Add (separator length times (array_length - 1)) to string_length.
4001  __ SmiToInteger32(scratch,
4003  __ decl(index);
4004  __ imull(scratch, index);
4005  __ j(overflow, &bailout);
4006  __ addl(string_length, scratch);
4007  __ j(overflow, &bailout);
4008 
4009  // Live registers and stack values:
4010  // string_length: Total length of result string.
4011  // elements: FixedArray of strings.
4012  __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4013  &bailout);
4014  __ movp(result_operand, result_pos);
4015  __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4016 
4017  __ movp(string, separator_operand);
4018  __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
4019  Smi::FromInt(1));
4020  __ j(equal, &one_char_separator);
4021  __ j(greater, &long_separator);
4022 
4023 
4024  // Empty separator case:
4025  __ Set(index, 0);
4026  __ movl(scratch, array_length_operand);
4027  __ jmp(&loop_1_condition);
4028  // Loop condition: while (index < array_length).
4029  __ bind(&loop_1);
4030  // Each iteration of the loop concatenates one string to the result.
4031  // Live values in registers:
4032  // index: which element of the elements array we are adding to the result.
4033  // result_pos: the position to which we are currently copying characters.
4034  // elements: the FixedArray of strings we are joining.
4035  // scratch: array length.
4036 
4037  // Get string = array[index].
4038  __ movp(string, FieldOperand(elements, index,
4041  __ SmiToInteger32(string_length,
4043  __ leap(string,
4045  __ CopyBytes(result_pos, string, string_length);
4046  __ incl(index);
4047  __ bind(&loop_1_condition);
4048  __ cmpl(index, scratch);
4049  __ j(less, &loop_1); // Loop while (index < array_length).
4050  __ jmp(&done);
4051 
4052  // Generic bailout code used from several places.
4053  __ bind(&bailout);
4054  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4055  __ jmp(&return_result);
4056 
4057 
4058  // One-character separator case
4059  __ bind(&one_char_separator);
4060  // Get the separator one-byte character value.
4061  // Register "string" holds the separator.
4062  __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4063  __ Set(index, 0);
4064  // Jump into the loop after the code that copies the separator, so the first
4065  // element is not preceded by a separator
4066  __ jmp(&loop_2_entry);
4067  // Loop condition: while (index < length).
4068  __ bind(&loop_2);
4069  // Each iteration of the loop concatenates one string to the result.
4070  // Live values in registers:
4071  // elements: The FixedArray of strings we are joining.
4072  // index: which element of the elements array we are adding to the result.
4073  // result_pos: the position to which we are currently copying characters.
4074  // scratch: Separator character.
4075 
4076  // Copy the separator character to the result.
4077  __ movb(Operand(result_pos, 0), scratch);
4078  __ incp(result_pos);
4079 
4080  __ bind(&loop_2_entry);
4081  // Get string = array[index].
4082  __ movp(string, FieldOperand(elements, index,
4085  __ SmiToInteger32(string_length,
4087  __ leap(string,
4089  __ CopyBytes(result_pos, string, string_length);
4090  __ incl(index);
4091  __ cmpl(index, array_length_operand);
4092  __ j(less, &loop_2); // End while (index < length).
4093  __ jmp(&done);
4094 
4095 
4096  // Long separator case (separator is more than one character).
4097  __ bind(&long_separator);
4098 
4099  // Make elements point to end of elements array, and index
4100  // count from -array_length to zero, so we don't need to maintain
4101  // a loop limit.
4102  __ movl(index, array_length_operand);
4103  __ leap(elements, FieldOperand(elements, index, times_pointer_size,
4105  __ negq(index);
4106 
4107  // Replace separator string with pointer to its first character, and
4108  // make scratch be its length.
4109  __ movp(string, separator_operand);
4110  __ SmiToInteger32(scratch,
4112  __ leap(string,
4114  __ movp(separator_operand, string);
4115 
4116  // Jump into the loop after the code that copies the separator, so the first
4117  // element is not preceded by a separator
4118  __ jmp(&loop_3_entry);
4119  // Loop condition: while (index < length).
4120  __ bind(&loop_3);
4121  // Each iteration of the loop concatenates one string to the result.
4122  // Live values in registers:
4123  // index: which element of the elements array we are adding to the result.
4124  // result_pos: the position to which we are currently copying characters.
4125  // scratch: Separator length.
4126  // separator_operand (rsp[0x10]): Address of first char of separator.
4127 
4128  // Copy the separator to the result.
4129  __ movp(string, separator_operand);
4130  __ movl(string_length, scratch);
4131  __ CopyBytes(result_pos, string, string_length, 2);
4132 
4133  __ bind(&loop_3_entry);
4134  // Get string = array[index].
4135  __ movp(string, Operand(elements, index, times_pointer_size, 0));
4136  __ SmiToInteger32(string_length,
4138  __ leap(string,
4140  __ CopyBytes(result_pos, string, string_length);
4141  __ incq(index);
4142  __ j(not_equal, &loop_3); // Loop while (index < 0).
4143 
4144  __ bind(&done);
4145  __ movp(rax, result_operand);
4146 
4147  __ bind(&return_result);
4148  // Drop temp values from the stack, and restore context register.
4149  __ addp(rsp, Immediate(3 * kPointerSize));
4151  context()->Plug(rax);
4152 }
4153 
4154 
4155 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4156  DCHECK(expr->arguments()->length() == 0);
4157  ExternalReference debug_is_active =
4158  ExternalReference::debug_is_active_address(isolate());
4159  __ Move(kScratchRegister, debug_is_active);
4160  __ movzxbp(rax, Operand(kScratchRegister, 0));
4161  __ Integer32ToSmi(rax, rax);
4162  context()->Plug(rax);
4163 }
4164 
4165 
4166 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4167  if (expr->function() != NULL &&
4168  expr->function()->intrinsic_type == Runtime::INLINE) {
4169  Comment cmnt(masm_, "[ InlineRuntimeCall");
4170  EmitInlineRuntimeCall(expr);
4171  return;
4172  }
4173 
4174  Comment cmnt(masm_, "[ CallRuntime");
4175  ZoneList<Expression*>* args = expr->arguments();
4176  int arg_count = args->length();
4177 
4178  if (expr->is_jsruntime()) {
4179  // Push the builtins object as receiver.
4180  __ movp(rax, GlobalObjectOperand());
4182 
4183  // Load the function from the receiver.
4184  __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4185  __ Move(LoadDescriptor::NameRegister(), expr->name());
4186  if (FLAG_vector_ics) {
4188  Smi::FromInt(expr->CallRuntimeFeedbackSlot()));
4190  } else {
4191  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4192  }
4193 
4194  // Push the target function under the receiver.
4195  __ Push(Operand(rsp, 0));
4196  __ movp(Operand(rsp, kPointerSize), rax);
4197 
4198  // Push the arguments ("left-to-right").
4199  for (int i = 0; i < arg_count; i++) {
4200  VisitForStackValue(args->at(i));
4201  }
4202 
4203  // Record source position of the IC call.
4204  SetSourcePosition(expr->position());
4205  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4206  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
4207  __ CallStub(&stub);
4208 
4209  // Restore context register.
4211  context()->DropAndPlug(1, rax);
4212 
4213  } else {
4214  // Push the arguments ("left-to-right").
4215  for (int i = 0; i < arg_count; i++) {
4216  VisitForStackValue(args->at(i));
4217  }
4218 
4219  // Call the C runtime.
4220  __ CallRuntime(expr->function(), arg_count);
4221  context()->Plug(rax);
4222  }
4223 }
4224 
4225 
4226 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4227  switch (expr->op()) {
4228  case Token::DELETE: {
4229  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4230  Property* property = expr->expression()->AsProperty();
4231  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4232 
4233  if (property != NULL) {
4234  VisitForStackValue(property->obj());
4235  VisitForStackValue(property->key());
4237  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4238  context()->Plug(rax);
4239  } else if (proxy != NULL) {
4240  Variable* var = proxy->var();
4241  // Delete of an unqualified identifier is disallowed in strict mode
4242  // but "delete this" is allowed.
4243  DCHECK(strict_mode() == SLOPPY || var->is_this());
4244  if (var->IsUnallocated()) {
4246  __ Push(var->name());
4248  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4249  context()->Plug(rax);
4250  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4251  // Result of deleting non-global variables is false. 'this' is
4252  // not really a variable, though we implement it as one. The
4253  // subexpression does not have side effects.
4254  context()->Plug(var->is_this());
4255  } else {
4256  // Non-global variable. Call the runtime to try to delete from the
4257  // context where the variable was introduced.
4259  __ Push(var->name());
4260  __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4261  context()->Plug(rax);
4262  }
4263  } else {
4264  // Result of deleting non-property, non-variable reference is true.
4265  // The subexpression may have side effects.
4266  VisitForEffect(expr->expression());
4267  context()->Plug(true);
4268  }
4269  break;
4270  }
4271 
4272  case Token::VOID: {
4273  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4274  VisitForEffect(expr->expression());
4275  context()->Plug(Heap::kUndefinedValueRootIndex);
4276  break;
4277  }
4278 
4279  case Token::NOT: {
4280  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4281  if (context()->IsEffect()) {
4282  // Unary NOT has no side effects so it's only necessary to visit the
4283  // subexpression. Match the optimizing compiler by not branching.
4284  VisitForEffect(expr->expression());
4285  } else if (context()->IsTest()) {
4286  const TestContext* test = TestContext::cast(context());
4287  // The labels are swapped for the recursive call.
4288  VisitForControl(expr->expression(),
4289  test->false_label(),
4290  test->true_label(),
4291  test->fall_through());
4292  context()->Plug(test->true_label(), test->false_label());
4293  } else {
4294  // We handle value contexts explicitly rather than simply visiting
4295  // for control and plugging the control flow into the context,
4296  // because we need to prepare a pair of extra administrative AST ids
4297  // for the optimizing compiler.
4298  DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4299  Label materialize_true, materialize_false, done;
4300  VisitForControl(expr->expression(),
4301  &materialize_false,
4302  &materialize_true,
4303  &materialize_true);
4304  __ bind(&materialize_true);
4305  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4306  if (context()->IsAccumulatorValue()) {
4307  __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4308  } else {
4309  __ PushRoot(Heap::kTrueValueRootIndex);
4310  }
4311  __ jmp(&done, Label::kNear);
4312  __ bind(&materialize_false);
4313  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4314  if (context()->IsAccumulatorValue()) {
4315  __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4316  } else {
4317  __ PushRoot(Heap::kFalseValueRootIndex);
4318  }
4319  __ bind(&done);
4320  }
4321  break;
4322  }
4323 
4324  case Token::TYPEOF: {
4325  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4326  { StackValueContext context(this);
4327  VisitForTypeofValue(expr->expression());
4328  }
4329  __ CallRuntime(Runtime::kTypeof, 1);
4330  context()->Plug(rax);
4331  break;
4332  }
4333 
4334  default:
4335  UNREACHABLE();
4336  }
4337 }
4338 
4339 
4340 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4341  DCHECK(expr->expression()->IsValidReferenceExpression());
4342 
4343  Comment cmnt(masm_, "[ CountOperation");
4344  SetSourcePosition(expr->position());
4345 
4346  // Expression can only be a property, a global or a (parameter or local)
4347  // slot.
4349  LhsKind assign_type = VARIABLE;
4350  Property* prop = expr->expression()->AsProperty();
4351  // In case of a property we use the uninitialized expression context
4352  // of the key to detect a named property.
4353  if (prop != NULL) {
4354  assign_type =
4355  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4356  if (prop->IsSuperAccess()) {
4357  // throw exception.
4358  VisitSuperReference(prop->obj()->AsSuperReference());
4359  return;
4360  }
4361  }
4362 
4363  // Evaluate expression and get value.
4364  if (assign_type == VARIABLE) {
4365  DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4366  AccumulatorValueContext context(this);
4367  EmitVariableLoad(expr->expression()->AsVariableProxy());
4368  } else {
4369  // Reserve space for result of postfix operation.
4370  if (expr->is_postfix() && !context()->IsEffect()) {
4371  __ Push(Smi::FromInt(0));
4372  }
4373  if (assign_type == NAMED_PROPERTY) {
4374  VisitForStackValue(prop->obj());
4375  __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
4376  EmitNamedPropertyLoad(prop);
4377  } else {
4378  VisitForStackValue(prop->obj());
4379  VisitForStackValue(prop->key());
4380  // Leave receiver on stack
4382  // Copy of key, needed for later store.
4383  __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
4384  EmitKeyedPropertyLoad(prop);
4385  }
4386  }
4387 
4388  // We need a second deoptimization point after loading the value
4389  // in case evaluating the property load my have a side effect.
4390  if (assign_type == VARIABLE) {
4391  PrepareForBailout(expr->expression(), TOS_REG);
4392  } else {
4393  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4394  }
4395 
4396  // Inline smi case if we are in a loop.
4397  Label done, stub_call;
4398  JumpPatchSite patch_site(masm_);
4399  if (ShouldInlineSmiCase(expr->op())) {
4400  Label slow;
4401  patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
4402 
4403  // Save result for postfix expressions.
4404  if (expr->is_postfix()) {
4405  if (!context()->IsEffect()) {
4406  // Save the result on the stack. If we have a named or keyed property
4407  // we store the result under the receiver that is currently on top
4408  // of the stack.
4409  switch (assign_type) {
4410  case VARIABLE:
4411  __ Push(rax);
4412  break;
4413  case NAMED_PROPERTY:
4414  __ movp(Operand(rsp, kPointerSize), rax);
4415  break;
4416  case KEYED_PROPERTY:
4417  __ movp(Operand(rsp, 2 * kPointerSize), rax);
4418  break;
4419  }
4420  }
4421  }
4422 
4423  SmiOperationExecutionMode mode;
4426  if (expr->op() == Token::INC) {
4427  __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4428  } else {
4429  __ SmiSubConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear);
4430  }
4431  __ jmp(&stub_call, Label::kNear);
4432  __ bind(&slow);
4433  }
4434 
4435  ToNumberStub convert_stub(isolate());
4436  __ CallStub(&convert_stub);
4437 
4438  // Save result for postfix expressions.
4439  if (expr->is_postfix()) {
4440  if (!context()->IsEffect()) {
4441  // Save the result on the stack. If we have a named or keyed property
4442  // we store the result under the receiver that is currently on top
4443  // of the stack.
4444  switch (assign_type) {
4445  case VARIABLE:
4446  __ Push(rax);
4447  break;
4448  case NAMED_PROPERTY:
4449  __ movp(Operand(rsp, kPointerSize), rax);
4450  break;
4451  case KEYED_PROPERTY:
4452  __ movp(Operand(rsp, 2 * kPointerSize), rax);
4453  break;
4454  }
4455  }
4456  }
4457 
4458  // Record position before stub call.
4459  SetSourcePosition(expr->position());
4460 
4461  // Call stub for +1/-1.
4462  __ bind(&stub_call);
4463  __ movp(rdx, rax);
4464  __ Move(rax, Smi::FromInt(1));
4465  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4466  NO_OVERWRITE).code();
4467  CallIC(code, expr->CountBinOpFeedbackId());
4468  patch_site.EmitPatchInfo();
4469  __ bind(&done);
4470 
4471  // Store the value returned in rax.
4472  switch (assign_type) {
4473  case VARIABLE:
4474  if (expr->is_postfix()) {
4475  // Perform the assignment as if via '='.
4476  { EffectContext context(this);
4477  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4478  Token::ASSIGN);
4479  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4480  context.Plug(rax);
4481  }
4482  // For all contexts except kEffect: We have the result on
4483  // top of the stack.
4484  if (!context()->IsEffect()) {
4485  context()->PlugTOS();
4486  }
4487  } else {
4488  // Perform the assignment as if via '='.
4489  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4490  Token::ASSIGN);
4491  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4492  context()->Plug(rax);
4493  }
4494  break;
4495  case NAMED_PROPERTY: {
4497  prop->key()->AsLiteral()->value());
4499  CallStoreIC(expr->CountStoreFeedbackId());
4500  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4501  if (expr->is_postfix()) {
4502  if (!context()->IsEffect()) {
4503  context()->PlugTOS();
4504  }
4505  } else {
4506  context()->Plug(rax);
4507  }
4508  break;
4509  }
4510  case KEYED_PROPERTY: {
4513  Handle<Code> ic =
4514  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4515  CallIC(ic, expr->CountStoreFeedbackId());
4516  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4517  if (expr->is_postfix()) {
4518  if (!context()->IsEffect()) {
4519  context()->PlugTOS();
4520  }
4521  } else {
4522  context()->Plug(rax);
4523  }
4524  break;
4525  }
4526  }
4527 }
4528 
4529 
4530 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4531  VariableProxy* proxy = expr->AsVariableProxy();
4532  DCHECK(!context()->IsEffect());
4533  DCHECK(!context()->IsTest());
4534 
4535  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4536  Comment cmnt(masm_, "[ Global variable");
4537  __ Move(LoadDescriptor::NameRegister(), proxy->name());
4539  if (FLAG_vector_ics) {
4541  Smi::FromInt(proxy->VariableFeedbackSlot()));
4542  }
4543  // Use a regular load, not a contextual load, to avoid a reference
4544  // error.
4546  PrepareForBailout(expr, TOS_REG);
4547  context()->Plug(rax);
4548  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4549  Comment cmnt(masm_, "[ Lookup slot");
4550  Label done, slow;
4551 
4552  // Generate code for loading from variables potentially shadowed
4553  // by eval-introduced variables.
4554  EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4555 
4556  __ bind(&slow);
4557  __ Push(rsi);
4558  __ Push(proxy->name());
4559  __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4560  PrepareForBailout(expr, TOS_REG);
4561  __ bind(&done);
4562 
4563  context()->Plug(rax);
4564  } else {
4565  // This expression cannot throw a reference error at the top level.
4567  }
4568 }
4569 
4570 
4571 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4572  Expression* sub_expr,
4573  Handle<String> check) {
4574  Label materialize_true, materialize_false;
4575  Label* if_true = NULL;
4576  Label* if_false = NULL;
4577  Label* fall_through = NULL;
4578  context()->PrepareTest(&materialize_true, &materialize_false,
4579  &if_true, &if_false, &fall_through);
4580 
4581  { AccumulatorValueContext context(this);
4582  VisitForTypeofValue(sub_expr);
4583  }
4584  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4585 
4586  Factory* factory = isolate()->factory();
4587  if (String::Equals(check, factory->number_string())) {
4588  __ JumpIfSmi(rax, if_true);
4590  __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4591  Split(equal, if_true, if_false, fall_through);
4592  } else if (String::Equals(check, factory->string_string())) {
4593  __ JumpIfSmi(rax, if_false);
4594  // Check for undetectable objects => false.
4595  __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4596  __ j(above_equal, if_false);
4598  Immediate(1 << Map::kIsUndetectable));
4599  Split(zero, if_true, if_false, fall_through);
4600  } else if (String::Equals(check, factory->symbol_string())) {
4601  __ JumpIfSmi(rax, if_false);
4602  __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
4603  Split(equal, if_true, if_false, fall_through);
4604  } else if (String::Equals(check, factory->boolean_string())) {
4605  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4606  __ j(equal, if_true);
4607  __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4608  Split(equal, if_true, if_false, fall_through);
4609  } else if (String::Equals(check, factory->undefined_string())) {
4610  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4611  __ j(equal, if_true);
4612  __ JumpIfSmi(rax, if_false);
4613  // Check for undetectable objects => true.
4616  Immediate(1 << Map::kIsUndetectable));
4617  Split(not_zero, if_true, if_false, fall_through);
4618  } else if (String::Equals(check, factory->function_string())) {
4619  __ JumpIfSmi(rax, if_false);
4621  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4622  __ j(equal, if_true);
4623  __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4624  Split(equal, if_true, if_false, fall_through);
4625  } else if (String::Equals(check, factory->object_string())) {
4626  __ JumpIfSmi(rax, if_false);
4627  __ CompareRoot(rax, Heap::kNullValueRootIndex);
4628  __ j(equal, if_true);
4629  __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4630  __ j(below, if_false);
4631  __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4632  __ j(above, if_false);
4633  // Check for undetectable objects => false.
4635  Immediate(1 << Map::kIsUndetectable));
4636  Split(zero, if_true, if_false, fall_through);
4637  } else {
4638  if (if_false != fall_through) __ jmp(if_false);
4639  }
4640  context()->Plug(if_true, if_false);
4641 }
4642 
4643 
4644 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4645  Comment cmnt(masm_, "[ CompareOperation");
4646  SetSourcePosition(expr->position());
4647 
4648  // First we try a fast inlined version of the compare when one of
4649  // the operands is a literal.
4650  if (TryLiteralCompare(expr)) return;
4651 
4652  // Always perform the comparison for its control flow. Pack the result
4653  // into the expression's context after the comparison is performed.
4654  Label materialize_true, materialize_false;
4655  Label* if_true = NULL;
4656  Label* if_false = NULL;
4657  Label* fall_through = NULL;
4658  context()->PrepareTest(&materialize_true, &materialize_false,
4659  &if_true, &if_false, &fall_through);
4660 
4661  Token::Value op = expr->op();
4662  VisitForStackValue(expr->left());
4663  switch (op) {
4664  case Token::IN:
4665  VisitForStackValue(expr->right());
4666  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4667  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4668  __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4669  Split(equal, if_true, if_false, fall_through);
4670  break;
4671 
4672  case Token::INSTANCEOF: {
4673  VisitForStackValue(expr->right());
4674  InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4675  __ CallStub(&stub);
4676  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4677  __ testp(rax, rax);
4678  // The stub returns 0 for true.
4679  Split(zero, if_true, if_false, fall_through);
4680  break;
4681  }
4682 
4683  default: {
4684  VisitForAccumulatorValue(expr->right());
4686  __ Pop(rdx);
4687 
4688  bool inline_smi_code = ShouldInlineSmiCase(op);
4689  JumpPatchSite patch_site(masm_);
4690  if (inline_smi_code) {
4691  Label slow_case;
4692  __ movp(rcx, rdx);
4693  __ orp(rcx, rax);
4694  patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4695  __ cmpp(rdx, rax);
4696  Split(cc, if_true, if_false, NULL);
4697  __ bind(&slow_case);
4698  }
4699 
4700  // Record position and call the compare IC.
4701  SetSourcePosition(expr->position());
4702  Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4703  CallIC(ic, expr->CompareOperationFeedbackId());
4704  patch_site.EmitPatchInfo();
4705 
4706  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4707  __ testp(rax, rax);
4708  Split(cc, if_true, if_false, fall_through);
4709  }
4710  }
4711 
4712  // Convert the result of the comparison into one expected for this
4713  // expression's context.
4714  context()->Plug(if_true, if_false);
4715 }
4716 
4717 
4718 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4719  Expression* sub_expr,
4720  NilValue nil) {
4721  Label materialize_true, materialize_false;
4722  Label* if_true = NULL;
4723  Label* if_false = NULL;
4724  Label* fall_through = NULL;
4725  context()->PrepareTest(&materialize_true, &materialize_false,
4726  &if_true, &if_false, &fall_through);
4727 
4728  VisitForAccumulatorValue(sub_expr);
4729  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4730  if (expr->op() == Token::EQ_STRICT) {
4731  Heap::RootListIndex nil_value = nil == kNullValue ?
4732  Heap::kNullValueRootIndex :
4733  Heap::kUndefinedValueRootIndex;
4734  __ CompareRoot(rax, nil_value);
4735  Split(equal, if_true, if_false, fall_through);
4736  } else {
4737  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4738  CallIC(ic, expr->CompareOperationFeedbackId());
4739  __ testp(rax, rax);
4740  Split(not_zero, if_true, if_false, fall_through);
4741  }
4742  context()->Plug(if_true, if_false);
4743 }
4744 
4745 
4746 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4748  context()->Plug(rax);
4749 }
4750 
4751 
4753  return rax;
4754 }
4755 
4756 
4758  return rsi;
4759 }
4760 
4761 
4762 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4763  DCHECK(IsAligned(frame_offset, kPointerSize));
4764  __ movp(Operand(rbp, frame_offset), value);
4765 }
4766 
4767 
4768 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4769  __ movp(dst, ContextOperand(rsi, context_index));
4770 }
4771 
4772 
4774  Scope* declaration_scope = scope()->DeclarationScope();
4775  if (declaration_scope->is_global_scope() ||
4776  declaration_scope->is_module_scope()) {
4777  // Contexts nested in the native context have a canonical empty function
4778  // as their closure, not the anonymous closure containing the global
4779  // code. Pass a smi sentinel and let the runtime look up the empty
4780  // function.
4781  __ Push(Smi::FromInt(0));
4782  } else if (declaration_scope->is_eval_scope()) {
4783  // Contexts created by a call to eval have the same closure as the
4784  // context calling eval, not the anonymous closure containing the eval
4785  // code. Fetch it from the context.
4787  } else {
4788  DCHECK(declaration_scope->is_function_scope());
4790  }
4791 }
4792 
4793 
4794 // ----------------------------------------------------------------------------
4795 // Non-local control flow support.
4796 
4797 
4799  DCHECK(!result_register().is(rdx));
4800  DCHECK(!result_register().is(rcx));
4801  // Cook return address on top of stack (smi encoded Code* delta)
4802  __ PopReturnAddressTo(rdx);
4803  __ Move(rcx, masm_->CodeObject());
4804  __ subp(rdx, rcx);
4805  __ Integer32ToSmi(rdx, rdx);
4806  __ Push(rdx);
4807 
4808  // Store result register while executing finally block.
4809  __ Push(result_register());
4810 
4811  // Store pending message while executing finally block.
4812  ExternalReference pending_message_obj =
4813  ExternalReference::address_of_pending_message_obj(isolate());
4814  __ Load(rdx, pending_message_obj);
4815  __ Push(rdx);
4816 
4817  ExternalReference has_pending_message =
4818  ExternalReference::address_of_has_pending_message(isolate());
4819  __ Load(rdx, has_pending_message);
4820  __ Integer32ToSmi(rdx, rdx);
4821  __ Push(rdx);
4822 
4823  ExternalReference pending_message_script =
4824  ExternalReference::address_of_pending_message_script(isolate());
4825  __ Load(rdx, pending_message_script);
4826  __ Push(rdx);
4827 }
4828 
4829 
4831  DCHECK(!result_register().is(rdx));
4832  DCHECK(!result_register().is(rcx));
4833  // Restore pending message from stack.
4834  __ Pop(rdx);
4835  ExternalReference pending_message_script =
4836  ExternalReference::address_of_pending_message_script(isolate());
4837  __ Store(pending_message_script, rdx);
4838 
4839  __ Pop(rdx);
4840  __ SmiToInteger32(rdx, rdx);
4841  ExternalReference has_pending_message =
4842  ExternalReference::address_of_has_pending_message(isolate());
4843  __ Store(has_pending_message, rdx);
4844 
4845  __ Pop(rdx);
4846  ExternalReference pending_message_obj =
4847  ExternalReference::address_of_pending_message_obj(isolate());
4848  __ Store(pending_message_obj, rdx);
4849 
4850  // Restore result register from stack.
4851  __ Pop(result_register());
4852 
4853  // Uncook return address.
4854  __ Pop(rdx);
4855  __ SmiToInteger32(rdx, rdx);
4856  __ Move(rcx, masm_->CodeObject());
4857  __ addp(rdx, rcx);
4858  __ jmp(rdx);
4859 }
4860 
4861 
4862 #undef __
4863 
4864 #define __ ACCESS_MASM(masm())
4865 
4867  int* stack_depth,
4868  int* context_length) {
4869  // The macros used here must preserve the result register.
4870 
4871  // Because the handler block contains the context of the finally
4872  // code, we can restore it directly from there for the finally code
4873  // rather than iteratively unwinding contexts via their previous
4874  // links.
4875  __ Drop(*stack_depth); // Down to the handler block.
4876  if (*context_length > 0) {
4877  // Restore the context to its dedicated register and the stack.
4880  }
4881  __ PopTryHandler();
4882  __ call(finally_entry_);
4883 
4884  *stack_depth = 0;
4885  *context_length = 0;
4886  return previous_;
4887 }
4888 
4889 
4890 #undef __
4891 
4892 
4893 static const byte kJnsInstruction = 0x79;
4894 static const byte kNopByteOne = 0x66;
4895 static const byte kNopByteTwo = 0x90;
4896 #ifdef DEBUG
4897 static const byte kCallInstruction = 0xe8;
4898 #endif
4899 
4900 
4901 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4902  Address pc,
4903  BackEdgeState target_state,
4904  Code* replacement_code) {
4905  Address call_target_address = pc - kIntSize;
4906  Address jns_instr_address = call_target_address - 3;
4907  Address jns_offset_address = call_target_address - 2;
4908 
4909  switch (target_state) {
4910  case INTERRUPT:
4911  // sub <profiling_counter>, <delta> ;; Not changed
4912  // jns ok
4913  // call <interrupt stub>
4914  // ok:
4915  *jns_instr_address = kJnsInstruction;
4916  *jns_offset_address = kJnsOffset;
4917  break;
4918  case ON_STACK_REPLACEMENT:
4919  case OSR_AFTER_STACK_CHECK:
4920  // sub <profiling_counter>, <delta> ;; Not changed
4921  // nop
4922  // nop
4923  // call <on-stack replacment>
4924  // ok:
4925  *jns_instr_address = kNopByteOne;
4926  *jns_offset_address = kNopByteTwo;
4927  break;
4928  }
4929 
4930  Assembler::set_target_address_at(call_target_address,
4931  unoptimized_code,
4932  replacement_code->entry());
4933  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4934  unoptimized_code, call_target_address, replacement_code);
4935 }
4936 
4937 
4939  Isolate* isolate,
4940  Code* unoptimized_code,
4941  Address pc) {
4942  Address call_target_address = pc - kIntSize;
4943  Address jns_instr_address = call_target_address - 3;
4944  DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4945 
4946  if (*jns_instr_address == kJnsInstruction) {
4947  DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4948  DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4949  Assembler::target_address_at(call_target_address,
4950  unoptimized_code));
4951  return INTERRUPT;
4952  }
4953 
4954  DCHECK_EQ(kNopByteOne, *jns_instr_address);
4955  DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4956 
4957  if (Assembler::target_address_at(call_target_address,
4958  unoptimized_code) ==
4959  isolate->builtins()->OnStackReplacement()->entry()) {
4960  return ON_STACK_REPLACEMENT;
4961  }
4962 
4963  DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4964  Assembler::target_address_at(call_target_address,
4965  unoptimized_code));
4966  return OSR_AFTER_STACK_CHECK;
4967 }
4968 
4969 
4970 } } // namespace v8::internal
4971 
4972 #endif // V8_TARGET_ARCH_X64
#define BASE_EMBEDDED
Definition: allocation.h:45
Isolate * isolate() const
Definition: assembler.h:62
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static RelocInfo::Mode RelocInfoNone()
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target, ICacheFlushMode icache_flush_mode=FLUSH_ICACHE_IF_NEEDED)
static const int kJSReturnSequenceLength
int SizeOfCodeGeneratedSince(Label *label)
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static BailoutId Declarations()
Definition: utils.h:962
static BailoutId FunctionEntry()
Definition: utils.h:961
static Handle< Code > initialize_stub(Isolate *isolate, int argc, CallICState::CallType call_type)
Definition: ic.cc:1338
static const int kValueOffset
Definition: objects.h:9446
static bool RecordPositions(MacroAssembler *masm, int pos, bool right_here=false)
Definition: codegen.cc:225
static Condition ComputeCondition(Token::Value op)
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1329
void AddNoFrameRange(int from, int to)
Definition: compiler.h:354
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3331
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
Definition: compiler.cc:1243
@ STRING_FUNCTION_PROTOTYPE_MAP_INDEX
Definition: contexts.h:294
static int SlotOffset(int index)
Definition: contexts.h:552
static const int kDescriptorSize
Definition: objects.h:3038
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3032
static const int kEnumCacheOffset
Definition: objects.h:3028
static const int kFirstOffset
Definition: objects.h:3029
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
static const TestContext * cast(const ExpressionContext *context)
Definition: full-codegen.h:778
virtual void Plug(bool flag) const
virtual NestedStatement * Exit(int *stack_depth, int *context_length)
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:99
void EmitAccessor(Expression *expression)
void Split(Condition cc, Label *if_true, Label *if_false, Label *fall_through)
void EmitBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode)
void EmitLiteralCompareTypeof(Expression *expr, Expression *sub_expr, Handle< String > check)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
void PrepareForBailoutForId(BailoutId id, State state)
void EmitNewClosure(Handle< SharedFunctionInfo > info, bool pretenure)
void GetVar(Register destination, Variable *var)
static Register context_register()
ZoneList< Handle< Object > > * globals_
Definition: full-codegen.h:837
void VisitForControl(Expression *expr, Label *if_true, Label *if_false, Label *fall_through)
Definition: full-codegen.h:382
void CallLoadIC(ContextualMode mode, TypeFeedbackId id=TypeFeedbackId::None())
void RecordBackEdge(BailoutId osr_ast_id)
MemOperand StackOperand(Variable *var)
void EmitVariableLoad(VariableProxy *proxy)
void SetVar(Variable *var, Register source, Register scratch0, Register scratch1)
MemOperand ContextSlotOperandCheckExtensions(Variable *var, Label *slow)
void EmitKeyedPropertyAssignment(Assignment *expr)
void DeclareGlobals(Handle< FixedArray > pairs)
void EmitResolvePossiblyDirectEval(int arg_count)
void VisitForStackValue(Expression *expr)
Definition: full-codegen.h:376
void EmitKeyedCallWithLoadIC(Call *expr, Expression *key)
void EmitKeyedPropertyLoad(Property *expr)
void EmitDebugCheckDeclarationContext(Variable *variable)
FunctionLiteral * function()
Definition: full-codegen.h:609
void EmitNamedSuperPropertyLoad(Property *expr)
bool TryLiteralCompare(CompareOperation *compare)
void SetStatementPosition(Statement *stmt)
Handle< FixedArray > FeedbackVector()
Definition: full-codegen.h:432
void StoreToFrameField(int frame_offset, Register value)
void LoadContextField(Register dst, int context_index)
const ExpressionContext * context()
Definition: full-codegen.h:602
void EmitNamedPropertyLoad(Property *expr)
void EmitBackEdgeBookkeeping(IterationStatement *stmt, Label *back_edge_target)
void DoTest(Expression *condition, Label *if_true, Label *if_false, Label *fall_through)
void VisitForAccumulatorValue(Expression *expr)
Definition: full-codegen.h:370
void PrepareForBailout(Expression *node, State state)
void CallStoreIC(TypeFeedbackId id=TypeFeedbackId::None())
MemOperand VarOperand(Variable *var, Register scratch)
void DeclareModules(Handle< FixedArray > descriptions)
void EmitGeneratorResume(Expression *generator, Expression *value, JSGeneratorObject::ResumeMode resume_mode)
void VisitForEffect(Expression *expr)
Definition: full-codegen.h:364
void EmitAssignment(Expression *expr)
void EmitCall(Call *expr, CallICState::CallType=CallICState::FUNCTION)
void SetFunctionPosition(FunctionLiteral *fun)
void EmitLoadHomeObject(SuperReference *expr)
void EmitStoreToStackLocalOrContextSlot(Variable *var, MemOperand location)
void EmitInlineSmiBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode, Expression *left, Expression *right)
void EmitLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
void EmitVariableAssignment(Variable *var, Token::Value op)
void CallIC(Handle< Code > code, TypeFeedbackId id=TypeFeedbackId::None())
void EmitCreateIteratorResult(bool done)
void EmitLoadGlobalCheckExtensions(VariableProxy *proxy, TypeofState typeof_state, Label *slow)
void EmitCallWithLoadIC(Call *expr)
void EnsureSlotContainsAllocationSite(int slot)
void PrepareForBailoutBeforeSplit(Expression *expr, bool should_normalize, Label *if_true, Label *if_false)
void EmitInlineRuntimeCall(CallRuntime *expr)
void EmitSuperCallWithLoadIC(Call *expr)
void EmitNamedSuperPropertyAssignment(Assignment *expr)
void EmitNamedPropertyAssignment(Assignment *expr)
Handle< FixedArray > handler_table_
Definition: full-codegen.h:844
void RecordJSReturnSite(Call *call)
static Register result_register()
void VisitForTypeofValue(Expression *expr)
void EmitDynamicLookupFastCase(VariableProxy *proxy, TypeofState typeof_state, Label *slow, Label *done)
bool ShouldInlineSmiCase(Token::Value op)
Handle< FixedArray > handler_table()
Definition: full-codegen.h:642
void EmitProfilingCounterDecrement(int delta)
void VisitInDuplicateContext(Expression *expr)
static const int kBuiltinsOffset
Definition: objects.h:7458
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static const int kMapOffset
Definition: objects.h:1427
Factory * factory()
Definition: isolate.h:982
static const int kLengthOffset
Definition: objects.h:10072
static const int kValueOffset
Definition: objects.h:7623
static const int kCacheStampOffset
Definition: objects.h:7631
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kLiteralsOffset
Definition: objects.h:7382
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kResultDonePropertyOffset
Definition: objects.h:7142
static const int kFunctionOffset
Definition: objects.h:7123
static const int kGeneratorClosed
Definition: objects.h:7120
static const int kResultValuePropertyOffset
Definition: objects.h:7141
static const int kGeneratorExecuting
Definition: objects.h:7119
static const int kOperandStackOffset
Definition: objects.h:7127
static const int kReceiverOffset
Definition: objects.h:7125
static const int kContextOffset
Definition: objects.h:7124
static const int kContinuationOffset
Definition: objects.h:7126
static const int kInitialMaxFastElementArray
Definition: objects.h:2180
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kSize
Definition: objects.h:7772
static const int kInObjectFieldCount
Definition: objects.h:7826
static const int kValueOffset
Definition: objects.h:7546
static const Register ReceiverRegister()
static const Register NameRegister()
SmiIndex SmiToIndex(Register dst, Register src, int shift)
Condition CheckNonNegativeSmi(Register src)
static const int kIsUndetectable
Definition: objects.h:6244
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6251
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kBitField2Offset
Definition: objects.h:6233
static const int kConstructorOffset
Definition: objects.h:6191
static const int kPrototypeOffset
Definition: objects.h:6190
static const int kHashShift
Definition: objects.h:8499
static const int kHashFieldOffset
Definition: objects.h:8486
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
Definition: assembler.h:317
Scope * outer_scope() const
Definition: scopes.h:333
int num_parameters() const
Definition: scopes.h:321
VariableDeclaration * function() const
Definition: scopes.h:309
int ContextChainLength(Scope *scope)
Definition: scopes.cc:715
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:539
Scope * DeclarationScope()
Definition: scopes.cc:737
Variable * arguments() const
Definition: scopes.h:324
Scope * GlobalScope()
Definition: scopes.cc:728
Variable * parameter(int index) const
Definition: scopes.h:316
static const int kHeaderSize
Definition: objects.h:8941
static const int kFormalParameterCountOffset
Definition: objects.h:6946
static const int kInstanceClassNameOffset
Definition: objects.h:6897
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1334
static const int kContextOffset
Definition: frames.h:74
static const int kContextOffset
Definition: frames.h:162
static const int kExpressionsOffset
Definition: frames.h:160
static const int kCallerSPOffset
Definition: frames.h:167
static const int kMarkerOffset
Definition: frames.h:161
static const int kCallerFPOffset
Definition: frames.h:165
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8618
static const int kLengthOffset
Definition: objects.h:8802
bool Equals(String *other)
Definition: objects-inl.h:3336
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2258
static TypeFeedbackId None()
Definition: utils.h:945
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
TypeofState
Definition: codegen.h:46
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_BOOL(enable_unaligned_accesses
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK_NE(v1, v2)
Definition: logging.h:207
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ CALL_FUNCTION
@ TAG_OBJECT
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
Vector< const char > CStrVector(const char *data)
Definition: vector.h:158
const int kPointerSize
Definition: globals.h:129
const uint32_t kStringEncodingMask
Definition: objects.h:555
MemOperand ContextOperand(Register context, int index)
@ DO_SMI_CHECK
Definition: globals.h:641
@ STRING_ADD_CHECK_BOTH
Definition: code-stubs.h:1218
@ TRACK_ALLOCATION_SITE
Definition: objects.h:8085
@ DONT_TRACK_ALLOCATION_SITE
Definition: objects.h:8084
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
Definition: objects-inl.h:5448
@ kSeqStringTag
Definition: objects.h:563
const int kPCOnStackSize
Definition: globals.h:135
const Register kScratchRegister
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
bool IsDeclaredVariableMode(VariableMode mode)
Definition: globals.h:705
const uint32_t kTwoByteStringTag
Definition: objects.h:556
const int kSmiTagSize
Definition: v8.h:5743
Operand FieldOperand(Register object, int offset)
const Register rsi
const Register rbp
const int kPointerSizeLog2
Definition: globals.h:147
const uint32_t kStringTag
Definition: objects.h:544
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:785
@ JS_REGEXP_TYPE
Definition: objects.h:748
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
Definition: objects.h:788
@ JS_VALUE_TYPE
Definition: objects.h:728
@ JS_DATE_TYPE
Definition: objects.h:730
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:784
@ FIRST_JS_PROXY_TYPE
Definition: objects.h:778
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
@ LAST_JS_PROXY_TYPE
Definition: objects.h:779
const uint32_t kOneByteStringTag
Definition: objects.h:557
@ NO_OVERWRITE
Definition: ic-state.h:58
@ OVERWRITE_RIGHT
Definition: ic-state.h:58
const Register rdi
bool IsImmutableVariableMode(VariableMode mode)
Definition: globals.h:715
const int kInt64Size
Definition: globals.h:126
const Register pc
@ DYNAMIC_GLOBAL
Definition: globals.h:689
@ DYNAMIC_LOCAL
Definition: globals.h:693
@ CONST_LEGACY
Definition: globals.h:671
const Register rbx
Condition NegateCondition(Condition cond)
Definition: constants-arm.h:86
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
const uint32_t kStringRepresentationMask
Definition: objects.h:561
byte * Address
Definition: globals.h:101
NilValue
Definition: v8.h:97
@ kNullValue
Definition: v8.h:97
@ NOT_CONTEXTUAL
Definition: objects.h:174
const Register r8
const int kIntSize
Definition: globals.h:124
const int kFPOnStackSize
Definition: globals.h:136
const Register no_reg
const Register rdx
const Register arg_reg_1
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418
const Register rax
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
Definition: v8.h:5744
@ RECORD_CONSTRUCTOR_TARGET
Definition: globals.h:480
const int kSmiTag
Definition: v8.h:5742
const Register rcx
const uint32_t kIsNotStringMask
Definition: objects.h:543
bool IsAligned(T value, U alignment)
Definition: utils.h:123
@ NO_CALL_FUNCTION_FLAGS
Definition: globals.h:469
@ CALL_AS_METHOD
Definition: globals.h:470
const Register rsp
@ STRING_INDEX_IS_NUMBER
Definition: code-stubs.h:1590
void CopyBytes(uint8_t *target, uint8_t *source)
bool IsFastObjectElementsKind(ElementsKind kind)
const Register arg_reg_2
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
#define VOID
#define IN
PropertyAttributes
@ NONE
@ READ_ONLY
bool is(Register reg) const