V8 Project
full-codegen-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_IA32
8 
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 #define __ ACCESS_MASM(masm_)
24 
25 
26 class JumpPatchSite BASE_EMBEDDED {
27  public:
28  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
29 #ifdef DEBUG
30  info_emitted_ = false;
31 #endif
32  }
33 
34  ~JumpPatchSite() {
35  DCHECK(patch_site_.is_bound() == info_emitted_);
36  }
37 
38  void EmitJumpIfNotSmi(Register reg,
39  Label* target,
40  Label::Distance distance = Label::kFar) {
41  __ test(reg, Immediate(kSmiTagMask));
42  EmitJump(not_carry, target, distance); // Always taken before patched.
43  }
44 
45  void EmitJumpIfSmi(Register reg,
46  Label* target,
47  Label::Distance distance = Label::kFar) {
48  __ test(reg, Immediate(kSmiTagMask));
49  EmitJump(carry, target, distance); // Never taken before patched.
50  }
51 
52  void EmitPatchInfo() {
53  if (patch_site_.is_bound()) {
54  int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
55  DCHECK(is_uint8(delta_to_patch_site));
56  __ test(eax, Immediate(delta_to_patch_site));
57 #ifdef DEBUG
58  info_emitted_ = true;
59 #endif
60  } else {
61  __ nop(); // Signals no inlined code.
62  }
63  }
64 
65  private:
66  // jc will be patched with jz, jnc will become jnz.
67  void EmitJump(Condition cc, Label* target, Label::Distance distance) {
68  DCHECK(!patch_site_.is_bound() && !info_emitted_);
69  DCHECK(cc == carry || cc == not_carry);
70  __ bind(&patch_site_);
71  __ j(cc, target, distance);
72  }
73 
74  MacroAssembler* masm_;
75  Label patch_site_;
76 #ifdef DEBUG
77  bool info_emitted_;
78 #endif
79 };
80 
81 
82 // Generate code for a JS function. On entry to the function the receiver
83 // and arguments have been pushed on the stack left to right, with the
84 // return address on top of them. The actual argument count matches the
85 // formal parameter count expected by the function.
86 //
87 // The live registers are:
88 // o edi: the JS function object being called (i.e. ourselves)
89 // o esi: our context
90 // o ebp: our caller's frame pointer
91 // o esp: stack pointer (pointing to return address)
92 //
93 // The function builds a JS frame. Please see JavaScriptFrameConstants in
94 // frames-ia32.h for its layout.
96  CompilationInfo* info = info_;
98  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
99 
100  profiling_counter_ = isolate()->factory()->NewCell(
101  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
102  SetFunctionPosition(function());
103  Comment cmnt(masm_, "[ function compiled by full code generator");
104 
106 
107 #ifdef DEBUG
108  if (strlen(FLAG_stop_at) > 0 &&
109  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110  __ int3();
111  }
112 #endif
113 
114  // Sloppy mode functions and builtins need to replace the receiver with the
115  // global proxy when called as functions (without an explicit receiver
116  // object).
117  if (info->strict_mode() == SLOPPY && !info->is_native()) {
118  Label ok;
119  // +1 for return address.
120  int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
121  __ mov(ecx, Operand(esp, receiver_offset));
122 
123  __ cmp(ecx, isolate()->factory()->undefined_value());
124  __ j(not_equal, &ok, Label::kNear);
125 
126  __ mov(ecx, GlobalObjectOperand());
128 
129  __ mov(Operand(esp, receiver_offset), ecx);
130 
131  __ bind(&ok);
132  }
133 
134  // Open a frame scope to indicate that there is a frame on the stack. The
135  // MANUAL indicates that the scope shouldn't actually generate code to set up
136  // the frame (that is done below).
137  FrameScope frame_scope(masm_, StackFrame::MANUAL);
138 
139  info->set_prologue_offset(masm_->pc_offset());
140  __ Prologue(info->IsCodePreAgingActive());
141  info->AddNoFrameRange(0, masm_->pc_offset());
142 
143  { Comment cmnt(masm_, "[ Allocate locals");
144  int locals_count = info->scope()->num_stack_slots();
145  // Generators allocate locals, if any, in context slots.
146  DCHECK(!info->function()->is_generator() || locals_count == 0);
147  if (locals_count == 1) {
148  __ push(Immediate(isolate()->factory()->undefined_value()));
149  } else if (locals_count > 1) {
150  if (locals_count >= 128) {
151  Label ok;
152  __ mov(ecx, esp);
153  __ sub(ecx, Immediate(locals_count * kPointerSize));
154  ExternalReference stack_limit =
155  ExternalReference::address_of_real_stack_limit(isolate());
156  __ cmp(ecx, Operand::StaticVariable(stack_limit));
157  __ j(above_equal, &ok, Label::kNear);
158  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
159  __ bind(&ok);
160  }
161  __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
162  const int kMaxPushes = 32;
163  if (locals_count >= kMaxPushes) {
164  int loop_iterations = locals_count / kMaxPushes;
165  __ mov(ecx, loop_iterations);
166  Label loop_header;
167  __ bind(&loop_header);
168  // Do pushes.
169  for (int i = 0; i < kMaxPushes; i++) {
170  __ push(eax);
171  }
172  __ dec(ecx);
173  __ j(not_zero, &loop_header, Label::kNear);
174  }
175  int remaining = locals_count % kMaxPushes;
176  // Emit the remaining pushes.
177  for (int i = 0; i < remaining; i++) {
178  __ push(eax);
179  }
180  }
181  }
182 
183  bool function_in_register = true;
184 
185  // Possibly allocate a local context.
186  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
187  if (heap_slots > 0) {
188  Comment cmnt(masm_, "[ Allocate context");
189  bool need_write_barrier = true;
190  // Argument to NewContext is the function, which is still in edi.
191  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
192  __ push(edi);
193  __ Push(info->scope()->GetScopeInfo());
194  __ CallRuntime(Runtime::kNewGlobalContext, 2);
195  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
196  FastNewContextStub stub(isolate(), heap_slots);
197  __ CallStub(&stub);
198  // Result of FastNewContextStub is always in new space.
199  need_write_barrier = false;
200  } else {
201  __ push(edi);
202  __ CallRuntime(Runtime::kNewFunctionContext, 1);
203  }
204  function_in_register = false;
205  // Context is returned in eax. It replaces the context passed to us.
206  // It's saved in the stack and kept live in esi.
207  __ mov(esi, eax);
209 
210  // Copy parameters into context if necessary.
211  int num_parameters = info->scope()->num_parameters();
212  for (int i = 0; i < num_parameters; i++) {
213  Variable* var = scope()->parameter(i);
214  if (var->IsContextSlot()) {
215  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
216  (num_parameters - 1 - i) * kPointerSize;
217  // Load parameter from stack.
218  __ mov(eax, Operand(ebp, parameter_offset));
219  // Store it in the context.
220  int context_offset = Context::SlotOffset(var->index());
221  __ mov(Operand(esi, context_offset), eax);
222  // Update the write barrier. This clobbers eax and ebx.
223  if (need_write_barrier) {
224  __ RecordWriteContextSlot(esi,
225  context_offset,
226  eax,
227  ebx,
229  } else if (FLAG_debug_code) {
230  Label done;
231  __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
232  __ Abort(kExpectedNewSpaceObject);
233  __ bind(&done);
234  }
235  }
236  }
237  }
238 
239  Variable* arguments = scope()->arguments();
240  if (arguments != NULL) {
241  // Function uses arguments object.
242  Comment cmnt(masm_, "[ Allocate arguments object");
243  if (function_in_register) {
244  __ push(edi);
245  } else {
247  }
248  // Receiver is just before the parameters on the caller's stack.
249  int num_parameters = info->scope()->num_parameters();
250  int offset = num_parameters * kPointerSize;
251  __ lea(edx,
252  Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
253  __ push(edx);
254  __ push(Immediate(Smi::FromInt(num_parameters)));
255  // Arguments to ArgumentsAccessStub:
256  // function, receiver address, parameter count.
257  // The stub will rewrite receiver and parameter count if the previous
258  // stack frame was an arguments adapter frame.
260  if (strict_mode() == STRICT) {
262  } else if (function()->has_duplicate_parameters()) {
264  } else {
266  }
267  ArgumentsAccessStub stub(isolate(), type);
268  __ CallStub(&stub);
269 
270  SetVar(arguments, eax, ebx, edx);
271  }
272 
273  if (FLAG_trace) {
274  __ CallRuntime(Runtime::kTraceEnter, 0);
275  }
276 
277  // Visit the declarations and body unless there is an illegal
278  // redeclaration.
279  if (scope()->HasIllegalRedeclaration()) {
280  Comment cmnt(masm_, "[ Declarations");
282 
283  } else {
285  { Comment cmnt(masm_, "[ Declarations");
286  // For named function expressions, declare the function name as a
287  // constant.
288  if (scope()->is_function_scope() && scope()->function() != NULL) {
289  VariableDeclaration* function = scope()->function();
290  DCHECK(function->proxy()->var()->mode() == CONST ||
291  function->proxy()->var()->mode() == CONST_LEGACY);
292  DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
293  VisitVariableDeclaration(function);
294  }
295  VisitDeclarations(scope()->declarations());
296  }
297 
298  { Comment cmnt(masm_, "[ Stack check");
300  Label ok;
301  ExternalReference stack_limit
302  = ExternalReference::address_of_stack_limit(isolate());
303  __ cmp(esp, Operand::StaticVariable(stack_limit));
304  __ j(above_equal, &ok, Label::kNear);
305  __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
306  __ bind(&ok);
307  }
308 
309  { Comment cmnt(masm_, "[ Body");
310  DCHECK(loop_depth() == 0);
311  VisitStatements(function()->body());
312  DCHECK(loop_depth() == 0);
313  }
314  }
315 
316  // Always emit a 'return undefined' in case control fell off the end of
317  // the body.
318  { Comment cmnt(masm_, "[ return <undefined>;");
319  __ mov(eax, isolate()->factory()->undefined_value());
321  }
322 }
323 
324 
326  __ Move(eax, Immediate(Smi::FromInt(0)));
327 }
328 
329 
331  __ mov(ebx, Immediate(profiling_counter_));
333  Immediate(Smi::FromInt(delta)));
334 }
335 
336 
338  int reset_value = FLAG_interrupt_budget;
339  __ mov(ebx, Immediate(profiling_counter_));
341  Immediate(Smi::FromInt(reset_value)));
342 }
343 
344 
345 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
346  Label* back_edge_target) {
347  Comment cmnt(masm_, "[ Back edge bookkeeping");
348  Label ok;
349 
350  DCHECK(back_edge_target->is_bound());
351  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
352  int weight = Min(kMaxBackEdgeWeight,
353  Max(1, distance / kCodeSizeMultiplier));
355  __ j(positive, &ok, Label::kNear);
356  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
357 
358  // Record a mapping of this PC offset to the OSR id. This is used to find
359  // the AST id from the unoptimized code in order to use it as a key into
360  // the deoptimization input data found in the optimized code.
361  RecordBackEdge(stmt->OsrEntryId());
362 
364 
365  __ bind(&ok);
366  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
367  // Record a mapping of the OSR id to this PC. This is used if the OSR
368  // entry becomes the target of a bailout. We don't expect it to be, but
369  // we want it to work if it is.
370  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
371 }
372 
373 
375  Comment cmnt(masm_, "[ Return sequence");
376  if (return_label_.is_bound()) {
377  __ jmp(&return_label_);
378  } else {
379  // Common return label
380  __ bind(&return_label_);
381  if (FLAG_trace) {
382  __ push(eax);
383  __ CallRuntime(Runtime::kTraceExit, 1);
384  }
385  // Pretend that the exit is a backwards jump to the entry.
386  int weight = 1;
387  if (info_->ShouldSelfOptimize()) {
388  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
389  } else {
390  int distance = masm_->pc_offset();
391  weight = Min(kMaxBackEdgeWeight,
392  Max(1, distance / kCodeSizeMultiplier));
393  }
395  Label ok;
396  __ j(positive, &ok, Label::kNear);
397  __ push(eax);
398  __ call(isolate()->builtins()->InterruptCheck(),
400  __ pop(eax);
402  __ bind(&ok);
403 #ifdef DEBUG
404  // Add a label for checking the size of the code used for returning.
405  Label check_exit_codesize;
406  masm_->bind(&check_exit_codesize);
407 #endif
408  SetSourcePosition(function()->end_position() - 1);
409  __ RecordJSReturn();
410  // Do not use the leave instruction here because it is too short to
411  // patch with the code required by the debugger.
412  __ mov(esp, ebp);
413  int no_frame_start = masm_->pc_offset();
414  __ pop(ebp);
415 
416  int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
417  __ Ret(arguments_bytes, ecx);
418  // Check that the size of the code used for returning is large enough
419  // for the debugger's requirements.
421  masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
422  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
423  }
424 }
425 
426 
427 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
428  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
429 }
430 
431 
432 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
433  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
434  codegen()->GetVar(result_register(), var);
435 }
436 
437 
438 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
439  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
440  MemOperand operand = codegen()->VarOperand(var, result_register());
441  // Memory operands can be pushed directly.
442  __ push(operand);
443 }
444 
445 
446 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
447  // For simplicity we always test the accumulator register.
448  codegen()->GetVar(result_register(), var);
449  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
450  codegen()->DoTest(this);
451 }
452 
453 
455  UNREACHABLE(); // Not used on IA32.
456 }
457 
458 
460  Heap::RootListIndex index) const {
461  UNREACHABLE(); // Not used on IA32.
462 }
463 
464 
466  Heap::RootListIndex index) const {
467  UNREACHABLE(); // Not used on IA32.
468 }
469 
470 
472  UNREACHABLE(); // Not used on IA32.
473 }
474 
475 
476 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
477 }
478 
479 
481  Handle<Object> lit) const {
482  if (lit->IsSmi()) {
483  __ SafeMove(result_register(), Immediate(lit));
484  } else {
485  __ Move(result_register(), Immediate(lit));
486  }
487 }
488 
489 
490 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
491  if (lit->IsSmi()) {
492  __ SafePush(Immediate(lit));
493  } else {
494  __ push(Immediate(lit));
495  }
496 }
497 
498 
499 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
500  codegen()->PrepareForBailoutBeforeSplit(condition(),
501  true,
502  true_label_,
503  false_label_);
504  DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
505  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
506  if (false_label_ != fall_through_) __ jmp(false_label_);
507  } else if (lit->IsTrue() || lit->IsJSObject()) {
508  if (true_label_ != fall_through_) __ jmp(true_label_);
509  } else if (lit->IsString()) {
510  if (String::cast(*lit)->length() == 0) {
511  if (false_label_ != fall_through_) __ jmp(false_label_);
512  } else {
513  if (true_label_ != fall_through_) __ jmp(true_label_);
514  }
515  } else if (lit->IsSmi()) {
516  if (Smi::cast(*lit)->value() == 0) {
517  if (false_label_ != fall_through_) __ jmp(false_label_);
518  } else {
519  if (true_label_ != fall_through_) __ jmp(true_label_);
520  }
521  } else {
522  // For simplicity we always test the accumulator register.
523  __ mov(result_register(), lit);
524  codegen()->DoTest(this);
525  }
526 }
527 
528 
530  Register reg) const {
531  DCHECK(count > 0);
532  __ Drop(count);
533 }
534 
535 
537  int count,
538  Register reg) const {
539  DCHECK(count > 0);
540  __ Drop(count);
541  __ Move(result_register(), reg);
542 }
543 
544 
546  Register reg) const {
547  DCHECK(count > 0);
548  if (count > 1) __ Drop(count - 1);
549  __ mov(Operand(esp, 0), reg);
550 }
551 
552 
554  Register reg) const {
555  DCHECK(count > 0);
556  // For simplicity we always test the accumulator register.
557  __ Drop(count);
558  __ Move(result_register(), reg);
559  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
560  codegen()->DoTest(this);
561 }
562 
563 
564 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
565  Label* materialize_false) const {
566  DCHECK(materialize_true == materialize_false);
567  __ bind(materialize_true);
568 }
569 
570 
572  Label* materialize_true,
573  Label* materialize_false) const {
574  Label done;
575  __ bind(materialize_true);
576  __ mov(result_register(), isolate()->factory()->true_value());
577  __ jmp(&done, Label::kNear);
578  __ bind(materialize_false);
579  __ mov(result_register(), isolate()->factory()->false_value());
580  __ bind(&done);
581 }
582 
583 
585  Label* materialize_true,
586  Label* materialize_false) const {
587  Label done;
588  __ bind(materialize_true);
589  __ push(Immediate(isolate()->factory()->true_value()));
590  __ jmp(&done, Label::kNear);
591  __ bind(materialize_false);
592  __ push(Immediate(isolate()->factory()->false_value()));
593  __ bind(&done);
594 }
595 
596 
597 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
598  Label* materialize_false) const {
599  DCHECK(materialize_true == true_label_);
600  DCHECK(materialize_false == false_label_);
601 }
602 
603 
605 }
606 
607 
609  Handle<Object> value = flag
610  ? isolate()->factory()->true_value()
611  : isolate()->factory()->false_value();
612  __ mov(result_register(), value);
613 }
614 
615 
617  Handle<Object> value = flag
618  ? isolate()->factory()->true_value()
619  : isolate()->factory()->false_value();
620  __ push(Immediate(value));
621 }
622 
623 
625  codegen()->PrepareForBailoutBeforeSplit(condition(),
626  true,
627  true_label_,
628  false_label_);
629  if (flag) {
630  if (true_label_ != fall_through_) __ jmp(true_label_);
631  } else {
632  if (false_label_ != fall_through_) __ jmp(false_label_);
633  }
634 }
635 
636 
637 void FullCodeGenerator::DoTest(Expression* condition,
638  Label* if_true,
639  Label* if_false,
640  Label* fall_through) {
641  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
642  CallIC(ic, condition->test_id());
644  // The stub returns nonzero for true.
645  Split(not_zero, if_true, if_false, fall_through);
646 }
647 
648 
650  Label* if_true,
651  Label* if_false,
652  Label* fall_through) {
653  if (if_false == fall_through) {
654  __ j(cc, if_true);
655  } else if (if_true == fall_through) {
656  __ j(NegateCondition(cc), if_false);
657  } else {
658  __ j(cc, if_true);
659  __ jmp(if_false);
660  }
661 }
662 
663 
665  DCHECK(var->IsStackAllocated());
666  // Offset is negative because higher indexes are at lower addresses.
667  int offset = -var->index() * kPointerSize;
668  // Adjust by a (parameter or local) base offset.
669  if (var->IsParameter()) {
670  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
671  } else {
673  }
674  return Operand(ebp, offset);
675 }
676 
677 
678 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
679  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
680  if (var->IsContextSlot()) {
681  int context_chain_length = scope()->ContextChainLength(var->scope());
682  __ LoadContext(scratch, context_chain_length);
683  return ContextOperand(scratch, var->index());
684  } else {
685  return StackOperand(var);
686  }
687 }
688 
689 
690 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
691  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
692  MemOperand location = VarOperand(var, dest);
693  __ mov(dest, location);
694 }
695 
696 
697 void FullCodeGenerator::SetVar(Variable* var,
698  Register src,
699  Register scratch0,
700  Register scratch1) {
701  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
702  DCHECK(!scratch0.is(src));
703  DCHECK(!scratch0.is(scratch1));
704  DCHECK(!scratch1.is(src));
705  MemOperand location = VarOperand(var, scratch0);
706  __ mov(location, src);
707 
708  // Emit the write barrier code if the location is in the heap.
709  if (var->IsContextSlot()) {
710  int offset = Context::SlotOffset(var->index());
711  DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
712  __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
713  }
714 }
715 
716 
718  bool should_normalize,
719  Label* if_true,
720  Label* if_false) {
721  // Only prepare for bailouts before splits if we're in a test
722  // context. Otherwise, we let the Visit function deal with the
723  // preparation to avoid preparing with the same AST id twice.
724  if (!context()->IsTest() || !info_->IsOptimizable()) return;
725 
726  Label skip;
727  if (should_normalize) __ jmp(&skip, Label::kNear);
728  PrepareForBailout(expr, TOS_REG);
729  if (should_normalize) {
730  __ cmp(eax, isolate()->factory()->true_value());
731  Split(equal, if_true, if_false, NULL);
732  __ bind(&skip);
733  }
734 }
735 
736 
738  // The variable in the declaration always resides in the current context.
739  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
740  if (generate_debug_code_) {
741  // Check that we're not inside a with or catch context.
743  __ cmp(ebx, isolate()->factory()->with_context_map());
744  __ Check(not_equal, kDeclarationInWithContext);
745  __ cmp(ebx, isolate()->factory()->catch_context_map());
746  __ Check(not_equal, kDeclarationInCatchContext);
747  }
748 }
749 
750 
751 void FullCodeGenerator::VisitVariableDeclaration(
752  VariableDeclaration* declaration) {
753  // If it was not possible to allocate the variable at compile time, we
754  // need to "declare" it at runtime to make sure it actually exists in the
755  // local context.
756  VariableProxy* proxy = declaration->proxy();
757  VariableMode mode = declaration->mode();
758  Variable* variable = proxy->var();
759  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
760  switch (variable->location()) {
762  globals_->Add(variable->name(), zone());
763  globals_->Add(variable->binding_needs_init()
764  ? isolate()->factory()->the_hole_value()
765  : isolate()->factory()->undefined_value(), zone());
766  break;
767 
768  case Variable::PARAMETER:
769  case Variable::LOCAL:
770  if (hole_init) {
771  Comment cmnt(masm_, "[ VariableDeclaration");
772  __ mov(StackOperand(variable),
773  Immediate(isolate()->factory()->the_hole_value()));
774  }
775  break;
776 
777  case Variable::CONTEXT:
778  if (hole_init) {
779  Comment cmnt(masm_, "[ VariableDeclaration");
781  __ mov(ContextOperand(esi, variable->index()),
782  Immediate(isolate()->factory()->the_hole_value()));
783  // No write barrier since the hole value is in old space.
784  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
785  }
786  break;
787 
788  case Variable::LOOKUP: {
789  Comment cmnt(masm_, "[ VariableDeclaration");
790  __ push(esi);
791  __ push(Immediate(variable->name()));
792  // VariableDeclaration nodes are always introduced in one of four modes.
794  PropertyAttributes attr =
796  __ push(Immediate(Smi::FromInt(attr)));
797  // Push initial value, if any.
798  // Note: For variables we must not push an initial value (such as
799  // 'undefined') because we may have a (legal) redeclaration and we
800  // must not destroy the current value.
801  if (hole_init) {
802  __ push(Immediate(isolate()->factory()->the_hole_value()));
803  } else {
804  __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
805  }
806  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
807  break;
808  }
809  }
810 }
811 
812 
813 void FullCodeGenerator::VisitFunctionDeclaration(
814  FunctionDeclaration* declaration) {
815  VariableProxy* proxy = declaration->proxy();
816  Variable* variable = proxy->var();
817  switch (variable->location()) {
818  case Variable::UNALLOCATED: {
819  globals_->Add(variable->name(), zone());
820  Handle<SharedFunctionInfo> function =
821  Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
822  // Check for stack-overflow exception.
823  if (function.is_null()) return SetStackOverflow();
824  globals_->Add(function, zone());
825  break;
826  }
827 
828  case Variable::PARAMETER:
829  case Variable::LOCAL: {
830  Comment cmnt(masm_, "[ FunctionDeclaration");
831  VisitForAccumulatorValue(declaration->fun());
832  __ mov(StackOperand(variable), result_register());
833  break;
834  }
835 
836  case Variable::CONTEXT: {
837  Comment cmnt(masm_, "[ FunctionDeclaration");
839  VisitForAccumulatorValue(declaration->fun());
840  __ mov(ContextOperand(esi, variable->index()), result_register());
841  // We know that we have written a function, which is not a smi.
842  __ RecordWriteContextSlot(esi,
843  Context::SlotOffset(variable->index()),
844  result_register(),
845  ecx,
849  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
850  break;
851  }
852 
853  case Variable::LOOKUP: {
854  Comment cmnt(masm_, "[ FunctionDeclaration");
855  __ push(esi);
856  __ push(Immediate(variable->name()));
857  __ push(Immediate(Smi::FromInt(NONE)));
858  VisitForStackValue(declaration->fun());
859  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
860  break;
861  }
862  }
863 }
864 
865 
866 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
867  Variable* variable = declaration->proxy()->var();
868  DCHECK(variable->location() == Variable::CONTEXT);
869  DCHECK(variable->interface()->IsFrozen());
870 
871  Comment cmnt(masm_, "[ ModuleDeclaration");
873 
874  // Load instance object.
875  __ LoadContext(eax, scope_->ContextChainLength(scope_->GlobalScope()));
876  __ mov(eax, ContextOperand(eax, variable->interface()->Index()));
878 
879  // Assign it.
880  __ mov(ContextOperand(esi, variable->index()), eax);
881  // We know that we have written a module, which is not a smi.
882  __ RecordWriteContextSlot(esi,
883  Context::SlotOffset(variable->index()),
884  eax,
885  ecx,
889  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
890 
891  // Traverse into body.
892  Visit(declaration->module());
893 }
894 
895 
896 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
897  VariableProxy* proxy = declaration->proxy();
898  Variable* variable = proxy->var();
899  switch (variable->location()) {
901  // TODO(rossberg)
902  break;
903 
904  case Variable::CONTEXT: {
905  Comment cmnt(masm_, "[ ImportDeclaration");
907  // TODO(rossberg)
908  break;
909  }
910 
911  case Variable::PARAMETER:
912  case Variable::LOCAL:
913  case Variable::LOOKUP:
914  UNREACHABLE();
915  }
916 }
917 
918 
919 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
920  // TODO(rossberg)
921 }
922 
923 
924 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
925  // Call the runtime to declare the globals.
926  __ push(esi); // The context is the first argument.
927  __ Push(pairs);
929  __ CallRuntime(Runtime::kDeclareGlobals, 3);
930  // Return value is ignored.
931 }
932 
933 
934 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
935  // Call the runtime to declare the modules.
936  __ Push(descriptions);
937  __ CallRuntime(Runtime::kDeclareModules, 1);
938  // Return value is ignored.
939 }
940 
941 
942 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
943  Comment cmnt(masm_, "[ SwitchStatement");
944  Breakable nested_statement(this, stmt);
945  SetStatementPosition(stmt);
946 
947  // Keep the switch value on the stack until a case matches.
948  VisitForStackValue(stmt->tag());
949  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
950 
951  ZoneList<CaseClause*>* clauses = stmt->cases();
952  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
953 
954  Label next_test; // Recycled for each test.
955  // Compile all the tests with branches to their bodies.
956  for (int i = 0; i < clauses->length(); i++) {
957  CaseClause* clause = clauses->at(i);
958  clause->body_target()->Unuse();
959 
960  // The default is not a test, but remember it as final fall through.
961  if (clause->is_default()) {
962  default_clause = clause;
963  continue;
964  }
965 
966  Comment cmnt(masm_, "[ Case comparison");
967  __ bind(&next_test);
968  next_test.Unuse();
969 
970  // Compile the label expression.
971  VisitForAccumulatorValue(clause->label());
972 
973  // Perform the comparison as if via '==='.
974  __ mov(edx, Operand(esp, 0)); // Switch value.
975  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
976  JumpPatchSite patch_site(masm_);
977  if (inline_smi_code) {
978  Label slow_case;
979  __ mov(ecx, edx);
980  __ or_(ecx, eax);
981  patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
982 
983  __ cmp(edx, eax);
984  __ j(not_equal, &next_test);
985  __ Drop(1); // Switch value is no longer needed.
986  __ jmp(clause->body_target());
987  __ bind(&slow_case);
988  }
989 
990  // Record position before stub call for type feedback.
991  SetSourcePosition(clause->position());
992  Handle<Code> ic =
993  CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
994  CallIC(ic, clause->CompareId());
995  patch_site.EmitPatchInfo();
996 
997  Label skip;
998  __ jmp(&skip, Label::kNear);
999  PrepareForBailout(clause, TOS_REG);
1000  __ cmp(eax, isolate()->factory()->true_value());
1001  __ j(not_equal, &next_test);
1002  __ Drop(1);
1003  __ jmp(clause->body_target());
1004  __ bind(&skip);
1005 
1006  __ test(eax, eax);
1007  __ j(not_equal, &next_test);
1008  __ Drop(1); // Switch value is no longer needed.
1009  __ jmp(clause->body_target());
1010  }
1011 
1012  // Discard the test value and jump to the default if present, otherwise to
1013  // the end of the statement.
1014  __ bind(&next_test);
1015  __ Drop(1); // Switch value is no longer needed.
1016  if (default_clause == NULL) {
1017  __ jmp(nested_statement.break_label());
1018  } else {
1019  __ jmp(default_clause->body_target());
1020  }
1021 
1022  // Compile all the case bodies.
1023  for (int i = 0; i < clauses->length(); i++) {
1024  Comment cmnt(masm_, "[ Case body");
1025  CaseClause* clause = clauses->at(i);
1026  __ bind(clause->body_target());
1027  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1028  VisitStatements(clause->statements());
1029  }
1030 
1031  __ bind(nested_statement.break_label());
1032  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1033 }
1034 
1035 
1036 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1037  Comment cmnt(masm_, "[ ForInStatement");
1038  int slot = stmt->ForInFeedbackSlot();
1039 
1040  SetStatementPosition(stmt);
1041 
1042  Label loop, exit;
1043  ForIn loop_statement(this, stmt);
1045 
1046  // Get the object to enumerate over. If the object is null or undefined, skip
1047  // over the loop. See ECMA-262 version 5, section 12.6.4.
1048  VisitForAccumulatorValue(stmt->enumerable());
1049  __ cmp(eax, isolate()->factory()->undefined_value());
1050  __ j(equal, &exit);
1051  __ cmp(eax, isolate()->factory()->null_value());
1052  __ j(equal, &exit);
1053 
1054  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1055 
1056  // Convert the object to a JS object.
1057  Label convert, done_convert;
1058  __ JumpIfSmi(eax, &convert, Label::kNear);
1059  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1060  __ j(above_equal, &done_convert, Label::kNear);
1061  __ bind(&convert);
1062  __ push(eax);
1063  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1064  __ bind(&done_convert);
1065  __ push(eax);
1066 
1067  // Check for proxies.
1068  Label call_runtime, use_cache, fixed_array;
1070  __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1071  __ j(below_equal, &call_runtime);
1072 
1073  // Check cache validity in generated code. This is a fast case for
1074  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1075  // guarantee cache validity, call the runtime system to check cache
1076  // validity or get the property names in a fixed array.
1077  __ CheckEnumCache(&call_runtime);
1078 
1080  __ jmp(&use_cache, Label::kNear);
1081 
1082  // Get the set of properties to enumerate.
1083  __ bind(&call_runtime);
1084  __ push(eax);
1085  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1087  isolate()->factory()->meta_map());
1088  __ j(not_equal, &fixed_array);
1089 
1090 
1091  // We got a map in register eax. Get the enumeration cache from it.
1092  Label no_descriptors;
1093  __ bind(&use_cache);
1094 
1095  __ EnumLength(edx, eax);
1096  __ cmp(edx, Immediate(Smi::FromInt(0)));
1097  __ j(equal, &no_descriptors);
1098 
1099  __ LoadInstanceDescriptors(eax, ecx);
1102 
1103  // Set up the four remaining stack slots.
1104  __ push(eax); // Map.
1105  __ push(ecx); // Enumeration cache.
1106  __ push(edx); // Number of valid entries for the map in the enum cache.
1107  __ push(Immediate(Smi::FromInt(0))); // Initial index.
1108  __ jmp(&loop);
1109 
1110  __ bind(&no_descriptors);
1111  __ add(esp, Immediate(kPointerSize));
1112  __ jmp(&exit);
1113 
1114  // We got a fixed array in register eax. Iterate through that.
1115  Label non_proxy;
1116  __ bind(&fixed_array);
1117 
1118  // No need for a write barrier, we are storing a Smi in the feedback vector.
1119  __ LoadHeapObject(ebx, FeedbackVector());
1121  Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1122 
1123  __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1124  __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1126  __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1127  __ j(above, &non_proxy);
1128  __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1129  __ bind(&non_proxy);
1130  __ push(ebx); // Smi
1131  __ push(eax); // Array
1133  __ push(eax); // Fixed array length (as smi).
1134  __ push(Immediate(Smi::FromInt(0))); // Initial index.
1135 
1136  // Generate code for doing the condition check.
1137  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1138  __ bind(&loop);
1139  __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1140  __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1141  __ j(above_equal, loop_statement.break_label());
1142 
1143  // Get the current entry of the array into register ebx.
1144  __ mov(ebx, Operand(esp, 2 * kPointerSize));
1146 
1147  // Get the expected map from the stack or a smi in the
1148  // permanent slow case into register edx.
1149  __ mov(edx, Operand(esp, 3 * kPointerSize));
1150 
1151  // Check if the expected map still matches that of the enumerable.
1152  // If not, we may have to filter the key.
1153  Label update_each;
1154  __ mov(ecx, Operand(esp, 4 * kPointerSize));
1156  __ j(equal, &update_each, Label::kNear);
1157 
1158  // For proxies, no filtering is done.
1159  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1160  DCHECK(Smi::FromInt(0) == 0);
1161  __ test(edx, edx);
1162  __ j(zero, &update_each);
1163 
1164  // Convert the entry to a string or null if it isn't a property
1165  // anymore. If the property has been removed while iterating, we
1166  // just skip it.
1167  __ push(ecx); // Enumerable.
1168  __ push(ebx); // Current entry.
1169  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1170  __ test(eax, eax);
1171  __ j(equal, loop_statement.continue_label());
1172  __ mov(ebx, eax);
1173 
1174  // Update the 'each' property or variable from the possibly filtered
1175  // entry in register ebx.
1176  __ bind(&update_each);
1177  __ mov(result_register(), ebx);
1178  // Perform the assignment as if via '='.
1179  { EffectContext context(this);
1180  EmitAssignment(stmt->each());
1181  }
1182 
1183  // Generate code for the body of the loop.
1184  Visit(stmt->body());
1185 
1186  // Generate code for going to the next element by incrementing the
1187  // index (smi) stored on top of the stack.
1188  __ bind(loop_statement.continue_label());
1189  __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1190 
1191  EmitBackEdgeBookkeeping(stmt, &loop);
1192  __ jmp(&loop);
1193 
1194  // Remove the pointers stored on the stack.
1195  __ bind(loop_statement.break_label());
1196  __ add(esp, Immediate(5 * kPointerSize));
1197 
1198  // Exit and decrement the loop depth.
1199  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1200  __ bind(&exit);
1202 }
1203 
1204 
1205 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1206  Comment cmnt(masm_, "[ ForOfStatement");
1207  SetStatementPosition(stmt);
1208 
1209  Iteration loop_statement(this, stmt);
1211 
1212  // var iterator = iterable[Symbol.iterator]();
1213  VisitForEffect(stmt->assign_iterator());
1214 
1215  // Loop entry.
1216  __ bind(loop_statement.continue_label());
1217 
1218  // result = iterator.next()
1219  VisitForEffect(stmt->next_result());
1220 
1221  // if (result.done) break;
1222  Label result_not_done;
1223  VisitForControl(stmt->result_done(),
1224  loop_statement.break_label(),
1225  &result_not_done,
1226  &result_not_done);
1227  __ bind(&result_not_done);
1228 
1229  // each = result.value
1230  VisitForEffect(stmt->assign_each());
1231 
1232  // Generate code for the body of the loop.
1233  Visit(stmt->body());
1234 
1235  // Check stack before looping.
1236  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1237  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1238  __ jmp(loop_statement.continue_label());
1239 
1240  // Exit and decrement the loop depth.
1241  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1242  __ bind(loop_statement.break_label());
1244 }
1245 
1246 
1247 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1248  bool pretenure) {
1249  // Use the fast case closure allocation code that allocates in new
1250  // space for nested functions that don't need literals cloning. If
1251  // we're running with the --always-opt or the --prepare-always-opt
1252  // flag, we need to use the runtime function so that the new function
1253  // we are creating here gets a chance to have its code optimized and
1254  // doesn't just get a copy of the existing unoptimized code.
1255  if (!FLAG_always_opt &&
1256  !FLAG_prepare_always_opt &&
1257  !pretenure &&
1258  scope()->is_function_scope() &&
1259  info->num_literals() == 0) {
1260  FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1261  __ mov(ebx, Immediate(info));
1262  __ CallStub(&stub);
1263  } else {
1264  __ push(esi);
1265  __ push(Immediate(info));
1266  __ push(Immediate(pretenure
1267  ? isolate()->factory()->true_value()
1268  : isolate()->factory()->false_value()));
1269  __ CallRuntime(Runtime::kNewClosure, 3);
1270  }
1271  context()->Plug(eax);
1272 }
1273 
1274 
1275 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1276  Comment cmnt(masm_, "[ VariableProxy");
1277  EmitVariableLoad(expr);
1278 }
1279 
1280 
1281 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1282  Comment cnmt(masm_, "[ SuperReference ");
1283 
1286 
1287  Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1288  __ mov(LoadDescriptor::NameRegister(), home_object_symbol);
1289 
1290  CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1291 
1292  __ cmp(eax, isolate()->factory()->undefined_value());
1293  Label done;
1294  __ j(not_equal, &done);
1295  __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1296  __ bind(&done);
1297 }
1298 
1299 
1300 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1301  TypeofState typeof_state,
1302  Label* slow) {
1303  Register context = esi;
1304  Register temp = edx;
1305 
1306  Scope* s = scope();
1307  while (s != NULL) {
1308  if (s->num_heap_slots() > 0) {
1309  if (s->calls_sloppy_eval()) {
1310  // Check that extension is NULL.
1312  Immediate(0));
1313  __ j(not_equal, slow);
1314  }
1315  // Load next context in chain.
1317  // Walk the rest of the chain without clobbering esi.
1318  context = temp;
1319  }
1320  // If no outer scope calls eval, we do not need to check more
1321  // context extensions. If we have reached an eval scope, we check
1322  // all extensions from this point.
1323  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1324  s = s->outer_scope();
1325  }
1326 
1327  if (s != NULL && s->is_eval_scope()) {
1328  // Loop up the context chain. There is no frame effect so it is
1329  // safe to use raw labels here.
1330  Label next, fast;
1331  if (!context.is(temp)) {
1332  __ mov(temp, context);
1333  }
1334  __ bind(&next);
1335  // Terminate at native context.
1337  Immediate(isolate()->factory()->native_context_map()));
1338  __ j(equal, &fast, Label::kNear);
1339  // Check that extension is NULL.
1340  __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1341  __ j(not_equal, slow);
1342  // Load next context in chain.
1343  __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1344  __ jmp(&next);
1345  __ bind(&fast);
1346  }
1347 
1348  // All extension objects were empty and it is safe to use a global
1349  // load IC call.
1351  __ mov(LoadDescriptor::NameRegister(), proxy->var()->name());
1352  if (FLAG_vector_ics) {
1354  Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
1355  }
1356 
1357  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1358  ? NOT_CONTEXTUAL
1359  : CONTEXTUAL;
1360 
1361  CallLoadIC(mode);
1362 }
1363 
1364 
1366  Label* slow) {
1367  DCHECK(var->IsContextSlot());
1368  Register context = esi;
1369  Register temp = ebx;
1370 
1371  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1372  if (s->num_heap_slots() > 0) {
1373  if (s->calls_sloppy_eval()) {
1374  // Check that extension is NULL.
1376  Immediate(0));
1377  __ j(not_equal, slow);
1378  }
1380  // Walk the rest of the chain without clobbering esi.
1381  context = temp;
1382  }
1383  }
1384  // Check that last extension is NULL.
1385  __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1386  __ j(not_equal, slow);
1387 
1388  // This function is used only for loads, not stores, so it's safe to
1389  // return an esi-based operand (the write barrier cannot be allowed to
1390  // destroy the esi register).
1391  return ContextOperand(context, var->index());
1392 }
1393 
1394 
1395 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1396  TypeofState typeof_state,
1397  Label* slow,
1398  Label* done) {
1399  // Generate fast-case code for variables that might be shadowed by
1400  // eval-introduced variables. Eval is used a lot without
1401  // introducing variables. In those cases, we do not want to
1402  // perform a runtime call for all variables in the scope
1403  // containing the eval.
1404  Variable* var = proxy->var();
1405  if (var->mode() == DYNAMIC_GLOBAL) {
1406  EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1407  __ jmp(done);
1408  } else if (var->mode() == DYNAMIC_LOCAL) {
1409  Variable* local = var->local_if_not_shadowed();
1410  __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1411  if (local->mode() == LET || local->mode() == CONST ||
1412  local->mode() == CONST_LEGACY) {
1413  __ cmp(eax, isolate()->factory()->the_hole_value());
1414  __ j(not_equal, done);
1415  if (local->mode() == CONST_LEGACY) {
1416  __ mov(eax, isolate()->factory()->undefined_value());
1417  } else { // LET || CONST
1418  __ push(Immediate(var->name()));
1419  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1420  }
1421  }
1422  __ jmp(done);
1423  }
1424 }
1425 
1426 
1427 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1428  // Record position before possible IC call.
1429  SetSourcePosition(proxy->position());
1430  Variable* var = proxy->var();
1431 
1432  // Three cases: global variables, lookup variables, and all other types of
1433  // variables.
1434  switch (var->location()) {
1435  case Variable::UNALLOCATED: {
1436  Comment cmnt(masm_, "[ Global variable");
1438  __ mov(LoadDescriptor::NameRegister(), var->name());
1439  if (FLAG_vector_ics) {
1441  Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
1442  }
1444  context()->Plug(eax);
1445  break;
1446  }
1447 
1448  case Variable::PARAMETER:
1449  case Variable::LOCAL:
1450  case Variable::CONTEXT: {
1451  Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1452  : "[ Stack variable");
1453  if (var->binding_needs_init()) {
1454  // var->scope() may be NULL when the proxy is located in eval code and
1455  // refers to a potential outside binding. Currently those bindings are
1456  // always looked up dynamically, i.e. in that case
1457  // var->location() == LOOKUP.
1458  // always holds.
1459  DCHECK(var->scope() != NULL);
1460 
1461  // Check if the binding really needs an initialization check. The check
1462  // can be skipped in the following situation: we have a LET or CONST
1463  // binding in harmony mode, both the Variable and the VariableProxy have
1464  // the same declaration scope (i.e. they are both in global code, in the
1465  // same function or in the same eval code) and the VariableProxy is in
1466  // the source physically located after the initializer of the variable.
1467  //
1468  // We cannot skip any initialization checks for CONST in non-harmony
1469  // mode because const variables may be declared but never initialized:
1470  // if (false) { const x; }; var y = x;
1471  //
1472  // The condition on the declaration scopes is a conservative check for
1473  // nested functions that access a binding and are called before the
1474  // binding is initialized:
1475  // function() { f(); let x = 1; function f() { x = 2; } }
1476  //
1477  bool skip_init_check;
1478  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1479  skip_init_check = false;
1480  } else {
1481  // Check that we always have valid source position.
1482  DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1483  DCHECK(proxy->position() != RelocInfo::kNoPosition);
1484  skip_init_check = var->mode() != CONST_LEGACY &&
1485  var->initializer_position() < proxy->position();
1486  }
1487 
1488  if (!skip_init_check) {
1489  // Let and const need a read barrier.
1490  Label done;
1491  GetVar(eax, var);
1492  __ cmp(eax, isolate()->factory()->the_hole_value());
1493  __ j(not_equal, &done, Label::kNear);
1494  if (var->mode() == LET || var->mode() == CONST) {
1495  // Throw a reference error when using an uninitialized let/const
1496  // binding in harmony mode.
1497  __ push(Immediate(var->name()));
1498  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1499  } else {
1500  // Uninitalized const bindings outside of harmony mode are unholed.
1501  DCHECK(var->mode() == CONST_LEGACY);
1502  __ mov(eax, isolate()->factory()->undefined_value());
1503  }
1504  __ bind(&done);
1505  context()->Plug(eax);
1506  break;
1507  }
1508  }
1509  context()->Plug(var);
1510  break;
1511  }
1512 
1513  case Variable::LOOKUP: {
1514  Comment cmnt(masm_, "[ Lookup variable");
1515  Label done, slow;
1516  // Generate code for loading from variables potentially shadowed
1517  // by eval-introduced variables.
1518  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1519  __ bind(&slow);
1520  __ push(esi); // Context.
1521  __ push(Immediate(var->name()));
1522  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1523  __ bind(&done);
1524  context()->Plug(eax);
1525  break;
1526  }
1527  }
1528 }
1529 
1530 
1531 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1532  Comment cmnt(masm_, "[ RegExpLiteral");
1533  Label materialized;
1534  // Registers will be used as follows:
1535  // edi = JS function.
1536  // ecx = literals array.
1537  // ebx = regexp literal.
1538  // eax = regexp literal clone.
1541  int literal_offset =
1542  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1543  __ mov(ebx, FieldOperand(ecx, literal_offset));
1544  __ cmp(ebx, isolate()->factory()->undefined_value());
1545  __ j(not_equal, &materialized, Label::kNear);
1546 
1547  // Create regexp literal using runtime function
1548  // Result will be in eax.
1549  __ push(ecx);
1550  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1551  __ push(Immediate(expr->pattern()));
1552  __ push(Immediate(expr->flags()));
1553  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1554  __ mov(ebx, eax);
1555 
1556  __ bind(&materialized);
1558  Label allocated, runtime_allocate;
1559  __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1560  __ jmp(&allocated);
1561 
1562  __ bind(&runtime_allocate);
1563  __ push(ebx);
1564  __ push(Immediate(Smi::FromInt(size)));
1565  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1566  __ pop(ebx);
1567 
1568  __ bind(&allocated);
1569  // Copy the content into the newly allocated memory.
1570  // (Unroll copy loop once for better throughput).
1571  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1572  __ mov(edx, FieldOperand(ebx, i));
1573  __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1574  __ mov(FieldOperand(eax, i), edx);
1575  __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1576  }
1577  if ((size % (2 * kPointerSize)) != 0) {
1578  __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1579  __ mov(FieldOperand(eax, size - kPointerSize), edx);
1580  }
1581  context()->Plug(eax);
1582 }
1583 
1584 
1585 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1586  if (expression == NULL) {
1587  __ push(Immediate(isolate()->factory()->null_value()));
1588  } else {
1589  VisitForStackValue(expression);
1590  }
1591 }
1592 
1593 
1594 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1595  Comment cmnt(masm_, "[ ObjectLiteral");
1596 
1597  expr->BuildConstantProperties(isolate());
1598  Handle<FixedArray> constant_properties = expr->constant_properties();
1599  int flags = expr->fast_elements()
1600  ? ObjectLiteral::kFastElements
1601  : ObjectLiteral::kNoFlags;
1602  flags |= expr->has_function()
1603  ? ObjectLiteral::kHasFunction
1604  : ObjectLiteral::kNoFlags;
1605  int properties_count = constant_properties->length() / 2;
1606  if (expr->may_store_doubles() || expr->depth() > 1 ||
1607  masm()->serializer_enabled() ||
1608  flags != ObjectLiteral::kFastElements ||
1612  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1613  __ push(Immediate(constant_properties));
1614  __ push(Immediate(Smi::FromInt(flags)));
1615  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1616  } else {
1619  __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1620  __ mov(ecx, Immediate(constant_properties));
1621  __ mov(edx, Immediate(Smi::FromInt(flags)));
1622  FastCloneShallowObjectStub stub(isolate(), properties_count);
1623  __ CallStub(&stub);
1624  }
1625 
1626  // If result_saved is true the result is on top of the stack. If
1627  // result_saved is false the result is in eax.
1628  bool result_saved = false;
1629 
1630  // Mark all computed expressions that are bound to a key that
1631  // is shadowed by a later occurrence of the same key. For the
1632  // marked expressions, no store code is emitted.
1633  expr->CalculateEmitStore(zone());
1634 
1635  AccessorTable accessor_table(zone());
1636  for (int i = 0; i < expr->properties()->length(); i++) {
1637  ObjectLiteral::Property* property = expr->properties()->at(i);
1638  if (property->IsCompileTimeValue()) continue;
1639 
1640  Literal* key = property->key();
1641  Expression* value = property->value();
1642  if (!result_saved) {
1643  __ push(eax); // Save result on the stack
1644  result_saved = true;
1645  }
1646  switch (property->kind()) {
1648  UNREACHABLE();
1649  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1651  // Fall through.
1652  case ObjectLiteral::Property::COMPUTED:
1653  if (key->value()->IsInternalizedString()) {
1654  if (property->emit_store()) {
1655  VisitForAccumulatorValue(value);
1657  __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1658  __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1659  CallStoreIC(key->LiteralFeedbackId());
1661  } else {
1662  VisitForEffect(value);
1663  }
1664  break;
1665  }
1666  __ push(Operand(esp, 0)); // Duplicate receiver.
1667  VisitForStackValue(key);
1668  VisitForStackValue(value);
1669  if (property->emit_store()) {
1670  __ push(Immediate(Smi::FromInt(SLOPPY))); // Strict mode
1671  __ CallRuntime(Runtime::kSetProperty, 4);
1672  } else {
1673  __ Drop(3);
1674  }
1675  break;
1676  case ObjectLiteral::Property::PROTOTYPE:
1677  __ push(Operand(esp, 0)); // Duplicate receiver.
1678  VisitForStackValue(value);
1679  if (property->emit_store()) {
1680  __ CallRuntime(Runtime::kSetPrototype, 2);
1681  } else {
1682  __ Drop(2);
1683  }
1684  break;
1685  case ObjectLiteral::Property::GETTER:
1686  accessor_table.lookup(key)->second->getter = value;
1687  break;
1688  case ObjectLiteral::Property::SETTER:
1689  accessor_table.lookup(key)->second->setter = value;
1690  break;
1691  }
1692  }
1693 
1694  // Emit code to define accessors, using only a single call to the runtime for
1695  // each pair of corresponding getters and setters.
1696  for (AccessorTable::Iterator it = accessor_table.begin();
1697  it != accessor_table.end();
1698  ++it) {
1699  __ push(Operand(esp, 0)); // Duplicate receiver.
1700  VisitForStackValue(it->first);
1701  EmitAccessor(it->second->getter);
1702  EmitAccessor(it->second->setter);
1703  __ push(Immediate(Smi::FromInt(NONE)));
1704  __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1705  }
1706 
1707  if (expr->has_function()) {
1708  DCHECK(result_saved);
1709  __ push(Operand(esp, 0));
1710  __ CallRuntime(Runtime::kToFastProperties, 1);
1711  }
1712 
1713  if (result_saved) {
1714  context()->PlugTOS();
1715  } else {
1716  context()->Plug(eax);
1717  }
1718 }
1719 
1720 
1721 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1722  Comment cmnt(masm_, "[ ArrayLiteral");
1723 
1724  expr->BuildConstantElements(isolate());
1725  int flags = expr->depth() == 1
1726  ? ArrayLiteral::kShallowElements
1727  : ArrayLiteral::kNoFlags;
1728 
1729  ZoneList<Expression*>* subexprs = expr->values();
1730  int length = subexprs->length();
1731  Handle<FixedArray> constant_elements = expr->constant_elements();
1732  DCHECK_EQ(2, constant_elements->length());
1733  ElementsKind constant_elements_kind =
1734  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1735  bool has_constant_fast_elements =
1736  IsFastObjectElementsKind(constant_elements_kind);
1737  Handle<FixedArrayBase> constant_elements_values(
1738  FixedArrayBase::cast(constant_elements->get(1)));
1739 
1740  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1741  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1742  // If the only customer of allocation sites is transitioning, then
1743  // we can turn it off if we don't have anywhere else to transition to.
1744  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1745  }
1746 
1747  if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1750  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1751  __ push(Immediate(constant_elements));
1752  __ push(Immediate(Smi::FromInt(flags)));
1753  __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1754  } else {
1757  __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1758  __ mov(ecx, Immediate(constant_elements));
1759  FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1760  __ CallStub(&stub);
1761  }
1762 
1763  bool result_saved = false; // Is the result saved to the stack?
1764 
1765  // Emit code to evaluate all the non-constant subexpressions and to store
1766  // them into the newly cloned array.
1767  for (int i = 0; i < length; i++) {
1768  Expression* subexpr = subexprs->at(i);
1769  // If the subexpression is a literal or a simple materialized literal it
1770  // is already set in the cloned array.
1771  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1772 
1773  if (!result_saved) {
1774  __ push(eax); // array literal.
1775  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1776  result_saved = true;
1777  }
1778  VisitForAccumulatorValue(subexpr);
1779 
1780  if (IsFastObjectElementsKind(constant_elements_kind)) {
1781  // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1782  // cannot transition and don't need to call the runtime stub.
1783  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1784  __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1786  // Store the subexpression value in the array's elements.
1787  __ mov(FieldOperand(ebx, offset), result_register());
1788  // Update the write barrier for the array store.
1789  __ RecordWriteField(ebx, offset, result_register(), ecx,
1793  } else {
1794  // Store the subexpression value in the array's elements.
1795  __ mov(ecx, Immediate(Smi::FromInt(i)));
1796  StoreArrayLiteralElementStub stub(isolate());
1797  __ CallStub(&stub);
1798  }
1799 
1800  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1801  }
1802 
1803  if (result_saved) {
1804  __ add(esp, Immediate(kPointerSize)); // literal index
1805  context()->PlugTOS();
1806  } else {
1807  context()->Plug(eax);
1808  }
1809 }
1810 
1811 
1812 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1813  DCHECK(expr->target()->IsValidReferenceExpression());
1814 
1815  Comment cmnt(masm_, "[ Assignment");
1816 
1817  // Left-hand side can only be a property, a global or a (parameter or local)
1818  // slot.
1819  enum LhsKind {
1820  VARIABLE,
1823  NAMED_SUPER_PROPERTY
1824  };
1825  LhsKind assign_type = VARIABLE;
1826  Property* property = expr->target()->AsProperty();
1827  if (property != NULL) {
1828  assign_type = (property->key()->IsPropertyName())
1829  ? (property->IsSuperAccess() ? NAMED_SUPER_PROPERTY
1830  : NAMED_PROPERTY)
1831  : KEYED_PROPERTY;
1832  }
1833 
1834  // Evaluate LHS expression.
1835  switch (assign_type) {
1836  case VARIABLE:
1837  // Nothing to do here.
1838  break;
1839  case NAMED_SUPER_PROPERTY:
1840  VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1841  EmitLoadHomeObject(property->obj()->AsSuperReference());
1842  __ push(result_register());
1843  if (expr->is_compound()) {
1844  __ push(MemOperand(esp, kPointerSize));
1845  __ push(result_register());
1846  }
1847  break;
1848  case NAMED_PROPERTY:
1849  if (expr->is_compound()) {
1850  // We need the receiver both on the stack and in the register.
1851  VisitForStackValue(property->obj());
1852  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1853  } else {
1854  VisitForStackValue(property->obj());
1855  }
1856  break;
1857  case KEYED_PROPERTY: {
1858  if (expr->is_compound()) {
1859  VisitForStackValue(property->obj());
1860  VisitForStackValue(property->key());
1862  __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1863  } else {
1864  VisitForStackValue(property->obj());
1865  VisitForStackValue(property->key());
1866  }
1867  break;
1868  }
1869  }
1870 
1871  // For compound assignments we need another deoptimization point after the
1872  // variable/property load.
1873  if (expr->is_compound()) {
1874  AccumulatorValueContext result_context(this);
1875  { AccumulatorValueContext left_operand_context(this);
1876  switch (assign_type) {
1877  case VARIABLE:
1878  EmitVariableLoad(expr->target()->AsVariableProxy());
1879  PrepareForBailout(expr->target(), TOS_REG);
1880  break;
1881  case NAMED_SUPER_PROPERTY:
1882  EmitNamedSuperPropertyLoad(property);
1883  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1884  break;
1885  case NAMED_PROPERTY:
1886  EmitNamedPropertyLoad(property);
1887  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1888  break;
1889  case KEYED_PROPERTY:
1890  EmitKeyedPropertyLoad(property);
1891  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1892  break;
1893  }
1894  }
1895 
1896  Token::Value op = expr->binary_op();
1897  __ push(eax); // Left operand goes on the stack.
1898  VisitForAccumulatorValue(expr->value());
1899 
1900  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1901  ? OVERWRITE_RIGHT
1902  : NO_OVERWRITE;
1903  SetSourcePosition(expr->position() + 1);
1904  if (ShouldInlineSmiCase(op)) {
1905  EmitInlineSmiBinaryOp(expr->binary_operation(),
1906  op,
1907  mode,
1908  expr->target(),
1909  expr->value());
1910  } else {
1911  EmitBinaryOp(expr->binary_operation(), op, mode);
1912  }
1913 
1914  // Deoptimization point in case the binary operation may have side effects.
1915  PrepareForBailout(expr->binary_operation(), TOS_REG);
1916  } else {
1917  VisitForAccumulatorValue(expr->value());
1918  }
1919 
1920  // Record source position before possible IC call.
1921  SetSourcePosition(expr->position());
1922 
1923  // Store the value.
1924  switch (assign_type) {
1925  case VARIABLE:
1926  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1927  expr->op());
1928  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1929  context()->Plug(eax);
1930  break;
1931  case NAMED_PROPERTY:
1933  break;
1934  case NAMED_SUPER_PROPERTY:
1936  break;
1937  case KEYED_PROPERTY:
1939  break;
1940  }
1941 }
1942 
1943 
1944 void FullCodeGenerator::VisitYield(Yield* expr) {
1945  Comment cmnt(masm_, "[ Yield");
1946  // Evaluate yielded value first; the initial iterator definition depends on
1947  // this. It stays on the stack while we update the iterator.
1948  VisitForStackValue(expr->expression());
1949 
1950  switch (expr->yield_kind()) {
1951  case Yield::kSuspend:
1952  // Pop value from top-of-stack slot; box result into result register.
1953  EmitCreateIteratorResult(false);
1954  __ push(result_register());
1955  // Fall through.
1956  case Yield::kInitial: {
1957  Label suspend, continuation, post_runtime, resume;
1958 
1959  __ jmp(&suspend);
1960 
1961  __ bind(&continuation);
1962  __ jmp(&resume);
1963 
1964  __ bind(&suspend);
1965  VisitForAccumulatorValue(expr->generator_object());
1966  DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1968  Immediate(Smi::FromInt(continuation.pos())));
1970  __ mov(ecx, esi);
1971  __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1972  kDontSaveFPRegs);
1974  __ cmp(esp, ebx);
1975  __ j(equal, &post_runtime);
1976  __ push(eax); // generator object
1977  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1978  __ mov(context_register(),
1980  __ bind(&post_runtime);
1981  __ pop(result_register());
1983 
1984  __ bind(&resume);
1985  context()->Plug(result_register());
1986  break;
1987  }
1988 
1989  case Yield::kFinal: {
1990  VisitForAccumulatorValue(expr->generator_object());
1994  // Pop value from top-of-stack slot, box result into result register.
1998  break;
1999  }
2000 
2001  case Yield::kDelegating: {
2002  VisitForStackValue(expr->generator_object());
2003 
2004  // Initial stack layout is as follows:
2005  // [sp + 1 * kPointerSize] iter
2006  // [sp + 0 * kPointerSize] g
2007 
2008  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2009  Label l_next, l_call, l_loop;
2010  Register load_receiver = LoadDescriptor::ReceiverRegister();
2011  Register load_name = LoadDescriptor::NameRegister();
2012 
2013  // Initial send value is undefined.
2014  __ mov(eax, isolate()->factory()->undefined_value());
2015  __ jmp(&l_next);
2016 
2017  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2018  __ bind(&l_catch);
2019  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2020  __ mov(load_name, isolate()->factory()->throw_string()); // "throw"
2021  __ push(load_name); // "throw"
2022  __ push(Operand(esp, 2 * kPointerSize)); // iter
2023  __ push(eax); // exception
2024  __ jmp(&l_call);
2025 
2026  // try { received = %yield result }
2027  // Shuffle the received result above a try handler and yield it without
2028  // re-boxing.
2029  __ bind(&l_try);
2030  __ pop(eax); // result
2031  __ PushTryHandler(StackHandler::CATCH, expr->index());
2032  const int handler_size = StackHandlerConstants::kSize;
2033  __ push(eax); // result
2034  __ jmp(&l_suspend);
2035  __ bind(&l_continuation);
2036  __ jmp(&l_resume);
2037  __ bind(&l_suspend);
2038  const int generator_object_depth = kPointerSize + handler_size;
2039  __ mov(eax, Operand(esp, generator_object_depth));
2040  __ push(eax); // g
2041  DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2043  Immediate(Smi::FromInt(l_continuation.pos())));
2045  __ mov(ecx, esi);
2046  __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2047  kDontSaveFPRegs);
2048  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2049  __ mov(context_register(),
2051  __ pop(eax); // result
2053  __ bind(&l_resume); // received in eax
2054  __ PopTryHandler();
2055 
2056  // receiver = iter; f = iter.next; arg = received;
2057  __ bind(&l_next);
2058 
2059  __ mov(load_name, isolate()->factory()->next_string());
2060  __ push(load_name); // "next"
2061  __ push(Operand(esp, 2 * kPointerSize)); // iter
2062  __ push(eax); // received
2063 
2064  // result = receiver[f](arg);
2065  __ bind(&l_call);
2066  __ mov(load_receiver, Operand(esp, kPointerSize));
2067  if (FLAG_vector_ics) {
2069  Immediate(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2070  }
2071  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2073  __ mov(edi, eax);
2074  __ mov(Operand(esp, 2 * kPointerSize), edi);
2075  CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2076  __ CallStub(&stub);
2077 
2079  __ Drop(1); // The function is still on the stack; drop it.
2080 
2081  // if (!result.done) goto l_try;
2082  __ bind(&l_loop);
2083  __ push(eax); // save result
2084  __ Move(load_receiver, eax); // result
2085  __ mov(load_name,
2086  isolate()->factory()->done_string()); // "done"
2087  if (FLAG_vector_ics) {
2089  Immediate(Smi::FromInt(expr->DoneFeedbackSlot())));
2090  }
2091  CallLoadIC(NOT_CONTEXTUAL); // result.done in eax
2092  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2093  CallIC(bool_ic);
2094  __ test(eax, eax);
2095  __ j(zero, &l_try);
2096 
2097  // result.value
2098  __ pop(load_receiver); // result
2099  __ mov(load_name,
2100  isolate()->factory()->value_string()); // "value"
2101  if (FLAG_vector_ics) {
2103  Immediate(Smi::FromInt(expr->ValueFeedbackSlot())));
2104  }
2105  CallLoadIC(NOT_CONTEXTUAL); // result.value in eax
2106  context()->DropAndPlug(2, eax); // drop iter and g
2107  break;
2108  }
2109  }
2110 }
2111 
2112 
2113 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2114  Expression *value,
2115  JSGeneratorObject::ResumeMode resume_mode) {
2116  // The value stays in eax, and is ultimately read by the resumed generator, as
2117  // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2118  // is read to throw the value when the resumed generator is already closed.
2119  // ebx will hold the generator object until the activation has been resumed.
2120  VisitForStackValue(generator);
2121  VisitForAccumulatorValue(value);
2122  __ pop(ebx);
2123 
2124  // Check generator state.
2125  Label wrong_state, closed_state, done;
2129  Immediate(Smi::FromInt(0)));
2130  __ j(equal, &closed_state);
2131  __ j(less, &wrong_state);
2132 
2133  // Load suspended function and context.
2136 
2137  // Push receiver.
2139 
2140  // Push holes for arguments to generator function.
2142  __ mov(edx,
2144  __ mov(ecx, isolate()->factory()->the_hole_value());
2145  Label push_argument_holes, push_frame;
2146  __ bind(&push_argument_holes);
2147  __ sub(edx, Immediate(Smi::FromInt(1)));
2148  __ j(carry, &push_frame);
2149  __ push(ecx);
2150  __ jmp(&push_argument_holes);
2151 
2152  // Enter a new JavaScript frame, and initialize its slots as they were when
2153  // the generator was suspended.
2154  Label resume_frame;
2155  __ bind(&push_frame);
2156  __ call(&resume_frame);
2157  __ jmp(&done);
2158  __ bind(&resume_frame);
2159  __ push(ebp); // Caller's frame pointer.
2160  __ mov(ebp, esp);
2161  __ push(esi); // Callee's context.
2162  __ push(edi); // Callee's JS Function.
2163 
2164  // Load the operand stack size.
2167  __ SmiUntag(edx);
2168 
2169  // If we are sending a value and there is no operand stack, we can jump back
2170  // in directly.
2171  if (resume_mode == JSGeneratorObject::NEXT) {
2172  Label slow_resume;
2173  __ cmp(edx, Immediate(0));
2174  __ j(not_zero, &slow_resume);
2177  __ SmiUntag(ecx);
2178  __ add(edx, ecx);
2181  __ jmp(edx);
2182  __ bind(&slow_resume);
2183  }
2184 
2185  // Otherwise, we push holes for the operand stack and call the runtime to fix
2186  // up the stack and the handlers.
2187  Label push_operand_holes, call_resume;
2188  __ bind(&push_operand_holes);
2189  __ sub(edx, Immediate(1));
2190  __ j(carry, &call_resume);
2191  __ push(ecx);
2192  __ jmp(&push_operand_holes);
2193  __ bind(&call_resume);
2194  __ push(ebx);
2195  __ push(result_register());
2196  __ Push(Smi::FromInt(resume_mode));
2197  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2198  // Not reached: the runtime call returns elsewhere.
2199  __ Abort(kGeneratorFailedToResume);
2200 
2201  // Reach here when generator is closed.
2202  __ bind(&closed_state);
2203  if (resume_mode == JSGeneratorObject::NEXT) {
2204  // Return completed iterator result when generator is closed.
2205  __ push(Immediate(isolate()->factory()->undefined_value()));
2206  // Pop value from top-of-stack slot; box result into result register.
2208  } else {
2209  // Throw the provided value.
2210  __ push(eax);
2211  __ CallRuntime(Runtime::kThrow, 1);
2212  }
2213  __ jmp(&done);
2214 
2215  // Throw error if we attempt to operate on a running generator.
2216  __ bind(&wrong_state);
2217  __ push(ebx);
2218  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2219 
2220  __ bind(&done);
2221  context()->Plug(result_register());
2222 }
2223 
2224 
2226  Label gc_required;
2227  Label allocated;
2228 
2229  Handle<Map> map(isolate()->native_context()->iterator_result_map());
2230 
2231  __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT);
2232  __ jmp(&allocated);
2233 
2234  __ bind(&gc_required);
2235  __ Push(Smi::FromInt(map->instance_size()));
2236  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2237  __ mov(context_register(),
2239 
2240  __ bind(&allocated);
2241  __ mov(ebx, map);
2242  __ pop(ecx);
2243  __ mov(edx, isolate()->factory()->ToBoolean(done));
2244  DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2247  isolate()->factory()->empty_fixed_array());
2249  isolate()->factory()->empty_fixed_array());
2252 
2253  // Only the value field needs a write barrier, as the other values are in the
2254  // root set.
2256  ecx, edx, kDontSaveFPRegs);
2257 }
2258 
2259 
2260 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2261  SetSourcePosition(prop->position());
2262  Literal* key = prop->key()->AsLiteral();
2263  DCHECK(!key->value()->IsSmi());
2264  DCHECK(!prop->IsSuperAccess());
2265 
2266  __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2267  if (FLAG_vector_ics) {
2269  Immediate(Smi::FromInt(prop->PropertyFeedbackSlot())));
2271  } else {
2272  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2273  }
2274 }
2275 
2276 
2277 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2278  // Stack: receiver, home_object.
2279  SetSourcePosition(prop->position());
2280  Literal* key = prop->key()->AsLiteral();
2281  DCHECK(!key->value()->IsSmi());
2282  DCHECK(prop->IsSuperAccess());
2283 
2284  __ push(Immediate(key->value()));
2285  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2286 }
2287 
2288 
2289 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2290  SetSourcePosition(prop->position());
2291  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2292  if (FLAG_vector_ics) {
2294  Immediate(Smi::FromInt(prop->PropertyFeedbackSlot())));
2295  CallIC(ic);
2296  } else {
2297  CallIC(ic, prop->PropertyFeedbackId());
2298  }
2299 }
2300 
2301 
2302 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2303  Token::Value op,
2305  Expression* left,
2306  Expression* right) {
2307  // Do combined smi check of the operands. Left operand is on the
2308  // stack. Right operand is in eax.
2309  Label smi_case, done, stub_call;
2310  __ pop(edx);
2311  __ mov(ecx, eax);
2312  __ or_(eax, edx);
2313  JumpPatchSite patch_site(masm_);
2314  patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2315 
2316  __ bind(&stub_call);
2317  __ mov(eax, ecx);
2318  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2319  CallIC(code, expr->BinaryOperationFeedbackId());
2320  patch_site.EmitPatchInfo();
2321  __ jmp(&done, Label::kNear);
2322 
2323  // Smi case.
2324  __ bind(&smi_case);
2325  __ mov(eax, edx); // Copy left operand in case of a stub call.
2326 
2327  switch (op) {
2328  case Token::SAR:
2329  __ SmiUntag(ecx);
2330  __ sar_cl(eax); // No checks of result necessary
2331  __ and_(eax, Immediate(~kSmiTagMask));
2332  break;
2333  case Token::SHL: {
2334  Label result_ok;
2335  __ SmiUntag(eax);
2336  __ SmiUntag(ecx);
2337  __ shl_cl(eax);
2338  // Check that the *signed* result fits in a smi.
2339  __ cmp(eax, 0xc0000000);
2340  __ j(positive, &result_ok);
2341  __ SmiTag(ecx);
2342  __ jmp(&stub_call);
2343  __ bind(&result_ok);
2344  __ SmiTag(eax);
2345  break;
2346  }
2347  case Token::SHR: {
2348  Label result_ok;
2349  __ SmiUntag(eax);
2350  __ SmiUntag(ecx);
2351  __ shr_cl(eax);
2352  __ test(eax, Immediate(0xc0000000));
2353  __ j(zero, &result_ok);
2354  __ SmiTag(ecx);
2355  __ jmp(&stub_call);
2356  __ bind(&result_ok);
2357  __ SmiTag(eax);
2358  break;
2359  }
2360  case Token::ADD:
2361  __ add(eax, ecx);
2362  __ j(overflow, &stub_call);
2363  break;
2364  case Token::SUB:
2365  __ sub(eax, ecx);
2366  __ j(overflow, &stub_call);
2367  break;
2368  case Token::MUL: {
2369  __ SmiUntag(eax);
2370  __ imul(eax, ecx);
2371  __ j(overflow, &stub_call);
2372  __ test(eax, eax);
2373  __ j(not_zero, &done, Label::kNear);
2374  __ mov(ebx, edx);
2375  __ or_(ebx, ecx);
2376  __ j(negative, &stub_call);
2377  break;
2378  }
2379  case Token::BIT_OR:
2380  __ or_(eax, ecx);
2381  break;
2382  case Token::BIT_AND:
2383  __ and_(eax, ecx);
2384  break;
2385  case Token::BIT_XOR:
2386  __ xor_(eax, ecx);
2387  break;
2388  default:
2389  UNREACHABLE();
2390  }
2391 
2392  __ bind(&done);
2393  context()->Plug(eax);
2394 }
2395 
2396 
2397 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2398  Token::Value op,
2399  OverwriteMode mode) {
2400  __ pop(edx);
2401  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2402  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2403  CallIC(code, expr->BinaryOperationFeedbackId());
2404  patch_site.EmitPatchInfo();
2405  context()->Plug(eax);
2406 }
2407 
2408 
2409 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2410  DCHECK(expr->IsValidReferenceExpression());
2411 
2412  // Left-hand side can only be a property, a global or a (parameter or local)
2413  // slot.
2415  LhsKind assign_type = VARIABLE;
2416  Property* prop = expr->AsProperty();
2417  if (prop != NULL) {
2418  assign_type = (prop->key()->IsPropertyName())
2419  ? NAMED_PROPERTY
2420  : KEYED_PROPERTY;
2421  }
2422 
2423  switch (assign_type) {
2424  case VARIABLE: {
2425  Variable* var = expr->AsVariableProxy()->var();
2426  EffectContext context(this);
2427  EmitVariableAssignment(var, Token::ASSIGN);
2428  break;
2429  }
2430  case NAMED_PROPERTY: {
2431  __ push(eax); // Preserve value.
2432  VisitForAccumulatorValue(prop->obj());
2434  __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2436  prop->key()->AsLiteral()->value());
2437  CallStoreIC();
2438  break;
2439  }
2440  case KEYED_PROPERTY: {
2441  __ push(eax); // Preserve value.
2442  VisitForStackValue(prop->obj());
2443  VisitForAccumulatorValue(prop->key());
2445  __ pop(StoreDescriptor::ReceiverRegister()); // Receiver.
2446  __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2447  Handle<Code> ic =
2448  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2449  CallIC(ic);
2450  break;
2451  }
2452  }
2453  context()->Plug(eax);
2454 }
2455 
2456 
2458  Variable* var, MemOperand location) {
2459  __ mov(location, eax);
2460  if (var->IsContextSlot()) {
2461  __ mov(edx, eax);
2462  int offset = Context::SlotOffset(var->index());
2463  __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2464  }
2465 }
2466 
2467 
2469  Token::Value op) {
2470  if (var->IsUnallocated()) {
2471  // Global var, const, or let.
2472  __ mov(StoreDescriptor::NameRegister(), var->name());
2474  CallStoreIC();
2475 
2476  } else if (op == Token::INIT_CONST_LEGACY) {
2477  // Const initializers need a write barrier.
2478  DCHECK(!var->IsParameter()); // No const parameters.
2479  if (var->IsLookupSlot()) {
2480  __ push(eax);
2481  __ push(esi);
2482  __ push(Immediate(var->name()));
2483  __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2484  } else {
2485  DCHECK(var->IsStackLocal() || var->IsContextSlot());
2486  Label skip;
2487  MemOperand location = VarOperand(var, ecx);
2488  __ mov(edx, location);
2489  __ cmp(edx, isolate()->factory()->the_hole_value());
2490  __ j(not_equal, &skip, Label::kNear);
2491  EmitStoreToStackLocalOrContextSlot(var, location);
2492  __ bind(&skip);
2493  }
2494 
2495  } else if (var->mode() == LET && op != Token::INIT_LET) {
2496  // Non-initializing assignment to let variable needs a write barrier.
2497  DCHECK(!var->IsLookupSlot());
2498  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2499  Label assign;
2500  MemOperand location = VarOperand(var, ecx);
2501  __ mov(edx, location);
2502  __ cmp(edx, isolate()->factory()->the_hole_value());
2503  __ j(not_equal, &assign, Label::kNear);
2504  __ push(Immediate(var->name()));
2505  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2506  __ bind(&assign);
2507  EmitStoreToStackLocalOrContextSlot(var, location);
2508 
2509  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2510  if (var->IsLookupSlot()) {
2511  // Assignment to var.
2512  __ push(eax); // Value.
2513  __ push(esi); // Context.
2514  __ push(Immediate(var->name()));
2515  __ push(Immediate(Smi::FromInt(strict_mode())));
2516  __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2517  } else {
2518  // Assignment to var or initializing assignment to let/const in harmony
2519  // mode.
2520  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2521  MemOperand location = VarOperand(var, ecx);
2522  if (generate_debug_code_ && op == Token::INIT_LET) {
2523  // Check for an uninitialized let binding.
2524  __ mov(edx, location);
2525  __ cmp(edx, isolate()->factory()->the_hole_value());
2526  __ Check(equal, kLetBindingReInitialization);
2527  }
2528  EmitStoreToStackLocalOrContextSlot(var, location);
2529  }
2530  }
2531  // Non-initializing assignments to consts are ignored.
2532 }
2533 
2534 
2535 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2536  // Assignment to a property, using a named store IC.
2537  // eax : value
2538  // esp[0] : receiver
2539 
2540  Property* prop = expr->target()->AsProperty();
2541  DCHECK(prop != NULL);
2542  DCHECK(prop->key()->IsLiteral());
2543 
2544  // Record source code position before IC call.
2545  SetSourcePosition(expr->position());
2546  __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2548  CallStoreIC(expr->AssignmentFeedbackId());
2549  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2550  context()->Plug(eax);
2551 }
2552 
2553 
2555  // Assignment to named property of super.
2556  // eax : value
2557  // stack : receiver ('this'), home_object
2558  Property* prop = expr->target()->AsProperty();
2559  DCHECK(prop != NULL);
2560  Literal* key = prop->key()->AsLiteral();
2561  DCHECK(key != NULL);
2562 
2563  __ push(eax);
2564  __ push(Immediate(key->value()));
2565  __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2566  : Runtime::kStoreToSuper_Sloppy),
2567  4);
2568  context()->Plug(eax);
2569 }
2570 
2571 
2572 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2573  // Assignment to a property, using a keyed store IC.
2574  // eax : value
2575  // esp[0] : key
2576  // esp[kPointerSize] : receiver
2577 
2578  __ pop(StoreDescriptor::NameRegister()); // Key.
2581  // Record source code position before IC call.
2582  SetSourcePosition(expr->position());
2583  Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2584  CallIC(ic, expr->AssignmentFeedbackId());
2585 
2586  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2587  context()->Plug(eax);
2588 }
2589 
2590 
2591 void FullCodeGenerator::VisitProperty(Property* expr) {
2592  Comment cmnt(masm_, "[ Property");
2593  Expression* key = expr->key();
2594 
2595  if (key->IsPropertyName()) {
2596  if (!expr->IsSuperAccess()) {
2597  VisitForAccumulatorValue(expr->obj());
2599  EmitNamedPropertyLoad(expr);
2600  } else {
2601  VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2602  EmitLoadHomeObject(expr->obj()->AsSuperReference());
2603  __ push(result_register());
2605  }
2606  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2607  context()->Plug(eax);
2608  } else {
2609  VisitForStackValue(expr->obj());
2610  VisitForAccumulatorValue(expr->key());
2611  __ pop(LoadDescriptor::ReceiverRegister()); // Object.
2612  __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2613  EmitKeyedPropertyLoad(expr);
2614  context()->Plug(eax);
2615  }
2616 }
2617 
2618 
2619 void FullCodeGenerator::CallIC(Handle<Code> code,
2620  TypeFeedbackId ast_id) {
2621  ic_total_count_++;
2622  __ call(code, RelocInfo::CODE_TARGET, ast_id);
2623 }
2624 
2625 
2626 // Code common for calls using the IC.
2627 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2628  Expression* callee = expr->expression();
2629 
2630  CallICState::CallType call_type =
2631  callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2632  // Get the target function.
2633  if (call_type == CallICState::FUNCTION) {
2634  { StackValueContext context(this);
2635  EmitVariableLoad(callee->AsVariableProxy());
2637  }
2638  // Push undefined as receiver. This is patched in the method prologue if it
2639  // is a sloppy mode method.
2640  __ push(Immediate(isolate()->factory()->undefined_value()));
2641  } else {
2642  // Load the function from the receiver.
2643  DCHECK(callee->IsProperty());
2644  DCHECK(!callee->AsProperty()->IsSuperAccess());
2645  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2646  EmitNamedPropertyLoad(callee->AsProperty());
2647  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2648  // Push the target function under the receiver.
2649  __ push(Operand(esp, 0));
2650  __ mov(Operand(esp, kPointerSize), eax);
2651  }
2652 
2653  EmitCall(expr, call_type);
2654 }
2655 
2656 
2658  Expression* callee = expr->expression();
2659  DCHECK(callee->IsProperty());
2660  Property* prop = callee->AsProperty();
2661  DCHECK(prop->IsSuperAccess());
2662 
2663  SetSourcePosition(prop->position());
2664  Literal* key = prop->key()->AsLiteral();
2665  DCHECK(!key->value()->IsSmi());
2666  // Load the function from the receiver.
2667  SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2668  EmitLoadHomeObject(super_ref);
2669  __ push(eax);
2670  VisitForAccumulatorValue(super_ref->this_var());
2671  __ push(eax);
2672  __ push(eax);
2673  __ push(Operand(esp, kPointerSize * 2));
2674  __ push(Immediate(key->value()));
2675  // Stack here:
2676  // - home_object
2677  // - this (receiver)
2678  // - this (receiver) <-- LoadFromSuper will pop here and below.
2679  // - home_object
2680  // - key
2681  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2682 
2683  // Replace home_object with target function.
2684  __ mov(Operand(esp, kPointerSize), eax);
2685 
2686  // Stack here:
2687  // - target function
2688  // - this (receiver)
2689  EmitCall(expr, CallICState::METHOD);
2690 }
2691 
2692 
2693 // Code common for calls using the IC.
2695  Expression* key) {
2696  // Load the key.
2698 
2699  Expression* callee = expr->expression();
2700 
2701  // Load the function from the receiver.
2702  DCHECK(callee->IsProperty());
2703  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2705  EmitKeyedPropertyLoad(callee->AsProperty());
2706  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2707 
2708  // Push the target function under the receiver.
2709  __ push(Operand(esp, 0));
2710  __ mov(Operand(esp, kPointerSize), eax);
2711 
2712  EmitCall(expr, CallICState::METHOD);
2713 }
2714 
2715 
2716 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2717  // Load the arguments.
2718  ZoneList<Expression*>* args = expr->arguments();
2719  int arg_count = args->length();
2720  { PreservePositionScope scope(masm()->positions_recorder());
2721  for (int i = 0; i < arg_count; i++) {
2722  VisitForStackValue(args->at(i));
2723  }
2724  }
2725 
2726  // Record source position of the IC call.
2727  SetSourcePosition(expr->position());
2728  Handle<Code> ic = CallIC::initialize_stub(
2729  isolate(), arg_count, call_type);
2730  __ Move(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
2731  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2732  // Don't assign a type feedback id to the IC, since type feedback is provided
2733  // by the vector above.
2734  CallIC(ic);
2735 
2736  RecordJSReturnSite(expr);
2737 
2738  // Restore context register.
2740 
2741  context()->DropAndPlug(1, eax);
2742 }
2743 
2744 
2746  // Push copy of the first argument or undefined if it doesn't exist.
2747  if (arg_count > 0) {
2748  __ push(Operand(esp, arg_count * kPointerSize));
2749  } else {
2750  __ push(Immediate(isolate()->factory()->undefined_value()));
2751  }
2752 
2753  // Push the receiver of the enclosing function.
2754  __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2755  // Push the language mode.
2756  __ push(Immediate(Smi::FromInt(strict_mode())));
2757 
2758  // Push the start position of the scope the calls resides in.
2759  __ push(Immediate(Smi::FromInt(scope()->start_position())));
2760 
2761  // Do the runtime call.
2762  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2763 }
2764 
2765 
2766 void FullCodeGenerator::VisitCall(Call* expr) {
2767 #ifdef DEBUG
2768  // We want to verify that RecordJSReturnSite gets called on all paths
2769  // through this function. Avoid early returns.
2770  expr->return_is_recorded_ = false;
2771 #endif
2772 
2773  Comment cmnt(masm_, "[ Call");
2774  Expression* callee = expr->expression();
2775  Call::CallType call_type = expr->GetCallType(isolate());
2776 
2777  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2778  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2779  // to resolve the function we need to call and the receiver of the call.
2780  // Then we call the resolved function using the given arguments.
2781  ZoneList<Expression*>* args = expr->arguments();
2782  int arg_count = args->length();
2783  { PreservePositionScope pos_scope(masm()->positions_recorder());
2784  VisitForStackValue(callee);
2785  // Reserved receiver slot.
2786  __ push(Immediate(isolate()->factory()->undefined_value()));
2787  // Push the arguments.
2788  for (int i = 0; i < arg_count; i++) {
2789  VisitForStackValue(args->at(i));
2790  }
2791 
2792  // Push a copy of the function (found below the arguments) and
2793  // resolve eval.
2794  __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2795  EmitResolvePossiblyDirectEval(arg_count);
2796 
2797  // The runtime call returns a pair of values in eax (function) and
2798  // edx (receiver). Touch up the stack with the right values.
2799  __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2800  __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2801  }
2802  // Record source position for debugger.
2803  SetSourcePosition(expr->position());
2804  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2805  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2806  __ CallStub(&stub);
2807  RecordJSReturnSite(expr);
2808  // Restore context register.
2810  context()->DropAndPlug(1, eax);
2811 
2812  } else if (call_type == Call::GLOBAL_CALL) {
2813  EmitCallWithLoadIC(expr);
2814 
2815  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2816  // Call to a lookup slot (dynamically introduced variable).
2817  VariableProxy* proxy = callee->AsVariableProxy();
2818  Label slow, done;
2819  { PreservePositionScope scope(masm()->positions_recorder());
2820  // Generate code for loading from variables potentially shadowed by
2821  // eval-introduced variables.
2822  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2823  }
2824  __ bind(&slow);
2825  // Call the runtime to find the function to call (returned in eax) and
2826  // the object holding it (returned in edx).
2827  __ push(context_register());
2828  __ push(Immediate(proxy->name()));
2829  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2830  __ push(eax); // Function.
2831  __ push(edx); // Receiver.
2832 
2833  // If fast case code has been generated, emit code to push the function
2834  // and receiver and have the slow path jump around this code.
2835  if (done.is_linked()) {
2836  Label call;
2837  __ jmp(&call, Label::kNear);
2838  __ bind(&done);
2839  // Push function.
2840  __ push(eax);
2841  // The receiver is implicitly the global receiver. Indicate this by
2842  // passing the hole to the call function stub.
2843  __ push(Immediate(isolate()->factory()->undefined_value()));
2844  __ bind(&call);
2845  }
2846 
2847  // The receiver is either the global receiver or an object found by
2848  // LoadContextSlot.
2849  EmitCall(expr);
2850 
2851  } else if (call_type == Call::PROPERTY_CALL) {
2852  Property* property = callee->AsProperty();
2853  bool is_named_call = property->key()->IsPropertyName();
2854  // super.x() is handled in EmitCallWithLoadIC.
2855  if (property->IsSuperAccess() && is_named_call) {
2857  } else {
2858  {
2859  PreservePositionScope scope(masm()->positions_recorder());
2860  VisitForStackValue(property->obj());
2861  }
2862  if (is_named_call) {
2863  EmitCallWithLoadIC(expr);
2864  } else {
2865  EmitKeyedCallWithLoadIC(expr, property->key());
2866  }
2867  }
2868  } else {
2869  DCHECK(call_type == Call::OTHER_CALL);
2870  // Call to an arbitrary expression not handled specially above.
2871  { PreservePositionScope scope(masm()->positions_recorder());
2872  VisitForStackValue(callee);
2873  }
2874  __ push(Immediate(isolate()->factory()->undefined_value()));
2875  // Emit function call.
2876  EmitCall(expr);
2877  }
2878 
2879 #ifdef DEBUG
2880  // RecordJSReturnSite should have been called.
2881  DCHECK(expr->return_is_recorded_);
2882 #endif
2883 }
2884 
2885 
2886 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2887  Comment cmnt(masm_, "[ CallNew");
2888  // According to ECMA-262, section 11.2.2, page 44, the function
2889  // expression in new calls must be evaluated before the
2890  // arguments.
2891 
2892  // Push constructor on the stack. If it's not a function it's used as
2893  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2894  // ignored.
2895  VisitForStackValue(expr->expression());
2896 
2897  // Push the arguments ("left-to-right") on the stack.
2898  ZoneList<Expression*>* args = expr->arguments();
2899  int arg_count = args->length();
2900  for (int i = 0; i < arg_count; i++) {
2901  VisitForStackValue(args->at(i));
2902  }
2903 
2904  // Call the construct call builtin that handles allocation and
2905  // constructor invocation.
2906  SetSourcePosition(expr->position());
2907 
2908  // Load function and argument count into edi and eax.
2909  __ Move(eax, Immediate(arg_count));
2910  __ mov(edi, Operand(esp, arg_count * kPointerSize));
2911 
2912  // Record call targets in unoptimized code.
2913  if (FLAG_pretenuring_call_new) {
2914  EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2915  DCHECK(expr->AllocationSiteFeedbackSlot() ==
2916  expr->CallNewFeedbackSlot() + 1);
2917  }
2918 
2919  __ LoadHeapObject(ebx, FeedbackVector());
2920  __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
2921 
2922  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2923  __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2924  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2925  context()->Plug(eax);
2926 }
2927 
2928 
2929 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2930  ZoneList<Expression*>* args = expr->arguments();
2931  DCHECK(args->length() == 1);
2932 
2933  VisitForAccumulatorValue(args->at(0));
2934 
2935  Label materialize_true, materialize_false;
2936  Label* if_true = NULL;
2937  Label* if_false = NULL;
2938  Label* fall_through = NULL;
2939  context()->PrepareTest(&materialize_true, &materialize_false,
2940  &if_true, &if_false, &fall_through);
2941 
2942  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2943  __ test(eax, Immediate(kSmiTagMask));
2944  Split(zero, if_true, if_false, fall_through);
2945 
2946  context()->Plug(if_true, if_false);
2947 }
2948 
2949 
2950 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2951  ZoneList<Expression*>* args = expr->arguments();
2952  DCHECK(args->length() == 1);
2953 
2954  VisitForAccumulatorValue(args->at(0));
2955 
2956  Label materialize_true, materialize_false;
2957  Label* if_true = NULL;
2958  Label* if_false = NULL;
2959  Label* fall_through = NULL;
2960  context()->PrepareTest(&materialize_true, &materialize_false,
2961  &if_true, &if_false, &fall_through);
2962 
2963  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2964  __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2965  Split(zero, if_true, if_false, fall_through);
2966 
2967  context()->Plug(if_true, if_false);
2968 }
2969 
2970 
2971 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2972  ZoneList<Expression*>* args = expr->arguments();
2973  DCHECK(args->length() == 1);
2974 
2975  VisitForAccumulatorValue(args->at(0));
2976 
2977  Label materialize_true, materialize_false;
2978  Label* if_true = NULL;
2979  Label* if_false = NULL;
2980  Label* fall_through = NULL;
2981  context()->PrepareTest(&materialize_true, &materialize_false,
2982  &if_true, &if_false, &fall_through);
2983 
2984  __ JumpIfSmi(eax, if_false);
2985  __ cmp(eax, isolate()->factory()->null_value());
2986  __ j(equal, if_true);
2988  // Undetectable objects behave like undefined when tested with typeof.
2990  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2991  __ j(not_zero, if_false);
2994  __ j(below, if_false);
2996  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2997  Split(below_equal, if_true, if_false, fall_through);
2998 
2999  context()->Plug(if_true, if_false);
3000 }
3001 
3002 
3003 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3004  ZoneList<Expression*>* args = expr->arguments();
3005  DCHECK(args->length() == 1);
3006 
3007  VisitForAccumulatorValue(args->at(0));
3008 
3009  Label materialize_true, materialize_false;
3010  Label* if_true = NULL;
3011  Label* if_false = NULL;
3012  Label* fall_through = NULL;
3013  context()->PrepareTest(&materialize_true, &materialize_false,
3014  &if_true, &if_false, &fall_through);
3015 
3016  __ JumpIfSmi(eax, if_false);
3017  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
3018  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3019  Split(above_equal, if_true, if_false, fall_through);
3020 
3021  context()->Plug(if_true, if_false);
3022 }
3023 
3024 
3025 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3026  ZoneList<Expression*>* args = expr->arguments();
3027  DCHECK(args->length() == 1);
3028 
3029  VisitForAccumulatorValue(args->at(0));
3030 
3031  Label materialize_true, materialize_false;
3032  Label* if_true = NULL;
3033  Label* if_false = NULL;
3034  Label* fall_through = NULL;
3035  context()->PrepareTest(&materialize_true, &materialize_false,
3036  &if_true, &if_false, &fall_through);
3037 
3038  __ JumpIfSmi(eax, if_false);
3041  __ test(ebx, Immediate(1 << Map::kIsUndetectable));
3042  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3043  Split(not_zero, if_true, if_false, fall_through);
3044 
3045  context()->Plug(if_true, if_false);
3046 }
3047 
3048 
3049 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3050  CallRuntime* expr) {
3051  ZoneList<Expression*>* args = expr->arguments();
3052  DCHECK(args->length() == 1);
3053 
3054  VisitForAccumulatorValue(args->at(0));
3055 
3056  Label materialize_true, materialize_false, skip_lookup;
3057  Label* if_true = NULL;
3058  Label* if_false = NULL;
3059  Label* fall_through = NULL;
3060  context()->PrepareTest(&materialize_true, &materialize_false,
3061  &if_true, &if_false, &fall_through);
3062 
3063  __ AssertNotSmi(eax);
3064 
3065  // Check whether this map has already been checked to be safe for default
3066  // valueOf.
3070  __ j(not_zero, &skip_lookup);
3071 
3072  // Check for fast case object. Return false for slow case objects.
3075  __ cmp(ecx, isolate()->factory()->hash_table_map());
3076  __ j(equal, if_false);
3077 
3078  // Look for valueOf string in the descriptor array, and indicate false if
3079  // found. Since we omit an enumeration index check, if it is added via a
3080  // transition that shares its descriptor array, this is a false positive.
3081  Label entry, loop, done;
3082 
3083  // Skip loop if no descriptors are valid.
3084  __ NumberOfOwnDescriptors(ecx, ebx);
3085  __ cmp(ecx, 0);
3086  __ j(equal, &done);
3087 
3088  __ LoadInstanceDescriptors(ebx, ebx);
3089  // ebx: descriptor array.
3090  // ecx: valid entries in the descriptor array.
3091  // Calculate the end of the descriptor array.
3092  STATIC_ASSERT(kSmiTag == 0);
3093  STATIC_ASSERT(kSmiTagSize == 1);
3096  __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
3097  // Calculate location of the first key name.
3098  __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3099  // Loop through all the keys in the descriptor array. If one of these is the
3100  // internalized string "valueOf" the result is false.
3101  __ jmp(&entry);
3102  __ bind(&loop);
3103  __ mov(edx, FieldOperand(ebx, 0));
3104  __ cmp(edx, isolate()->factory()->value_of_string());
3105  __ j(equal, if_false);
3107  __ bind(&entry);
3108  __ cmp(ebx, ecx);
3109  __ j(not_equal, &loop);
3110 
3111  __ bind(&done);
3112 
3113  // Reload map as register ebx was used as temporary above.
3115 
3116  // Set the bit in the map to indicate that there is no local valueOf field.
3119 
3120  __ bind(&skip_lookup);
3121 
3122  // If a valueOf property is not found on the object check that its
3123  // prototype is the un-modified String prototype. If not result is false.
3125  __ JumpIfSmi(ecx, if_false);
3128  __ mov(edx,
3130  __ cmp(ecx,
3133  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3134  Split(equal, if_true, if_false, fall_through);
3135 
3136  context()->Plug(if_true, if_false);
3137 }
3138 
3139 
3140 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3141  ZoneList<Expression*>* args = expr->arguments();
3142  DCHECK(args->length() == 1);
3143 
3144  VisitForAccumulatorValue(args->at(0));
3145 
3146  Label materialize_true, materialize_false;
3147  Label* if_true = NULL;
3148  Label* if_false = NULL;
3149  Label* fall_through = NULL;
3150  context()->PrepareTest(&materialize_true, &materialize_false,
3151  &if_true, &if_false, &fall_through);
3152 
3153  __ JumpIfSmi(eax, if_false);
3154  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3155  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3156  Split(equal, if_true, if_false, fall_through);
3157 
3158  context()->Plug(if_true, if_false);
3159 }
3160 
3161 
3162 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3163  ZoneList<Expression*>* args = expr->arguments();
3164  DCHECK(args->length() == 1);
3165 
3166  VisitForAccumulatorValue(args->at(0));
3167 
3168  Label materialize_true, materialize_false;
3169  Label* if_true = NULL;
3170  Label* if_false = NULL;
3171  Label* fall_through = NULL;
3172  context()->PrepareTest(&materialize_true, &materialize_false,
3173  &if_true, &if_false, &fall_through);
3174 
3175  Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3176  __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3177  // Check if the exponent half is 0x80000000. Comparing against 1 and
3178  // checking for overflow is the shortest possible encoding.
3179  __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3180  __ j(no_overflow, if_false);
3181  __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3182  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3183  Split(equal, if_true, if_false, fall_through);
3184 
3185  context()->Plug(if_true, if_false);
3186 }
3187 
3188 
3189 
3190 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3191  ZoneList<Expression*>* args = expr->arguments();
3192  DCHECK(args->length() == 1);
3193 
3194  VisitForAccumulatorValue(args->at(0));
3195 
3196  Label materialize_true, materialize_false;
3197  Label* if_true = NULL;
3198  Label* if_false = NULL;
3199  Label* fall_through = NULL;
3200  context()->PrepareTest(&materialize_true, &materialize_false,
3201  &if_true, &if_false, &fall_through);
3202 
3203  __ JumpIfSmi(eax, if_false);
3204  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3205  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3206  Split(equal, if_true, if_false, fall_through);
3207 
3208  context()->Plug(if_true, if_false);
3209 }
3210 
3211 
3212 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3213  ZoneList<Expression*>* args = expr->arguments();
3214  DCHECK(args->length() == 1);
3215 
3216  VisitForAccumulatorValue(args->at(0));
3217 
3218  Label materialize_true, materialize_false;
3219  Label* if_true = NULL;
3220  Label* if_false = NULL;
3221  Label* fall_through = NULL;
3222  context()->PrepareTest(&materialize_true, &materialize_false,
3223  &if_true, &if_false, &fall_through);
3224 
3225  __ JumpIfSmi(eax, if_false);
3226  __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3227  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3228  Split(equal, if_true, if_false, fall_through);
3229 
3230  context()->Plug(if_true, if_false);
3231 }
3232 
3233 
3234 
3235 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3236  DCHECK(expr->arguments()->length() == 0);
3237 
3238  Label materialize_true, materialize_false;
3239  Label* if_true = NULL;
3240  Label* if_false = NULL;
3241  Label* fall_through = NULL;
3242  context()->PrepareTest(&materialize_true, &materialize_false,
3243  &if_true, &if_false, &fall_through);
3244 
3245  // Get the frame pointer for the calling frame.
3247 
3248  // Skip the arguments adaptor frame if it exists.
3249  Label check_frame_marker;
3252  __ j(not_equal, &check_frame_marker);
3254 
3255  // Check the marker in the calling frame.
3256  __ bind(&check_frame_marker);
3258  Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3259  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3260  Split(equal, if_true, if_false, fall_through);
3261 
3262  context()->Plug(if_true, if_false);
3263 }
3264 
3265 
3266 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3267  ZoneList<Expression*>* args = expr->arguments();
3268  DCHECK(args->length() == 2);
3269 
3270  // Load the two objects into registers and perform the comparison.
3271  VisitForStackValue(args->at(0));
3272  VisitForAccumulatorValue(args->at(1));
3273 
3274  Label materialize_true, materialize_false;
3275  Label* if_true = NULL;
3276  Label* if_false = NULL;
3277  Label* fall_through = NULL;
3278  context()->PrepareTest(&materialize_true, &materialize_false,
3279  &if_true, &if_false, &fall_through);
3280 
3281  __ pop(ebx);
3282  __ cmp(eax, ebx);
3283  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3284  Split(equal, if_true, if_false, fall_through);
3285 
3286  context()->Plug(if_true, if_false);
3287 }
3288 
3289 
3290 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3291  ZoneList<Expression*>* args = expr->arguments();
3292  DCHECK(args->length() == 1);
3293 
3294  // ArgumentsAccessStub expects the key in edx and the formal
3295  // parameter count in eax.
3296  VisitForAccumulatorValue(args->at(0));
3297  __ mov(edx, eax);
3298  __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3299  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3300  __ CallStub(&stub);
3301  context()->Plug(eax);
3302 }
3303 
3304 
3305 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3306  DCHECK(expr->arguments()->length() == 0);
3307 
3308  Label exit;
3309  // Get the number of formal parameters.
3310  __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3311 
3312  // Check if the calling frame is an arguments adaptor frame.
3316  __ j(not_equal, &exit);
3317 
3318  // Arguments adaptor case: Read the arguments length from the
3319  // adaptor frame.
3321 
3322  __ bind(&exit);
3323  __ AssertSmi(eax);
3324  context()->Plug(eax);
3325 }
3326 
3327 
3328 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3329  ZoneList<Expression*>* args = expr->arguments();
3330  DCHECK(args->length() == 1);
3331  Label done, null, function, non_function_constructor;
3332 
3333  VisitForAccumulatorValue(args->at(0));
3334 
3335  // If the object is a smi, we return null.
3336  __ JumpIfSmi(eax, &null);
3337 
3338  // Check that the object is a JS object but take special care of JS
3339  // functions to make sure they have 'Function' as their class.
3340  // Assume that there are only two callable types, and one of them is at
3341  // either end of the type range for JS object types. Saves extra comparisons.
3343  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3344  // Map is now in eax.
3345  __ j(below, &null);
3348  __ j(equal, &function);
3349 
3350  __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3352  LAST_SPEC_OBJECT_TYPE - 1);
3353  __ j(equal, &function);
3354  // Assume that there is no larger type.
3356 
3357  // Check if the constructor in the map is a JS function.
3359  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3360  __ j(not_equal, &non_function_constructor);
3361 
3362  // eax now contains the constructor function. Grab the
3363  // instance class name from there.
3366  __ jmp(&done);
3367 
3368  // Functions have class 'Function'.
3369  __ bind(&function);
3370  __ mov(eax, isolate()->factory()->Function_string());
3371  __ jmp(&done);
3372 
3373  // Objects with a non-function constructor have class 'Object'.
3374  __ bind(&non_function_constructor);
3375  __ mov(eax, isolate()->factory()->Object_string());
3376  __ jmp(&done);
3377 
3378  // Non-JS objects have class null.
3379  __ bind(&null);
3380  __ mov(eax, isolate()->factory()->null_value());
3381 
3382  // All done.
3383  __ bind(&done);
3384 
3385  context()->Plug(eax);
3386 }
3387 
3388 
3389 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3390  // Load the arguments on the stack and call the stub.
3391  SubStringStub stub(isolate());
3392  ZoneList<Expression*>* args = expr->arguments();
3393  DCHECK(args->length() == 3);
3394  VisitForStackValue(args->at(0));
3395  VisitForStackValue(args->at(1));
3396  VisitForStackValue(args->at(2));
3397  __ CallStub(&stub);
3398  context()->Plug(eax);
3399 }
3400 
3401 
3402 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3403  // Load the arguments on the stack and call the stub.
3404  RegExpExecStub stub(isolate());
3405  ZoneList<Expression*>* args = expr->arguments();
3406  DCHECK(args->length() == 4);
3407  VisitForStackValue(args->at(0));
3408  VisitForStackValue(args->at(1));
3409  VisitForStackValue(args->at(2));
3410  VisitForStackValue(args->at(3));
3411  __ CallStub(&stub);
3412  context()->Plug(eax);
3413 }
3414 
3415 
3416 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3417  ZoneList<Expression*>* args = expr->arguments();
3418  DCHECK(args->length() == 1);
3419 
3420  VisitForAccumulatorValue(args->at(0)); // Load the object.
3421 
3422  Label done;
3423  // If the object is a smi return the object.
3424  __ JumpIfSmi(eax, &done, Label::kNear);
3425  // If the object is not a value type, return the object.
3426  __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3427  __ j(not_equal, &done, Label::kNear);
3429 
3430  __ bind(&done);
3431  context()->Plug(eax);
3432 }
3433 
3434 
3435 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3436  ZoneList<Expression*>* args = expr->arguments();
3437  DCHECK(args->length() == 2);
3438  DCHECK_NE(NULL, args->at(1)->AsLiteral());
3439  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3440 
3441  VisitForAccumulatorValue(args->at(0)); // Load the object.
3442 
3443  Label runtime, done, not_date_object;
3444  Register object = eax;
3445  Register result = eax;
3446  Register scratch = ecx;
3447 
3448  __ JumpIfSmi(object, &not_date_object);
3449  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3450  __ j(not_equal, &not_date_object);
3451 
3452  if (index->value() == 0) {
3453  __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3454  __ jmp(&done);
3455  } else {
3456  if (index->value() < JSDate::kFirstUncachedField) {
3457  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3458  __ mov(scratch, Operand::StaticVariable(stamp));
3459  __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3460  __ j(not_equal, &runtime, Label::kNear);
3461  __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3462  kPointerSize * index->value()));
3463  __ jmp(&done);
3464  }
3465  __ bind(&runtime);
3466  __ PrepareCallCFunction(2, scratch);
3467  __ mov(Operand(esp, 0), object);
3468  __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3469  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3470  __ jmp(&done);
3471  }
3472 
3473  __ bind(&not_date_object);
3474  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3475  __ bind(&done);
3476  context()->Plug(result);
3477 }
3478 
3479 
3480 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3481  ZoneList<Expression*>* args = expr->arguments();
3482  DCHECK_EQ(3, args->length());
3483 
3484  Register string = eax;
3485  Register index = ebx;
3486  Register value = ecx;
3487 
3488  VisitForStackValue(args->at(0)); // index
3489  VisitForStackValue(args->at(1)); // value
3490  VisitForAccumulatorValue(args->at(2)); // string
3491 
3492  __ pop(value);
3493  __ pop(index);
3494 
3495  if (FLAG_debug_code) {
3496  __ test(value, Immediate(kSmiTagMask));
3497  __ Check(zero, kNonSmiValue);
3498  __ test(index, Immediate(kSmiTagMask));
3499  __ Check(zero, kNonSmiValue);
3500  }
3501 
3502  __ SmiUntag(value);
3503  __ SmiUntag(index);
3504 
3505  if (FLAG_debug_code) {
3506  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3507  __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3508  }
3509 
3510  __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3511  value);
3512  context()->Plug(string);
3513 }
3514 
3515 
3516 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3517  ZoneList<Expression*>* args = expr->arguments();
3518  DCHECK_EQ(3, args->length());
3519 
3520  Register string = eax;
3521  Register index = ebx;
3522  Register value = ecx;
3523 
3524  VisitForStackValue(args->at(0)); // index
3525  VisitForStackValue(args->at(1)); // value
3526  VisitForAccumulatorValue(args->at(2)); // string
3527  __ pop(value);
3528  __ pop(index);
3529 
3530  if (FLAG_debug_code) {
3531  __ test(value, Immediate(kSmiTagMask));
3532  __ Check(zero, kNonSmiValue);
3533  __ test(index, Immediate(kSmiTagMask));
3534  __ Check(zero, kNonSmiValue);
3535  __ SmiUntag(index);
3536  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3537  __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3538  __ SmiTag(index);
3539  }
3540 
3541  __ SmiUntag(value);
3542  // No need to untag a smi for two-byte addressing.
3543  __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3544  value);
3545  context()->Plug(string);
3546 }
3547 
3548 
3549 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3550  // Load the arguments on the stack and call the runtime function.
3551  ZoneList<Expression*>* args = expr->arguments();
3552  DCHECK(args->length() == 2);
3553  VisitForStackValue(args->at(0));
3554  VisitForStackValue(args->at(1));
3555 
3556  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3557  __ CallStub(&stub);
3558  context()->Plug(eax);
3559 }
3560 
3561 
3562 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3563  ZoneList<Expression*>* args = expr->arguments();
3564  DCHECK(args->length() == 2);
3565 
3566  VisitForStackValue(args->at(0)); // Load the object.
3567  VisitForAccumulatorValue(args->at(1)); // Load the value.
3568  __ pop(ebx); // eax = value. ebx = object.
3569 
3570  Label done;
3571  // If the object is a smi, return the value.
3572  __ JumpIfSmi(ebx, &done, Label::kNear);
3573 
3574  // If the object is not a value type, return the value.
3575  __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3576  __ j(not_equal, &done, Label::kNear);
3577 
3578  // Store the value.
3580 
3581  // Update the write barrier. Save the value as it will be
3582  // overwritten by the write barrier code and is needed afterward.
3583  __ mov(edx, eax);
3584  __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3585 
3586  __ bind(&done);
3587  context()->Plug(eax);
3588 }
3589 
3590 
3591 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3592  ZoneList<Expression*>* args = expr->arguments();
3593  DCHECK_EQ(args->length(), 1);
3594 
3595  // Load the argument into eax and call the stub.
3596  VisitForAccumulatorValue(args->at(0));
3597 
3598  NumberToStringStub stub(isolate());
3599  __ CallStub(&stub);
3600  context()->Plug(eax);
3601 }
3602 
3603 
3604 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3605  ZoneList<Expression*>* args = expr->arguments();
3606  DCHECK(args->length() == 1);
3607 
3608  VisitForAccumulatorValue(args->at(0));
3609 
3610  Label done;
3611  StringCharFromCodeGenerator generator(eax, ebx);
3612  generator.GenerateFast(masm_);
3613  __ jmp(&done);
3614 
3615  NopRuntimeCallHelper call_helper;
3616  generator.GenerateSlow(masm_, call_helper);
3617 
3618  __ bind(&done);
3619  context()->Plug(ebx);
3620 }
3621 
3622 
3623 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3624  ZoneList<Expression*>* args = expr->arguments();
3625  DCHECK(args->length() == 2);
3626 
3627  VisitForStackValue(args->at(0));
3628  VisitForAccumulatorValue(args->at(1));
3629 
3630  Register object = ebx;
3631  Register index = eax;
3632  Register result = edx;
3633 
3634  __ pop(object);
3635 
3636  Label need_conversion;
3637  Label index_out_of_range;
3638  Label done;
3639  StringCharCodeAtGenerator generator(object,
3640  index,
3641  result,
3642  &need_conversion,
3643  &need_conversion,
3644  &index_out_of_range,
3646  generator.GenerateFast(masm_);
3647  __ jmp(&done);
3648 
3649  __ bind(&index_out_of_range);
3650  // When the index is out of range, the spec requires us to return
3651  // NaN.
3652  __ Move(result, Immediate(isolate()->factory()->nan_value()));
3653  __ jmp(&done);
3654 
3655  __ bind(&need_conversion);
3656  // Move the undefined value into the result register, which will
3657  // trigger conversion.
3658  __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3659  __ jmp(&done);
3660 
3661  NopRuntimeCallHelper call_helper;
3662  generator.GenerateSlow(masm_, call_helper);
3663 
3664  __ bind(&done);
3665  context()->Plug(result);
3666 }
3667 
3668 
3669 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3670  ZoneList<Expression*>* args = expr->arguments();
3671  DCHECK(args->length() == 2);
3672 
3673  VisitForStackValue(args->at(0));
3674  VisitForAccumulatorValue(args->at(1));
3675 
3676  Register object = ebx;
3677  Register index = eax;
3678  Register scratch = edx;
3679  Register result = eax;
3680 
3681  __ pop(object);
3682 
3683  Label need_conversion;
3684  Label index_out_of_range;
3685  Label done;
3686  StringCharAtGenerator generator(object,
3687  index,
3688  scratch,
3689  result,
3690  &need_conversion,
3691  &need_conversion,
3692  &index_out_of_range,
3694  generator.GenerateFast(masm_);
3695  __ jmp(&done);
3696 
3697  __ bind(&index_out_of_range);
3698  // When the index is out of range, the spec requires us to return
3699  // the empty string.
3700  __ Move(result, Immediate(isolate()->factory()->empty_string()));
3701  __ jmp(&done);
3702 
3703  __ bind(&need_conversion);
3704  // Move smi zero into the result register, which will trigger
3705  // conversion.
3706  __ Move(result, Immediate(Smi::FromInt(0)));
3707  __ jmp(&done);
3708 
3709  NopRuntimeCallHelper call_helper;
3710  generator.GenerateSlow(masm_, call_helper);
3711 
3712  __ bind(&done);
3713  context()->Plug(result);
3714 }
3715 
3716 
3717 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3718  ZoneList<Expression*>* args = expr->arguments();
3719  DCHECK_EQ(2, args->length());
3720  VisitForStackValue(args->at(0));
3721  VisitForAccumulatorValue(args->at(1));
3722 
3723  __ pop(edx);
3724  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3725  __ CallStub(&stub);
3726  context()->Plug(eax);
3727 }
3728 
3729 
3730 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3731  ZoneList<Expression*>* args = expr->arguments();
3732  DCHECK_EQ(2, args->length());
3733 
3734  VisitForStackValue(args->at(0));
3735  VisitForStackValue(args->at(1));
3736 
3737  StringCompareStub stub(isolate());
3738  __ CallStub(&stub);
3739  context()->Plug(eax);
3740 }
3741 
3742 
3743 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3744  ZoneList<Expression*>* args = expr->arguments();
3745  DCHECK(args->length() >= 2);
3746 
3747  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3748  for (int i = 0; i < arg_count + 1; ++i) {
3749  VisitForStackValue(args->at(i));
3750  }
3751  VisitForAccumulatorValue(args->last()); // Function.
3752 
3753  Label runtime, done;
3754  // Check for non-function argument (including proxy).
3755  __ JumpIfSmi(eax, &runtime);
3756  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3757  __ j(not_equal, &runtime);
3758 
3759  // InvokeFunction requires the function in edi. Move it in there.
3760  __ mov(edi, result_register());
3761  ParameterCount count(arg_count);
3762  __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
3764  __ jmp(&done);
3765 
3766  __ bind(&runtime);
3767  __ push(eax);
3768  __ CallRuntime(Runtime::kCall, args->length());
3769  __ bind(&done);
3770 
3771  context()->Plug(eax);
3772 }
3773 
3774 
3775 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3776  // Load the arguments on the stack and call the stub.
3777  RegExpConstructResultStub stub(isolate());
3778  ZoneList<Expression*>* args = expr->arguments();
3779  DCHECK(args->length() == 3);
3780  VisitForStackValue(args->at(0));
3781  VisitForStackValue(args->at(1));
3782  VisitForAccumulatorValue(args->at(2));
3783  __ pop(ebx);
3784  __ pop(ecx);
3785  __ CallStub(&stub);
3786  context()->Plug(eax);
3787 }
3788 
3789 
3790 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3791  ZoneList<Expression*>* args = expr->arguments();
3792  DCHECK_EQ(2, args->length());
3793 
3794  DCHECK_NE(NULL, args->at(0)->AsLiteral());
3795  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3796 
3797  Handle<FixedArray> jsfunction_result_caches(
3798  isolate()->native_context()->jsfunction_result_caches());
3799  if (jsfunction_result_caches->length() <= cache_id) {
3800  __ Abort(kAttemptToUseUndefinedCache);
3801  __ mov(eax, isolate()->factory()->undefined_value());
3802  context()->Plug(eax);
3803  return;
3804  }
3805 
3806  VisitForAccumulatorValue(args->at(1));
3807 
3808  Register key = eax;
3809  Register cache = ebx;
3810  Register tmp = ecx;
3812  __ mov(cache,
3815  __ mov(cache,
3816  FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3817 
3818  Label done, not_found;
3819  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3821  // tmp now holds finger offset as a smi.
3822  __ cmp(key, FixedArrayElementOperand(cache, tmp));
3823  __ j(not_equal, &not_found);
3824 
3825  __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
3826  __ jmp(&done);
3827 
3828  __ bind(&not_found);
3829  // Call runtime to perform the lookup.
3830  __ push(cache);
3831  __ push(key);
3832  __ CallRuntime(Runtime::kGetFromCache, 2);
3833 
3834  __ bind(&done);
3835  context()->Plug(eax);
3836 }
3837 
3838 
3839 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3840  ZoneList<Expression*>* args = expr->arguments();
3841  DCHECK(args->length() == 1);
3842 
3843  VisitForAccumulatorValue(args->at(0));
3844 
3845  __ AssertString(eax);
3846 
3847  Label materialize_true, materialize_false;
3848  Label* if_true = NULL;
3849  Label* if_false = NULL;
3850  Label* fall_through = NULL;
3851  context()->PrepareTest(&materialize_true, &materialize_false,
3852  &if_true, &if_false, &fall_through);
3853 
3856  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3857  Split(zero, if_true, if_false, fall_through);
3858 
3859  context()->Plug(if_true, if_false);
3860 }
3861 
3862 
3863 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3864  ZoneList<Expression*>* args = expr->arguments();
3865  DCHECK(args->length() == 1);
3866  VisitForAccumulatorValue(args->at(0));
3867 
3868  __ AssertString(eax);
3869 
3871  __ IndexFromHash(eax, eax);
3872 
3873  context()->Plug(eax);
3874 }
3875 
3876 
3877 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3878  Label bailout, done, one_char_separator, long_separator,
3879  non_trivial_array, not_size_one_array, loop,
3880  loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3881 
3882  ZoneList<Expression*>* args = expr->arguments();
3883  DCHECK(args->length() == 2);
3884  // We will leave the separator on the stack until the end of the function.
3885  VisitForStackValue(args->at(1));
3886  // Load this to eax (= array)
3887  VisitForAccumulatorValue(args->at(0));
3888  // All aliases of the same register have disjoint lifetimes.
3889  Register array = eax;
3890  Register elements = no_reg; // Will be eax.
3891 
3892  Register index = edx;
3893 
3894  Register string_length = ecx;
3895 
3896  Register string = esi;
3897 
3898  Register scratch = ebx;
3899 
3900  Register array_length = edi;
3901  Register result_pos = no_reg; // Will be edi.
3902 
3903  // Separator operand is already pushed.
3904  Operand separator_operand = Operand(esp, 2 * kPointerSize);
3905  Operand result_operand = Operand(esp, 1 * kPointerSize);
3906  Operand array_length_operand = Operand(esp, 0);
3907  __ sub(esp, Immediate(2 * kPointerSize));
3908  __ cld();
3909  // Check that the array is a JSArray
3910  __ JumpIfSmi(array, &bailout);
3911  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3912  __ j(not_equal, &bailout);
3913 
3914  // Check that the array has fast elements.
3915  __ CheckFastElements(scratch, &bailout);
3916 
3917  // If the array has length zero, return the empty string.
3918  __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3919  __ SmiUntag(array_length);
3920  __ j(not_zero, &non_trivial_array);
3921  __ mov(result_operand, isolate()->factory()->empty_string());
3922  __ jmp(&done);
3923 
3924  // Save the array length.
3925  __ bind(&non_trivial_array);
3926  __ mov(array_length_operand, array_length);
3927 
3928  // Save the FixedArray containing array's elements.
3929  // End of array's live range.
3930  elements = array;
3931  __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3932  array = no_reg;
3933 
3934 
3935  // Check that all array elements are sequential one-byte strings, and
3936  // accumulate the sum of their lengths, as a smi-encoded value.
3937  __ Move(index, Immediate(0));
3938  __ Move(string_length, Immediate(0));
3939  // Loop condition: while (index < length).
3940  // Live loop registers: index, array_length, string,
3941  // scratch, string_length, elements.
3942  if (generate_debug_code_) {
3943  __ cmp(index, array_length);
3944  __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3945  }
3946  __ bind(&loop);
3947  __ mov(string, FieldOperand(elements,
3948  index,
3951  __ JumpIfSmi(string, &bailout);
3952  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3953  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3954  __ and_(scratch, Immediate(
3956  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3957  __ j(not_equal, &bailout);
3958  __ add(string_length,
3960  __ j(overflow, &bailout);
3961  __ add(index, Immediate(1));
3962  __ cmp(index, array_length);
3963  __ j(less, &loop);
3964 
3965  // If array_length is 1, return elements[0], a string.
3966  __ cmp(array_length, 1);
3967  __ j(not_equal, &not_size_one_array);
3968  __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3969  __ mov(result_operand, scratch);
3970  __ jmp(&done);
3971 
3972  __ bind(&not_size_one_array);
3973 
3974  // End of array_length live range.
3975  result_pos = array_length;
3976  array_length = no_reg;
3977 
3978  // Live registers:
3979  // string_length: Sum of string lengths, as a smi.
3980  // elements: FixedArray of strings.
3981 
3982  // Check that the separator is a flat one-byte string.
3983  __ mov(string, separator_operand);
3984  __ JumpIfSmi(string, &bailout);
3985  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3986  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3987  __ and_(scratch, Immediate(
3989  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3990  __ j(not_equal, &bailout);
3991 
3992  // Add (separator length times array_length) - separator length
3993  // to string_length.
3994  __ mov(scratch, separator_operand);
3995  __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
3996  __ sub(string_length, scratch); // May be negative, temporarily.
3997  __ imul(scratch, array_length_operand);
3998  __ j(overflow, &bailout);
3999  __ add(string_length, scratch);
4000  __ j(overflow, &bailout);
4001 
4002  __ shr(string_length, 1);
4003  // Live registers and stack values:
4004  // string_length
4005  // elements
4006  __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4007  &bailout);
4008  __ mov(result_operand, result_pos);
4009  __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4010 
4011 
4012  __ mov(string, separator_operand);
4014  Immediate(Smi::FromInt(1)));
4015  __ j(equal, &one_char_separator);
4016  __ j(greater, &long_separator);
4017 
4018 
4019  // Empty separator case
4020  __ mov(index, Immediate(0));
4021  __ jmp(&loop_1_condition);
4022  // Loop condition: while (index < length).
4023  __ bind(&loop_1);
4024  // Each iteration of the loop concatenates one string to the result.
4025  // Live values in registers:
4026  // index: which element of the elements array we are adding to the result.
4027  // result_pos: the position to which we are currently copying characters.
4028  // elements: the FixedArray of strings we are joining.
4029 
4030  // Get string = array[index].
4031  __ mov(string, FieldOperand(elements, index,
4034  __ mov(string_length,
4036  __ shr(string_length, 1);
4037  __ lea(string,
4039  __ CopyBytes(string, result_pos, string_length, scratch);
4040  __ add(index, Immediate(1));
4041  __ bind(&loop_1_condition);
4042  __ cmp(index, array_length_operand);
4043  __ j(less, &loop_1); // End while (index < length).
4044  __ jmp(&done);
4045 
4046 
4047 
4048  // One-character separator case
4049  __ bind(&one_char_separator);
4050  // Replace separator with its one-byte character value.
4051  __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4052  __ mov_b(separator_operand, scratch);
4053 
4054  __ Move(index, Immediate(0));
4055  // Jump into the loop after the code that copies the separator, so the first
4056  // element is not preceded by a separator
4057  __ jmp(&loop_2_entry);
4058  // Loop condition: while (index < length).
4059  __ bind(&loop_2);
4060  // Each iteration of the loop concatenates one string to the result.
4061  // Live values in registers:
4062  // index: which element of the elements array we are adding to the result.
4063  // result_pos: the position to which we are currently copying characters.
4064 
4065  // Copy the separator character to the result.
4066  __ mov_b(scratch, separator_operand);
4067  __ mov_b(Operand(result_pos, 0), scratch);
4068  __ inc(result_pos);
4069 
4070  __ bind(&loop_2_entry);
4071  // Get string = array[index].
4072  __ mov(string, FieldOperand(elements, index,
4075  __ mov(string_length,
4077  __ shr(string_length, 1);
4078  __ lea(string,
4080  __ CopyBytes(string, result_pos, string_length, scratch);
4081  __ add(index, Immediate(1));
4082 
4083  __ cmp(index, array_length_operand);
4084  __ j(less, &loop_2); // End while (index < length).
4085  __ jmp(&done);
4086 
4087 
4088  // Long separator case (separator is more than one character).
4089  __ bind(&long_separator);
4090 
4091  __ Move(index, Immediate(0));
4092  // Jump into the loop after the code that copies the separator, so the first
4093  // element is not preceded by a separator
4094  __ jmp(&loop_3_entry);
4095  // Loop condition: while (index < length).
4096  __ bind(&loop_3);
4097  // Each iteration of the loop concatenates one string to the result.
4098  // Live values in registers:
4099  // index: which element of the elements array we are adding to the result.
4100  // result_pos: the position to which we are currently copying characters.
4101 
4102  // Copy the separator to the result.
4103  __ mov(string, separator_operand);
4104  __ mov(string_length,
4106  __ shr(string_length, 1);
4107  __ lea(string,
4109  __ CopyBytes(string, result_pos, string_length, scratch);
4110 
4111  __ bind(&loop_3_entry);
4112  // Get string = array[index].
4113  __ mov(string, FieldOperand(elements, index,
4116  __ mov(string_length,
4118  __ shr(string_length, 1);
4119  __ lea(string,
4121  __ CopyBytes(string, result_pos, string_length, scratch);
4122  __ add(index, Immediate(1));
4123 
4124  __ cmp(index, array_length_operand);
4125  __ j(less, &loop_3); // End while (index < length).
4126  __ jmp(&done);
4127 
4128 
4129  __ bind(&bailout);
4130  __ mov(result_operand, isolate()->factory()->undefined_value());
4131  __ bind(&done);
4132  __ mov(eax, result_operand);
4133  // Drop temp values from the stack, and restore context register.
4134  __ add(esp, Immediate(3 * kPointerSize));
4135 
4137  context()->Plug(eax);
4138 }
4139 
4140 
4141 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4142  DCHECK(expr->arguments()->length() == 0);
4143  ExternalReference debug_is_active =
4144  ExternalReference::debug_is_active_address(isolate());
4145  __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
4146  __ SmiTag(eax);
4147  context()->Plug(eax);
4148 }
4149 
4150 
4151 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4152  if (expr->function() != NULL &&
4153  expr->function()->intrinsic_type == Runtime::INLINE) {
4154  Comment cmnt(masm_, "[ InlineRuntimeCall");
4155  EmitInlineRuntimeCall(expr);
4156  return;
4157  }
4158 
4159  Comment cmnt(masm_, "[ CallRuntime");
4160  ZoneList<Expression*>* args = expr->arguments();
4161 
4162  if (expr->is_jsruntime()) {
4163  // Push the builtins object as receiver.
4164  __ mov(eax, GlobalObjectOperand());
4166 
4167  // Load the function from the receiver.
4168  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4169  __ mov(LoadDescriptor::NameRegister(), Immediate(expr->name()));
4170  if (FLAG_vector_ics) {
4172  Immediate(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4174  } else {
4175  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4176  }
4177 
4178  // Push the target function under the receiver.
4179  __ push(Operand(esp, 0));
4180  __ mov(Operand(esp, kPointerSize), eax);
4181 
4182  // Code common for calls using the IC.
4183  ZoneList<Expression*>* args = expr->arguments();
4184  int arg_count = args->length();
4185  for (int i = 0; i < arg_count; i++) {
4186  VisitForStackValue(args->at(i));
4187  }
4188 
4189  // Record source position of the IC call.
4190  SetSourcePosition(expr->position());
4191  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4192  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4193  __ CallStub(&stub);
4194  // Restore context register.
4196  context()->DropAndPlug(1, eax);
4197 
4198  } else {
4199  // Push the arguments ("left-to-right").
4200  int arg_count = args->length();
4201  for (int i = 0; i < arg_count; i++) {
4202  VisitForStackValue(args->at(i));
4203  }
4204 
4205  // Call the C runtime function.
4206  __ CallRuntime(expr->function(), arg_count);
4207 
4208  context()->Plug(eax);
4209  }
4210 }
4211 
4212 
4213 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4214  switch (expr->op()) {
4215  case Token::DELETE: {
4216  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4217  Property* property = expr->expression()->AsProperty();
4218  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4219 
4220  if (property != NULL) {
4221  VisitForStackValue(property->obj());
4222  VisitForStackValue(property->key());
4223  __ push(Immediate(Smi::FromInt(strict_mode())));
4224  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4225  context()->Plug(eax);
4226  } else if (proxy != NULL) {
4227  Variable* var = proxy->var();
4228  // Delete of an unqualified identifier is disallowed in strict mode
4229  // but "delete this" is allowed.
4230  DCHECK(strict_mode() == SLOPPY || var->is_this());
4231  if (var->IsUnallocated()) {
4232  __ push(GlobalObjectOperand());
4233  __ push(Immediate(var->name()));
4234  __ push(Immediate(Smi::FromInt(SLOPPY)));
4235  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4236  context()->Plug(eax);
4237  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4238  // Result of deleting non-global variables is false. 'this' is
4239  // not really a variable, though we implement it as one. The
4240  // subexpression does not have side effects.
4241  context()->Plug(var->is_this());
4242  } else {
4243  // Non-global variable. Call the runtime to try to delete from the
4244  // context where the variable was introduced.
4245  __ push(context_register());
4246  __ push(Immediate(var->name()));
4247  __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4248  context()->Plug(eax);
4249  }
4250  } else {
4251  // Result of deleting non-property, non-variable reference is true.
4252  // The subexpression may have side effects.
4253  VisitForEffect(expr->expression());
4254  context()->Plug(true);
4255  }
4256  break;
4257  }
4258 
4259  case Token::VOID: {
4260  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4261  VisitForEffect(expr->expression());
4262  context()->Plug(isolate()->factory()->undefined_value());
4263  break;
4264  }
4265 
4266  case Token::NOT: {
4267  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4268  if (context()->IsEffect()) {
4269  // Unary NOT has no side effects so it's only necessary to visit the
4270  // subexpression. Match the optimizing compiler by not branching.
4271  VisitForEffect(expr->expression());
4272  } else if (context()->IsTest()) {
4273  const TestContext* test = TestContext::cast(context());
4274  // The labels are swapped for the recursive call.
4275  VisitForControl(expr->expression(),
4276  test->false_label(),
4277  test->true_label(),
4278  test->fall_through());
4279  context()->Plug(test->true_label(), test->false_label());
4280  } else {
4281  // We handle value contexts explicitly rather than simply visiting
4282  // for control and plugging the control flow into the context,
4283  // because we need to prepare a pair of extra administrative AST ids
4284  // for the optimizing compiler.
4285  DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4286  Label materialize_true, materialize_false, done;
4287  VisitForControl(expr->expression(),
4288  &materialize_false,
4289  &materialize_true,
4290  &materialize_true);
4291  __ bind(&materialize_true);
4292  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4293  if (context()->IsAccumulatorValue()) {
4294  __ mov(eax, isolate()->factory()->true_value());
4295  } else {
4296  __ Push(isolate()->factory()->true_value());
4297  }
4298  __ jmp(&done, Label::kNear);
4299  __ bind(&materialize_false);
4300  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4301  if (context()->IsAccumulatorValue()) {
4302  __ mov(eax, isolate()->factory()->false_value());
4303  } else {
4304  __ Push(isolate()->factory()->false_value());
4305  }
4306  __ bind(&done);
4307  }
4308  break;
4309  }
4310 
4311  case Token::TYPEOF: {
4312  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4313  { StackValueContext context(this);
4314  VisitForTypeofValue(expr->expression());
4315  }
4316  __ CallRuntime(Runtime::kTypeof, 1);
4317  context()->Plug(eax);
4318  break;
4319  }
4320 
4321  default:
4322  UNREACHABLE();
4323  }
4324 }
4325 
4326 
4327 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4328  DCHECK(expr->expression()->IsValidReferenceExpression());
4329 
4330  Comment cmnt(masm_, "[ CountOperation");
4331  SetSourcePosition(expr->position());
4332 
4333  // Expression can only be a property, a global or a (parameter or local)
4334  // slot.
4336  LhsKind assign_type = VARIABLE;
4337  Property* prop = expr->expression()->AsProperty();
4338  // In case of a property we use the uninitialized expression context
4339  // of the key to detect a named property.
4340  if (prop != NULL) {
4341  assign_type =
4342  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4343  if (prop->IsSuperAccess()) {
4344  // throw exception.
4345  VisitSuperReference(prop->obj()->AsSuperReference());
4346  return;
4347  }
4348  }
4349 
4350  // Evaluate expression and get value.
4351  if (assign_type == VARIABLE) {
4352  DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4353  AccumulatorValueContext context(this);
4354  EmitVariableLoad(expr->expression()->AsVariableProxy());
4355  } else {
4356  // Reserve space for result of postfix operation.
4357  if (expr->is_postfix() && !context()->IsEffect()) {
4358  __ push(Immediate(Smi::FromInt(0)));
4359  }
4360  if (assign_type == NAMED_PROPERTY) {
4361  // Put the object both on the stack and in the register.
4362  VisitForStackValue(prop->obj());
4363  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4364  EmitNamedPropertyLoad(prop);
4365  } else {
4366  VisitForStackValue(prop->obj());
4367  VisitForStackValue(prop->key());
4369  Operand(esp, kPointerSize)); // Object.
4370  __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
4371  EmitKeyedPropertyLoad(prop);
4372  }
4373  }
4374 
4375  // We need a second deoptimization point after loading the value
4376  // in case evaluating the property load my have a side effect.
4377  if (assign_type == VARIABLE) {
4378  PrepareForBailout(expr->expression(), TOS_REG);
4379  } else {
4380  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4381  }
4382 
4383  // Inline smi case if we are in a loop.
4384  Label done, stub_call;
4385  JumpPatchSite patch_site(masm_);
4386  if (ShouldInlineSmiCase(expr->op())) {
4387  Label slow;
4388  patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4389 
4390  // Save result for postfix expressions.
4391  if (expr->is_postfix()) {
4392  if (!context()->IsEffect()) {
4393  // Save the result on the stack. If we have a named or keyed property
4394  // we store the result under the receiver that is currently on top
4395  // of the stack.
4396  switch (assign_type) {
4397  case VARIABLE:
4398  __ push(eax);
4399  break;
4400  case NAMED_PROPERTY:
4401  __ mov(Operand(esp, kPointerSize), eax);
4402  break;
4403  case KEYED_PROPERTY:
4404  __ mov(Operand(esp, 2 * kPointerSize), eax);
4405  break;
4406  }
4407  }
4408  }
4409 
4410  if (expr->op() == Token::INC) {
4411  __ add(eax, Immediate(Smi::FromInt(1)));
4412  } else {
4413  __ sub(eax, Immediate(Smi::FromInt(1)));
4414  }
4415  __ j(no_overflow, &done, Label::kNear);
4416  // Call stub. Undo operation first.
4417  if (expr->op() == Token::INC) {
4418  __ sub(eax, Immediate(Smi::FromInt(1)));
4419  } else {
4420  __ add(eax, Immediate(Smi::FromInt(1)));
4421  }
4422  __ jmp(&stub_call, Label::kNear);
4423  __ bind(&slow);
4424  }
4425  ToNumberStub convert_stub(isolate());
4426  __ CallStub(&convert_stub);
4427 
4428  // Save result for postfix expressions.
4429  if (expr->is_postfix()) {
4430  if (!context()->IsEffect()) {
4431  // Save the result on the stack. If we have a named or keyed property
4432  // we store the result under the receiver that is currently on top
4433  // of the stack.
4434  switch (assign_type) {
4435  case VARIABLE:
4436  __ push(eax);
4437  break;
4438  case NAMED_PROPERTY:
4439  __ mov(Operand(esp, kPointerSize), eax);
4440  break;
4441  case KEYED_PROPERTY:
4442  __ mov(Operand(esp, 2 * kPointerSize), eax);
4443  break;
4444  }
4445  }
4446  }
4447 
4448  // Record position before stub call.
4449  SetSourcePosition(expr->position());
4450 
4451  // Call stub for +1/-1.
4452  __ bind(&stub_call);
4453  __ mov(edx, eax);
4454  __ mov(eax, Immediate(Smi::FromInt(1)));
4455  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4456  NO_OVERWRITE).code();
4457  CallIC(code, expr->CountBinOpFeedbackId());
4458  patch_site.EmitPatchInfo();
4459  __ bind(&done);
4460 
4461  // Store the value returned in eax.
4462  switch (assign_type) {
4463  case VARIABLE:
4464  if (expr->is_postfix()) {
4465  // Perform the assignment as if via '='.
4466  { EffectContext context(this);
4467  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4468  Token::ASSIGN);
4469  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4470  context.Plug(eax);
4471  }
4472  // For all contexts except EffectContext We have the result on
4473  // top of the stack.
4474  if (!context()->IsEffect()) {
4475  context()->PlugTOS();
4476  }
4477  } else {
4478  // Perform the assignment as if via '='.
4479  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4480  Token::ASSIGN);
4481  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4482  context()->Plug(eax);
4483  }
4484  break;
4485  case NAMED_PROPERTY: {
4487  prop->key()->AsLiteral()->value());
4489  CallStoreIC(expr->CountStoreFeedbackId());
4490  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4491  if (expr->is_postfix()) {
4492  if (!context()->IsEffect()) {
4493  context()->PlugTOS();
4494  }
4495  } else {
4496  context()->Plug(eax);
4497  }
4498  break;
4499  }
4500  case KEYED_PROPERTY: {
4503  Handle<Code> ic =
4504  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4505  CallIC(ic, expr->CountStoreFeedbackId());
4506  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4507  if (expr->is_postfix()) {
4508  // Result is on the stack
4509  if (!context()->IsEffect()) {
4510  context()->PlugTOS();
4511  }
4512  } else {
4513  context()->Plug(eax);
4514  }
4515  break;
4516  }
4517  }
4518 }
4519 
4520 
4521 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4522  VariableProxy* proxy = expr->AsVariableProxy();
4523  DCHECK(!context()->IsEffect());
4524  DCHECK(!context()->IsTest());
4525 
4526  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4527  Comment cmnt(masm_, "[ Global variable");
4529  __ mov(LoadDescriptor::NameRegister(), Immediate(proxy->name()));
4530  if (FLAG_vector_ics) {
4532  Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
4533  }
4534  // Use a regular load, not a contextual load, to avoid a reference
4535  // error.
4537  PrepareForBailout(expr, TOS_REG);
4538  context()->Plug(eax);
4539  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4540  Comment cmnt(masm_, "[ Lookup slot");
4541  Label done, slow;
4542 
4543  // Generate code for loading from variables potentially shadowed
4544  // by eval-introduced variables.
4545  EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4546 
4547  __ bind(&slow);
4548  __ push(esi);
4549  __ push(Immediate(proxy->name()));
4550  __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4551  PrepareForBailout(expr, TOS_REG);
4552  __ bind(&done);
4553 
4554  context()->Plug(eax);
4555  } else {
4556  // This expression cannot throw a reference error at the top level.
4558  }
4559 }
4560 
4561 
4562 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4563  Expression* sub_expr,
4564  Handle<String> check) {
4565  Label materialize_true, materialize_false;
4566  Label* if_true = NULL;
4567  Label* if_false = NULL;
4568  Label* fall_through = NULL;
4569  context()->PrepareTest(&materialize_true, &materialize_false,
4570  &if_true, &if_false, &fall_through);
4571 
4572  { AccumulatorValueContext context(this);
4573  VisitForTypeofValue(sub_expr);
4574  }
4575  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4576 
4577  Factory* factory = isolate()->factory();
4578  if (String::Equals(check, factory->number_string())) {
4579  __ JumpIfSmi(eax, if_true);
4581  isolate()->factory()->heap_number_map());
4582  Split(equal, if_true, if_false, fall_through);
4583  } else if (String::Equals(check, factory->string_string())) {
4584  __ JumpIfSmi(eax, if_false);
4585  __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4586  __ j(above_equal, if_false);
4587  // Check for undetectable objects => false.
4589  1 << Map::kIsUndetectable);
4590  Split(zero, if_true, if_false, fall_through);
4591  } else if (String::Equals(check, factory->symbol_string())) {
4592  __ JumpIfSmi(eax, if_false);
4593  __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4594  Split(equal, if_true, if_false, fall_through);
4595  } else if (String::Equals(check, factory->boolean_string())) {
4596  __ cmp(eax, isolate()->factory()->true_value());
4597  __ j(equal, if_true);
4598  __ cmp(eax, isolate()->factory()->false_value());
4599  Split(equal, if_true, if_false, fall_through);
4600  } else if (String::Equals(check, factory->undefined_string())) {
4601  __ cmp(eax, isolate()->factory()->undefined_value());
4602  __ j(equal, if_true);
4603  __ JumpIfSmi(eax, if_false);
4604  // Check for undetectable objects => true.
4607  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4608  Split(not_zero, if_true, if_false, fall_through);
4609  } else if (String::Equals(check, factory->function_string())) {
4610  __ JumpIfSmi(eax, if_false);
4612  __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4613  __ j(equal, if_true);
4614  __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4615  Split(equal, if_true, if_false, fall_through);
4616  } else if (String::Equals(check, factory->object_string())) {
4617  __ JumpIfSmi(eax, if_false);
4618  __ cmp(eax, isolate()->factory()->null_value());
4619  __ j(equal, if_true);
4620  __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4621  __ j(below, if_false);
4622  __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4623  __ j(above, if_false);
4624  // Check for undetectable objects => false.
4626  1 << Map::kIsUndetectable);
4627  Split(zero, if_true, if_false, fall_through);
4628  } else {
4629  if (if_false != fall_through) __ jmp(if_false);
4630  }
4631  context()->Plug(if_true, if_false);
4632 }
4633 
4634 
4635 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4636  Comment cmnt(masm_, "[ CompareOperation");
4637  SetSourcePosition(expr->position());
4638 
4639  // First we try a fast inlined version of the compare when one of
4640  // the operands is a literal.
4641  if (TryLiteralCompare(expr)) return;
4642 
4643  // Always perform the comparison for its control flow. Pack the result
4644  // into the expression's context after the comparison is performed.
4645  Label materialize_true, materialize_false;
4646  Label* if_true = NULL;
4647  Label* if_false = NULL;
4648  Label* fall_through = NULL;
4649  context()->PrepareTest(&materialize_true, &materialize_false,
4650  &if_true, &if_false, &fall_through);
4651 
4652  Token::Value op = expr->op();
4653  VisitForStackValue(expr->left());
4654  switch (op) {
4655  case Token::IN:
4656  VisitForStackValue(expr->right());
4657  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4658  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4659  __ cmp(eax, isolate()->factory()->true_value());
4660  Split(equal, if_true, if_false, fall_through);
4661  break;
4662 
4663  case Token::INSTANCEOF: {
4664  VisitForStackValue(expr->right());
4665  InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4666  __ CallStub(&stub);
4667  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4668  __ test(eax, eax);
4669  // The stub returns 0 for true.
4670  Split(zero, if_true, if_false, fall_through);
4671  break;
4672  }
4673 
4674  default: {
4675  VisitForAccumulatorValue(expr->right());
4677  __ pop(edx);
4678 
4679  bool inline_smi_code = ShouldInlineSmiCase(op);
4680  JumpPatchSite patch_site(masm_);
4681  if (inline_smi_code) {
4682  Label slow_case;
4683  __ mov(ecx, edx);
4684  __ or_(ecx, eax);
4685  patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4686  __ cmp(edx, eax);
4687  Split(cc, if_true, if_false, NULL);
4688  __ bind(&slow_case);
4689  }
4690 
4691  // Record position and call the compare IC.
4692  SetSourcePosition(expr->position());
4693  Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4694  CallIC(ic, expr->CompareOperationFeedbackId());
4695  patch_site.EmitPatchInfo();
4696 
4697  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4698  __ test(eax, eax);
4699  Split(cc, if_true, if_false, fall_through);
4700  }
4701  }
4702 
4703  // Convert the result of the comparison into one expected for this
4704  // expression's context.
4705  context()->Plug(if_true, if_false);
4706 }
4707 
4708 
4709 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4710  Expression* sub_expr,
4711  NilValue nil) {
4712  Label materialize_true, materialize_false;
4713  Label* if_true = NULL;
4714  Label* if_false = NULL;
4715  Label* fall_through = NULL;
4716  context()->PrepareTest(&materialize_true, &materialize_false,
4717  &if_true, &if_false, &fall_through);
4718 
4719  VisitForAccumulatorValue(sub_expr);
4720  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4721 
4722  Handle<Object> nil_value = nil == kNullValue
4723  ? isolate()->factory()->null_value()
4724  : isolate()->factory()->undefined_value();
4725  if (expr->op() == Token::EQ_STRICT) {
4726  __ cmp(eax, nil_value);
4727  Split(equal, if_true, if_false, fall_through);
4728  } else {
4729  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4730  CallIC(ic, expr->CompareOperationFeedbackId());
4731  __ test(eax, eax);
4732  Split(not_zero, if_true, if_false, fall_through);
4733  }
4734  context()->Plug(if_true, if_false);
4735 }
4736 
4737 
4738 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4740  context()->Plug(eax);
4741 }
4742 
4743 
4745  return eax;
4746 }
4747 
4748 
4750  return esi;
4751 }
4752 
4753 
4754 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4755  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4756  __ mov(Operand(ebp, frame_offset), value);
4757 }
4758 
4759 
4760 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4761  __ mov(dst, ContextOperand(esi, context_index));
4762 }
4763 
4764 
4766  Scope* declaration_scope = scope()->DeclarationScope();
4767  if (declaration_scope->is_global_scope() ||
4768  declaration_scope->is_module_scope()) {
4769  // Contexts nested in the native context have a canonical empty function
4770  // as their closure, not the anonymous closure containing the global
4771  // code. Pass a smi sentinel and let the runtime look up the empty
4772  // function.
4773  __ push(Immediate(Smi::FromInt(0)));
4774  } else if (declaration_scope->is_eval_scope()) {
4775  // Contexts nested inside eval code have the same closure as the context
4776  // calling eval, not the anonymous closure containing the eval code.
4777  // Fetch it from the context.
4779  } else {
4780  DCHECK(declaration_scope->is_function_scope());
4782  }
4783 }
4784 
4785 
4786 // ----------------------------------------------------------------------------
4787 // Non-local control flow support.
4788 
4790  // Cook return address on top of stack (smi encoded Code* delta)
4791  DCHECK(!result_register().is(edx));
4792  __ pop(edx);
4793  __ sub(edx, Immediate(masm_->CodeObject()));
4795  STATIC_ASSERT(kSmiTag == 0);
4796  __ SmiTag(edx);
4797  __ push(edx);
4798 
4799  // Store result register while executing finally block.
4800  __ push(result_register());
4801 
4802  // Store pending message while executing finally block.
4803  ExternalReference pending_message_obj =
4804  ExternalReference::address_of_pending_message_obj(isolate());
4805  __ mov(edx, Operand::StaticVariable(pending_message_obj));
4806  __ push(edx);
4807 
4808  ExternalReference has_pending_message =
4809  ExternalReference::address_of_has_pending_message(isolate());
4810  __ mov(edx, Operand::StaticVariable(has_pending_message));
4811  __ SmiTag(edx);
4812  __ push(edx);
4813 
4814  ExternalReference pending_message_script =
4815  ExternalReference::address_of_pending_message_script(isolate());
4816  __ mov(edx, Operand::StaticVariable(pending_message_script));
4817  __ push(edx);
4818 }
4819 
4820 
4822  DCHECK(!result_register().is(edx));
4823  // Restore pending message from stack.
4824  __ pop(edx);
4825  ExternalReference pending_message_script =
4826  ExternalReference::address_of_pending_message_script(isolate());
4827  __ mov(Operand::StaticVariable(pending_message_script), edx);
4828 
4829  __ pop(edx);
4830  __ SmiUntag(edx);
4831  ExternalReference has_pending_message =
4832  ExternalReference::address_of_has_pending_message(isolate());
4833  __ mov(Operand::StaticVariable(has_pending_message), edx);
4834 
4835  __ pop(edx);
4836  ExternalReference pending_message_obj =
4837  ExternalReference::address_of_pending_message_obj(isolate());
4838  __ mov(Operand::StaticVariable(pending_message_obj), edx);
4839 
4840  // Restore result register from stack.
4841  __ pop(result_register());
4842 
4843  // Uncook return address.
4844  __ pop(edx);
4845  __ SmiUntag(edx);
4846  __ add(edx, Immediate(masm_->CodeObject()));
4847  __ jmp(edx);
4848 }
4849 
4850 
4851 #undef __
4852 
4853 #define __ ACCESS_MASM(masm())
4854 
4856  int* stack_depth,
4857  int* context_length) {
4858  // The macros used here must preserve the result register.
4859 
4860  // Because the handler block contains the context of the finally
4861  // code, we can restore it directly from there for the finally code
4862  // rather than iteratively unwinding contexts via their previous
4863  // links.
4864  __ Drop(*stack_depth); // Down to the handler block.
4865  if (*context_length > 0) {
4866  // Restore the context to its dedicated register and the stack.
4869  }
4870  __ PopTryHandler();
4871  __ call(finally_entry_);
4872 
4873  *stack_depth = 0;
4874  *context_length = 0;
4875  return previous_;
4876 }
4877 
4878 #undef __
4879 
4880 
4881 static const byte kJnsInstruction = 0x79;
4882 static const byte kJnsOffset = 0x11;
4883 static const byte kNopByteOne = 0x66;
4884 static const byte kNopByteTwo = 0x90;
4885 #ifdef DEBUG
4886 static const byte kCallInstruction = 0xe8;
4887 #endif
4888 
4889 
4890 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4891  Address pc,
4892  BackEdgeState target_state,
4893  Code* replacement_code) {
4894  Address call_target_address = pc - kIntSize;
4895  Address jns_instr_address = call_target_address - 3;
4896  Address jns_offset_address = call_target_address - 2;
4897 
4898  switch (target_state) {
4899  case INTERRUPT:
4900  // sub <profiling_counter>, <delta> ;; Not changed
4901  // jns ok
4902  // call <interrupt stub>
4903  // ok:
4904  *jns_instr_address = kJnsInstruction;
4905  *jns_offset_address = kJnsOffset;
4906  break;
4907  case ON_STACK_REPLACEMENT:
4908  case OSR_AFTER_STACK_CHECK:
4909  // sub <profiling_counter>, <delta> ;; Not changed
4910  // nop
4911  // nop
4912  // call <on-stack replacment>
4913  // ok:
4914  *jns_instr_address = kNopByteOne;
4915  *jns_offset_address = kNopByteTwo;
4916  break;
4917  }
4918 
4919  Assembler::set_target_address_at(call_target_address,
4920  unoptimized_code,
4921  replacement_code->entry());
4922  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4923  unoptimized_code, call_target_address, replacement_code);
4924 }
4925 
4926 
4928  Isolate* isolate,
4929  Code* unoptimized_code,
4930  Address pc) {
4931  Address call_target_address = pc - kIntSize;
4932  Address jns_instr_address = call_target_address - 3;
4933  DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4934 
4935  if (*jns_instr_address == kJnsInstruction) {
4936  DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4937  DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4938  Assembler::target_address_at(call_target_address,
4939  unoptimized_code));
4940  return INTERRUPT;
4941  }
4942 
4943  DCHECK_EQ(kNopByteOne, *jns_instr_address);
4944  DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4945 
4946  if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4947  isolate->builtins()->OnStackReplacement()->entry()) {
4948  return ON_STACK_REPLACEMENT;
4949  }
4950 
4951  DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4952  Assembler::target_address_at(call_target_address,
4953  unoptimized_code));
4954  return OSR_AFTER_STACK_CHECK;
4955 }
4956 
4957 
4958 } } // namespace v8::internal
4959 
4960 #endif // V8_TARGET_ARCH_IA32
#define BASE_EMBEDDED
Definition: allocation.h:45
Isolate * isolate() const
Definition: assembler.h:62
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target, ICacheFlushMode icache_flush_mode=FLUSH_ICACHE_IF_NEEDED)
static const int kJSReturnSequenceLength
int SizeOfCodeGeneratedSince(Label *label)
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static BailoutId Declarations()
Definition: utils.h:962
static BailoutId FunctionEntry()
Definition: utils.h:961
static Handle< Code > initialize_stub(Isolate *isolate, int argc, CallICState::CallType call_type)
Definition: ic.cc:1338
static const int kValueOffset
Definition: objects.h:9446
static Condition ComputeCondition(Token::Value op)
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1329
void AddNoFrameRange(int from, int to)
Definition: compiler.h:354
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3331
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
Definition: compiler.cc:1243
@ STRING_FUNCTION_PROTOTYPE_MAP_INDEX
Definition: contexts.h:294
static int SlotOffset(int index)
Definition: contexts.h:552
static const int kDescriptorSize
Definition: objects.h:3038
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3032
static const int kEnumCacheOffset
Definition: objects.h:3028
static const int kFirstOffset
Definition: objects.h:3029
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
static const TestContext * cast(const ExpressionContext *context)
Definition: full-codegen.h:778
virtual void Plug(bool flag) const
virtual NestedStatement * Exit(int *stack_depth, int *context_length)
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:99
void EmitAccessor(Expression *expression)
void Split(Condition cc, Label *if_true, Label *if_false, Label *fall_through)
void EmitBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode)
void EmitLiteralCompareTypeof(Expression *expr, Expression *sub_expr, Handle< String > check)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
void PrepareForBailoutForId(BailoutId id, State state)
void EmitNewClosure(Handle< SharedFunctionInfo > info, bool pretenure)
void GetVar(Register destination, Variable *var)
static Register context_register()
ZoneList< Handle< Object > > * globals_
Definition: full-codegen.h:837
void VisitForControl(Expression *expr, Label *if_true, Label *if_false, Label *fall_through)
Definition: full-codegen.h:382
void CallLoadIC(ContextualMode mode, TypeFeedbackId id=TypeFeedbackId::None())
void RecordBackEdge(BailoutId osr_ast_id)
MemOperand StackOperand(Variable *var)
void EmitVariableLoad(VariableProxy *proxy)
void SetVar(Variable *var, Register source, Register scratch0, Register scratch1)
MemOperand ContextSlotOperandCheckExtensions(Variable *var, Label *slow)
void EmitKeyedPropertyAssignment(Assignment *expr)
void DeclareGlobals(Handle< FixedArray > pairs)
void EmitResolvePossiblyDirectEval(int arg_count)
void VisitForStackValue(Expression *expr)
Definition: full-codegen.h:376
void EmitKeyedCallWithLoadIC(Call *expr, Expression *key)
void EmitKeyedPropertyLoad(Property *expr)
void EmitDebugCheckDeclarationContext(Variable *variable)
FunctionLiteral * function()
Definition: full-codegen.h:609
void EmitNamedSuperPropertyLoad(Property *expr)
bool TryLiteralCompare(CompareOperation *compare)
void SetStatementPosition(Statement *stmt)
Handle< FixedArray > FeedbackVector()
Definition: full-codegen.h:432
void StoreToFrameField(int frame_offset, Register value)
void LoadContextField(Register dst, int context_index)
const ExpressionContext * context()
Definition: full-codegen.h:602
void EmitNamedPropertyLoad(Property *expr)
void EmitBackEdgeBookkeeping(IterationStatement *stmt, Label *back_edge_target)
void DoTest(Expression *condition, Label *if_true, Label *if_false, Label *fall_through)
void VisitForAccumulatorValue(Expression *expr)
Definition: full-codegen.h:370
void PrepareForBailout(Expression *node, State state)
void CallStoreIC(TypeFeedbackId id=TypeFeedbackId::None())
MemOperand VarOperand(Variable *var, Register scratch)
void DeclareModules(Handle< FixedArray > descriptions)
void EmitGeneratorResume(Expression *generator, Expression *value, JSGeneratorObject::ResumeMode resume_mode)
void VisitForEffect(Expression *expr)
Definition: full-codegen.h:364
void EmitAssignment(Expression *expr)
void EmitCall(Call *expr, CallICState::CallType=CallICState::FUNCTION)
void SetFunctionPosition(FunctionLiteral *fun)
void EmitLoadHomeObject(SuperReference *expr)
void EmitStoreToStackLocalOrContextSlot(Variable *var, MemOperand location)
void EmitInlineSmiBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode, Expression *left, Expression *right)
void EmitLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
void EmitVariableAssignment(Variable *var, Token::Value op)
void CallIC(Handle< Code > code, TypeFeedbackId id=TypeFeedbackId::None())
void EmitCreateIteratorResult(bool done)
void EmitLoadGlobalCheckExtensions(VariableProxy *proxy, TypeofState typeof_state, Label *slow)
void EmitCallWithLoadIC(Call *expr)
void EnsureSlotContainsAllocationSite(int slot)
void PrepareForBailoutBeforeSplit(Expression *expr, bool should_normalize, Label *if_true, Label *if_false)
void EmitInlineRuntimeCall(CallRuntime *expr)
void EmitSuperCallWithLoadIC(Call *expr)
void EmitNamedSuperPropertyAssignment(Assignment *expr)
void EmitNamedPropertyAssignment(Assignment *expr)
Handle< FixedArray > handler_table_
Definition: full-codegen.h:844
void RecordJSReturnSite(Call *call)
static Register result_register()
void VisitForTypeofValue(Expression *expr)
void EmitDynamicLookupFastCase(VariableProxy *proxy, TypeofState typeof_state, Label *slow, Label *done)
bool ShouldInlineSmiCase(Token::Value op)
Handle< FixedArray > handler_table()
Definition: full-codegen.h:642
void EmitProfilingCounterDecrement(int delta)
void VisitInDuplicateContext(Expression *expr)
static const int kBuiltinsOffset
Definition: objects.h:7458
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static const int kMapOffset
Definition: objects.h:1427
Factory * factory()
Definition: isolate.h:982
static const int kLengthOffset
Definition: objects.h:10072
static const int kValueOffset
Definition: objects.h:7623
static const int kCacheStampOffset
Definition: objects.h:7631
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kLiteralsOffset
Definition: objects.h:7382
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kResultDonePropertyOffset
Definition: objects.h:7142
static const int kFunctionOffset
Definition: objects.h:7123
static const int kGeneratorClosed
Definition: objects.h:7120
static const int kResultValuePropertyOffset
Definition: objects.h:7141
static const int kGeneratorExecuting
Definition: objects.h:7119
static const int kOperandStackOffset
Definition: objects.h:7127
static const int kReceiverOffset
Definition: objects.h:7125
static const int kContextOffset
Definition: objects.h:7124
static const int kContinuationOffset
Definition: objects.h:7126
static const int kInitialMaxFastElementArray
Definition: objects.h:2180
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kSize
Definition: objects.h:7772
static const int kInObjectFieldCount
Definition: objects.h:7826
static const int kValueOffset
Definition: objects.h:7546
static const Register ReceiverRegister()
static const Register NameRegister()
static const int kIsUndetectable
Definition: objects.h:6244
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6251
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kBitField2Offset
Definition: objects.h:6233
static const int kConstructorOffset
Definition: objects.h:6191
static const int kPrototypeOffset
Definition: objects.h:6190
static const int kHashFieldOffset
Definition: objects.h:8486
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
Definition: assembler.h:317
Scope * outer_scope() const
Definition: scopes.h:333
int num_parameters() const
Definition: scopes.h:321
VariableDeclaration * function() const
Definition: scopes.h:309
int ContextChainLength(Scope *scope)
Definition: scopes.cc:715
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:539
Scope * DeclarationScope()
Definition: scopes.cc:737
Variable * arguments() const
Definition: scopes.h:324
Scope * GlobalScope()
Definition: scopes.cc:728
Variable * parameter(int index) const
Definition: scopes.h:316
static const int kHeaderSize
Definition: objects.h:8941
static const int kFormalParameterCountOffset
Definition: objects.h:6946
static const int kInstanceClassNameOffset
Definition: objects.h:6897
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1334
static const int kContextOffset
Definition: frames.h:74
static const int kContextOffset
Definition: frames.h:162
static const int kExpressionsOffset
Definition: frames.h:160
static const int kCallerSPOffset
Definition: frames.h:167
static const int kMarkerOffset
Definition: frames.h:161
static const int kCallerFPOffset
Definition: frames.h:165
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8618
static const int kLengthOffset
Definition: objects.h:8802
bool Equals(String *other)
Definition: objects-inl.h:3336
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2258
static TypeFeedbackId None()
Definition: utils.h:945
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
TypeofState
Definition: codegen.h:46
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_BOOL(enable_unaligned_accesses
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define POINTER_SIZE_ALIGN(value)
Definition: globals.h:582
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK_NE(v1, v2)
Definition: logging.h:207
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ CALL_FUNCTION
@ TAG_OBJECT
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
Vector< const char > CStrVector(const char *data)
Definition: vector.h:158
const int kPointerSize
Definition: globals.h:129
const Register edx
const uint32_t kStringEncodingMask
Definition: objects.h:555
const Register edi
MemOperand ContextOperand(Register context, int index)
@ DO_SMI_CHECK
Definition: globals.h:641
@ STRING_ADD_CHECK_BOTH
Definition: code-stubs.h:1218
@ TRACK_ALLOCATION_SITE
Definition: objects.h:8085
@ DONT_TRACK_ALLOCATION_SITE
Definition: objects.h:8084
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
Definition: objects-inl.h:5448
@ kSeqStringTag
Definition: objects.h:563
const Register esp
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
bool IsDeclaredVariableMode(VariableMode mode)
Definition: globals.h:705
const uint32_t kTwoByteStringTag
Definition: objects.h:556
const int kSmiTagSize
Definition: v8.h:5743
Operand FieldOperand(Register object, int offset)
const uint32_t kStringTag
Definition: objects.h:544
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:785
@ JS_REGEXP_TYPE
Definition: objects.h:748
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
Definition: objects.h:788
@ JS_VALUE_TYPE
Definition: objects.h:728
@ JS_DATE_TYPE
Definition: objects.h:730
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:784
@ FIRST_JS_PROXY_TYPE
Definition: objects.h:778
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
@ LAST_JS_PROXY_TYPE
Definition: objects.h:779
const uint32_t kOneByteStringTag
Definition: objects.h:557
@ NO_OVERWRITE
Definition: ic-state.h:58
@ OVERWRITE_RIGHT
Definition: ic-state.h:58
const Register esi
const Register eax
bool IsImmutableVariableMode(VariableMode mode)
Definition: globals.h:715
const Register pc
const Register ebx
@ DYNAMIC_GLOBAL
Definition: globals.h:689
@ DYNAMIC_LOCAL
Definition: globals.h:693
@ CONST_LEGACY
Definition: globals.h:671
Condition NegateCondition(Condition cond)
Definition: constants-arm.h:86
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
const uint32_t kStringRepresentationMask
Definition: objects.h:561
byte * Address
Definition: globals.h:101
NilValue
Definition: v8.h:97
@ kNullValue
Definition: v8.h:97
@ NOT_CONTEXTUAL
Definition: objects.h:174
OStream & dec(OStream &os)
Definition: ostreams.cc:122
const int kIntSize
Definition: globals.h:124
Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
const int kSmiShiftSize
Definition: v8.h:5805
const Register no_reg
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
Definition: v8.h:5744
const Register ebp
@ RECORD_CONSTRUCTOR_TARGET
Definition: globals.h:480
const int kSmiTag
Definition: v8.h:5742
const uint32_t kIsNotStringMask
Definition: objects.h:543
@ NO_CALL_FUNCTION_FLAGS
Definition: globals.h:469
@ CALL_AS_METHOD
Definition: globals.h:470
@ STRING_INDEX_IS_NUMBER
Definition: code-stubs.h:1590
void CopyBytes(uint8_t *target, uint8_t *source)
bool IsFastObjectElementsKind(ElementsKind kind)
const Register ecx
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
#define VOID
#define IN
PropertyAttributes
@ NONE
@ READ_ONLY