V8 Project
full-codegen-x87.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_X87
8 
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 #define __ ACCESS_MASM(masm_)
24 
25 
26 class JumpPatchSite BASE_EMBEDDED {
27  public:
28  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
29 #ifdef DEBUG
30  info_emitted_ = false;
31 #endif
32  }
33 
34  ~JumpPatchSite() {
35  DCHECK(patch_site_.is_bound() == info_emitted_);
36  }
37 
38  void EmitJumpIfNotSmi(Register reg,
39  Label* target,
40  Label::Distance distance = Label::kFar) {
41  __ test(reg, Immediate(kSmiTagMask));
42  EmitJump(not_carry, target, distance); // Always taken before patched.
43  }
44 
45  void EmitJumpIfSmi(Register reg,
46  Label* target,
47  Label::Distance distance = Label::kFar) {
48  __ test(reg, Immediate(kSmiTagMask));
49  EmitJump(carry, target, distance); // Never taken before patched.
50  }
51 
52  void EmitPatchInfo() {
53  if (patch_site_.is_bound()) {
54  int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
55  DCHECK(is_uint8(delta_to_patch_site));
56  __ test(eax, Immediate(delta_to_patch_site));
57 #ifdef DEBUG
58  info_emitted_ = true;
59 #endif
60  } else {
61  __ nop(); // Signals no inlined code.
62  }
63  }
64 
65  private:
66  // jc will be patched with jz, jnc will become jnz.
67  void EmitJump(Condition cc, Label* target, Label::Distance distance) {
68  DCHECK(!patch_site_.is_bound() && !info_emitted_);
69  DCHECK(cc == carry || cc == not_carry);
70  __ bind(&patch_site_);
71  __ j(cc, target, distance);
72  }
73 
74  MacroAssembler* masm_;
75  Label patch_site_;
76 #ifdef DEBUG
77  bool info_emitted_;
78 #endif
79 };
80 
81 
82 // Generate code for a JS function. On entry to the function the receiver
83 // and arguments have been pushed on the stack left to right, with the
84 // return address on top of them. The actual argument count matches the
85 // formal parameter count expected by the function.
86 //
87 // The live registers are:
88 // o edi: the JS function object being called (i.e. ourselves)
89 // o esi: our context
90 // o ebp: our caller's frame pointer
91 // o esp: stack pointer (pointing to return address)
92 //
93 // The function builds a JS frame. Please see JavaScriptFrameConstants in
94 // frames-x87.h for its layout.
96  CompilationInfo* info = info_;
98  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
99 
100  profiling_counter_ = isolate()->factory()->NewCell(
101  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
102  SetFunctionPosition(function());
103  Comment cmnt(masm_, "[ function compiled by full code generator");
104 
106 
107 #ifdef DEBUG
108  if (strlen(FLAG_stop_at) > 0 &&
109  info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110  __ int3();
111  }
112 #endif
113 
114  // Sloppy mode functions and builtins need to replace the receiver with the
115  // global proxy when called as functions (without an explicit receiver
116  // object).
117  if (info->strict_mode() == SLOPPY && !info->is_native()) {
118  Label ok;
119  // +1 for return address.
120  int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
121  __ mov(ecx, Operand(esp, receiver_offset));
122 
123  __ cmp(ecx, isolate()->factory()->undefined_value());
124  __ j(not_equal, &ok, Label::kNear);
125 
126  __ mov(ecx, GlobalObjectOperand());
128 
129  __ mov(Operand(esp, receiver_offset), ecx);
130 
131  __ bind(&ok);
132  }
133 
134  // Open a frame scope to indicate that there is a frame on the stack. The
135  // MANUAL indicates that the scope shouldn't actually generate code to set up
136  // the frame (that is done below).
137  FrameScope frame_scope(masm_, StackFrame::MANUAL);
138 
139  info->set_prologue_offset(masm_->pc_offset());
140  __ Prologue(info->IsCodePreAgingActive());
141  info->AddNoFrameRange(0, masm_->pc_offset());
142 
143  { Comment cmnt(masm_, "[ Allocate locals");
144  int locals_count = info->scope()->num_stack_slots();
145  // Generators allocate locals, if any, in context slots.
146  DCHECK(!info->function()->is_generator() || locals_count == 0);
147  if (locals_count == 1) {
148  __ push(Immediate(isolate()->factory()->undefined_value()));
149  } else if (locals_count > 1) {
150  if (locals_count >= 128) {
151  Label ok;
152  __ mov(ecx, esp);
153  __ sub(ecx, Immediate(locals_count * kPointerSize));
154  ExternalReference stack_limit =
155  ExternalReference::address_of_real_stack_limit(isolate());
156  __ cmp(ecx, Operand::StaticVariable(stack_limit));
157  __ j(above_equal, &ok, Label::kNear);
158  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
159  __ bind(&ok);
160  }
161  __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
162  const int kMaxPushes = 32;
163  if (locals_count >= kMaxPushes) {
164  int loop_iterations = locals_count / kMaxPushes;
165  __ mov(ecx, loop_iterations);
166  Label loop_header;
167  __ bind(&loop_header);
168  // Do pushes.
169  for (int i = 0; i < kMaxPushes; i++) {
170  __ push(eax);
171  }
172  __ dec(ecx);
173  __ j(not_zero, &loop_header, Label::kNear);
174  }
175  int remaining = locals_count % kMaxPushes;
176  // Emit the remaining pushes.
177  for (int i = 0; i < remaining; i++) {
178  __ push(eax);
179  }
180  }
181  }
182 
183  bool function_in_register = true;
184 
185  // Possibly allocate a local context.
186  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
187  if (heap_slots > 0) {
188  Comment cmnt(masm_, "[ Allocate context");
189  bool need_write_barrier = true;
190  // Argument to NewContext is the function, which is still in edi.
191  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
192  __ push(edi);
193  __ Push(info->scope()->GetScopeInfo());
194  __ CallRuntime(Runtime::kNewGlobalContext, 2);
195  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
196  FastNewContextStub stub(isolate(), heap_slots);
197  __ CallStub(&stub);
198  // Result of FastNewContextStub is always in new space.
199  need_write_barrier = false;
200  } else {
201  __ push(edi);
202  __ CallRuntime(Runtime::kNewFunctionContext, 1);
203  }
204  function_in_register = false;
205  // Context is returned in eax. It replaces the context passed to us.
206  // It's saved in the stack and kept live in esi.
207  __ mov(esi, eax);
209 
210  // Copy parameters into context if necessary.
211  int num_parameters = info->scope()->num_parameters();
212  for (int i = 0; i < num_parameters; i++) {
213  Variable* var = scope()->parameter(i);
214  if (var->IsContextSlot()) {
215  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
216  (num_parameters - 1 - i) * kPointerSize;
217  // Load parameter from stack.
218  __ mov(eax, Operand(ebp, parameter_offset));
219  // Store it in the context.
220  int context_offset = Context::SlotOffset(var->index());
221  __ mov(Operand(esi, context_offset), eax);
222  // Update the write barrier. This clobbers eax and ebx.
223  if (need_write_barrier) {
224  __ RecordWriteContextSlot(esi, context_offset, eax, ebx,
226  } else if (FLAG_debug_code) {
227  Label done;
228  __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
229  __ Abort(kExpectedNewSpaceObject);
230  __ bind(&done);
231  }
232  }
233  }
234  }
235 
236  Variable* arguments = scope()->arguments();
237  if (arguments != NULL) {
238  // Function uses arguments object.
239  Comment cmnt(masm_, "[ Allocate arguments object");
240  if (function_in_register) {
241  __ push(edi);
242  } else {
244  }
245  // Receiver is just before the parameters on the caller's stack.
246  int num_parameters = info->scope()->num_parameters();
247  int offset = num_parameters * kPointerSize;
248  __ lea(edx,
249  Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
250  __ push(edx);
251  __ push(Immediate(Smi::FromInt(num_parameters)));
252  // Arguments to ArgumentsAccessStub:
253  // function, receiver address, parameter count.
254  // The stub will rewrite receiver and parameter count if the previous
255  // stack frame was an arguments adapter frame.
257  if (strict_mode() == STRICT) {
259  } else if (function()->has_duplicate_parameters()) {
261  } else {
263  }
264  ArgumentsAccessStub stub(isolate(), type);
265  __ CallStub(&stub);
266 
267  SetVar(arguments, eax, ebx, edx);
268  }
269 
270  if (FLAG_trace) {
271  __ CallRuntime(Runtime::kTraceEnter, 0);
272  }
273 
274  // Visit the declarations and body unless there is an illegal
275  // redeclaration.
276  if (scope()->HasIllegalRedeclaration()) {
277  Comment cmnt(masm_, "[ Declarations");
279 
280  } else {
282  { Comment cmnt(masm_, "[ Declarations");
283  // For named function expressions, declare the function name as a
284  // constant.
285  if (scope()->is_function_scope() && scope()->function() != NULL) {
286  VariableDeclaration* function = scope()->function();
287  DCHECK(function->proxy()->var()->mode() == CONST ||
288  function->proxy()->var()->mode() == CONST_LEGACY);
289  DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
290  VisitVariableDeclaration(function);
291  }
292  VisitDeclarations(scope()->declarations());
293  }
294 
295  { Comment cmnt(masm_, "[ Stack check");
297  Label ok;
298  ExternalReference stack_limit
299  = ExternalReference::address_of_stack_limit(isolate());
300  __ cmp(esp, Operand::StaticVariable(stack_limit));
301  __ j(above_equal, &ok, Label::kNear);
302  __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
303  __ bind(&ok);
304  }
305 
306  { Comment cmnt(masm_, "[ Body");
307  DCHECK(loop_depth() == 0);
308  VisitStatements(function()->body());
309  DCHECK(loop_depth() == 0);
310  }
311  }
312 
313  // Always emit a 'return undefined' in case control fell off the end of
314  // the body.
315  { Comment cmnt(masm_, "[ return <undefined>;");
316  __ mov(eax, isolate()->factory()->undefined_value());
318  }
319 }
320 
321 
323  __ Move(eax, Immediate(Smi::FromInt(0)));
324 }
325 
326 
328  __ mov(ebx, Immediate(profiling_counter_));
330  Immediate(Smi::FromInt(delta)));
331 }
332 
333 
335  int reset_value = FLAG_interrupt_budget;
336  __ mov(ebx, Immediate(profiling_counter_));
338  Immediate(Smi::FromInt(reset_value)));
339 }
340 
341 
342 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
343  Label* back_edge_target) {
344  Comment cmnt(masm_, "[ Back edge bookkeeping");
345  Label ok;
346 
347  DCHECK(back_edge_target->is_bound());
348  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
349  int weight = Min(kMaxBackEdgeWeight,
350  Max(1, distance / kCodeSizeMultiplier));
352  __ j(positive, &ok, Label::kNear);
353  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
354 
355  // Record a mapping of this PC offset to the OSR id. This is used to find
356  // the AST id from the unoptimized code in order to use it as a key into
357  // the deoptimization input data found in the optimized code.
358  RecordBackEdge(stmt->OsrEntryId());
359 
361 
362  __ bind(&ok);
363  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
364  // Record a mapping of the OSR id to this PC. This is used if the OSR
365  // entry becomes the target of a bailout. We don't expect it to be, but
366  // we want it to work if it is.
367  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
368 }
369 
370 
372  Comment cmnt(masm_, "[ Return sequence");
373  if (return_label_.is_bound()) {
374  __ jmp(&return_label_);
375  } else {
376  // Common return label
377  __ bind(&return_label_);
378  if (FLAG_trace) {
379  __ push(eax);
380  __ CallRuntime(Runtime::kTraceExit, 1);
381  }
382  // Pretend that the exit is a backwards jump to the entry.
383  int weight = 1;
384  if (info_->ShouldSelfOptimize()) {
385  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
386  } else {
387  int distance = masm_->pc_offset();
388  weight = Min(kMaxBackEdgeWeight,
389  Max(1, distance / kCodeSizeMultiplier));
390  }
392  Label ok;
393  __ j(positive, &ok, Label::kNear);
394  __ push(eax);
395  __ call(isolate()->builtins()->InterruptCheck(),
397  __ pop(eax);
399  __ bind(&ok);
400 #ifdef DEBUG
401  // Add a label for checking the size of the code used for returning.
402  Label check_exit_codesize;
403  masm_->bind(&check_exit_codesize);
404 #endif
405  SetSourcePosition(function()->end_position() - 1);
406  __ RecordJSReturn();
407  // Do not use the leave instruction here because it is too short to
408  // patch with the code required by the debugger.
409  __ mov(esp, ebp);
410  int no_frame_start = masm_->pc_offset();
411  __ pop(ebp);
412 
413  int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
414  __ Ret(arguments_bytes, ecx);
415  // Check that the size of the code used for returning is large enough
416  // for the debugger's requirements.
418  masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
419  info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
420  }
421 }
422 
423 
424 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
425  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
426 }
427 
428 
429 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
430  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
431  codegen()->GetVar(result_register(), var);
432 }
433 
434 
435 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
436  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
437  MemOperand operand = codegen()->VarOperand(var, result_register());
438  // Memory operands can be pushed directly.
439  __ push(operand);
440 }
441 
442 
443 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
444  // For simplicity we always test the accumulator register.
445  codegen()->GetVar(result_register(), var);
446  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
447  codegen()->DoTest(this);
448 }
449 
450 
452  UNREACHABLE(); // Not used on X87.
453 }
454 
455 
457  Heap::RootListIndex index) const {
458  UNREACHABLE(); // Not used on X87.
459 }
460 
461 
463  Heap::RootListIndex index) const {
464  UNREACHABLE(); // Not used on X87.
465 }
466 
467 
469  UNREACHABLE(); // Not used on X87.
470 }
471 
472 
473 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
474 }
475 
476 
478  Handle<Object> lit) const {
479  if (lit->IsSmi()) {
480  __ SafeMove(result_register(), Immediate(lit));
481  } else {
482  __ Move(result_register(), Immediate(lit));
483  }
484 }
485 
486 
487 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
488  if (lit->IsSmi()) {
489  __ SafePush(Immediate(lit));
490  } else {
491  __ push(Immediate(lit));
492  }
493 }
494 
495 
496 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
497  codegen()->PrepareForBailoutBeforeSplit(condition(),
498  true,
499  true_label_,
500  false_label_);
501  DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
502  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
503  if (false_label_ != fall_through_) __ jmp(false_label_);
504  } else if (lit->IsTrue() || lit->IsJSObject()) {
505  if (true_label_ != fall_through_) __ jmp(true_label_);
506  } else if (lit->IsString()) {
507  if (String::cast(*lit)->length() == 0) {
508  if (false_label_ != fall_through_) __ jmp(false_label_);
509  } else {
510  if (true_label_ != fall_through_) __ jmp(true_label_);
511  }
512  } else if (lit->IsSmi()) {
513  if (Smi::cast(*lit)->value() == 0) {
514  if (false_label_ != fall_through_) __ jmp(false_label_);
515  } else {
516  if (true_label_ != fall_through_) __ jmp(true_label_);
517  }
518  } else {
519  // For simplicity we always test the accumulator register.
520  __ mov(result_register(), lit);
521  codegen()->DoTest(this);
522  }
523 }
524 
525 
527  Register reg) const {
528  DCHECK(count > 0);
529  __ Drop(count);
530 }
531 
532 
534  int count,
535  Register reg) const {
536  DCHECK(count > 0);
537  __ Drop(count);
538  __ Move(result_register(), reg);
539 }
540 
541 
543  Register reg) const {
544  DCHECK(count > 0);
545  if (count > 1) __ Drop(count - 1);
546  __ mov(Operand(esp, 0), reg);
547 }
548 
549 
551  Register reg) const {
552  DCHECK(count > 0);
553  // For simplicity we always test the accumulator register.
554  __ Drop(count);
555  __ Move(result_register(), reg);
556  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
557  codegen()->DoTest(this);
558 }
559 
560 
561 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
562  Label* materialize_false) const {
563  DCHECK(materialize_true == materialize_false);
564  __ bind(materialize_true);
565 }
566 
567 
569  Label* materialize_true,
570  Label* materialize_false) const {
571  Label done;
572  __ bind(materialize_true);
573  __ mov(result_register(), isolate()->factory()->true_value());
574  __ jmp(&done, Label::kNear);
575  __ bind(materialize_false);
576  __ mov(result_register(), isolate()->factory()->false_value());
577  __ bind(&done);
578 }
579 
580 
582  Label* materialize_true,
583  Label* materialize_false) const {
584  Label done;
585  __ bind(materialize_true);
586  __ push(Immediate(isolate()->factory()->true_value()));
587  __ jmp(&done, Label::kNear);
588  __ bind(materialize_false);
589  __ push(Immediate(isolate()->factory()->false_value()));
590  __ bind(&done);
591 }
592 
593 
594 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
595  Label* materialize_false) const {
596  DCHECK(materialize_true == true_label_);
597  DCHECK(materialize_false == false_label_);
598 }
599 
600 
602 }
603 
604 
606  Handle<Object> value = flag
607  ? isolate()->factory()->true_value()
608  : isolate()->factory()->false_value();
609  __ mov(result_register(), value);
610 }
611 
612 
614  Handle<Object> value = flag
615  ? isolate()->factory()->true_value()
616  : isolate()->factory()->false_value();
617  __ push(Immediate(value));
618 }
619 
620 
622  codegen()->PrepareForBailoutBeforeSplit(condition(),
623  true,
624  true_label_,
625  false_label_);
626  if (flag) {
627  if (true_label_ != fall_through_) __ jmp(true_label_);
628  } else {
629  if (false_label_ != fall_through_) __ jmp(false_label_);
630  }
631 }
632 
633 
634 void FullCodeGenerator::DoTest(Expression* condition,
635  Label* if_true,
636  Label* if_false,
637  Label* fall_through) {
638  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
639  CallIC(ic, condition->test_id());
641  // The stub returns nonzero for true.
642  Split(not_zero, if_true, if_false, fall_through);
643 }
644 
645 
647  Label* if_true,
648  Label* if_false,
649  Label* fall_through) {
650  if (if_false == fall_through) {
651  __ j(cc, if_true);
652  } else if (if_true == fall_through) {
653  __ j(NegateCondition(cc), if_false);
654  } else {
655  __ j(cc, if_true);
656  __ jmp(if_false);
657  }
658 }
659 
660 
662  DCHECK(var->IsStackAllocated());
663  // Offset is negative because higher indexes are at lower addresses.
664  int offset = -var->index() * kPointerSize;
665  // Adjust by a (parameter or local) base offset.
666  if (var->IsParameter()) {
667  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
668  } else {
670  }
671  return Operand(ebp, offset);
672 }
673 
674 
675 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
676  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
677  if (var->IsContextSlot()) {
678  int context_chain_length = scope()->ContextChainLength(var->scope());
679  __ LoadContext(scratch, context_chain_length);
680  return ContextOperand(scratch, var->index());
681  } else {
682  return StackOperand(var);
683  }
684 }
685 
686 
687 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
688  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
689  MemOperand location = VarOperand(var, dest);
690  __ mov(dest, location);
691 }
692 
693 
694 void FullCodeGenerator::SetVar(Variable* var,
695  Register src,
696  Register scratch0,
697  Register scratch1) {
698  DCHECK(var->IsContextSlot() || var->IsStackAllocated());
699  DCHECK(!scratch0.is(src));
700  DCHECK(!scratch0.is(scratch1));
701  DCHECK(!scratch1.is(src));
702  MemOperand location = VarOperand(var, scratch0);
703  __ mov(location, src);
704 
705  // Emit the write barrier code if the location is in the heap.
706  if (var->IsContextSlot()) {
707  int offset = Context::SlotOffset(var->index());
708  DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
709  __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
710  }
711 }
712 
713 
715  bool should_normalize,
716  Label* if_true,
717  Label* if_false) {
718  // Only prepare for bailouts before splits if we're in a test
719  // context. Otherwise, we let the Visit function deal with the
720  // preparation to avoid preparing with the same AST id twice.
721  if (!context()->IsTest() || !info_->IsOptimizable()) return;
722 
723  Label skip;
724  if (should_normalize) __ jmp(&skip, Label::kNear);
725  PrepareForBailout(expr, TOS_REG);
726  if (should_normalize) {
727  __ cmp(eax, isolate()->factory()->true_value());
728  Split(equal, if_true, if_false, NULL);
729  __ bind(&skip);
730  }
731 }
732 
733 
735  // The variable in the declaration always resides in the current context.
736  DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
737  if (generate_debug_code_) {
738  // Check that we're not inside a with or catch context.
740  __ cmp(ebx, isolate()->factory()->with_context_map());
741  __ Check(not_equal, kDeclarationInWithContext);
742  __ cmp(ebx, isolate()->factory()->catch_context_map());
743  __ Check(not_equal, kDeclarationInCatchContext);
744  }
745 }
746 
747 
748 void FullCodeGenerator::VisitVariableDeclaration(
749  VariableDeclaration* declaration) {
750  // If it was not possible to allocate the variable at compile time, we
751  // need to "declare" it at runtime to make sure it actually exists in the
752  // local context.
753  VariableProxy* proxy = declaration->proxy();
754  VariableMode mode = declaration->mode();
755  Variable* variable = proxy->var();
756  bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
757  switch (variable->location()) {
759  globals_->Add(variable->name(), zone());
760  globals_->Add(variable->binding_needs_init()
761  ? isolate()->factory()->the_hole_value()
762  : isolate()->factory()->undefined_value(), zone());
763  break;
764 
765  case Variable::PARAMETER:
766  case Variable::LOCAL:
767  if (hole_init) {
768  Comment cmnt(masm_, "[ VariableDeclaration");
769  __ mov(StackOperand(variable),
770  Immediate(isolate()->factory()->the_hole_value()));
771  }
772  break;
773 
774  case Variable::CONTEXT:
775  if (hole_init) {
776  Comment cmnt(masm_, "[ VariableDeclaration");
778  __ mov(ContextOperand(esi, variable->index()),
779  Immediate(isolate()->factory()->the_hole_value()));
780  // No write barrier since the hole value is in old space.
781  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
782  }
783  break;
784 
785  case Variable::LOOKUP: {
786  Comment cmnt(masm_, "[ VariableDeclaration");
787  __ push(esi);
788  __ push(Immediate(variable->name()));
789  // VariableDeclaration nodes are always introduced in one of four modes.
791  PropertyAttributes attr =
793  __ push(Immediate(Smi::FromInt(attr)));
794  // Push initial value, if any.
795  // Note: For variables we must not push an initial value (such as
796  // 'undefined') because we may have a (legal) redeclaration and we
797  // must not destroy the current value.
798  if (hole_init) {
799  __ push(Immediate(isolate()->factory()->the_hole_value()));
800  } else {
801  __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
802  }
803  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
804  break;
805  }
806  }
807 }
808 
809 
810 void FullCodeGenerator::VisitFunctionDeclaration(
811  FunctionDeclaration* declaration) {
812  VariableProxy* proxy = declaration->proxy();
813  Variable* variable = proxy->var();
814  switch (variable->location()) {
815  case Variable::UNALLOCATED: {
816  globals_->Add(variable->name(), zone());
817  Handle<SharedFunctionInfo> function =
818  Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
819  // Check for stack-overflow exception.
820  if (function.is_null()) return SetStackOverflow();
821  globals_->Add(function, zone());
822  break;
823  }
824 
825  case Variable::PARAMETER:
826  case Variable::LOCAL: {
827  Comment cmnt(masm_, "[ FunctionDeclaration");
828  VisitForAccumulatorValue(declaration->fun());
829  __ mov(StackOperand(variable), result_register());
830  break;
831  }
832 
833  case Variable::CONTEXT: {
834  Comment cmnt(masm_, "[ FunctionDeclaration");
836  VisitForAccumulatorValue(declaration->fun());
837  __ mov(ContextOperand(esi, variable->index()), result_register());
838  // We know that we have written a function, which is not a smi.
839  __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
842  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
843  break;
844  }
845 
846  case Variable::LOOKUP: {
847  Comment cmnt(masm_, "[ FunctionDeclaration");
848  __ push(esi);
849  __ push(Immediate(variable->name()));
850  __ push(Immediate(Smi::FromInt(NONE)));
851  VisitForStackValue(declaration->fun());
852  __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
853  break;
854  }
855  }
856 }
857 
858 
859 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
860  Variable* variable = declaration->proxy()->var();
861  DCHECK(variable->location() == Variable::CONTEXT);
862  DCHECK(variable->interface()->IsFrozen());
863 
864  Comment cmnt(masm_, "[ ModuleDeclaration");
866 
867  // Load instance object.
868  __ LoadContext(eax, scope_->ContextChainLength(scope_->GlobalScope()));
869  __ mov(eax, ContextOperand(eax, variable->interface()->Index()));
871 
872  // Assign it.
873  __ mov(ContextOperand(esi, variable->index()), eax);
874  // We know that we have written a module, which is not a smi.
875  __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()), eax,
878  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
879 
880  // Traverse into body.
881  Visit(declaration->module());
882 }
883 
884 
885 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
886  VariableProxy* proxy = declaration->proxy();
887  Variable* variable = proxy->var();
888  switch (variable->location()) {
890  // TODO(rossberg)
891  break;
892 
893  case Variable::CONTEXT: {
894  Comment cmnt(masm_, "[ ImportDeclaration");
896  // TODO(rossberg)
897  break;
898  }
899 
900  case Variable::PARAMETER:
901  case Variable::LOCAL:
902  case Variable::LOOKUP:
903  UNREACHABLE();
904  }
905 }
906 
907 
908 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
909  // TODO(rossberg)
910 }
911 
912 
913 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
914  // Call the runtime to declare the globals.
915  __ push(esi); // The context is the first argument.
916  __ Push(pairs);
918  __ CallRuntime(Runtime::kDeclareGlobals, 3);
919  // Return value is ignored.
920 }
921 
922 
923 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
924  // Call the runtime to declare the modules.
925  __ Push(descriptions);
926  __ CallRuntime(Runtime::kDeclareModules, 1);
927  // Return value is ignored.
928 }
929 
930 
931 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
932  Comment cmnt(masm_, "[ SwitchStatement");
933  Breakable nested_statement(this, stmt);
934  SetStatementPosition(stmt);
935 
936  // Keep the switch value on the stack until a case matches.
937  VisitForStackValue(stmt->tag());
938  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
939 
940  ZoneList<CaseClause*>* clauses = stmt->cases();
941  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
942 
943  Label next_test; // Recycled for each test.
944  // Compile all the tests with branches to their bodies.
945  for (int i = 0; i < clauses->length(); i++) {
946  CaseClause* clause = clauses->at(i);
947  clause->body_target()->Unuse();
948 
949  // The default is not a test, but remember it as final fall through.
950  if (clause->is_default()) {
951  default_clause = clause;
952  continue;
953  }
954 
955  Comment cmnt(masm_, "[ Case comparison");
956  __ bind(&next_test);
957  next_test.Unuse();
958 
959  // Compile the label expression.
960  VisitForAccumulatorValue(clause->label());
961 
962  // Perform the comparison as if via '==='.
963  __ mov(edx, Operand(esp, 0)); // Switch value.
964  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
965  JumpPatchSite patch_site(masm_);
966  if (inline_smi_code) {
967  Label slow_case;
968  __ mov(ecx, edx);
969  __ or_(ecx, eax);
970  patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
971 
972  __ cmp(edx, eax);
973  __ j(not_equal, &next_test);
974  __ Drop(1); // Switch value is no longer needed.
975  __ jmp(clause->body_target());
976  __ bind(&slow_case);
977  }
978 
979  // Record position before stub call for type feedback.
980  SetSourcePosition(clause->position());
981  Handle<Code> ic =
982  CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
983  CallIC(ic, clause->CompareId());
984  patch_site.EmitPatchInfo();
985 
986  Label skip;
987  __ jmp(&skip, Label::kNear);
988  PrepareForBailout(clause, TOS_REG);
989  __ cmp(eax, isolate()->factory()->true_value());
990  __ j(not_equal, &next_test);
991  __ Drop(1);
992  __ jmp(clause->body_target());
993  __ bind(&skip);
994 
995  __ test(eax, eax);
996  __ j(not_equal, &next_test);
997  __ Drop(1); // Switch value is no longer needed.
998  __ jmp(clause->body_target());
999  }
1000 
1001  // Discard the test value and jump to the default if present, otherwise to
1002  // the end of the statement.
1003  __ bind(&next_test);
1004  __ Drop(1); // Switch value is no longer needed.
1005  if (default_clause == NULL) {
1006  __ jmp(nested_statement.break_label());
1007  } else {
1008  __ jmp(default_clause->body_target());
1009  }
1010 
1011  // Compile all the case bodies.
1012  for (int i = 0; i < clauses->length(); i++) {
1013  Comment cmnt(masm_, "[ Case body");
1014  CaseClause* clause = clauses->at(i);
1015  __ bind(clause->body_target());
1016  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1017  VisitStatements(clause->statements());
1018  }
1019 
1020  __ bind(nested_statement.break_label());
1021  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1022 }
1023 
1024 
1025 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1026  Comment cmnt(masm_, "[ ForInStatement");
1027  int slot = stmt->ForInFeedbackSlot();
1028 
1029  SetStatementPosition(stmt);
1030 
1031  Label loop, exit;
1032  ForIn loop_statement(this, stmt);
1034 
1035  // Get the object to enumerate over. If the object is null or undefined, skip
1036  // over the loop. See ECMA-262 version 5, section 12.6.4.
1037  VisitForAccumulatorValue(stmt->enumerable());
1038  __ cmp(eax, isolate()->factory()->undefined_value());
1039  __ j(equal, &exit);
1040  __ cmp(eax, isolate()->factory()->null_value());
1041  __ j(equal, &exit);
1042 
1043  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1044 
1045  // Convert the object to a JS object.
1046  Label convert, done_convert;
1047  __ JumpIfSmi(eax, &convert, Label::kNear);
1048  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1049  __ j(above_equal, &done_convert, Label::kNear);
1050  __ bind(&convert);
1051  __ push(eax);
1052  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1053  __ bind(&done_convert);
1054  __ push(eax);
1055 
1056  // Check for proxies.
1057  Label call_runtime, use_cache, fixed_array;
1059  __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1060  __ j(below_equal, &call_runtime);
1061 
1062  // Check cache validity in generated code. This is a fast case for
1063  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1064  // guarantee cache validity, call the runtime system to check cache
1065  // validity or get the property names in a fixed array.
1066  __ CheckEnumCache(&call_runtime);
1067 
1069  __ jmp(&use_cache, Label::kNear);
1070 
1071  // Get the set of properties to enumerate.
1072  __ bind(&call_runtime);
1073  __ push(eax);
1074  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1076  isolate()->factory()->meta_map());
1077  __ j(not_equal, &fixed_array);
1078 
1079 
1080  // We got a map in register eax. Get the enumeration cache from it.
1081  Label no_descriptors;
1082  __ bind(&use_cache);
1083 
1084  __ EnumLength(edx, eax);
1085  __ cmp(edx, Immediate(Smi::FromInt(0)));
1086  __ j(equal, &no_descriptors);
1087 
1088  __ LoadInstanceDescriptors(eax, ecx);
1091 
1092  // Set up the four remaining stack slots.
1093  __ push(eax); // Map.
1094  __ push(ecx); // Enumeration cache.
1095  __ push(edx); // Number of valid entries for the map in the enum cache.
1096  __ push(Immediate(Smi::FromInt(0))); // Initial index.
1097  __ jmp(&loop);
1098 
1099  __ bind(&no_descriptors);
1100  __ add(esp, Immediate(kPointerSize));
1101  __ jmp(&exit);
1102 
1103  // We got a fixed array in register eax. Iterate through that.
1104  Label non_proxy;
1105  __ bind(&fixed_array);
1106 
1107  // No need for a write barrier, we are storing a Smi in the feedback vector.
1108  __ LoadHeapObject(ebx, FeedbackVector());
1110  Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1111 
1112  __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1113  __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1115  __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1116  __ j(above, &non_proxy);
1117  __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1118  __ bind(&non_proxy);
1119  __ push(ebx); // Smi
1120  __ push(eax); // Array
1122  __ push(eax); // Fixed array length (as smi).
1123  __ push(Immediate(Smi::FromInt(0))); // Initial index.
1124 
1125  // Generate code for doing the condition check.
1126  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1127  __ bind(&loop);
1128  __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1129  __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1130  __ j(above_equal, loop_statement.break_label());
1131 
1132  // Get the current entry of the array into register ebx.
1133  __ mov(ebx, Operand(esp, 2 * kPointerSize));
1135 
1136  // Get the expected map from the stack or a smi in the
1137  // permanent slow case into register edx.
1138  __ mov(edx, Operand(esp, 3 * kPointerSize));
1139 
1140  // Check if the expected map still matches that of the enumerable.
1141  // If not, we may have to filter the key.
1142  Label update_each;
1143  __ mov(ecx, Operand(esp, 4 * kPointerSize));
1145  __ j(equal, &update_each, Label::kNear);
1146 
1147  // For proxies, no filtering is done.
1148  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1149  DCHECK(Smi::FromInt(0) == 0);
1150  __ test(edx, edx);
1151  __ j(zero, &update_each);
1152 
1153  // Convert the entry to a string or null if it isn't a property
1154  // anymore. If the property has been removed while iterating, we
1155  // just skip it.
1156  __ push(ecx); // Enumerable.
1157  __ push(ebx); // Current entry.
1158  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1159  __ test(eax, eax);
1160  __ j(equal, loop_statement.continue_label());
1161  __ mov(ebx, eax);
1162 
1163  // Update the 'each' property or variable from the possibly filtered
1164  // entry in register ebx.
1165  __ bind(&update_each);
1166  __ mov(result_register(), ebx);
1167  // Perform the assignment as if via '='.
1168  { EffectContext context(this);
1169  EmitAssignment(stmt->each());
1170  }
1171 
1172  // Generate code for the body of the loop.
1173  Visit(stmt->body());
1174 
1175  // Generate code for going to the next element by incrementing the
1176  // index (smi) stored on top of the stack.
1177  __ bind(loop_statement.continue_label());
1178  __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1179 
1180  EmitBackEdgeBookkeeping(stmt, &loop);
1181  __ jmp(&loop);
1182 
1183  // Remove the pointers stored on the stack.
1184  __ bind(loop_statement.break_label());
1185  __ add(esp, Immediate(5 * kPointerSize));
1186 
1187  // Exit and decrement the loop depth.
1188  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1189  __ bind(&exit);
1191 }
1192 
1193 
1194 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1195  Comment cmnt(masm_, "[ ForOfStatement");
1196  SetStatementPosition(stmt);
1197 
1198  Iteration loop_statement(this, stmt);
1200 
1201  // var iterator = iterable[Symbol.iterator]();
1202  VisitForEffect(stmt->assign_iterator());
1203 
1204  // Loop entry.
1205  __ bind(loop_statement.continue_label());
1206 
1207  // result = iterator.next()
1208  VisitForEffect(stmt->next_result());
1209 
1210  // if (result.done) break;
1211  Label result_not_done;
1212  VisitForControl(stmt->result_done(),
1213  loop_statement.break_label(),
1214  &result_not_done,
1215  &result_not_done);
1216  __ bind(&result_not_done);
1217 
1218  // each = result.value
1219  VisitForEffect(stmt->assign_each());
1220 
1221  // Generate code for the body of the loop.
1222  Visit(stmt->body());
1223 
1224  // Check stack before looping.
1225  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1226  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1227  __ jmp(loop_statement.continue_label());
1228 
1229  // Exit and decrement the loop depth.
1230  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1231  __ bind(loop_statement.break_label());
1233 }
1234 
1235 
1236 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1237  bool pretenure) {
1238  // Use the fast case closure allocation code that allocates in new
1239  // space for nested functions that don't need literals cloning. If
1240  // we're running with the --always-opt or the --prepare-always-opt
1241  // flag, we need to use the runtime function so that the new function
1242  // we are creating here gets a chance to have its code optimized and
1243  // doesn't just get a copy of the existing unoptimized code.
1244  if (!FLAG_always_opt &&
1245  !FLAG_prepare_always_opt &&
1246  !pretenure &&
1247  scope()->is_function_scope() &&
1248  info->num_literals() == 0) {
1249  FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1250  __ mov(ebx, Immediate(info));
1251  __ CallStub(&stub);
1252  } else {
1253  __ push(esi);
1254  __ push(Immediate(info));
1255  __ push(Immediate(pretenure
1256  ? isolate()->factory()->true_value()
1257  : isolate()->factory()->false_value()));
1258  __ CallRuntime(Runtime::kNewClosure, 3);
1259  }
1260  context()->Plug(eax);
1261 }
1262 
1263 
1264 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1265  Comment cmnt(masm_, "[ VariableProxy");
1266  EmitVariableLoad(expr);
1267 }
1268 
1269 
1270 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1271  Comment cnmt(masm_, "[ SuperReference ");
1272 
1275 
1276  Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1277  __ mov(LoadDescriptor::NameRegister(), home_object_symbol);
1278 
1279  CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1280 
1281  __ cmp(eax, isolate()->factory()->undefined_value());
1282  Label done;
1283  __ j(not_equal, &done);
1284  __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1285  __ bind(&done);
1286 }
1287 
1288 
1289 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1290  TypeofState typeof_state,
1291  Label* slow) {
1292  Register context = esi;
1293  Register temp = edx;
1294 
1295  Scope* s = scope();
1296  while (s != NULL) {
1297  if (s->num_heap_slots() > 0) {
1298  if (s->calls_sloppy_eval()) {
1299  // Check that extension is NULL.
1301  Immediate(0));
1302  __ j(not_equal, slow);
1303  }
1304  // Load next context in chain.
1306  // Walk the rest of the chain without clobbering esi.
1307  context = temp;
1308  }
1309  // If no outer scope calls eval, we do not need to check more
1310  // context extensions. If we have reached an eval scope, we check
1311  // all extensions from this point.
1312  if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1313  s = s->outer_scope();
1314  }
1315 
1316  if (s != NULL && s->is_eval_scope()) {
1317  // Loop up the context chain. There is no frame effect so it is
1318  // safe to use raw labels here.
1319  Label next, fast;
1320  if (!context.is(temp)) {
1321  __ mov(temp, context);
1322  }
1323  __ bind(&next);
1324  // Terminate at native context.
1326  Immediate(isolate()->factory()->native_context_map()));
1327  __ j(equal, &fast, Label::kNear);
1328  // Check that extension is NULL.
1329  __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1330  __ j(not_equal, slow);
1331  // Load next context in chain.
1332  __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1333  __ jmp(&next);
1334  __ bind(&fast);
1335  }
1336 
1337  // All extension objects were empty and it is safe to use a global
1338  // load IC call.
1340  __ mov(LoadDescriptor::NameRegister(), proxy->var()->name());
1341  if (FLAG_vector_ics) {
1343  Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
1344  }
1345 
1346  ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1347  ? NOT_CONTEXTUAL
1348  : CONTEXTUAL;
1349 
1350  CallLoadIC(mode);
1351 }
1352 
1353 
1355  Label* slow) {
1356  DCHECK(var->IsContextSlot());
1357  Register context = esi;
1358  Register temp = ebx;
1359 
1360  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1361  if (s->num_heap_slots() > 0) {
1362  if (s->calls_sloppy_eval()) {
1363  // Check that extension is NULL.
1365  Immediate(0));
1366  __ j(not_equal, slow);
1367  }
1369  // Walk the rest of the chain without clobbering esi.
1370  context = temp;
1371  }
1372  }
1373  // Check that last extension is NULL.
1374  __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1375  __ j(not_equal, slow);
1376 
1377  // This function is used only for loads, not stores, so it's safe to
1378  // return an esi-based operand (the write barrier cannot be allowed to
1379  // destroy the esi register).
1380  return ContextOperand(context, var->index());
1381 }
1382 
1383 
1384 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1385  TypeofState typeof_state,
1386  Label* slow,
1387  Label* done) {
1388  // Generate fast-case code for variables that might be shadowed by
1389  // eval-introduced variables. Eval is used a lot without
1390  // introducing variables. In those cases, we do not want to
1391  // perform a runtime call for all variables in the scope
1392  // containing the eval.
1393  Variable* var = proxy->var();
1394  if (var->mode() == DYNAMIC_GLOBAL) {
1395  EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1396  __ jmp(done);
1397  } else if (var->mode() == DYNAMIC_LOCAL) {
1398  Variable* local = var->local_if_not_shadowed();
1399  __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1400  if (local->mode() == LET || local->mode() == CONST ||
1401  local->mode() == CONST_LEGACY) {
1402  __ cmp(eax, isolate()->factory()->the_hole_value());
1403  __ j(not_equal, done);
1404  if (local->mode() == CONST_LEGACY) {
1405  __ mov(eax, isolate()->factory()->undefined_value());
1406  } else { // LET || CONST
1407  __ push(Immediate(var->name()));
1408  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1409  }
1410  }
1411  __ jmp(done);
1412  }
1413 }
1414 
1415 
1416 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1417  // Record position before possible IC call.
1418  SetSourcePosition(proxy->position());
1419  Variable* var = proxy->var();
1420 
1421  // Three cases: global variables, lookup variables, and all other types of
1422  // variables.
1423  switch (var->location()) {
1424  case Variable::UNALLOCATED: {
1425  Comment cmnt(masm_, "[ Global variable");
1427  __ mov(LoadDescriptor::NameRegister(), var->name());
1428  if (FLAG_vector_ics) {
1430  Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
1431  }
1433  context()->Plug(eax);
1434  break;
1435  }
1436 
1437  case Variable::PARAMETER:
1438  case Variable::LOCAL:
1439  case Variable::CONTEXT: {
1440  Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1441  : "[ Stack variable");
1442  if (var->binding_needs_init()) {
1443  // var->scope() may be NULL when the proxy is located in eval code and
1444  // refers to a potential outside binding. Currently those bindings are
1445  // always looked up dynamically, i.e. in that case
1446  // var->location() == LOOKUP.
1447  // always holds.
1448  DCHECK(var->scope() != NULL);
1449 
1450  // Check if the binding really needs an initialization check. The check
1451  // can be skipped in the following situation: we have a LET or CONST
1452  // binding in harmony mode, both the Variable and the VariableProxy have
1453  // the same declaration scope (i.e. they are both in global code, in the
1454  // same function or in the same eval code) and the VariableProxy is in
1455  // the source physically located after the initializer of the variable.
1456  //
1457  // We cannot skip any initialization checks for CONST in non-harmony
1458  // mode because const variables may be declared but never initialized:
1459  // if (false) { const x; }; var y = x;
1460  //
1461  // The condition on the declaration scopes is a conservative check for
1462  // nested functions that access a binding and are called before the
1463  // binding is initialized:
1464  // function() { f(); let x = 1; function f() { x = 2; } }
1465  //
1466  bool skip_init_check;
1467  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1468  skip_init_check = false;
1469  } else {
1470  // Check that we always have valid source position.
1471  DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1472  DCHECK(proxy->position() != RelocInfo::kNoPosition);
1473  skip_init_check = var->mode() != CONST_LEGACY &&
1474  var->initializer_position() < proxy->position();
1475  }
1476 
1477  if (!skip_init_check) {
1478  // Let and const need a read barrier.
1479  Label done;
1480  GetVar(eax, var);
1481  __ cmp(eax, isolate()->factory()->the_hole_value());
1482  __ j(not_equal, &done, Label::kNear);
1483  if (var->mode() == LET || var->mode() == CONST) {
1484  // Throw a reference error when using an uninitialized let/const
1485  // binding in harmony mode.
1486  __ push(Immediate(var->name()));
1487  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1488  } else {
1489  // Uninitalized const bindings outside of harmony mode are unholed.
1490  DCHECK(var->mode() == CONST_LEGACY);
1491  __ mov(eax, isolate()->factory()->undefined_value());
1492  }
1493  __ bind(&done);
1494  context()->Plug(eax);
1495  break;
1496  }
1497  }
1498  context()->Plug(var);
1499  break;
1500  }
1501 
1502  case Variable::LOOKUP: {
1503  Comment cmnt(masm_, "[ Lookup variable");
1504  Label done, slow;
1505  // Generate code for loading from variables potentially shadowed
1506  // by eval-introduced variables.
1507  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1508  __ bind(&slow);
1509  __ push(esi); // Context.
1510  __ push(Immediate(var->name()));
1511  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1512  __ bind(&done);
1513  context()->Plug(eax);
1514  break;
1515  }
1516  }
1517 }
1518 
1519 
1520 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1521  Comment cmnt(masm_, "[ RegExpLiteral");
1522  Label materialized;
1523  // Registers will be used as follows:
1524  // edi = JS function.
1525  // ecx = literals array.
1526  // ebx = regexp literal.
1527  // eax = regexp literal clone.
1530  int literal_offset =
1531  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1532  __ mov(ebx, FieldOperand(ecx, literal_offset));
1533  __ cmp(ebx, isolate()->factory()->undefined_value());
1534  __ j(not_equal, &materialized, Label::kNear);
1535 
1536  // Create regexp literal using runtime function
1537  // Result will be in eax.
1538  __ push(ecx);
1539  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1540  __ push(Immediate(expr->pattern()));
1541  __ push(Immediate(expr->flags()));
1542  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1543  __ mov(ebx, eax);
1544 
1545  __ bind(&materialized);
1547  Label allocated, runtime_allocate;
1548  __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1549  __ jmp(&allocated);
1550 
1551  __ bind(&runtime_allocate);
1552  __ push(ebx);
1553  __ push(Immediate(Smi::FromInt(size)));
1554  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1555  __ pop(ebx);
1556 
1557  __ bind(&allocated);
1558  // Copy the content into the newly allocated memory.
1559  // (Unroll copy loop once for better throughput).
1560  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1561  __ mov(edx, FieldOperand(ebx, i));
1562  __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1563  __ mov(FieldOperand(eax, i), edx);
1564  __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1565  }
1566  if ((size % (2 * kPointerSize)) != 0) {
1567  __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1568  __ mov(FieldOperand(eax, size - kPointerSize), edx);
1569  }
1570  context()->Plug(eax);
1571 }
1572 
1573 
1574 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1575  if (expression == NULL) {
1576  __ push(Immediate(isolate()->factory()->null_value()));
1577  } else {
1578  VisitForStackValue(expression);
1579  }
1580 }
1581 
1582 
1583 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1584  Comment cmnt(masm_, "[ ObjectLiteral");
1585 
1586  expr->BuildConstantProperties(isolate());
1587  Handle<FixedArray> constant_properties = expr->constant_properties();
1588  int flags = expr->fast_elements()
1589  ? ObjectLiteral::kFastElements
1590  : ObjectLiteral::kNoFlags;
1591  flags |= expr->has_function()
1592  ? ObjectLiteral::kHasFunction
1593  : ObjectLiteral::kNoFlags;
1594  int properties_count = constant_properties->length() / 2;
1595  if (expr->may_store_doubles() || expr->depth() > 1 ||
1596  masm()->serializer_enabled() ||
1597  flags != ObjectLiteral::kFastElements ||
1601  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1602  __ push(Immediate(constant_properties));
1603  __ push(Immediate(Smi::FromInt(flags)));
1604  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1605  } else {
1608  __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1609  __ mov(ecx, Immediate(constant_properties));
1610  __ mov(edx, Immediate(Smi::FromInt(flags)));
1611  FastCloneShallowObjectStub stub(isolate(), properties_count);
1612  __ CallStub(&stub);
1613  }
1614 
1615  // If result_saved is true the result is on top of the stack. If
1616  // result_saved is false the result is in eax.
1617  bool result_saved = false;
1618 
1619  // Mark all computed expressions that are bound to a key that
1620  // is shadowed by a later occurrence of the same key. For the
1621  // marked expressions, no store code is emitted.
1622  expr->CalculateEmitStore(zone());
1623 
1624  AccessorTable accessor_table(zone());
1625  for (int i = 0; i < expr->properties()->length(); i++) {
1626  ObjectLiteral::Property* property = expr->properties()->at(i);
1627  if (property->IsCompileTimeValue()) continue;
1628 
1629  Literal* key = property->key();
1630  Expression* value = property->value();
1631  if (!result_saved) {
1632  __ push(eax); // Save result on the stack
1633  result_saved = true;
1634  }
1635  switch (property->kind()) {
1637  UNREACHABLE();
1638  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1640  // Fall through.
1641  case ObjectLiteral::Property::COMPUTED:
1642  if (key->value()->IsInternalizedString()) {
1643  if (property->emit_store()) {
1644  VisitForAccumulatorValue(value);
1646  __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1647  __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1648  CallStoreIC(key->LiteralFeedbackId());
1650  } else {
1651  VisitForEffect(value);
1652  }
1653  break;
1654  }
1655  __ push(Operand(esp, 0)); // Duplicate receiver.
1656  VisitForStackValue(key);
1657  VisitForStackValue(value);
1658  if (property->emit_store()) {
1659  __ push(Immediate(Smi::FromInt(SLOPPY))); // Strict mode
1660  __ CallRuntime(Runtime::kSetProperty, 4);
1661  } else {
1662  __ Drop(3);
1663  }
1664  break;
1665  case ObjectLiteral::Property::PROTOTYPE:
1666  __ push(Operand(esp, 0)); // Duplicate receiver.
1667  VisitForStackValue(value);
1668  if (property->emit_store()) {
1669  __ CallRuntime(Runtime::kSetPrototype, 2);
1670  } else {
1671  __ Drop(2);
1672  }
1673  break;
1674  case ObjectLiteral::Property::GETTER:
1675  accessor_table.lookup(key)->second->getter = value;
1676  break;
1677  case ObjectLiteral::Property::SETTER:
1678  accessor_table.lookup(key)->second->setter = value;
1679  break;
1680  }
1681  }
1682 
1683  // Emit code to define accessors, using only a single call to the runtime for
1684  // each pair of corresponding getters and setters.
1685  for (AccessorTable::Iterator it = accessor_table.begin();
1686  it != accessor_table.end();
1687  ++it) {
1688  __ push(Operand(esp, 0)); // Duplicate receiver.
1689  VisitForStackValue(it->first);
1690  EmitAccessor(it->second->getter);
1691  EmitAccessor(it->second->setter);
1692  __ push(Immediate(Smi::FromInt(NONE)));
1693  __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1694  }
1695 
1696  if (expr->has_function()) {
1697  DCHECK(result_saved);
1698  __ push(Operand(esp, 0));
1699  __ CallRuntime(Runtime::kToFastProperties, 1);
1700  }
1701 
1702  if (result_saved) {
1703  context()->PlugTOS();
1704  } else {
1705  context()->Plug(eax);
1706  }
1707 }
1708 
1709 
1710 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1711  Comment cmnt(masm_, "[ ArrayLiteral");
1712 
1713  expr->BuildConstantElements(isolate());
1714  int flags = expr->depth() == 1
1715  ? ArrayLiteral::kShallowElements
1716  : ArrayLiteral::kNoFlags;
1717 
1718  ZoneList<Expression*>* subexprs = expr->values();
1719  int length = subexprs->length();
1720  Handle<FixedArray> constant_elements = expr->constant_elements();
1721  DCHECK_EQ(2, constant_elements->length());
1722  ElementsKind constant_elements_kind =
1723  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1724  bool has_constant_fast_elements =
1725  IsFastObjectElementsKind(constant_elements_kind);
1726  Handle<FixedArrayBase> constant_elements_values(
1727  FixedArrayBase::cast(constant_elements->get(1)));
1728 
1729  AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1730  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1731  // If the only customer of allocation sites is transitioning, then
1732  // we can turn it off if we don't have anywhere else to transition to.
1733  allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1734  }
1735 
1736  if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1739  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1740  __ push(Immediate(constant_elements));
1741  __ push(Immediate(Smi::FromInt(flags)));
1742  __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1743  } else {
1746  __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1747  __ mov(ecx, Immediate(constant_elements));
1748  FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1749  __ CallStub(&stub);
1750  }
1751 
1752  bool result_saved = false; // Is the result saved to the stack?
1753 
1754  // Emit code to evaluate all the non-constant subexpressions and to store
1755  // them into the newly cloned array.
1756  for (int i = 0; i < length; i++) {
1757  Expression* subexpr = subexprs->at(i);
1758  // If the subexpression is a literal or a simple materialized literal it
1759  // is already set in the cloned array.
1760  if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1761 
1762  if (!result_saved) {
1763  __ push(eax); // array literal.
1764  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1765  result_saved = true;
1766  }
1767  VisitForAccumulatorValue(subexpr);
1768 
1769  if (IsFastObjectElementsKind(constant_elements_kind)) {
1770  // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1771  // cannot transition and don't need to call the runtime stub.
1772  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1773  __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1775  // Store the subexpression value in the array's elements.
1776  __ mov(FieldOperand(ebx, offset), result_register());
1777  // Update the write barrier for the array store.
1778  __ RecordWriteField(ebx, offset, result_register(), ecx, kDontSaveFPRegs,
1780  } else {
1781  // Store the subexpression value in the array's elements.
1782  __ mov(ecx, Immediate(Smi::FromInt(i)));
1783  StoreArrayLiteralElementStub stub(isolate());
1784  __ CallStub(&stub);
1785  }
1786 
1787  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1788  }
1789 
1790  if (result_saved) {
1791  __ add(esp, Immediate(kPointerSize)); // literal index
1792  context()->PlugTOS();
1793  } else {
1794  context()->Plug(eax);
1795  }
1796 }
1797 
1798 
1799 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1800  DCHECK(expr->target()->IsValidReferenceExpression());
1801 
1802  Comment cmnt(masm_, "[ Assignment");
1803 
1804  // Left-hand side can only be a property, a global or a (parameter or local)
1805  // slot.
1806  enum LhsKind {
1807  VARIABLE,
1810  NAMED_SUPER_PROPERTY
1811  };
1812  LhsKind assign_type = VARIABLE;
1813  Property* property = expr->target()->AsProperty();
1814  if (property != NULL) {
1815  assign_type = (property->key()->IsPropertyName())
1816  ? (property->IsSuperAccess() ? NAMED_SUPER_PROPERTY
1817  : NAMED_PROPERTY)
1818  : KEYED_PROPERTY;
1819  }
1820 
1821  // Evaluate LHS expression.
1822  switch (assign_type) {
1823  case VARIABLE:
1824  // Nothing to do here.
1825  break;
1826  case NAMED_SUPER_PROPERTY:
1827  VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1828  EmitLoadHomeObject(property->obj()->AsSuperReference());
1829  __ push(result_register());
1830  if (expr->is_compound()) {
1831  __ push(MemOperand(esp, kPointerSize));
1832  __ push(result_register());
1833  }
1834  break;
1835  case NAMED_PROPERTY:
1836  if (expr->is_compound()) {
1837  // We need the receiver both on the stack and in the register.
1838  VisitForStackValue(property->obj());
1839  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1840  } else {
1841  VisitForStackValue(property->obj());
1842  }
1843  break;
1844  case KEYED_PROPERTY: {
1845  if (expr->is_compound()) {
1846  VisitForStackValue(property->obj());
1847  VisitForStackValue(property->key());
1849  __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1850  } else {
1851  VisitForStackValue(property->obj());
1852  VisitForStackValue(property->key());
1853  }
1854  break;
1855  }
1856  }
1857 
1858  // For compound assignments we need another deoptimization point after the
1859  // variable/property load.
1860  if (expr->is_compound()) {
1861  AccumulatorValueContext result_context(this);
1862  { AccumulatorValueContext left_operand_context(this);
1863  switch (assign_type) {
1864  case VARIABLE:
1865  EmitVariableLoad(expr->target()->AsVariableProxy());
1866  PrepareForBailout(expr->target(), TOS_REG);
1867  break;
1868  case NAMED_SUPER_PROPERTY:
1869  EmitNamedSuperPropertyLoad(property);
1870  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1871  break;
1872  case NAMED_PROPERTY:
1873  EmitNamedPropertyLoad(property);
1874  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1875  break;
1876  case KEYED_PROPERTY:
1877  EmitKeyedPropertyLoad(property);
1878  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1879  break;
1880  }
1881  }
1882 
1883  Token::Value op = expr->binary_op();
1884  __ push(eax); // Left operand goes on the stack.
1885  VisitForAccumulatorValue(expr->value());
1886 
1887  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1888  ? OVERWRITE_RIGHT
1889  : NO_OVERWRITE;
1890  SetSourcePosition(expr->position() + 1);
1891  if (ShouldInlineSmiCase(op)) {
1892  EmitInlineSmiBinaryOp(expr->binary_operation(),
1893  op,
1894  mode,
1895  expr->target(),
1896  expr->value());
1897  } else {
1898  EmitBinaryOp(expr->binary_operation(), op, mode);
1899  }
1900 
1901  // Deoptimization point in case the binary operation may have side effects.
1902  PrepareForBailout(expr->binary_operation(), TOS_REG);
1903  } else {
1904  VisitForAccumulatorValue(expr->value());
1905  }
1906 
1907  // Record source position before possible IC call.
1908  SetSourcePosition(expr->position());
1909 
1910  // Store the value.
1911  switch (assign_type) {
1912  case VARIABLE:
1913  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1914  expr->op());
1915  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1916  context()->Plug(eax);
1917  break;
1918  case NAMED_PROPERTY:
1920  break;
1921  case NAMED_SUPER_PROPERTY:
1923  break;
1924  case KEYED_PROPERTY:
1926  break;
1927  }
1928 }
1929 
1930 
1931 void FullCodeGenerator::VisitYield(Yield* expr) {
1932  Comment cmnt(masm_, "[ Yield");
1933  // Evaluate yielded value first; the initial iterator definition depends on
1934  // this. It stays on the stack while we update the iterator.
1935  VisitForStackValue(expr->expression());
1936 
1937  switch (expr->yield_kind()) {
1938  case Yield::kSuspend:
1939  // Pop value from top-of-stack slot; box result into result register.
1940  EmitCreateIteratorResult(false);
1941  __ push(result_register());
1942  // Fall through.
1943  case Yield::kInitial: {
1944  Label suspend, continuation, post_runtime, resume;
1945 
1946  __ jmp(&suspend);
1947 
1948  __ bind(&continuation);
1949  __ jmp(&resume);
1950 
1951  __ bind(&suspend);
1952  VisitForAccumulatorValue(expr->generator_object());
1953  DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1955  Immediate(Smi::FromInt(continuation.pos())));
1957  __ mov(ecx, esi);
1958  __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1959  kDontSaveFPRegs);
1961  __ cmp(esp, ebx);
1962  __ j(equal, &post_runtime);
1963  __ push(eax); // generator object
1964  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1965  __ mov(context_register(),
1967  __ bind(&post_runtime);
1968  __ pop(result_register());
1970 
1971  __ bind(&resume);
1972  context()->Plug(result_register());
1973  break;
1974  }
1975 
1976  case Yield::kFinal: {
1977  VisitForAccumulatorValue(expr->generator_object());
1981  // Pop value from top-of-stack slot, box result into result register.
1985  break;
1986  }
1987 
1988  case Yield::kDelegating: {
1989  VisitForStackValue(expr->generator_object());
1990 
1991  // Initial stack layout is as follows:
1992  // [sp + 1 * kPointerSize] iter
1993  // [sp + 0 * kPointerSize] g
1994 
1995  Label l_catch, l_try, l_suspend, l_continuation, l_resume;
1996  Label l_next, l_call, l_loop;
1997  Register load_receiver = LoadDescriptor::ReceiverRegister();
1998  Register load_name = LoadDescriptor::NameRegister();
1999 
2000  // Initial send value is undefined.
2001  __ mov(eax, isolate()->factory()->undefined_value());
2002  __ jmp(&l_next);
2003 
2004  // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2005  __ bind(&l_catch);
2006  handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2007  __ mov(load_name, isolate()->factory()->throw_string()); // "throw"
2008  __ push(load_name); // "throw"
2009  __ push(Operand(esp, 2 * kPointerSize)); // iter
2010  __ push(eax); // exception
2011  __ jmp(&l_call);
2012 
2013  // try { received = %yield result }
2014  // Shuffle the received result above a try handler and yield it without
2015  // re-boxing.
2016  __ bind(&l_try);
2017  __ pop(eax); // result
2018  __ PushTryHandler(StackHandler::CATCH, expr->index());
2019  const int handler_size = StackHandlerConstants::kSize;
2020  __ push(eax); // result
2021  __ jmp(&l_suspend);
2022  __ bind(&l_continuation);
2023  __ jmp(&l_resume);
2024  __ bind(&l_suspend);
2025  const int generator_object_depth = kPointerSize + handler_size;
2026  __ mov(eax, Operand(esp, generator_object_depth));
2027  __ push(eax); // g
2028  DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2030  Immediate(Smi::FromInt(l_continuation.pos())));
2032  __ mov(ecx, esi);
2033  __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2034  kDontSaveFPRegs);
2035  __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2036  __ mov(context_register(),
2038  __ pop(eax); // result
2040  __ bind(&l_resume); // received in eax
2041  __ PopTryHandler();
2042 
2043  // receiver = iter; f = iter.next; arg = received;
2044  __ bind(&l_next);
2045 
2046  __ mov(load_name, isolate()->factory()->next_string());
2047  __ push(load_name); // "next"
2048  __ push(Operand(esp, 2 * kPointerSize)); // iter
2049  __ push(eax); // received
2050 
2051  // result = receiver[f](arg);
2052  __ bind(&l_call);
2053  __ mov(load_receiver, Operand(esp, kPointerSize));
2054  if (FLAG_vector_ics) {
2056  Immediate(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2057  }
2058  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2060  __ mov(edi, eax);
2061  __ mov(Operand(esp, 2 * kPointerSize), edi);
2062  CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2063  __ CallStub(&stub);
2064 
2066  __ Drop(1); // The function is still on the stack; drop it.
2067 
2068  // if (!result.done) goto l_try;
2069  __ bind(&l_loop);
2070  __ push(eax); // save result
2071  __ Move(load_receiver, eax); // result
2072  __ mov(load_name,
2073  isolate()->factory()->done_string()); // "done"
2074  if (FLAG_vector_ics) {
2076  Immediate(Smi::FromInt(expr->DoneFeedbackSlot())));
2077  }
2078  CallLoadIC(NOT_CONTEXTUAL); // result.done in eax
2079  Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2080  CallIC(bool_ic);
2081  __ test(eax, eax);
2082  __ j(zero, &l_try);
2083 
2084  // result.value
2085  __ pop(load_receiver); // result
2086  __ mov(load_name,
2087  isolate()->factory()->value_string()); // "value"
2088  if (FLAG_vector_ics) {
2090  Immediate(Smi::FromInt(expr->ValueFeedbackSlot())));
2091  }
2092  CallLoadIC(NOT_CONTEXTUAL); // result.value in eax
2093  context()->DropAndPlug(2, eax); // drop iter and g
2094  break;
2095  }
2096  }
2097 }
2098 
2099 
2100 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2101  Expression *value,
2102  JSGeneratorObject::ResumeMode resume_mode) {
2103  // The value stays in eax, and is ultimately read by the resumed generator, as
2104  // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2105  // is read to throw the value when the resumed generator is already closed.
2106  // ebx will hold the generator object until the activation has been resumed.
2107  VisitForStackValue(generator);
2108  VisitForAccumulatorValue(value);
2109  __ pop(ebx);
2110 
2111  // Check generator state.
2112  Label wrong_state, closed_state, done;
2116  Immediate(Smi::FromInt(0)));
2117  __ j(equal, &closed_state);
2118  __ j(less, &wrong_state);
2119 
2120  // Load suspended function and context.
2123 
2124  // Push receiver.
2126 
2127  // Push holes for arguments to generator function.
2129  __ mov(edx,
2131  __ mov(ecx, isolate()->factory()->the_hole_value());
2132  Label push_argument_holes, push_frame;
2133  __ bind(&push_argument_holes);
2134  __ sub(edx, Immediate(Smi::FromInt(1)));
2135  __ j(carry, &push_frame);
2136  __ push(ecx);
2137  __ jmp(&push_argument_holes);
2138 
2139  // Enter a new JavaScript frame, and initialize its slots as they were when
2140  // the generator was suspended.
2141  Label resume_frame;
2142  __ bind(&push_frame);
2143  __ call(&resume_frame);
2144  __ jmp(&done);
2145  __ bind(&resume_frame);
2146  __ push(ebp); // Caller's frame pointer.
2147  __ mov(ebp, esp);
2148  __ push(esi); // Callee's context.
2149  __ push(edi); // Callee's JS Function.
2150 
2151  // Load the operand stack size.
2154  __ SmiUntag(edx);
2155 
2156  // If we are sending a value and there is no operand stack, we can jump back
2157  // in directly.
2158  if (resume_mode == JSGeneratorObject::NEXT) {
2159  Label slow_resume;
2160  __ cmp(edx, Immediate(0));
2161  __ j(not_zero, &slow_resume);
2164  __ SmiUntag(ecx);
2165  __ add(edx, ecx);
2168  __ jmp(edx);
2169  __ bind(&slow_resume);
2170  }
2171 
2172  // Otherwise, we push holes for the operand stack and call the runtime to fix
2173  // up the stack and the handlers.
2174  Label push_operand_holes, call_resume;
2175  __ bind(&push_operand_holes);
2176  __ sub(edx, Immediate(1));
2177  __ j(carry, &call_resume);
2178  __ push(ecx);
2179  __ jmp(&push_operand_holes);
2180  __ bind(&call_resume);
2181  __ push(ebx);
2182  __ push(result_register());
2183  __ Push(Smi::FromInt(resume_mode));
2184  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2185  // Not reached: the runtime call returns elsewhere.
2186  __ Abort(kGeneratorFailedToResume);
2187 
2188  // Reach here when generator is closed.
2189  __ bind(&closed_state);
2190  if (resume_mode == JSGeneratorObject::NEXT) {
2191  // Return completed iterator result when generator is closed.
2192  __ push(Immediate(isolate()->factory()->undefined_value()));
2193  // Pop value from top-of-stack slot; box result into result register.
2195  } else {
2196  // Throw the provided value.
2197  __ push(eax);
2198  __ CallRuntime(Runtime::kThrow, 1);
2199  }
2200  __ jmp(&done);
2201 
2202  // Throw error if we attempt to operate on a running generator.
2203  __ bind(&wrong_state);
2204  __ push(ebx);
2205  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2206 
2207  __ bind(&done);
2208  context()->Plug(result_register());
2209 }
2210 
2211 
2213  Label gc_required;
2214  Label allocated;
2215 
2216  Handle<Map> map(isolate()->native_context()->iterator_result_map());
2217 
2218  __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT);
2219  __ jmp(&allocated);
2220 
2221  __ bind(&gc_required);
2222  __ Push(Smi::FromInt(map->instance_size()));
2223  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2224  __ mov(context_register(),
2226 
2227  __ bind(&allocated);
2228  __ mov(ebx, map);
2229  __ pop(ecx);
2230  __ mov(edx, isolate()->factory()->ToBoolean(done));
2231  DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2234  isolate()->factory()->empty_fixed_array());
2236  isolate()->factory()->empty_fixed_array());
2239 
2240  // Only the value field needs a write barrier, as the other values are in the
2241  // root set.
2243  edx, kDontSaveFPRegs);
2244 }
2245 
2246 
2247 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2248  SetSourcePosition(prop->position());
2249  Literal* key = prop->key()->AsLiteral();
2250  DCHECK(!key->value()->IsSmi());
2251  DCHECK(!prop->IsSuperAccess());
2252 
2253  __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2254  if (FLAG_vector_ics) {
2256  Immediate(Smi::FromInt(prop->PropertyFeedbackSlot())));
2258  } else {
2259  CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2260  }
2261 }
2262 
2263 
2264 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2265  // Stack: receiver, home_object.
2266  SetSourcePosition(prop->position());
2267  Literal* key = prop->key()->AsLiteral();
2268  DCHECK(!key->value()->IsSmi());
2269  DCHECK(prop->IsSuperAccess());
2270 
2271  __ push(Immediate(key->value()));
2272  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2273 }
2274 
2275 
2276 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2277  SetSourcePosition(prop->position());
2278  Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2279  if (FLAG_vector_ics) {
2281  Immediate(Smi::FromInt(prop->PropertyFeedbackSlot())));
2282  CallIC(ic);
2283  } else {
2284  CallIC(ic, prop->PropertyFeedbackId());
2285  }
2286 }
2287 
2288 
2289 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2290  Token::Value op,
2292  Expression* left,
2293  Expression* right) {
2294  // Do combined smi check of the operands. Left operand is on the
2295  // stack. Right operand is in eax.
2296  Label smi_case, done, stub_call;
2297  __ pop(edx);
2298  __ mov(ecx, eax);
2299  __ or_(eax, edx);
2300  JumpPatchSite patch_site(masm_);
2301  patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2302 
2303  __ bind(&stub_call);
2304  __ mov(eax, ecx);
2305  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2306  CallIC(code, expr->BinaryOperationFeedbackId());
2307  patch_site.EmitPatchInfo();
2308  __ jmp(&done, Label::kNear);
2309 
2310  // Smi case.
2311  __ bind(&smi_case);
2312  __ mov(eax, edx); // Copy left operand in case of a stub call.
2313 
2314  switch (op) {
2315  case Token::SAR:
2316  __ SmiUntag(ecx);
2317  __ sar_cl(eax); // No checks of result necessary
2318  __ and_(eax, Immediate(~kSmiTagMask));
2319  break;
2320  case Token::SHL: {
2321  Label result_ok;
2322  __ SmiUntag(eax);
2323  __ SmiUntag(ecx);
2324  __ shl_cl(eax);
2325  // Check that the *signed* result fits in a smi.
2326  __ cmp(eax, 0xc0000000);
2327  __ j(positive, &result_ok);
2328  __ SmiTag(ecx);
2329  __ jmp(&stub_call);
2330  __ bind(&result_ok);
2331  __ SmiTag(eax);
2332  break;
2333  }
2334  case Token::SHR: {
2335  Label result_ok;
2336  __ SmiUntag(eax);
2337  __ SmiUntag(ecx);
2338  __ shr_cl(eax);
2339  __ test(eax, Immediate(0xc0000000));
2340  __ j(zero, &result_ok);
2341  __ SmiTag(ecx);
2342  __ jmp(&stub_call);
2343  __ bind(&result_ok);
2344  __ SmiTag(eax);
2345  break;
2346  }
2347  case Token::ADD:
2348  __ add(eax, ecx);
2349  __ j(overflow, &stub_call);
2350  break;
2351  case Token::SUB:
2352  __ sub(eax, ecx);
2353  __ j(overflow, &stub_call);
2354  break;
2355  case Token::MUL: {
2356  __ SmiUntag(eax);
2357  __ imul(eax, ecx);
2358  __ j(overflow, &stub_call);
2359  __ test(eax, eax);
2360  __ j(not_zero, &done, Label::kNear);
2361  __ mov(ebx, edx);
2362  __ or_(ebx, ecx);
2363  __ j(negative, &stub_call);
2364  break;
2365  }
2366  case Token::BIT_OR:
2367  __ or_(eax, ecx);
2368  break;
2369  case Token::BIT_AND:
2370  __ and_(eax, ecx);
2371  break;
2372  case Token::BIT_XOR:
2373  __ xor_(eax, ecx);
2374  break;
2375  default:
2376  UNREACHABLE();
2377  }
2378 
2379  __ bind(&done);
2380  context()->Plug(eax);
2381 }
2382 
2383 
2384 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2385  Token::Value op,
2386  OverwriteMode mode) {
2387  __ pop(edx);
2388  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2389  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2390  CallIC(code, expr->BinaryOperationFeedbackId());
2391  patch_site.EmitPatchInfo();
2392  context()->Plug(eax);
2393 }
2394 
2395 
2396 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2397  DCHECK(expr->IsValidReferenceExpression());
2398 
2399  // Left-hand side can only be a property, a global or a (parameter or local)
2400  // slot.
2402  LhsKind assign_type = VARIABLE;
2403  Property* prop = expr->AsProperty();
2404  if (prop != NULL) {
2405  assign_type = (prop->key()->IsPropertyName())
2406  ? NAMED_PROPERTY
2407  : KEYED_PROPERTY;
2408  }
2409 
2410  switch (assign_type) {
2411  case VARIABLE: {
2412  Variable* var = expr->AsVariableProxy()->var();
2413  EffectContext context(this);
2414  EmitVariableAssignment(var, Token::ASSIGN);
2415  break;
2416  }
2417  case NAMED_PROPERTY: {
2418  __ push(eax); // Preserve value.
2419  VisitForAccumulatorValue(prop->obj());
2421  __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2423  prop->key()->AsLiteral()->value());
2424  CallStoreIC();
2425  break;
2426  }
2427  case KEYED_PROPERTY: {
2428  __ push(eax); // Preserve value.
2429  VisitForStackValue(prop->obj());
2430  VisitForAccumulatorValue(prop->key());
2432  __ pop(StoreDescriptor::ReceiverRegister()); // Receiver.
2433  __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2434  Handle<Code> ic =
2435  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2436  CallIC(ic);
2437  break;
2438  }
2439  }
2440  context()->Plug(eax);
2441 }
2442 
2443 
2445  Variable* var, MemOperand location) {
2446  __ mov(location, eax);
2447  if (var->IsContextSlot()) {
2448  __ mov(edx, eax);
2449  int offset = Context::SlotOffset(var->index());
2450  __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2451  }
2452 }
2453 
2454 
2456  Token::Value op) {
2457  if (var->IsUnallocated()) {
2458  // Global var, const, or let.
2459  __ mov(StoreDescriptor::NameRegister(), var->name());
2461  CallStoreIC();
2462 
2463  } else if (op == Token::INIT_CONST_LEGACY) {
2464  // Const initializers need a write barrier.
2465  DCHECK(!var->IsParameter()); // No const parameters.
2466  if (var->IsLookupSlot()) {
2467  __ push(eax);
2468  __ push(esi);
2469  __ push(Immediate(var->name()));
2470  __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2471  } else {
2472  DCHECK(var->IsStackLocal() || var->IsContextSlot());
2473  Label skip;
2474  MemOperand location = VarOperand(var, ecx);
2475  __ mov(edx, location);
2476  __ cmp(edx, isolate()->factory()->the_hole_value());
2477  __ j(not_equal, &skip, Label::kNear);
2478  EmitStoreToStackLocalOrContextSlot(var, location);
2479  __ bind(&skip);
2480  }
2481 
2482  } else if (var->mode() == LET && op != Token::INIT_LET) {
2483  // Non-initializing assignment to let variable needs a write barrier.
2484  DCHECK(!var->IsLookupSlot());
2485  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2486  Label assign;
2487  MemOperand location = VarOperand(var, ecx);
2488  __ mov(edx, location);
2489  __ cmp(edx, isolate()->factory()->the_hole_value());
2490  __ j(not_equal, &assign, Label::kNear);
2491  __ push(Immediate(var->name()));
2492  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2493  __ bind(&assign);
2494  EmitStoreToStackLocalOrContextSlot(var, location);
2495 
2496  } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2497  if (var->IsLookupSlot()) {
2498  // Assignment to var.
2499  __ push(eax); // Value.
2500  __ push(esi); // Context.
2501  __ push(Immediate(var->name()));
2502  __ push(Immediate(Smi::FromInt(strict_mode())));
2503  __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2504  } else {
2505  // Assignment to var or initializing assignment to let/const in harmony
2506  // mode.
2507  DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2508  MemOperand location = VarOperand(var, ecx);
2509  if (generate_debug_code_ && op == Token::INIT_LET) {
2510  // Check for an uninitialized let binding.
2511  __ mov(edx, location);
2512  __ cmp(edx, isolate()->factory()->the_hole_value());
2513  __ Check(equal, kLetBindingReInitialization);
2514  }
2515  EmitStoreToStackLocalOrContextSlot(var, location);
2516  }
2517  }
2518  // Non-initializing assignments to consts are ignored.
2519 }
2520 
2521 
2522 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2523  // Assignment to a property, using a named store IC.
2524  // eax : value
2525  // esp[0] : receiver
2526 
2527  Property* prop = expr->target()->AsProperty();
2528  DCHECK(prop != NULL);
2529  DCHECK(prop->key()->IsLiteral());
2530 
2531  // Record source code position before IC call.
2532  SetSourcePosition(expr->position());
2533  __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2535  CallStoreIC(expr->AssignmentFeedbackId());
2536  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2537  context()->Plug(eax);
2538 }
2539 
2540 
2542  // Assignment to named property of super.
2543  // eax : value
2544  // stack : receiver ('this'), home_object
2545  Property* prop = expr->target()->AsProperty();
2546  DCHECK(prop != NULL);
2547  Literal* key = prop->key()->AsLiteral();
2548  DCHECK(key != NULL);
2549 
2550  __ push(eax);
2551  __ push(Immediate(key->value()));
2552  __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2553  : Runtime::kStoreToSuper_Sloppy),
2554  4);
2555  context()->Plug(eax);
2556 }
2557 
2558 
2559 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2560  // Assignment to a property, using a keyed store IC.
2561  // eax : value
2562  // esp[0] : key
2563  // esp[kPointerSize] : receiver
2564 
2565  __ pop(StoreDescriptor::NameRegister()); // Key.
2568  // Record source code position before IC call.
2569  SetSourcePosition(expr->position());
2570  Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2571  CallIC(ic, expr->AssignmentFeedbackId());
2572 
2573  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2574  context()->Plug(eax);
2575 }
2576 
2577 
2578 void FullCodeGenerator::VisitProperty(Property* expr) {
2579  Comment cmnt(masm_, "[ Property");
2580  Expression* key = expr->key();
2581 
2582  if (key->IsPropertyName()) {
2583  if (!expr->IsSuperAccess()) {
2584  VisitForAccumulatorValue(expr->obj());
2586  EmitNamedPropertyLoad(expr);
2587  } else {
2588  VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2589  EmitLoadHomeObject(expr->obj()->AsSuperReference());
2590  __ push(result_register());
2592  }
2593  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2594  context()->Plug(eax);
2595  } else {
2596  VisitForStackValue(expr->obj());
2597  VisitForAccumulatorValue(expr->key());
2598  __ pop(LoadDescriptor::ReceiverRegister()); // Object.
2599  __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2600  EmitKeyedPropertyLoad(expr);
2601  context()->Plug(eax);
2602  }
2603 }
2604 
2605 
2606 void FullCodeGenerator::CallIC(Handle<Code> code,
2607  TypeFeedbackId ast_id) {
2608  ic_total_count_++;
2609  __ call(code, RelocInfo::CODE_TARGET, ast_id);
2610 }
2611 
2612 
2613 // Code common for calls using the IC.
2614 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2615  Expression* callee = expr->expression();
2616 
2617  CallICState::CallType call_type =
2618  callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2619  // Get the target function.
2620  if (call_type == CallICState::FUNCTION) {
2621  { StackValueContext context(this);
2622  EmitVariableLoad(callee->AsVariableProxy());
2624  }
2625  // Push undefined as receiver. This is patched in the method prologue if it
2626  // is a sloppy mode method.
2627  __ push(Immediate(isolate()->factory()->undefined_value()));
2628  } else {
2629  // Load the function from the receiver.
2630  DCHECK(callee->IsProperty());
2631  DCHECK(!callee->AsProperty()->IsSuperAccess());
2632  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2633  EmitNamedPropertyLoad(callee->AsProperty());
2634  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2635  // Push the target function under the receiver.
2636  __ push(Operand(esp, 0));
2637  __ mov(Operand(esp, kPointerSize), eax);
2638  }
2639 
2640  EmitCall(expr, call_type);
2641 }
2642 
2643 
2645  Expression* callee = expr->expression();
2646  DCHECK(callee->IsProperty());
2647  Property* prop = callee->AsProperty();
2648  DCHECK(prop->IsSuperAccess());
2649 
2650  SetSourcePosition(prop->position());
2651  Literal* key = prop->key()->AsLiteral();
2652  DCHECK(!key->value()->IsSmi());
2653  // Load the function from the receiver.
2654  SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2655  EmitLoadHomeObject(super_ref);
2656  __ push(eax);
2657  VisitForAccumulatorValue(super_ref->this_var());
2658  __ push(eax);
2659  __ push(eax);
2660  __ push(Operand(esp, kPointerSize * 2));
2661  __ push(Immediate(key->value()));
2662  // Stack here:
2663  // - home_object
2664  // - this (receiver)
2665  // - this (receiver) <-- LoadFromSuper will pop here and below.
2666  // - home_object
2667  // - key
2668  __ CallRuntime(Runtime::kLoadFromSuper, 3);
2669 
2670  // Replace home_object with target function.
2671  __ mov(Operand(esp, kPointerSize), eax);
2672 
2673  // Stack here:
2674  // - target function
2675  // - this (receiver)
2676  EmitCall(expr, CallICState::METHOD);
2677 }
2678 
2679 
2680 // Code common for calls using the IC.
2682  Expression* key) {
2683  // Load the key.
2685 
2686  Expression* callee = expr->expression();
2687 
2688  // Load the function from the receiver.
2689  DCHECK(callee->IsProperty());
2690  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2692  EmitKeyedPropertyLoad(callee->AsProperty());
2693  PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2694 
2695  // Push the target function under the receiver.
2696  __ push(Operand(esp, 0));
2697  __ mov(Operand(esp, kPointerSize), eax);
2698 
2699  EmitCall(expr, CallICState::METHOD);
2700 }
2701 
2702 
2703 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2704  // Load the arguments.
2705  ZoneList<Expression*>* args = expr->arguments();
2706  int arg_count = args->length();
2707  { PreservePositionScope scope(masm()->positions_recorder());
2708  for (int i = 0; i < arg_count; i++) {
2709  VisitForStackValue(args->at(i));
2710  }
2711  }
2712 
2713  // Record source position of the IC call.
2714  SetSourcePosition(expr->position());
2715  Handle<Code> ic = CallIC::initialize_stub(
2716  isolate(), arg_count, call_type);
2717  __ Move(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
2718  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2719  // Don't assign a type feedback id to the IC, since type feedback is provided
2720  // by the vector above.
2721  CallIC(ic);
2722 
2723  RecordJSReturnSite(expr);
2724 
2725  // Restore context register.
2727 
2728  context()->DropAndPlug(1, eax);
2729 }
2730 
2731 
2733  // Push copy of the first argument or undefined if it doesn't exist.
2734  if (arg_count > 0) {
2735  __ push(Operand(esp, arg_count * kPointerSize));
2736  } else {
2737  __ push(Immediate(isolate()->factory()->undefined_value()));
2738  }
2739 
2740  // Push the receiver of the enclosing function.
2741  __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2742  // Push the language mode.
2743  __ push(Immediate(Smi::FromInt(strict_mode())));
2744 
2745  // Push the start position of the scope the calls resides in.
2746  __ push(Immediate(Smi::FromInt(scope()->start_position())));
2747 
2748  // Do the runtime call.
2749  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2750 }
2751 
2752 
2753 void FullCodeGenerator::VisitCall(Call* expr) {
2754 #ifdef DEBUG
2755  // We want to verify that RecordJSReturnSite gets called on all paths
2756  // through this function. Avoid early returns.
2757  expr->return_is_recorded_ = false;
2758 #endif
2759 
2760  Comment cmnt(masm_, "[ Call");
2761  Expression* callee = expr->expression();
2762  Call::CallType call_type = expr->GetCallType(isolate());
2763 
2764  if (call_type == Call::POSSIBLY_EVAL_CALL) {
2765  // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2766  // to resolve the function we need to call and the receiver of the call.
2767  // Then we call the resolved function using the given arguments.
2768  ZoneList<Expression*>* args = expr->arguments();
2769  int arg_count = args->length();
2770  { PreservePositionScope pos_scope(masm()->positions_recorder());
2771  VisitForStackValue(callee);
2772  // Reserved receiver slot.
2773  __ push(Immediate(isolate()->factory()->undefined_value()));
2774  // Push the arguments.
2775  for (int i = 0; i < arg_count; i++) {
2776  VisitForStackValue(args->at(i));
2777  }
2778 
2779  // Push a copy of the function (found below the arguments) and
2780  // resolve eval.
2781  __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2782  EmitResolvePossiblyDirectEval(arg_count);
2783 
2784  // The runtime call returns a pair of values in eax (function) and
2785  // edx (receiver). Touch up the stack with the right values.
2786  __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2787  __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2788  }
2789  // Record source position for debugger.
2790  SetSourcePosition(expr->position());
2791  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2792  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2793  __ CallStub(&stub);
2794  RecordJSReturnSite(expr);
2795  // Restore context register.
2797  context()->DropAndPlug(1, eax);
2798 
2799  } else if (call_type == Call::GLOBAL_CALL) {
2800  EmitCallWithLoadIC(expr);
2801 
2802  } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2803  // Call to a lookup slot (dynamically introduced variable).
2804  VariableProxy* proxy = callee->AsVariableProxy();
2805  Label slow, done;
2806  { PreservePositionScope scope(masm()->positions_recorder());
2807  // Generate code for loading from variables potentially shadowed by
2808  // eval-introduced variables.
2809  EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2810  }
2811  __ bind(&slow);
2812  // Call the runtime to find the function to call (returned in eax) and
2813  // the object holding it (returned in edx).
2814  __ push(context_register());
2815  __ push(Immediate(proxy->name()));
2816  __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2817  __ push(eax); // Function.
2818  __ push(edx); // Receiver.
2819 
2820  // If fast case code has been generated, emit code to push the function
2821  // and receiver and have the slow path jump around this code.
2822  if (done.is_linked()) {
2823  Label call;
2824  __ jmp(&call, Label::kNear);
2825  __ bind(&done);
2826  // Push function.
2827  __ push(eax);
2828  // The receiver is implicitly the global receiver. Indicate this by
2829  // passing the hole to the call function stub.
2830  __ push(Immediate(isolate()->factory()->undefined_value()));
2831  __ bind(&call);
2832  }
2833 
2834  // The receiver is either the global receiver or an object found by
2835  // LoadContextSlot.
2836  EmitCall(expr);
2837 
2838  } else if (call_type == Call::PROPERTY_CALL) {
2839  Property* property = callee->AsProperty();
2840  bool is_named_call = property->key()->IsPropertyName();
2841  // super.x() is handled in EmitCallWithLoadIC.
2842  if (property->IsSuperAccess() && is_named_call) {
2844  } else {
2845  {
2846  PreservePositionScope scope(masm()->positions_recorder());
2847  VisitForStackValue(property->obj());
2848  }
2849  if (is_named_call) {
2850  EmitCallWithLoadIC(expr);
2851  } else {
2852  EmitKeyedCallWithLoadIC(expr, property->key());
2853  }
2854  }
2855  } else {
2856  DCHECK(call_type == Call::OTHER_CALL);
2857  // Call to an arbitrary expression not handled specially above.
2858  { PreservePositionScope scope(masm()->positions_recorder());
2859  VisitForStackValue(callee);
2860  }
2861  __ push(Immediate(isolate()->factory()->undefined_value()));
2862  // Emit function call.
2863  EmitCall(expr);
2864  }
2865 
2866 #ifdef DEBUG
2867  // RecordJSReturnSite should have been called.
2868  DCHECK(expr->return_is_recorded_);
2869 #endif
2870 }
2871 
2872 
2873 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2874  Comment cmnt(masm_, "[ CallNew");
2875  // According to ECMA-262, section 11.2.2, page 44, the function
2876  // expression in new calls must be evaluated before the
2877  // arguments.
2878 
2879  // Push constructor on the stack. If it's not a function it's used as
2880  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2881  // ignored.
2882  VisitForStackValue(expr->expression());
2883 
2884  // Push the arguments ("left-to-right") on the stack.
2885  ZoneList<Expression*>* args = expr->arguments();
2886  int arg_count = args->length();
2887  for (int i = 0; i < arg_count; i++) {
2888  VisitForStackValue(args->at(i));
2889  }
2890 
2891  // Call the construct call builtin that handles allocation and
2892  // constructor invocation.
2893  SetSourcePosition(expr->position());
2894 
2895  // Load function and argument count into edi and eax.
2896  __ Move(eax, Immediate(arg_count));
2897  __ mov(edi, Operand(esp, arg_count * kPointerSize));
2898 
2899  // Record call targets in unoptimized code.
2900  if (FLAG_pretenuring_call_new) {
2901  EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2902  DCHECK(expr->AllocationSiteFeedbackSlot() ==
2903  expr->CallNewFeedbackSlot() + 1);
2904  }
2905 
2906  __ LoadHeapObject(ebx, FeedbackVector());
2907  __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
2908 
2909  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2910  __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2911  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2912  context()->Plug(eax);
2913 }
2914 
2915 
2916 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2917  ZoneList<Expression*>* args = expr->arguments();
2918  DCHECK(args->length() == 1);
2919 
2920  VisitForAccumulatorValue(args->at(0));
2921 
2922  Label materialize_true, materialize_false;
2923  Label* if_true = NULL;
2924  Label* if_false = NULL;
2925  Label* fall_through = NULL;
2926  context()->PrepareTest(&materialize_true, &materialize_false,
2927  &if_true, &if_false, &fall_through);
2928 
2929  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2930  __ test(eax, Immediate(kSmiTagMask));
2931  Split(zero, if_true, if_false, fall_through);
2932 
2933  context()->Plug(if_true, if_false);
2934 }
2935 
2936 
2937 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2938  ZoneList<Expression*>* args = expr->arguments();
2939  DCHECK(args->length() == 1);
2940 
2941  VisitForAccumulatorValue(args->at(0));
2942 
2943  Label materialize_true, materialize_false;
2944  Label* if_true = NULL;
2945  Label* if_false = NULL;
2946  Label* fall_through = NULL;
2947  context()->PrepareTest(&materialize_true, &materialize_false,
2948  &if_true, &if_false, &fall_through);
2949 
2950  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2951  __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2952  Split(zero, if_true, if_false, fall_through);
2953 
2954  context()->Plug(if_true, if_false);
2955 }
2956 
2957 
2958 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2959  ZoneList<Expression*>* args = expr->arguments();
2960  DCHECK(args->length() == 1);
2961 
2962  VisitForAccumulatorValue(args->at(0));
2963 
2964  Label materialize_true, materialize_false;
2965  Label* if_true = NULL;
2966  Label* if_false = NULL;
2967  Label* fall_through = NULL;
2968  context()->PrepareTest(&materialize_true, &materialize_false,
2969  &if_true, &if_false, &fall_through);
2970 
2971  __ JumpIfSmi(eax, if_false);
2972  __ cmp(eax, isolate()->factory()->null_value());
2973  __ j(equal, if_true);
2975  // Undetectable objects behave like undefined when tested with typeof.
2977  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2978  __ j(not_zero, if_false);
2981  __ j(below, if_false);
2983  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2984  Split(below_equal, if_true, if_false, fall_through);
2985 
2986  context()->Plug(if_true, if_false);
2987 }
2988 
2989 
2990 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2991  ZoneList<Expression*>* args = expr->arguments();
2992  DCHECK(args->length() == 1);
2993 
2994  VisitForAccumulatorValue(args->at(0));
2995 
2996  Label materialize_true, materialize_false;
2997  Label* if_true = NULL;
2998  Label* if_false = NULL;
2999  Label* fall_through = NULL;
3000  context()->PrepareTest(&materialize_true, &materialize_false,
3001  &if_true, &if_false, &fall_through);
3002 
3003  __ JumpIfSmi(eax, if_false);
3004  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
3005  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3006  Split(above_equal, if_true, if_false, fall_through);
3007 
3008  context()->Plug(if_true, if_false);
3009 }
3010 
3011 
3012 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3013  ZoneList<Expression*>* args = expr->arguments();
3014  DCHECK(args->length() == 1);
3015 
3016  VisitForAccumulatorValue(args->at(0));
3017 
3018  Label materialize_true, materialize_false;
3019  Label* if_true = NULL;
3020  Label* if_false = NULL;
3021  Label* fall_through = NULL;
3022  context()->PrepareTest(&materialize_true, &materialize_false,
3023  &if_true, &if_false, &fall_through);
3024 
3025  __ JumpIfSmi(eax, if_false);
3028  __ test(ebx, Immediate(1 << Map::kIsUndetectable));
3029  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3030  Split(not_zero, if_true, if_false, fall_through);
3031 
3032  context()->Plug(if_true, if_false);
3033 }
3034 
3035 
3036 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3037  CallRuntime* expr) {
3038  ZoneList<Expression*>* args = expr->arguments();
3039  DCHECK(args->length() == 1);
3040 
3041  VisitForAccumulatorValue(args->at(0));
3042 
3043  Label materialize_true, materialize_false, skip_lookup;
3044  Label* if_true = NULL;
3045  Label* if_false = NULL;
3046  Label* fall_through = NULL;
3047  context()->PrepareTest(&materialize_true, &materialize_false,
3048  &if_true, &if_false, &fall_through);
3049 
3050  __ AssertNotSmi(eax);
3051 
3052  // Check whether this map has already been checked to be safe for default
3053  // valueOf.
3057  __ j(not_zero, &skip_lookup);
3058 
3059  // Check for fast case object. Return false for slow case objects.
3062  __ cmp(ecx, isolate()->factory()->hash_table_map());
3063  __ j(equal, if_false);
3064 
3065  // Look for valueOf string in the descriptor array, and indicate false if
3066  // found. Since we omit an enumeration index check, if it is added via a
3067  // transition that shares its descriptor array, this is a false positive.
3068  Label entry, loop, done;
3069 
3070  // Skip loop if no descriptors are valid.
3071  __ NumberOfOwnDescriptors(ecx, ebx);
3072  __ cmp(ecx, 0);
3073  __ j(equal, &done);
3074 
3075  __ LoadInstanceDescriptors(ebx, ebx);
3076  // ebx: descriptor array.
3077  // ecx: valid entries in the descriptor array.
3078  // Calculate the end of the descriptor array.
3079  STATIC_ASSERT(kSmiTag == 0);
3080  STATIC_ASSERT(kSmiTagSize == 1);
3083  __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
3084  // Calculate location of the first key name.
3085  __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3086  // Loop through all the keys in the descriptor array. If one of these is the
3087  // internalized string "valueOf" the result is false.
3088  __ jmp(&entry);
3089  __ bind(&loop);
3090  __ mov(edx, FieldOperand(ebx, 0));
3091  __ cmp(edx, isolate()->factory()->value_of_string());
3092  __ j(equal, if_false);
3094  __ bind(&entry);
3095  __ cmp(ebx, ecx);
3096  __ j(not_equal, &loop);
3097 
3098  __ bind(&done);
3099 
3100  // Reload map as register ebx was used as temporary above.
3102 
3103  // Set the bit in the map to indicate that there is no local valueOf field.
3106 
3107  __ bind(&skip_lookup);
3108 
3109  // If a valueOf property is not found on the object check that its
3110  // prototype is the un-modified String prototype. If not result is false.
3112  __ JumpIfSmi(ecx, if_false);
3115  __ mov(edx,
3117  __ cmp(ecx,
3120  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3121  Split(equal, if_true, if_false, fall_through);
3122 
3123  context()->Plug(if_true, if_false);
3124 }
3125 
3126 
3127 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3128  ZoneList<Expression*>* args = expr->arguments();
3129  DCHECK(args->length() == 1);
3130 
3131  VisitForAccumulatorValue(args->at(0));
3132 
3133  Label materialize_true, materialize_false;
3134  Label* if_true = NULL;
3135  Label* if_false = NULL;
3136  Label* fall_through = NULL;
3137  context()->PrepareTest(&materialize_true, &materialize_false,
3138  &if_true, &if_false, &fall_through);
3139 
3140  __ JumpIfSmi(eax, if_false);
3141  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3142  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3143  Split(equal, if_true, if_false, fall_through);
3144 
3145  context()->Plug(if_true, if_false);
3146 }
3147 
3148 
3149 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3150  ZoneList<Expression*>* args = expr->arguments();
3151  DCHECK(args->length() == 1);
3152 
3153  VisitForAccumulatorValue(args->at(0));
3154 
3155  Label materialize_true, materialize_false;
3156  Label* if_true = NULL;
3157  Label* if_false = NULL;
3158  Label* fall_through = NULL;
3159  context()->PrepareTest(&materialize_true, &materialize_false,
3160  &if_true, &if_false, &fall_through);
3161 
3162  Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3163  __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3164  // Check if the exponent half is 0x80000000. Comparing against 1 and
3165  // checking for overflow is the shortest possible encoding.
3166  __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3167  __ j(no_overflow, if_false);
3168  __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3169  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3170  Split(equal, if_true, if_false, fall_through);
3171 
3172  context()->Plug(if_true, if_false);
3173 }
3174 
3175 
3176 
3177 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3178  ZoneList<Expression*>* args = expr->arguments();
3179  DCHECK(args->length() == 1);
3180 
3181  VisitForAccumulatorValue(args->at(0));
3182 
3183  Label materialize_true, materialize_false;
3184  Label* if_true = NULL;
3185  Label* if_false = NULL;
3186  Label* fall_through = NULL;
3187  context()->PrepareTest(&materialize_true, &materialize_false,
3188  &if_true, &if_false, &fall_through);
3189 
3190  __ JumpIfSmi(eax, if_false);
3191  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3192  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3193  Split(equal, if_true, if_false, fall_through);
3194 
3195  context()->Plug(if_true, if_false);
3196 }
3197 
3198 
3199 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3200  ZoneList<Expression*>* args = expr->arguments();
3201  DCHECK(args->length() == 1);
3202 
3203  VisitForAccumulatorValue(args->at(0));
3204 
3205  Label materialize_true, materialize_false;
3206  Label* if_true = NULL;
3207  Label* if_false = NULL;
3208  Label* fall_through = NULL;
3209  context()->PrepareTest(&materialize_true, &materialize_false,
3210  &if_true, &if_false, &fall_through);
3211 
3212  __ JumpIfSmi(eax, if_false);
3213  __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3214  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3215  Split(equal, if_true, if_false, fall_through);
3216 
3217  context()->Plug(if_true, if_false);
3218 }
3219 
3220 
3221 
3222 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3223  DCHECK(expr->arguments()->length() == 0);
3224 
3225  Label materialize_true, materialize_false;
3226  Label* if_true = NULL;
3227  Label* if_false = NULL;
3228  Label* fall_through = NULL;
3229  context()->PrepareTest(&materialize_true, &materialize_false,
3230  &if_true, &if_false, &fall_through);
3231 
3232  // Get the frame pointer for the calling frame.
3234 
3235  // Skip the arguments adaptor frame if it exists.
3236  Label check_frame_marker;
3239  __ j(not_equal, &check_frame_marker);
3241 
3242  // Check the marker in the calling frame.
3243  __ bind(&check_frame_marker);
3245  Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3246  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3247  Split(equal, if_true, if_false, fall_through);
3248 
3249  context()->Plug(if_true, if_false);
3250 }
3251 
3252 
3253 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3254  ZoneList<Expression*>* args = expr->arguments();
3255  DCHECK(args->length() == 2);
3256 
3257  // Load the two objects into registers and perform the comparison.
3258  VisitForStackValue(args->at(0));
3259  VisitForAccumulatorValue(args->at(1));
3260 
3261  Label materialize_true, materialize_false;
3262  Label* if_true = NULL;
3263  Label* if_false = NULL;
3264  Label* fall_through = NULL;
3265  context()->PrepareTest(&materialize_true, &materialize_false,
3266  &if_true, &if_false, &fall_through);
3267 
3268  __ pop(ebx);
3269  __ cmp(eax, ebx);
3270  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3271  Split(equal, if_true, if_false, fall_through);
3272 
3273  context()->Plug(if_true, if_false);
3274 }
3275 
3276 
3277 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3278  ZoneList<Expression*>* args = expr->arguments();
3279  DCHECK(args->length() == 1);
3280 
3281  // ArgumentsAccessStub expects the key in edx and the formal
3282  // parameter count in eax.
3283  VisitForAccumulatorValue(args->at(0));
3284  __ mov(edx, eax);
3285  __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3286  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3287  __ CallStub(&stub);
3288  context()->Plug(eax);
3289 }
3290 
3291 
3292 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3293  DCHECK(expr->arguments()->length() == 0);
3294 
3295  Label exit;
3296  // Get the number of formal parameters.
3297  __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3298 
3299  // Check if the calling frame is an arguments adaptor frame.
3303  __ j(not_equal, &exit);
3304 
3305  // Arguments adaptor case: Read the arguments length from the
3306  // adaptor frame.
3308 
3309  __ bind(&exit);
3310  __ AssertSmi(eax);
3311  context()->Plug(eax);
3312 }
3313 
3314 
3315 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3316  ZoneList<Expression*>* args = expr->arguments();
3317  DCHECK(args->length() == 1);
3318  Label done, null, function, non_function_constructor;
3319 
3320  VisitForAccumulatorValue(args->at(0));
3321 
3322  // If the object is a smi, we return null.
3323  __ JumpIfSmi(eax, &null);
3324 
3325  // Check that the object is a JS object but take special care of JS
3326  // functions to make sure they have 'Function' as their class.
3327  // Assume that there are only two callable types, and one of them is at
3328  // either end of the type range for JS object types. Saves extra comparisons.
3330  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3331  // Map is now in eax.
3332  __ j(below, &null);
3335  __ j(equal, &function);
3336 
3337  __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3339  LAST_SPEC_OBJECT_TYPE - 1);
3340  __ j(equal, &function);
3341  // Assume that there is no larger type.
3343 
3344  // Check if the constructor in the map is a JS function.
3346  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3347  __ j(not_equal, &non_function_constructor);
3348 
3349  // eax now contains the constructor function. Grab the
3350  // instance class name from there.
3353  __ jmp(&done);
3354 
3355  // Functions have class 'Function'.
3356  __ bind(&function);
3357  __ mov(eax, isolate()->factory()->Function_string());
3358  __ jmp(&done);
3359 
3360  // Objects with a non-function constructor have class 'Object'.
3361  __ bind(&non_function_constructor);
3362  __ mov(eax, isolate()->factory()->Object_string());
3363  __ jmp(&done);
3364 
3365  // Non-JS objects have class null.
3366  __ bind(&null);
3367  __ mov(eax, isolate()->factory()->null_value());
3368 
3369  // All done.
3370  __ bind(&done);
3371 
3372  context()->Plug(eax);
3373 }
3374 
3375 
3376 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3377  // Load the arguments on the stack and call the stub.
3378  SubStringStub stub(isolate());
3379  ZoneList<Expression*>* args = expr->arguments();
3380  DCHECK(args->length() == 3);
3381  VisitForStackValue(args->at(0));
3382  VisitForStackValue(args->at(1));
3383  VisitForStackValue(args->at(2));
3384  __ CallStub(&stub);
3385  context()->Plug(eax);
3386 }
3387 
3388 
3389 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3390  // Load the arguments on the stack and call the stub.
3391  RegExpExecStub stub(isolate());
3392  ZoneList<Expression*>* args = expr->arguments();
3393  DCHECK(args->length() == 4);
3394  VisitForStackValue(args->at(0));
3395  VisitForStackValue(args->at(1));
3396  VisitForStackValue(args->at(2));
3397  VisitForStackValue(args->at(3));
3398  __ CallStub(&stub);
3399  context()->Plug(eax);
3400 }
3401 
3402 
3403 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3404  ZoneList<Expression*>* args = expr->arguments();
3405  DCHECK(args->length() == 1);
3406 
3407  VisitForAccumulatorValue(args->at(0)); // Load the object.
3408 
3409  Label done;
3410  // If the object is a smi return the object.
3411  __ JumpIfSmi(eax, &done, Label::kNear);
3412  // If the object is not a value type, return the object.
3413  __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3414  __ j(not_equal, &done, Label::kNear);
3416 
3417  __ bind(&done);
3418  context()->Plug(eax);
3419 }
3420 
3421 
3422 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3423  ZoneList<Expression*>* args = expr->arguments();
3424  DCHECK(args->length() == 2);
3425  DCHECK_NE(NULL, args->at(1)->AsLiteral());
3426  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3427 
3428  VisitForAccumulatorValue(args->at(0)); // Load the object.
3429 
3430  Label runtime, done, not_date_object;
3431  Register object = eax;
3432  Register result = eax;
3433  Register scratch = ecx;
3434 
3435  __ JumpIfSmi(object, &not_date_object);
3436  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3437  __ j(not_equal, &not_date_object);
3438 
3439  if (index->value() == 0) {
3440  __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3441  __ jmp(&done);
3442  } else {
3443  if (index->value() < JSDate::kFirstUncachedField) {
3444  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3445  __ mov(scratch, Operand::StaticVariable(stamp));
3446  __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3447  __ j(not_equal, &runtime, Label::kNear);
3448  __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3449  kPointerSize * index->value()));
3450  __ jmp(&done);
3451  }
3452  __ bind(&runtime);
3453  __ PrepareCallCFunction(2, scratch);
3454  __ mov(Operand(esp, 0), object);
3455  __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3456  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3457  __ jmp(&done);
3458  }
3459 
3460  __ bind(&not_date_object);
3461  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3462  __ bind(&done);
3463  context()->Plug(result);
3464 }
3465 
3466 
3467 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3468  ZoneList<Expression*>* args = expr->arguments();
3469  DCHECK_EQ(3, args->length());
3470 
3471  Register string = eax;
3472  Register index = ebx;
3473  Register value = ecx;
3474 
3475  VisitForStackValue(args->at(0)); // index
3476  VisitForStackValue(args->at(1)); // value
3477  VisitForAccumulatorValue(args->at(2)); // string
3478 
3479  __ pop(value);
3480  __ pop(index);
3481 
3482  if (FLAG_debug_code) {
3483  __ test(value, Immediate(kSmiTagMask));
3484  __ Check(zero, kNonSmiValue);
3485  __ test(index, Immediate(kSmiTagMask));
3486  __ Check(zero, kNonSmiValue);
3487  }
3488 
3489  __ SmiUntag(value);
3490  __ SmiUntag(index);
3491 
3492  if (FLAG_debug_code) {
3493  static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3494  __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3495  }
3496 
3497  __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3498  value);
3499  context()->Plug(string);
3500 }
3501 
3502 
3503 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3504  ZoneList<Expression*>* args = expr->arguments();
3505  DCHECK_EQ(3, args->length());
3506 
3507  Register string = eax;
3508  Register index = ebx;
3509  Register value = ecx;
3510 
3511  VisitForStackValue(args->at(0)); // index
3512  VisitForStackValue(args->at(1)); // value
3513  VisitForAccumulatorValue(args->at(2)); // string
3514  __ pop(value);
3515  __ pop(index);
3516 
3517  if (FLAG_debug_code) {
3518  __ test(value, Immediate(kSmiTagMask));
3519  __ Check(zero, kNonSmiValue);
3520  __ test(index, Immediate(kSmiTagMask));
3521  __ Check(zero, kNonSmiValue);
3522  __ SmiUntag(index);
3523  static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3524  __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3525  __ SmiTag(index);
3526  }
3527 
3528  __ SmiUntag(value);
3529  // No need to untag a smi for two-byte addressing.
3530  __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3531  value);
3532  context()->Plug(string);
3533 }
3534 
3535 
3536 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3537  // Load the arguments on the stack and call the runtime function.
3538  ZoneList<Expression*>* args = expr->arguments();
3539  DCHECK(args->length() == 2);
3540  VisitForStackValue(args->at(0));
3541  VisitForStackValue(args->at(1));
3542 
3543  __ CallRuntime(Runtime::kMathPowSlow, 2);
3544  context()->Plug(eax);
3545 }
3546 
3547 
3548 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3549  ZoneList<Expression*>* args = expr->arguments();
3550  DCHECK(args->length() == 2);
3551 
3552  VisitForStackValue(args->at(0)); // Load the object.
3553  VisitForAccumulatorValue(args->at(1)); // Load the value.
3554  __ pop(ebx); // eax = value. ebx = object.
3555 
3556  Label done;
3557  // If the object is a smi, return the value.
3558  __ JumpIfSmi(ebx, &done, Label::kNear);
3559 
3560  // If the object is not a value type, return the value.
3561  __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3562  __ j(not_equal, &done, Label::kNear);
3563 
3564  // Store the value.
3566 
3567  // Update the write barrier. Save the value as it will be
3568  // overwritten by the write barrier code and is needed afterward.
3569  __ mov(edx, eax);
3570  __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3571 
3572  __ bind(&done);
3573  context()->Plug(eax);
3574 }
3575 
3576 
3577 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3578  ZoneList<Expression*>* args = expr->arguments();
3579  DCHECK_EQ(args->length(), 1);
3580 
3581  // Load the argument into eax and call the stub.
3582  VisitForAccumulatorValue(args->at(0));
3583 
3584  NumberToStringStub stub(isolate());
3585  __ CallStub(&stub);
3586  context()->Plug(eax);
3587 }
3588 
3589 
3590 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3591  ZoneList<Expression*>* args = expr->arguments();
3592  DCHECK(args->length() == 1);
3593 
3594  VisitForAccumulatorValue(args->at(0));
3595 
3596  Label done;
3597  StringCharFromCodeGenerator generator(eax, ebx);
3598  generator.GenerateFast(masm_);
3599  __ jmp(&done);
3600 
3601  NopRuntimeCallHelper call_helper;
3602  generator.GenerateSlow(masm_, call_helper);
3603 
3604  __ bind(&done);
3605  context()->Plug(ebx);
3606 }
3607 
3608 
3609 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3610  ZoneList<Expression*>* args = expr->arguments();
3611  DCHECK(args->length() == 2);
3612 
3613  VisitForStackValue(args->at(0));
3614  VisitForAccumulatorValue(args->at(1));
3615 
3616  Register object = ebx;
3617  Register index = eax;
3618  Register result = edx;
3619 
3620  __ pop(object);
3621 
3622  Label need_conversion;
3623  Label index_out_of_range;
3624  Label done;
3625  StringCharCodeAtGenerator generator(object,
3626  index,
3627  result,
3628  &need_conversion,
3629  &need_conversion,
3630  &index_out_of_range,
3632  generator.GenerateFast(masm_);
3633  __ jmp(&done);
3634 
3635  __ bind(&index_out_of_range);
3636  // When the index is out of range, the spec requires us to return
3637  // NaN.
3638  __ Move(result, Immediate(isolate()->factory()->nan_value()));
3639  __ jmp(&done);
3640 
3641  __ bind(&need_conversion);
3642  // Move the undefined value into the result register, which will
3643  // trigger conversion.
3644  __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3645  __ jmp(&done);
3646 
3647  NopRuntimeCallHelper call_helper;
3648  generator.GenerateSlow(masm_, call_helper);
3649 
3650  __ bind(&done);
3651  context()->Plug(result);
3652 }
3653 
3654 
3655 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3656  ZoneList<Expression*>* args = expr->arguments();
3657  DCHECK(args->length() == 2);
3658 
3659  VisitForStackValue(args->at(0));
3660  VisitForAccumulatorValue(args->at(1));
3661 
3662  Register object = ebx;
3663  Register index = eax;
3664  Register scratch = edx;
3665  Register result = eax;
3666 
3667  __ pop(object);
3668 
3669  Label need_conversion;
3670  Label index_out_of_range;
3671  Label done;
3672  StringCharAtGenerator generator(object,
3673  index,
3674  scratch,
3675  result,
3676  &need_conversion,
3677  &need_conversion,
3678  &index_out_of_range,
3680  generator.GenerateFast(masm_);
3681  __ jmp(&done);
3682 
3683  __ bind(&index_out_of_range);
3684  // When the index is out of range, the spec requires us to return
3685  // the empty string.
3686  __ Move(result, Immediate(isolate()->factory()->empty_string()));
3687  __ jmp(&done);
3688 
3689  __ bind(&need_conversion);
3690  // Move smi zero into the result register, which will trigger
3691  // conversion.
3692  __ Move(result, Immediate(Smi::FromInt(0)));
3693  __ jmp(&done);
3694 
3695  NopRuntimeCallHelper call_helper;
3696  generator.GenerateSlow(masm_, call_helper);
3697 
3698  __ bind(&done);
3699  context()->Plug(result);
3700 }
3701 
3702 
3703 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3704  ZoneList<Expression*>* args = expr->arguments();
3705  DCHECK_EQ(2, args->length());
3706  VisitForStackValue(args->at(0));
3707  VisitForAccumulatorValue(args->at(1));
3708 
3709  __ pop(edx);
3710  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3711  __ CallStub(&stub);
3712  context()->Plug(eax);
3713 }
3714 
3715 
3716 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3717  ZoneList<Expression*>* args = expr->arguments();
3718  DCHECK_EQ(2, args->length());
3719 
3720  VisitForStackValue(args->at(0));
3721  VisitForStackValue(args->at(1));
3722 
3723  StringCompareStub stub(isolate());
3724  __ CallStub(&stub);
3725  context()->Plug(eax);
3726 }
3727 
3728 
3729 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3730  ZoneList<Expression*>* args = expr->arguments();
3731  DCHECK(args->length() >= 2);
3732 
3733  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3734  for (int i = 0; i < arg_count + 1; ++i) {
3735  VisitForStackValue(args->at(i));
3736  }
3737  VisitForAccumulatorValue(args->last()); // Function.
3738 
3739  Label runtime, done;
3740  // Check for non-function argument (including proxy).
3741  __ JumpIfSmi(eax, &runtime);
3742  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3743  __ j(not_equal, &runtime);
3744 
3745  // InvokeFunction requires the function in edi. Move it in there.
3746  __ mov(edi, result_register());
3747  ParameterCount count(arg_count);
3748  __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
3750  __ jmp(&done);
3751 
3752  __ bind(&runtime);
3753  __ push(eax);
3754  __ CallRuntime(Runtime::kCall, args->length());
3755  __ bind(&done);
3756 
3757  context()->Plug(eax);
3758 }
3759 
3760 
3761 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3762  // Load the arguments on the stack and call the stub.
3763  RegExpConstructResultStub stub(isolate());
3764  ZoneList<Expression*>* args = expr->arguments();
3765  DCHECK(args->length() == 3);
3766  VisitForStackValue(args->at(0));
3767  VisitForStackValue(args->at(1));
3768  VisitForAccumulatorValue(args->at(2));
3769  __ pop(ebx);
3770  __ pop(ecx);
3771  __ CallStub(&stub);
3772  context()->Plug(eax);
3773 }
3774 
3775 
3776 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3777  ZoneList<Expression*>* args = expr->arguments();
3778  DCHECK_EQ(2, args->length());
3779 
3780  DCHECK_NE(NULL, args->at(0)->AsLiteral());
3781  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3782 
3783  Handle<FixedArray> jsfunction_result_caches(
3784  isolate()->native_context()->jsfunction_result_caches());
3785  if (jsfunction_result_caches->length() <= cache_id) {
3786  __ Abort(kAttemptToUseUndefinedCache);
3787  __ mov(eax, isolate()->factory()->undefined_value());
3788  context()->Plug(eax);
3789  return;
3790  }
3791 
3792  VisitForAccumulatorValue(args->at(1));
3793 
3794  Register key = eax;
3795  Register cache = ebx;
3796  Register tmp = ecx;
3798  __ mov(cache,
3801  __ mov(cache,
3802  FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3803 
3804  Label done, not_found;
3805  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3807  // tmp now holds finger offset as a smi.
3808  __ cmp(key, FixedArrayElementOperand(cache, tmp));
3809  __ j(not_equal, &not_found);
3810 
3811  __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
3812  __ jmp(&done);
3813 
3814  __ bind(&not_found);
3815  // Call runtime to perform the lookup.
3816  __ push(cache);
3817  __ push(key);
3818  __ CallRuntime(Runtime::kGetFromCache, 2);
3819 
3820  __ bind(&done);
3821  context()->Plug(eax);
3822 }
3823 
3824 
3825 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3826  ZoneList<Expression*>* args = expr->arguments();
3827  DCHECK(args->length() == 1);
3828 
3829  VisitForAccumulatorValue(args->at(0));
3830 
3831  __ AssertString(eax);
3832 
3833  Label materialize_true, materialize_false;
3834  Label* if_true = NULL;
3835  Label* if_false = NULL;
3836  Label* fall_through = NULL;
3837  context()->PrepareTest(&materialize_true, &materialize_false,
3838  &if_true, &if_false, &fall_through);
3839 
3842  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3843  Split(zero, if_true, if_false, fall_through);
3844 
3845  context()->Plug(if_true, if_false);
3846 }
3847 
3848 
3849 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3850  ZoneList<Expression*>* args = expr->arguments();
3851  DCHECK(args->length() == 1);
3852  VisitForAccumulatorValue(args->at(0));
3853 
3854  __ AssertString(eax);
3855 
3857  __ IndexFromHash(eax, eax);
3858 
3859  context()->Plug(eax);
3860 }
3861 
3862 
3863 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3864  Label bailout, done, one_char_separator, long_separator,
3865  non_trivial_array, not_size_one_array, loop,
3866  loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3867 
3868  ZoneList<Expression*>* args = expr->arguments();
3869  DCHECK(args->length() == 2);
3870  // We will leave the separator on the stack until the end of the function.
3871  VisitForStackValue(args->at(1));
3872  // Load this to eax (= array)
3873  VisitForAccumulatorValue(args->at(0));
3874  // All aliases of the same register have disjoint lifetimes.
3875  Register array = eax;
3876  Register elements = no_reg; // Will be eax.
3877 
3878  Register index = edx;
3879 
3880  Register string_length = ecx;
3881 
3882  Register string = esi;
3883 
3884  Register scratch = ebx;
3885 
3886  Register array_length = edi;
3887  Register result_pos = no_reg; // Will be edi.
3888 
3889  // Separator operand is already pushed.
3890  Operand separator_operand = Operand(esp, 2 * kPointerSize);
3891  Operand result_operand = Operand(esp, 1 * kPointerSize);
3892  Operand array_length_operand = Operand(esp, 0);
3893  __ sub(esp, Immediate(2 * kPointerSize));
3894  __ cld();
3895  // Check that the array is a JSArray
3896  __ JumpIfSmi(array, &bailout);
3897  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3898  __ j(not_equal, &bailout);
3899 
3900  // Check that the array has fast elements.
3901  __ CheckFastElements(scratch, &bailout);
3902 
3903  // If the array has length zero, return the empty string.
3904  __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3905  __ SmiUntag(array_length);
3906  __ j(not_zero, &non_trivial_array);
3907  __ mov(result_operand, isolate()->factory()->empty_string());
3908  __ jmp(&done);
3909 
3910  // Save the array length.
3911  __ bind(&non_trivial_array);
3912  __ mov(array_length_operand, array_length);
3913 
3914  // Save the FixedArray containing array's elements.
3915  // End of array's live range.
3916  elements = array;
3917  __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3918  array = no_reg;
3919 
3920 
3921  // Check that all array elements are sequential one-byte strings, and
3922  // accumulate the sum of their lengths, as a smi-encoded value.
3923  __ Move(index, Immediate(0));
3924  __ Move(string_length, Immediate(0));
3925  // Loop condition: while (index < length).
3926  // Live loop registers: index, array_length, string,
3927  // scratch, string_length, elements.
3928  if (generate_debug_code_) {
3929  __ cmp(index, array_length);
3930  __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3931  }
3932  __ bind(&loop);
3933  __ mov(string, FieldOperand(elements,
3934  index,
3937  __ JumpIfSmi(string, &bailout);
3938  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3939  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3940  __ and_(scratch, Immediate(
3942  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3943  __ j(not_equal, &bailout);
3944  __ add(string_length,
3946  __ j(overflow, &bailout);
3947  __ add(index, Immediate(1));
3948  __ cmp(index, array_length);
3949  __ j(less, &loop);
3950 
3951  // If array_length is 1, return elements[0], a string.
3952  __ cmp(array_length, 1);
3953  __ j(not_equal, &not_size_one_array);
3954  __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3955  __ mov(result_operand, scratch);
3956  __ jmp(&done);
3957 
3958  __ bind(&not_size_one_array);
3959 
3960  // End of array_length live range.
3961  result_pos = array_length;
3962  array_length = no_reg;
3963 
3964  // Live registers:
3965  // string_length: Sum of string lengths, as a smi.
3966  // elements: FixedArray of strings.
3967 
3968  // Check that the separator is a flat one-byte string.
3969  __ mov(string, separator_operand);
3970  __ JumpIfSmi(string, &bailout);
3971  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3972  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3973  __ and_(scratch, Immediate(
3975  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3976  __ j(not_equal, &bailout);
3977 
3978  // Add (separator length times array_length) - separator length
3979  // to string_length.
3980  __ mov(scratch, separator_operand);
3981  __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
3982  __ sub(string_length, scratch); // May be negative, temporarily.
3983  __ imul(scratch, array_length_operand);
3984  __ j(overflow, &bailout);
3985  __ add(string_length, scratch);
3986  __ j(overflow, &bailout);
3987 
3988  __ shr(string_length, 1);
3989  // Live registers and stack values:
3990  // string_length
3991  // elements
3992  __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
3993  &bailout);
3994  __ mov(result_operand, result_pos);
3995  __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
3996 
3997 
3998  __ mov(string, separator_operand);
4000  Immediate(Smi::FromInt(1)));
4001  __ j(equal, &one_char_separator);
4002  __ j(greater, &long_separator);
4003 
4004 
4005  // Empty separator case
4006  __ mov(index, Immediate(0));
4007  __ jmp(&loop_1_condition);
4008  // Loop condition: while (index < length).
4009  __ bind(&loop_1);
4010  // Each iteration of the loop concatenates one string to the result.
4011  // Live values in registers:
4012  // index: which element of the elements array we are adding to the result.
4013  // result_pos: the position to which we are currently copying characters.
4014  // elements: the FixedArray of strings we are joining.
4015 
4016  // Get string = array[index].
4017  __ mov(string, FieldOperand(elements, index,
4020  __ mov(string_length,
4022  __ shr(string_length, 1);
4023  __ lea(string,
4025  __ CopyBytes(string, result_pos, string_length, scratch);
4026  __ add(index, Immediate(1));
4027  __ bind(&loop_1_condition);
4028  __ cmp(index, array_length_operand);
4029  __ j(less, &loop_1); // End while (index < length).
4030  __ jmp(&done);
4031 
4032 
4033 
4034  // One-character separator case
4035  __ bind(&one_char_separator);
4036  // Replace separator with its one-byte character value.
4037  __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4038  __ mov_b(separator_operand, scratch);
4039 
4040  __ Move(index, Immediate(0));
4041  // Jump into the loop after the code that copies the separator, so the first
4042  // element is not preceded by a separator
4043  __ jmp(&loop_2_entry);
4044  // Loop condition: while (index < length).
4045  __ bind(&loop_2);
4046  // Each iteration of the loop concatenates one string to the result.
4047  // Live values in registers:
4048  // index: which element of the elements array we are adding to the result.
4049  // result_pos: the position to which we are currently copying characters.
4050 
4051  // Copy the separator character to the result.
4052  __ mov_b(scratch, separator_operand);
4053  __ mov_b(Operand(result_pos, 0), scratch);
4054  __ inc(result_pos);
4055 
4056  __ bind(&loop_2_entry);
4057  // Get string = array[index].
4058  __ mov(string, FieldOperand(elements, index,
4061  __ mov(string_length,
4063  __ shr(string_length, 1);
4064  __ lea(string,
4066  __ CopyBytes(string, result_pos, string_length, scratch);
4067  __ add(index, Immediate(1));
4068 
4069  __ cmp(index, array_length_operand);
4070  __ j(less, &loop_2); // End while (index < length).
4071  __ jmp(&done);
4072 
4073 
4074  // Long separator case (separator is more than one character).
4075  __ bind(&long_separator);
4076 
4077  __ Move(index, Immediate(0));
4078  // Jump into the loop after the code that copies the separator, so the first
4079  // element is not preceded by a separator
4080  __ jmp(&loop_3_entry);
4081  // Loop condition: while (index < length).
4082  __ bind(&loop_3);
4083  // Each iteration of the loop concatenates one string to the result.
4084  // Live values in registers:
4085  // index: which element of the elements array we are adding to the result.
4086  // result_pos: the position to which we are currently copying characters.
4087 
4088  // Copy the separator to the result.
4089  __ mov(string, separator_operand);
4090  __ mov(string_length,
4092  __ shr(string_length, 1);
4093  __ lea(string,
4095  __ CopyBytes(string, result_pos, string_length, scratch);
4096 
4097  __ bind(&loop_3_entry);
4098  // Get string = array[index].
4099  __ mov(string, FieldOperand(elements, index,
4102  __ mov(string_length,
4104  __ shr(string_length, 1);
4105  __ lea(string,
4107  __ CopyBytes(string, result_pos, string_length, scratch);
4108  __ add(index, Immediate(1));
4109 
4110  __ cmp(index, array_length_operand);
4111  __ j(less, &loop_3); // End while (index < length).
4112  __ jmp(&done);
4113 
4114 
4115  __ bind(&bailout);
4116  __ mov(result_operand, isolate()->factory()->undefined_value());
4117  __ bind(&done);
4118  __ mov(eax, result_operand);
4119  // Drop temp values from the stack, and restore context register.
4120  __ add(esp, Immediate(3 * kPointerSize));
4121 
4123  context()->Plug(eax);
4124 }
4125 
4126 
4127 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4128  DCHECK(expr->arguments()->length() == 0);
4129  ExternalReference debug_is_active =
4130  ExternalReference::debug_is_active_address(isolate());
4131  __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
4132  __ SmiTag(eax);
4133  context()->Plug(eax);
4134 }
4135 
4136 
4137 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4138  if (expr->function() != NULL &&
4139  expr->function()->intrinsic_type == Runtime::INLINE) {
4140  Comment cmnt(masm_, "[ InlineRuntimeCall");
4141  EmitInlineRuntimeCall(expr);
4142  return;
4143  }
4144 
4145  Comment cmnt(masm_, "[ CallRuntime");
4146  ZoneList<Expression*>* args = expr->arguments();
4147 
4148  if (expr->is_jsruntime()) {
4149  // Push the builtins object as receiver.
4150  __ mov(eax, GlobalObjectOperand());
4152 
4153  // Load the function from the receiver.
4154  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4155  __ mov(LoadDescriptor::NameRegister(), Immediate(expr->name()));
4156  if (FLAG_vector_ics) {
4158  Immediate(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4160  } else {
4161  CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4162  }
4163 
4164  // Push the target function under the receiver.
4165  __ push(Operand(esp, 0));
4166  __ mov(Operand(esp, kPointerSize), eax);
4167 
4168  // Code common for calls using the IC.
4169  ZoneList<Expression*>* args = expr->arguments();
4170  int arg_count = args->length();
4171  for (int i = 0; i < arg_count; i++) {
4172  VisitForStackValue(args->at(i));
4173  }
4174 
4175  // Record source position of the IC call.
4176  SetSourcePosition(expr->position());
4177  CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4178  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4179  __ CallStub(&stub);
4180  // Restore context register.
4182  context()->DropAndPlug(1, eax);
4183 
4184  } else {
4185  // Push the arguments ("left-to-right").
4186  int arg_count = args->length();
4187  for (int i = 0; i < arg_count; i++) {
4188  VisitForStackValue(args->at(i));
4189  }
4190 
4191  // Call the C runtime function.
4192  __ CallRuntime(expr->function(), arg_count);
4193 
4194  context()->Plug(eax);
4195  }
4196 }
4197 
4198 
4199 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4200  switch (expr->op()) {
4201  case Token::DELETE: {
4202  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4203  Property* property = expr->expression()->AsProperty();
4204  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4205 
4206  if (property != NULL) {
4207  VisitForStackValue(property->obj());
4208  VisitForStackValue(property->key());
4209  __ push(Immediate(Smi::FromInt(strict_mode())));
4210  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4211  context()->Plug(eax);
4212  } else if (proxy != NULL) {
4213  Variable* var = proxy->var();
4214  // Delete of an unqualified identifier is disallowed in strict mode
4215  // but "delete this" is allowed.
4216  DCHECK(strict_mode() == SLOPPY || var->is_this());
4217  if (var->IsUnallocated()) {
4218  __ push(GlobalObjectOperand());
4219  __ push(Immediate(var->name()));
4220  __ push(Immediate(Smi::FromInt(SLOPPY)));
4221  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4222  context()->Plug(eax);
4223  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4224  // Result of deleting non-global variables is false. 'this' is
4225  // not really a variable, though we implement it as one. The
4226  // subexpression does not have side effects.
4227  context()->Plug(var->is_this());
4228  } else {
4229  // Non-global variable. Call the runtime to try to delete from the
4230  // context where the variable was introduced.
4231  __ push(context_register());
4232  __ push(Immediate(var->name()));
4233  __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4234  context()->Plug(eax);
4235  }
4236  } else {
4237  // Result of deleting non-property, non-variable reference is true.
4238  // The subexpression may have side effects.
4239  VisitForEffect(expr->expression());
4240  context()->Plug(true);
4241  }
4242  break;
4243  }
4244 
4245  case Token::VOID: {
4246  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4247  VisitForEffect(expr->expression());
4248  context()->Plug(isolate()->factory()->undefined_value());
4249  break;
4250  }
4251 
4252  case Token::NOT: {
4253  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4254  if (context()->IsEffect()) {
4255  // Unary NOT has no side effects so it's only necessary to visit the
4256  // subexpression. Match the optimizing compiler by not branching.
4257  VisitForEffect(expr->expression());
4258  } else if (context()->IsTest()) {
4259  const TestContext* test = TestContext::cast(context());
4260  // The labels are swapped for the recursive call.
4261  VisitForControl(expr->expression(),
4262  test->false_label(),
4263  test->true_label(),
4264  test->fall_through());
4265  context()->Plug(test->true_label(), test->false_label());
4266  } else {
4267  // We handle value contexts explicitly rather than simply visiting
4268  // for control and plugging the control flow into the context,
4269  // because we need to prepare a pair of extra administrative AST ids
4270  // for the optimizing compiler.
4271  DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4272  Label materialize_true, materialize_false, done;
4273  VisitForControl(expr->expression(),
4274  &materialize_false,
4275  &materialize_true,
4276  &materialize_true);
4277  __ bind(&materialize_true);
4278  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4279  if (context()->IsAccumulatorValue()) {
4280  __ mov(eax, isolate()->factory()->true_value());
4281  } else {
4282  __ Push(isolate()->factory()->true_value());
4283  }
4284  __ jmp(&done, Label::kNear);
4285  __ bind(&materialize_false);
4286  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4287  if (context()->IsAccumulatorValue()) {
4288  __ mov(eax, isolate()->factory()->false_value());
4289  } else {
4290  __ Push(isolate()->factory()->false_value());
4291  }
4292  __ bind(&done);
4293  }
4294  break;
4295  }
4296 
4297  case Token::TYPEOF: {
4298  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4299  { StackValueContext context(this);
4300  VisitForTypeofValue(expr->expression());
4301  }
4302  __ CallRuntime(Runtime::kTypeof, 1);
4303  context()->Plug(eax);
4304  break;
4305  }
4306 
4307  default:
4308  UNREACHABLE();
4309  }
4310 }
4311 
4312 
4313 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4314  DCHECK(expr->expression()->IsValidReferenceExpression());
4315 
4316  Comment cmnt(masm_, "[ CountOperation");
4317  SetSourcePosition(expr->position());
4318 
4319  // Expression can only be a property, a global or a (parameter or local)
4320  // slot.
4322  LhsKind assign_type = VARIABLE;
4323  Property* prop = expr->expression()->AsProperty();
4324  // In case of a property we use the uninitialized expression context
4325  // of the key to detect a named property.
4326  if (prop != NULL) {
4327  assign_type =
4328  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4329  if (prop->IsSuperAccess()) {
4330  // throw exception.
4331  VisitSuperReference(prop->obj()->AsSuperReference());
4332  return;
4333  }
4334  }
4335 
4336  // Evaluate expression and get value.
4337  if (assign_type == VARIABLE) {
4338  DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4339  AccumulatorValueContext context(this);
4340  EmitVariableLoad(expr->expression()->AsVariableProxy());
4341  } else {
4342  // Reserve space for result of postfix operation.
4343  if (expr->is_postfix() && !context()->IsEffect()) {
4344  __ push(Immediate(Smi::FromInt(0)));
4345  }
4346  if (assign_type == NAMED_PROPERTY) {
4347  // Put the object both on the stack and in the register.
4348  VisitForStackValue(prop->obj());
4349  __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4350  EmitNamedPropertyLoad(prop);
4351  } else {
4352  VisitForStackValue(prop->obj());
4353  VisitForStackValue(prop->key());
4355  Operand(esp, kPointerSize)); // Object.
4356  __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
4357  EmitKeyedPropertyLoad(prop);
4358  }
4359  }
4360 
4361  // We need a second deoptimization point after loading the value
4362  // in case evaluating the property load my have a side effect.
4363  if (assign_type == VARIABLE) {
4364  PrepareForBailout(expr->expression(), TOS_REG);
4365  } else {
4366  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4367  }
4368 
4369  // Inline smi case if we are in a loop.
4370  Label done, stub_call;
4371  JumpPatchSite patch_site(masm_);
4372  if (ShouldInlineSmiCase(expr->op())) {
4373  Label slow;
4374  patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4375 
4376  // Save result for postfix expressions.
4377  if (expr->is_postfix()) {
4378  if (!context()->IsEffect()) {
4379  // Save the result on the stack. If we have a named or keyed property
4380  // we store the result under the receiver that is currently on top
4381  // of the stack.
4382  switch (assign_type) {
4383  case VARIABLE:
4384  __ push(eax);
4385  break;
4386  case NAMED_PROPERTY:
4387  __ mov(Operand(esp, kPointerSize), eax);
4388  break;
4389  case KEYED_PROPERTY:
4390  __ mov(Operand(esp, 2 * kPointerSize), eax);
4391  break;
4392  }
4393  }
4394  }
4395 
4396  if (expr->op() == Token::INC) {
4397  __ add(eax, Immediate(Smi::FromInt(1)));
4398  } else {
4399  __ sub(eax, Immediate(Smi::FromInt(1)));
4400  }
4401  __ j(no_overflow, &done, Label::kNear);
4402  // Call stub. Undo operation first.
4403  if (expr->op() == Token::INC) {
4404  __ sub(eax, Immediate(Smi::FromInt(1)));
4405  } else {
4406  __ add(eax, Immediate(Smi::FromInt(1)));
4407  }
4408  __ jmp(&stub_call, Label::kNear);
4409  __ bind(&slow);
4410  }
4411  ToNumberStub convert_stub(isolate());
4412  __ CallStub(&convert_stub);
4413 
4414  // Save result for postfix expressions.
4415  if (expr->is_postfix()) {
4416  if (!context()->IsEffect()) {
4417  // Save the result on the stack. If we have a named or keyed property
4418  // we store the result under the receiver that is currently on top
4419  // of the stack.
4420  switch (assign_type) {
4421  case VARIABLE:
4422  __ push(eax);
4423  break;
4424  case NAMED_PROPERTY:
4425  __ mov(Operand(esp, kPointerSize), eax);
4426  break;
4427  case KEYED_PROPERTY:
4428  __ mov(Operand(esp, 2 * kPointerSize), eax);
4429  break;
4430  }
4431  }
4432  }
4433 
4434  // Record position before stub call.
4435  SetSourcePosition(expr->position());
4436 
4437  // Call stub for +1/-1.
4438  __ bind(&stub_call);
4439  __ mov(edx, eax);
4440  __ mov(eax, Immediate(Smi::FromInt(1)));
4441  Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4442  NO_OVERWRITE).code();
4443  CallIC(code, expr->CountBinOpFeedbackId());
4444  patch_site.EmitPatchInfo();
4445  __ bind(&done);
4446 
4447  // Store the value returned in eax.
4448  switch (assign_type) {
4449  case VARIABLE:
4450  if (expr->is_postfix()) {
4451  // Perform the assignment as if via '='.
4452  { EffectContext context(this);
4453  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4454  Token::ASSIGN);
4455  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4456  context.Plug(eax);
4457  }
4458  // For all contexts except EffectContext We have the result on
4459  // top of the stack.
4460  if (!context()->IsEffect()) {
4461  context()->PlugTOS();
4462  }
4463  } else {
4464  // Perform the assignment as if via '='.
4465  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4466  Token::ASSIGN);
4467  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4468  context()->Plug(eax);
4469  }
4470  break;
4471  case NAMED_PROPERTY: {
4473  prop->key()->AsLiteral()->value());
4475  CallStoreIC(expr->CountStoreFeedbackId());
4476  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4477  if (expr->is_postfix()) {
4478  if (!context()->IsEffect()) {
4479  context()->PlugTOS();
4480  }
4481  } else {
4482  context()->Plug(eax);
4483  }
4484  break;
4485  }
4486  case KEYED_PROPERTY: {
4489  Handle<Code> ic =
4490  CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4491  CallIC(ic, expr->CountStoreFeedbackId());
4492  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4493  if (expr->is_postfix()) {
4494  // Result is on the stack
4495  if (!context()->IsEffect()) {
4496  context()->PlugTOS();
4497  }
4498  } else {
4499  context()->Plug(eax);
4500  }
4501  break;
4502  }
4503  }
4504 }
4505 
4506 
4507 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4508  VariableProxy* proxy = expr->AsVariableProxy();
4509  DCHECK(!context()->IsEffect());
4510  DCHECK(!context()->IsTest());
4511 
4512  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4513  Comment cmnt(masm_, "[ Global variable");
4515  __ mov(LoadDescriptor::NameRegister(), Immediate(proxy->name()));
4516  if (FLAG_vector_ics) {
4518  Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
4519  }
4520  // Use a regular load, not a contextual load, to avoid a reference
4521  // error.
4523  PrepareForBailout(expr, TOS_REG);
4524  context()->Plug(eax);
4525  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4526  Comment cmnt(masm_, "[ Lookup slot");
4527  Label done, slow;
4528 
4529  // Generate code for loading from variables potentially shadowed
4530  // by eval-introduced variables.
4531  EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4532 
4533  __ bind(&slow);
4534  __ push(esi);
4535  __ push(Immediate(proxy->name()));
4536  __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4537  PrepareForBailout(expr, TOS_REG);
4538  __ bind(&done);
4539 
4540  context()->Plug(eax);
4541  } else {
4542  // This expression cannot throw a reference error at the top level.
4544  }
4545 }
4546 
4547 
4548 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4549  Expression* sub_expr,
4550  Handle<String> check) {
4551  Label materialize_true, materialize_false;
4552  Label* if_true = NULL;
4553  Label* if_false = NULL;
4554  Label* fall_through = NULL;
4555  context()->PrepareTest(&materialize_true, &materialize_false,
4556  &if_true, &if_false, &fall_through);
4557 
4558  { AccumulatorValueContext context(this);
4559  VisitForTypeofValue(sub_expr);
4560  }
4561  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4562 
4563  Factory* factory = isolate()->factory();
4564  if (String::Equals(check, factory->number_string())) {
4565  __ JumpIfSmi(eax, if_true);
4567  isolate()->factory()->heap_number_map());
4568  Split(equal, if_true, if_false, fall_through);
4569  } else if (String::Equals(check, factory->string_string())) {
4570  __ JumpIfSmi(eax, if_false);
4571  __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4572  __ j(above_equal, if_false);
4573  // Check for undetectable objects => false.
4575  1 << Map::kIsUndetectable);
4576  Split(zero, if_true, if_false, fall_through);
4577  } else if (String::Equals(check, factory->symbol_string())) {
4578  __ JumpIfSmi(eax, if_false);
4579  __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4580  Split(equal, if_true, if_false, fall_through);
4581  } else if (String::Equals(check, factory->boolean_string())) {
4582  __ cmp(eax, isolate()->factory()->true_value());
4583  __ j(equal, if_true);
4584  __ cmp(eax, isolate()->factory()->false_value());
4585  Split(equal, if_true, if_false, fall_through);
4586  } else if (String::Equals(check, factory->undefined_string())) {
4587  __ cmp(eax, isolate()->factory()->undefined_value());
4588  __ j(equal, if_true);
4589  __ JumpIfSmi(eax, if_false);
4590  // Check for undetectable objects => true.
4593  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4594  Split(not_zero, if_true, if_false, fall_through);
4595  } else if (String::Equals(check, factory->function_string())) {
4596  __ JumpIfSmi(eax, if_false);
4598  __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4599  __ j(equal, if_true);
4600  __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4601  Split(equal, if_true, if_false, fall_through);
4602  } else if (String::Equals(check, factory->object_string())) {
4603  __ JumpIfSmi(eax, if_false);
4604  __ cmp(eax, isolate()->factory()->null_value());
4605  __ j(equal, if_true);
4606  __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4607  __ j(below, if_false);
4608  __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4609  __ j(above, if_false);
4610  // Check for undetectable objects => false.
4612  1 << Map::kIsUndetectable);
4613  Split(zero, if_true, if_false, fall_through);
4614  } else {
4615  if (if_false != fall_through) __ jmp(if_false);
4616  }
4617  context()->Plug(if_true, if_false);
4618 }
4619 
4620 
4621 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4622  Comment cmnt(masm_, "[ CompareOperation");
4623  SetSourcePosition(expr->position());
4624 
4625  // First we try a fast inlined version of the compare when one of
4626  // the operands is a literal.
4627  if (TryLiteralCompare(expr)) return;
4628 
4629  // Always perform the comparison for its control flow. Pack the result
4630  // into the expression's context after the comparison is performed.
4631  Label materialize_true, materialize_false;
4632  Label* if_true = NULL;
4633  Label* if_false = NULL;
4634  Label* fall_through = NULL;
4635  context()->PrepareTest(&materialize_true, &materialize_false,
4636  &if_true, &if_false, &fall_through);
4637 
4638  Token::Value op = expr->op();
4639  VisitForStackValue(expr->left());
4640  switch (op) {
4641  case Token::IN:
4642  VisitForStackValue(expr->right());
4643  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4644  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4645  __ cmp(eax, isolate()->factory()->true_value());
4646  Split(equal, if_true, if_false, fall_through);
4647  break;
4648 
4649  case Token::INSTANCEOF: {
4650  VisitForStackValue(expr->right());
4651  InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4652  __ CallStub(&stub);
4653  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4654  __ test(eax, eax);
4655  // The stub returns 0 for true.
4656  Split(zero, if_true, if_false, fall_through);
4657  break;
4658  }
4659 
4660  default: {
4661  VisitForAccumulatorValue(expr->right());
4663  __ pop(edx);
4664 
4665  bool inline_smi_code = ShouldInlineSmiCase(op);
4666  JumpPatchSite patch_site(masm_);
4667  if (inline_smi_code) {
4668  Label slow_case;
4669  __ mov(ecx, edx);
4670  __ or_(ecx, eax);
4671  patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4672  __ cmp(edx, eax);
4673  Split(cc, if_true, if_false, NULL);
4674  __ bind(&slow_case);
4675  }
4676 
4677  // Record position and call the compare IC.
4678  SetSourcePosition(expr->position());
4679  Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4680  CallIC(ic, expr->CompareOperationFeedbackId());
4681  patch_site.EmitPatchInfo();
4682 
4683  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4684  __ test(eax, eax);
4685  Split(cc, if_true, if_false, fall_through);
4686  }
4687  }
4688 
4689  // Convert the result of the comparison into one expected for this
4690  // expression's context.
4691  context()->Plug(if_true, if_false);
4692 }
4693 
4694 
4695 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4696  Expression* sub_expr,
4697  NilValue nil) {
4698  Label materialize_true, materialize_false;
4699  Label* if_true = NULL;
4700  Label* if_false = NULL;
4701  Label* fall_through = NULL;
4702  context()->PrepareTest(&materialize_true, &materialize_false,
4703  &if_true, &if_false, &fall_through);
4704 
4705  VisitForAccumulatorValue(sub_expr);
4706  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4707 
4708  Handle<Object> nil_value = nil == kNullValue
4709  ? isolate()->factory()->null_value()
4710  : isolate()->factory()->undefined_value();
4711  if (expr->op() == Token::EQ_STRICT) {
4712  __ cmp(eax, nil_value);
4713  Split(equal, if_true, if_false, fall_through);
4714  } else {
4715  Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4716  CallIC(ic, expr->CompareOperationFeedbackId());
4717  __ test(eax, eax);
4718  Split(not_zero, if_true, if_false, fall_through);
4719  }
4720  context()->Plug(if_true, if_false);
4721 }
4722 
4723 
4724 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4726  context()->Plug(eax);
4727 }
4728 
4729 
4731  return eax;
4732 }
4733 
4734 
4736  return esi;
4737 }
4738 
4739 
4740 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4741  DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4742  __ mov(Operand(ebp, frame_offset), value);
4743 }
4744 
4745 
4746 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4747  __ mov(dst, ContextOperand(esi, context_index));
4748 }
4749 
4750 
4752  Scope* declaration_scope = scope()->DeclarationScope();
4753  if (declaration_scope->is_global_scope() ||
4754  declaration_scope->is_module_scope()) {
4755  // Contexts nested in the native context have a canonical empty function
4756  // as their closure, not the anonymous closure containing the global
4757  // code. Pass a smi sentinel and let the runtime look up the empty
4758  // function.
4759  __ push(Immediate(Smi::FromInt(0)));
4760  } else if (declaration_scope->is_eval_scope()) {
4761  // Contexts nested inside eval code have the same closure as the context
4762  // calling eval, not the anonymous closure containing the eval code.
4763  // Fetch it from the context.
4765  } else {
4766  DCHECK(declaration_scope->is_function_scope());
4768  }
4769 }
4770 
4771 
4772 // ----------------------------------------------------------------------------
4773 // Non-local control flow support.
4774 
4776  // Cook return address on top of stack (smi encoded Code* delta)
4777  DCHECK(!result_register().is(edx));
4778  __ pop(edx);
4779  __ sub(edx, Immediate(masm_->CodeObject()));
4781  STATIC_ASSERT(kSmiTag == 0);
4782  __ SmiTag(edx);
4783  __ push(edx);
4784 
4785  // Store result register while executing finally block.
4786  __ push(result_register());
4787 
4788  // Store pending message while executing finally block.
4789  ExternalReference pending_message_obj =
4790  ExternalReference::address_of_pending_message_obj(isolate());
4791  __ mov(edx, Operand::StaticVariable(pending_message_obj));
4792  __ push(edx);
4793 
4794  ExternalReference has_pending_message =
4795  ExternalReference::address_of_has_pending_message(isolate());
4796  __ mov(edx, Operand::StaticVariable(has_pending_message));
4797  __ SmiTag(edx);
4798  __ push(edx);
4799 
4800  ExternalReference pending_message_script =
4801  ExternalReference::address_of_pending_message_script(isolate());
4802  __ mov(edx, Operand::StaticVariable(pending_message_script));
4803  __ push(edx);
4804 }
4805 
4806 
4808  DCHECK(!result_register().is(edx));
4809  // Restore pending message from stack.
4810  __ pop(edx);
4811  ExternalReference pending_message_script =
4812  ExternalReference::address_of_pending_message_script(isolate());
4813  __ mov(Operand::StaticVariable(pending_message_script), edx);
4814 
4815  __ pop(edx);
4816  __ SmiUntag(edx);
4817  ExternalReference has_pending_message =
4818  ExternalReference::address_of_has_pending_message(isolate());
4819  __ mov(Operand::StaticVariable(has_pending_message), edx);
4820 
4821  __ pop(edx);
4822  ExternalReference pending_message_obj =
4823  ExternalReference::address_of_pending_message_obj(isolate());
4824  __ mov(Operand::StaticVariable(pending_message_obj), edx);
4825 
4826  // Restore result register from stack.
4827  __ pop(result_register());
4828 
4829  // Uncook return address.
4830  __ pop(edx);
4831  __ SmiUntag(edx);
4832  __ add(edx, Immediate(masm_->CodeObject()));
4833  __ jmp(edx);
4834 }
4835 
4836 
4837 #undef __
4838 
4839 #define __ ACCESS_MASM(masm())
4840 
4842  int* stack_depth,
4843  int* context_length) {
4844  // The macros used here must preserve the result register.
4845 
4846  // Because the handler block contains the context of the finally
4847  // code, we can restore it directly from there for the finally code
4848  // rather than iteratively unwinding contexts via their previous
4849  // links.
4850  __ Drop(*stack_depth); // Down to the handler block.
4851  if (*context_length > 0) {
4852  // Restore the context to its dedicated register and the stack.
4855  }
4856  __ PopTryHandler();
4857  __ call(finally_entry_);
4858 
4859  *stack_depth = 0;
4860  *context_length = 0;
4861  return previous_;
4862 }
4863 
4864 #undef __
4865 
4866 
4867 static const byte kJnsInstruction = 0x79;
4868 static const byte kJnsOffset = 0x11;
4869 static const byte kNopByteOne = 0x66;
4870 static const byte kNopByteTwo = 0x90;
4871 #ifdef DEBUG
4872 static const byte kCallInstruction = 0xe8;
4873 #endif
4874 
4875 
4876 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4877  Address pc,
4878  BackEdgeState target_state,
4879  Code* replacement_code) {
4880  Address call_target_address = pc - kIntSize;
4881  Address jns_instr_address = call_target_address - 3;
4882  Address jns_offset_address = call_target_address - 2;
4883 
4884  switch (target_state) {
4885  case INTERRUPT:
4886  // sub <profiling_counter>, <delta> ;; Not changed
4887  // jns ok
4888  // call <interrupt stub>
4889  // ok:
4890  *jns_instr_address = kJnsInstruction;
4891  *jns_offset_address = kJnsOffset;
4892  break;
4893  case ON_STACK_REPLACEMENT:
4894  case OSR_AFTER_STACK_CHECK:
4895  // sub <profiling_counter>, <delta> ;; Not changed
4896  // nop
4897  // nop
4898  // call <on-stack replacment>
4899  // ok:
4900  *jns_instr_address = kNopByteOne;
4901  *jns_offset_address = kNopByteTwo;
4902  break;
4903  }
4904 
4905  Assembler::set_target_address_at(call_target_address,
4906  unoptimized_code,
4907  replacement_code->entry());
4908  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4909  unoptimized_code, call_target_address, replacement_code);
4910 }
4911 
4912 
4914  Isolate* isolate,
4915  Code* unoptimized_code,
4916  Address pc) {
4917  Address call_target_address = pc - kIntSize;
4918  Address jns_instr_address = call_target_address - 3;
4919  DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4920 
4921  if (*jns_instr_address == kJnsInstruction) {
4922  DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4923  DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4924  Assembler::target_address_at(call_target_address,
4925  unoptimized_code));
4926  return INTERRUPT;
4927  }
4928 
4929  DCHECK_EQ(kNopByteOne, *jns_instr_address);
4930  DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4931 
4932  if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4933  isolate->builtins()->OnStackReplacement()->entry()) {
4934  return ON_STACK_REPLACEMENT;
4935  }
4936 
4937  DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4938  Assembler::target_address_at(call_target_address,
4939  unoptimized_code));
4940  return OSR_AFTER_STACK_CHECK;
4941 }
4942 
4943 
4944 } } // namespace v8::internal
4945 
4946 #endif // V8_TARGET_ARCH_X87
#define BASE_EMBEDDED
Definition: allocation.h:45
Isolate * isolate() const
Definition: assembler.h:62
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target, ICacheFlushMode icache_flush_mode=FLUSH_ICACHE_IF_NEEDED)
static const int kJSReturnSequenceLength
int SizeOfCodeGeneratedSince(Label *label)
static void PatchAt(Code *unoptimized_code, Address pc, BackEdgeState target_state, Code *replacement_code)
static BackEdgeState GetBackEdgeState(Isolate *isolate, Code *unoptimized_code, Address pc_after)
static BailoutId Declarations()
Definition: utils.h:962
static BailoutId FunctionEntry()
Definition: utils.h:961
static Handle< Code > initialize_stub(Isolate *isolate, int argc, CallICState::CallType call_type)
Definition: ic.cc:1338
static const int kValueOffset
Definition: objects.h:9446
static Condition ComputeCondition(Token::Value op)
static Handle< Code > GetUninitialized(Isolate *isolate, NilValue nil)
Definition: code-stubs.h:1329
void AddNoFrameRange(int from, int to)
Definition: compiler.h:354
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3331
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
Definition: compiler.cc:1243
@ STRING_FUNCTION_PROTOTYPE_MAP_INDEX
Definition: contexts.h:294
static int SlotOffset(int index)
Definition: contexts.h:552
static const int kDescriptorSize
Definition: objects.h:3038
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:3032
static const int kEnumCacheOffset
Definition: objects.h:3028
static const int kFirstOffset
Definition: objects.h:3029
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
virtual void DropAndPlug(int count, Register reg) const
static const TestContext * cast(const ExpressionContext *context)
Definition: full-codegen.h:778
virtual void Plug(bool flag) const
virtual NestedStatement * Exit(int *stack_depth, int *context_length)
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:99
void EmitAccessor(Expression *expression)
void Split(Condition cc, Label *if_true, Label *if_false, Label *fall_through)
void EmitBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode)
void EmitLiteralCompareTypeof(Expression *expr, Expression *sub_expr, Handle< String > check)
void VisitDeclarations(ZoneList< Declaration * > *declarations)
void PrepareForBailoutForId(BailoutId id, State state)
void EmitNewClosure(Handle< SharedFunctionInfo > info, bool pretenure)
void GetVar(Register destination, Variable *var)
static Register context_register()
ZoneList< Handle< Object > > * globals_
Definition: full-codegen.h:837
void VisitForControl(Expression *expr, Label *if_true, Label *if_false, Label *fall_through)
Definition: full-codegen.h:382
void CallLoadIC(ContextualMode mode, TypeFeedbackId id=TypeFeedbackId::None())
void RecordBackEdge(BailoutId osr_ast_id)
MemOperand StackOperand(Variable *var)
void EmitVariableLoad(VariableProxy *proxy)
void SetVar(Variable *var, Register source, Register scratch0, Register scratch1)
MemOperand ContextSlotOperandCheckExtensions(Variable *var, Label *slow)
void EmitKeyedPropertyAssignment(Assignment *expr)
void DeclareGlobals(Handle< FixedArray > pairs)
void EmitResolvePossiblyDirectEval(int arg_count)
void VisitForStackValue(Expression *expr)
Definition: full-codegen.h:376
void EmitKeyedCallWithLoadIC(Call *expr, Expression *key)
void EmitKeyedPropertyLoad(Property *expr)
void EmitDebugCheckDeclarationContext(Variable *variable)
FunctionLiteral * function()
Definition: full-codegen.h:609
void EmitNamedSuperPropertyLoad(Property *expr)
bool TryLiteralCompare(CompareOperation *compare)
void SetStatementPosition(Statement *stmt)
Handle< FixedArray > FeedbackVector()
Definition: full-codegen.h:432
void StoreToFrameField(int frame_offset, Register value)
void LoadContextField(Register dst, int context_index)
const ExpressionContext * context()
Definition: full-codegen.h:602
void EmitNamedPropertyLoad(Property *expr)
void EmitBackEdgeBookkeeping(IterationStatement *stmt, Label *back_edge_target)
void DoTest(Expression *condition, Label *if_true, Label *if_false, Label *fall_through)
void VisitForAccumulatorValue(Expression *expr)
Definition: full-codegen.h:370
void PrepareForBailout(Expression *node, State state)
void CallStoreIC(TypeFeedbackId id=TypeFeedbackId::None())
MemOperand VarOperand(Variable *var, Register scratch)
void DeclareModules(Handle< FixedArray > descriptions)
void EmitGeneratorResume(Expression *generator, Expression *value, JSGeneratorObject::ResumeMode resume_mode)
void VisitForEffect(Expression *expr)
Definition: full-codegen.h:364
void EmitAssignment(Expression *expr)
void EmitCall(Call *expr, CallICState::CallType=CallICState::FUNCTION)
void SetFunctionPosition(FunctionLiteral *fun)
void EmitLoadHomeObject(SuperReference *expr)
void EmitStoreToStackLocalOrContextSlot(Variable *var, MemOperand location)
void EmitInlineSmiBinaryOp(BinaryOperation *expr, Token::Value op, OverwriteMode mode, Expression *left, Expression *right)
void EmitLiteralCompareNil(CompareOperation *expr, Expression *sub_expr, NilValue nil)
void EmitVariableAssignment(Variable *var, Token::Value op)
void CallIC(Handle< Code > code, TypeFeedbackId id=TypeFeedbackId::None())
void EmitCreateIteratorResult(bool done)
void EmitLoadGlobalCheckExtensions(VariableProxy *proxy, TypeofState typeof_state, Label *slow)
void EmitCallWithLoadIC(Call *expr)
void EnsureSlotContainsAllocationSite(int slot)
void PrepareForBailoutBeforeSplit(Expression *expr, bool should_normalize, Label *if_true, Label *if_false)
void EmitInlineRuntimeCall(CallRuntime *expr)
void EmitSuperCallWithLoadIC(Call *expr)
void EmitNamedSuperPropertyAssignment(Assignment *expr)
void EmitNamedPropertyAssignment(Assignment *expr)
Handle< FixedArray > handler_table_
Definition: full-codegen.h:844
void RecordJSReturnSite(Call *call)
static Register result_register()
void VisitForTypeofValue(Expression *expr)
void EmitDynamicLookupFastCase(VariableProxy *proxy, TypeofState typeof_state, Label *slow, Label *done)
bool ShouldInlineSmiCase(Token::Value op)
Handle< FixedArray > handler_table()
Definition: full-codegen.h:642
void EmitProfilingCounterDecrement(int delta)
void VisitInDuplicateContext(Expression *expr)
static const int kBuiltinsOffset
Definition: objects.h:7458
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static const int kMapOffset
Definition: objects.h:1427
Factory * factory()
Definition: isolate.h:982
static const int kLengthOffset
Definition: objects.h:10072
static const int kValueOffset
Definition: objects.h:7623
static const int kCacheStampOffset
Definition: objects.h:7631
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kLiteralsOffset
Definition: objects.h:7382
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kResultDonePropertyOffset
Definition: objects.h:7142
static const int kFunctionOffset
Definition: objects.h:7123
static const int kGeneratorClosed
Definition: objects.h:7120
static const int kResultValuePropertyOffset
Definition: objects.h:7141
static const int kGeneratorExecuting
Definition: objects.h:7119
static const int kOperandStackOffset
Definition: objects.h:7127
static const int kReceiverOffset
Definition: objects.h:7125
static const int kContextOffset
Definition: objects.h:7124
static const int kContinuationOffset
Definition: objects.h:7126
static const int kInitialMaxFastElementArray
Definition: objects.h:2180
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kSize
Definition: objects.h:7772
static const int kInObjectFieldCount
Definition: objects.h:7826
static const int kValueOffset
Definition: objects.h:7546
static const Register ReceiverRegister()
static const Register NameRegister()
static const int kIsUndetectable
Definition: objects.h:6244
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:6251
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kBitField2Offset
Definition: objects.h:6233
static const int kConstructorOffset
Definition: objects.h:6191
static const int kPrototypeOffset
Definition: objects.h:6190
static const int kHashFieldOffset
Definition: objects.h:8486
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
Definition: assembler.h:317
Scope * outer_scope() const
Definition: scopes.h:333
int num_parameters() const
Definition: scopes.h:321
VariableDeclaration * function() const
Definition: scopes.h:309
int ContextChainLength(Scope *scope)
Definition: scopes.cc:715
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:539
Scope * DeclarationScope()
Definition: scopes.cc:737
Variable * arguments() const
Definition: scopes.h:324
Scope * GlobalScope()
Definition: scopes.cc:728
Variable * parameter(int index) const
Definition: scopes.h:316
static const int kHeaderSize
Definition: objects.h:8941
static const int kFormalParameterCountOffset
Definition: objects.h:6946
static const int kInstanceClassNameOffset
Definition: objects.h:6897
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1334
static const int kContextOffset
Definition: frames.h:74
static const int kContextOffset
Definition: frames.h:162
static const int kExpressionsOffset
Definition: frames.h:160
static const int kCallerSPOffset
Definition: frames.h:167
static const int kMarkerOffset
Definition: frames.h:161
static const int kCallerFPOffset
Definition: frames.h:165
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const unsigned int kContainsCachedArrayIndexMask
Definition: objects.h:8618
static const int kLengthOffset
Definition: objects.h:8802
bool Equals(String *other)
Definition: objects-inl.h:3336
static Handle< Code > GetUninitialized(Isolate *isolate)
Definition: code-stubs.h:2258
static TypeFeedbackId None()
Definition: utils.h:945
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
TypeofState
Definition: codegen.h:46
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_BOOL(enable_unaligned_accesses
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define POINTER_SIZE_ALIGN(value)
Definition: globals.h:582
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK_NE(v1, v2)
Definition: logging.h:207
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ CALL_FUNCTION
@ TAG_OBJECT
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
Vector< const char > CStrVector(const char *data)
Definition: vector.h:158
const int kPointerSize
Definition: globals.h:129
const Register edx
const uint32_t kStringEncodingMask
Definition: objects.h:555
const Register edi
MemOperand ContextOperand(Register context, int index)
@ DO_SMI_CHECK
Definition: globals.h:641
@ STRING_ADD_CHECK_BOTH
Definition: code-stubs.h:1218
@ TRACK_ALLOCATION_SITE
Definition: objects.h:8085
@ DONT_TRACK_ALLOCATION_SITE
Definition: objects.h:8084
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
Definition: objects-inl.h:5448
@ kSeqStringTag
Definition: objects.h:563
const Register esp
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
bool IsDeclaredVariableMode(VariableMode mode)
Definition: globals.h:705
const uint32_t kTwoByteStringTag
Definition: objects.h:556
const int kSmiTagSize
Definition: v8.h:5743
Operand FieldOperand(Register object, int offset)
const uint32_t kStringTag
Definition: objects.h:544
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:785
@ JS_REGEXP_TYPE
Definition: objects.h:748
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
Definition: objects.h:788
@ JS_VALUE_TYPE
Definition: objects.h:728
@ JS_DATE_TYPE
Definition: objects.h:730
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:784
@ FIRST_JS_PROXY_TYPE
Definition: objects.h:778
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
@ LAST_JS_PROXY_TYPE
Definition: objects.h:779
const uint32_t kOneByteStringTag
Definition: objects.h:557
@ NO_OVERWRITE
Definition: ic-state.h:58
@ OVERWRITE_RIGHT
Definition: ic-state.h:58
const Register esi
const Register eax
bool IsImmutableVariableMode(VariableMode mode)
Definition: globals.h:715
const Register pc
const Register ebx
@ DYNAMIC_GLOBAL
Definition: globals.h:689
@ DYNAMIC_LOCAL
Definition: globals.h:693
@ CONST_LEGACY
Definition: globals.h:671
Condition NegateCondition(Condition cond)
Definition: constants-arm.h:86
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
const uint32_t kStringRepresentationMask
Definition: objects.h:561
byte * Address
Definition: globals.h:101
NilValue
Definition: v8.h:97
@ kNullValue
Definition: v8.h:97
@ NOT_CONTEXTUAL
Definition: objects.h:174
OStream & dec(OStream &os)
Definition: ostreams.cc:122
const int kIntSize
Definition: globals.h:124
Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
const int kSmiShiftSize
Definition: v8.h:5805
const Register no_reg
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
Definition: v8.h:5744
const Register ebp
@ RECORD_CONSTRUCTOR_TARGET
Definition: globals.h:480
const int kSmiTag
Definition: v8.h:5742
const uint32_t kIsNotStringMask
Definition: objects.h:543
@ NO_CALL_FUNCTION_FLAGS
Definition: globals.h:469
@ CALL_AS_METHOD
Definition: globals.h:470
@ STRING_INDEX_IS_NUMBER
Definition: code-stubs.h:1590
void CopyBytes(uint8_t *target, uint8_t *source)
bool IsFastObjectElementsKind(ElementsKind kind)
const Register ecx
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
#define VOID
#define IN
PropertyAttributes
@ NONE
@ READ_ONLY