V8 Project
code-stubs-x87.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_X87
8 
9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h"
11 #include "src/code-stubs.h"
12 #include "src/codegen.h"
14 #include "src/ic/ic.h"
15 #include "src/isolate.h"
16 #include "src/jsregexp.h"
18 #include "src/runtime/runtime.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 
24 static void InitializeArrayConstructorDescriptor(
25  Isolate* isolate, CodeStubDescriptor* descriptor,
26  int constant_stack_parameter_count) {
27  // register state
28  // eax -- number of arguments
29  // edi -- function
30  // ebx -- allocation site with elements kind
31  Address deopt_handler = Runtime::FunctionForId(
32  Runtime::kArrayConstructor)->entry;
33 
34  if (constant_stack_parameter_count == 0) {
35  descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
37  } else {
38  descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
40  }
41 }
42 
43 
44 static void InitializeInternalArrayConstructorDescriptor(
45  Isolate* isolate, CodeStubDescriptor* descriptor,
46  int constant_stack_parameter_count) {
47  // register state
48  // eax -- number of arguments
49  // edi -- constructor function
50  Address deopt_handler = Runtime::FunctionForId(
51  Runtime::kInternalArrayConstructor)->entry;
52 
53  if (constant_stack_parameter_count == 0) {
54  descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
56  } else {
57  descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
59  }
60 }
61 
62 
63 void ArrayNoArgumentConstructorStub::InitializeDescriptor(
64  CodeStubDescriptor* descriptor) {
65  InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
66 }
67 
68 
69 void ArraySingleArgumentConstructorStub::InitializeDescriptor(
70  CodeStubDescriptor* descriptor) {
71  InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
72 }
73 
74 
75 void ArrayNArgumentsConstructorStub::InitializeDescriptor(
76  CodeStubDescriptor* descriptor) {
77  InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
78 }
79 
80 
81 void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
82  CodeStubDescriptor* descriptor) {
83  InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
84 }
85 
86 
87 void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
88  CodeStubDescriptor* descriptor) {
89  InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
90 }
91 
92 
93 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
94  CodeStubDescriptor* descriptor) {
95  InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
96 }
97 
98 
99 #define __ ACCESS_MASM(masm)
100 
101 
102 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
103  ExternalReference miss) {
104  // Update the static counter each time a new code stub is generated.
105  isolate()->counters()->code_stubs()->Increment();
106 
107  CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
108  int param_count = descriptor.GetEnvironmentParameterCount();
109  {
110  // Call the runtime system in a fresh internal frame.
111  FrameScope scope(masm, StackFrame::INTERNAL);
112  DCHECK(param_count == 0 ||
113  eax.is(descriptor.GetEnvironmentParameterRegister(param_count - 1)));
114  // Push arguments
115  for (int i = 0; i < param_count; ++i) {
116  __ push(descriptor.GetEnvironmentParameterRegister(i));
117  }
118  __ CallExternalReference(miss, param_count);
119  }
120 
121  __ ret(0);
122 }
123 
124 
125 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
126  // We don't allow a GC during a store buffer overflow so there is no need to
127  // store the registers in any particular way, but we do have to store and
128  // restore them.
129  __ pushad();
130  if (save_doubles()) {
131  // Save FPU stat in m108byte.
132  __ sub(esp, Immediate(108));
133  __ fnsave(Operand(esp, 0));
134  }
135  const int argument_count = 1;
136 
137  AllowExternalCallThatCantCauseGC scope(masm);
138  __ PrepareCallCFunction(argument_count, ecx);
139  __ mov(Operand(esp, 0 * kPointerSize),
140  Immediate(ExternalReference::isolate_address(isolate())));
141  __ CallCFunction(
142  ExternalReference::store_buffer_overflow_function(isolate()),
143  argument_count);
144  if (save_doubles()) {
145  // Restore FPU stat in m108byte.
146  __ frstor(Operand(esp, 0));
147  __ add(esp, Immediate(108));
148  }
149  __ popad();
150  __ ret(0);
151 }
152 
153 
154 class FloatingPointHelper : public AllStatic {
155  public:
156  enum ArgLocation {
157  ARGS_ON_STACK,
158  ARGS_IN_REGISTERS
159  };
160 
161  // Code pattern for loading a floating point value. Input value must
162  // be either a smi or a heap number object (fp value). Requirements:
163  // operand in register number. Returns operand as floating point number
164  // on FPU stack.
165  static void LoadFloatOperand(MacroAssembler* masm, Register number);
166 
167  // Test if operands are smi or number objects (fp). Requirements:
168  // operand_1 in eax, operand_2 in edx; falls through on float
169  // operands, jumps to the non_float label otherwise.
170  static void CheckFloatOperands(MacroAssembler* masm,
171  Label* non_float,
172  Register scratch);
173 };
174 
175 
176 void DoubleToIStub::Generate(MacroAssembler* masm) {
177  Register input_reg = this->source();
178  Register final_result_reg = this->destination();
180 
181  Label check_negative, process_64_bits, done, done_no_stash;
182 
183  int double_offset = offset();
184 
185  // Account for return address and saved regs if input is esp.
186  if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
187 
188  MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
189  MemOperand exponent_operand(MemOperand(input_reg,
190  double_offset + kDoubleSize / 2));
191 
192  Register scratch1;
193  {
194  Register scratch_candidates[3] = { ebx, edx, edi };
195  for (int i = 0; i < 3; i++) {
196  scratch1 = scratch_candidates[i];
197  if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
198  }
199  }
200  // Since we must use ecx for shifts below, use some other register (eax)
201  // to calculate the result if ecx is the requested return register.
202  Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
203  // Save ecx if it isn't the return register and therefore volatile, or if it
204  // is the return register, then save the temp register we use in its stead for
205  // the result.
206  Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
207  __ push(scratch1);
208  __ push(save_reg);
209 
210  bool stash_exponent_copy = !input_reg.is(esp);
211  __ mov(scratch1, mantissa_operand);
212  __ mov(ecx, exponent_operand);
213  if (stash_exponent_copy) __ push(ecx);
214 
217  __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
218  __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
219  __ j(below, &process_64_bits);
220 
221  // Result is entirely in lower 32-bits of mantissa
223  __ sub(ecx, Immediate(delta));
224  __ xor_(result_reg, result_reg);
225  __ cmp(ecx, Immediate(31));
226  __ j(above, &done);
227  __ shl_cl(scratch1);
228  __ jmp(&check_negative);
229 
230  __ bind(&process_64_bits);
231  // Result must be extracted from shifted 32-bit mantissa
232  __ sub(ecx, Immediate(delta));
233  __ neg(ecx);
234  if (stash_exponent_copy) {
235  __ mov(result_reg, MemOperand(esp, 0));
236  } else {
237  __ mov(result_reg, exponent_operand);
238  }
239  __ and_(result_reg,
240  Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
241  __ add(result_reg,
242  Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
243  __ shrd(result_reg, scratch1);
244  __ shr_cl(result_reg);
245  __ test(ecx, Immediate(32));
246  {
247  Label skip_mov;
248  __ j(equal, &skip_mov, Label::kNear);
249  __ mov(scratch1, result_reg);
250  __ bind(&skip_mov);
251  }
252 
253  // If the double was negative, negate the integer result.
254  __ bind(&check_negative);
255  __ mov(result_reg, scratch1);
256  __ neg(result_reg);
257  if (stash_exponent_copy) {
258  __ cmp(MemOperand(esp, 0), Immediate(0));
259  } else {
260  __ cmp(exponent_operand, Immediate(0));
261  }
262  {
263  Label skip_mov;
264  __ j(less_equal, &skip_mov, Label::kNear);
265  __ mov(result_reg, scratch1);
266  __ bind(&skip_mov);
267  }
268 
269  // Restore registers
270  __ bind(&done);
271  if (stash_exponent_copy) {
272  __ add(esp, Immediate(kDoubleSize / 2));
273  }
274  __ bind(&done_no_stash);
275  if (!final_result_reg.is(result_reg)) {
276  DCHECK(final_result_reg.is(ecx));
277  __ mov(final_result_reg, result_reg);
278  }
279  __ pop(save_reg);
280  __ pop(scratch1);
281  __ ret(0);
282 }
283 
284 
285 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
286  Register number) {
287  Label load_smi, done;
288 
289  __ JumpIfSmi(number, &load_smi, Label::kNear);
290  __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
291  __ jmp(&done, Label::kNear);
292 
293  __ bind(&load_smi);
294  __ SmiUntag(number);
295  __ push(number);
296  __ fild_s(Operand(esp, 0));
297  __ pop(number);
298 
299  __ bind(&done);
300 }
301 
302 
303 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
304  Label* non_float,
305  Register scratch) {
306  Label test_other, done;
307  // Test if both operands are floats or smi -> scratch=k_is_float;
308  // Otherwise scratch = k_not_float.
309  __ JumpIfSmi(edx, &test_other, Label::kNear);
310  __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
311  Factory* factory = masm->isolate()->factory();
312  __ cmp(scratch, factory->heap_number_map());
313  __ j(not_equal, non_float); // argument in edx is not a number -> NaN
314 
315  __ bind(&test_other);
316  __ JumpIfSmi(eax, &done, Label::kNear);
317  __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
318  __ cmp(scratch, factory->heap_number_map());
319  __ j(not_equal, non_float); // argument in eax is not a number -> NaN
320 
321  // Fall-through: Both operands are numbers.
322  __ bind(&done);
323 }
324 
325 
326 void MathPowStub::Generate(MacroAssembler* masm) {
327  // No SSE2 support
328  UNREACHABLE();
329 }
330 
331 
332 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
333  Label miss;
334  Register receiver = LoadDescriptor::ReceiverRegister();
335 
337  ebx, &miss);
338  __ bind(&miss);
339  PropertyAccessCompiler::TailCallBuiltin(
340  masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
341 }
342 
343 
344 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
345  // Return address is on the stack.
346  Label slow;
347 
348  Register receiver = LoadDescriptor::ReceiverRegister();
349  Register key = LoadDescriptor::NameRegister();
350  Register scratch = eax;
351  DCHECK(!scratch.is(receiver) && !scratch.is(key));
352 
353  // Check that the key is an array index, that is Uint32.
354  __ test(key, Immediate(kSmiTagMask | kSmiSignMask));
355  __ j(not_zero, &slow);
356 
357  // Everything is fine, call runtime.
358  __ pop(scratch);
359  __ push(receiver); // receiver
360  __ push(key); // key
361  __ push(scratch); // return address
362 
363  // Perform tail call to the entry.
364  ExternalReference ref = ExternalReference(
365  IC_Utility(IC::kLoadElementWithInterceptor), masm->isolate());
366  __ TailCallExternalReference(ref, 2, 1);
367 
368  __ bind(&slow);
369  PropertyAccessCompiler::TailCallBuiltin(
370  masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
371 }
372 
373 
374 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
375  // The key is in edx and the parameter count is in eax.
378 
379  // The displacement is used for skipping the frame pointer on the
380  // stack. It is the offset of the last parameter (if any) relative
381  // to the frame pointer.
382  static const int kDisplacement = 1 * kPointerSize;
383 
384  // Check that the key is a smi.
385  Label slow;
386  __ JumpIfNotSmi(edx, &slow, Label::kNear);
387 
388  // Check if the calling frame is an arguments adaptor frame.
389  Label adaptor;
393  __ j(equal, &adaptor, Label::kNear);
394 
395  // Check index against formal parameters count limit passed in
396  // through register eax. Use unsigned comparison to get negative
397  // check for free.
398  __ cmp(edx, eax);
399  __ j(above_equal, &slow, Label::kNear);
400 
401  // Read the argument from the stack and return it.
403  STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
404  __ lea(ebx, Operand(ebp, eax, times_2, 0));
405  __ neg(edx);
406  __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
407  __ ret(0);
408 
409  // Arguments adaptor case: Check index against actual arguments
410  // limit found in the arguments adaptor frame. Use unsigned
411  // comparison to get negative check for free.
412  __ bind(&adaptor);
414  __ cmp(edx, ecx);
415  __ j(above_equal, &slow, Label::kNear);
416 
417  // Read the argument from the stack and return it.
419  STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
420  __ lea(ebx, Operand(ebx, ecx, times_2, 0));
421  __ neg(edx);
422  __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
423  __ ret(0);
424 
425  // Slow-case: Handle non-smi or out-of-bounds access to arguments
426  // by calling the runtime system.
427  __ bind(&slow);
428  __ pop(ebx); // Return address.
429  __ push(edx);
430  __ push(ebx);
431  __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
432 }
433 
434 
435 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
436  // esp[0] : return address
437  // esp[4] : number of parameters
438  // esp[8] : receiver displacement
439  // esp[12] : function
440 
441  // Check if the calling frame is an arguments adaptor frame.
442  Label runtime;
446  __ j(not_equal, &runtime, Label::kNear);
447 
448  // Patch the arguments.length and the parameters pointer.
450  __ mov(Operand(esp, 1 * kPointerSize), ecx);
451  __ lea(edx, Operand(edx, ecx, times_2,
453  __ mov(Operand(esp, 2 * kPointerSize), edx);
454 
455  __ bind(&runtime);
456  __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
457 }
458 
459 
460 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
461  // esp[0] : return address
462  // esp[4] : number of parameters (tagged)
463  // esp[8] : receiver displacement
464  // esp[12] : function
465 
466  // ebx = parameter count (tagged)
467  __ mov(ebx, Operand(esp, 1 * kPointerSize));
468 
469  // Check if the calling frame is an arguments adaptor frame.
470  // TODO(rossberg): Factor out some of the bits that are shared with the other
471  // Generate* functions.
472  Label runtime;
473  Label adaptor_frame, try_allocate;
477  __ j(equal, &adaptor_frame, Label::kNear);
478 
479  // No adaptor, parameter count = argument count.
480  __ mov(ecx, ebx);
481  __ jmp(&try_allocate, Label::kNear);
482 
483  // We have an adaptor frame. Patch the parameters pointer.
484  __ bind(&adaptor_frame);
486  __ lea(edx, Operand(edx, ecx, times_2,
488  __ mov(Operand(esp, 2 * kPointerSize), edx);
489 
490  // ebx = parameter count (tagged)
491  // ecx = argument count (smi-tagged)
492  // esp[4] = parameter count (tagged)
493  // esp[8] = address of receiver argument
494  // Compute the mapped parameter count = min(ebx, ecx) in ebx.
495  __ cmp(ebx, ecx);
496  __ j(less_equal, &try_allocate, Label::kNear);
497  __ mov(ebx, ecx);
498 
499  __ bind(&try_allocate);
500 
501  // Save mapped parameter count.
502  __ push(ebx);
503 
504  // Compute the sizes of backing store, parameter map, and arguments object.
505  // 1. Parameter map, has 2 extra words containing context and backing store.
506  const int kParameterMapHeaderSize =
508  Label no_parameter_map;
509  __ test(ebx, ebx);
510  __ j(zero, &no_parameter_map, Label::kNear);
511  __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
512  __ bind(&no_parameter_map);
513 
514  // 2. Backing store.
515  __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
516 
517  // 3. Arguments object.
518  __ add(ebx, Immediate(Heap::kSloppyArgumentsObjectSize));
519 
520  // Do the allocation of all three objects in one go.
521  __ Allocate(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
522 
523  // eax = address of new object(s) (tagged)
524  // ecx = argument count (smi-tagged)
525  // esp[0] = mapped parameter count (tagged)
526  // esp[8] = parameter count (tagged)
527  // esp[12] = address of receiver argument
528  // Get the arguments map from the current native context into edi.
529  Label has_mapped_parameters, instantiate;
532  __ mov(ebx, Operand(esp, 0 * kPointerSize));
533  __ test(ebx, ebx);
534  __ j(not_zero, &has_mapped_parameters, Label::kNear);
535  __ mov(
536  edi,
538  __ jmp(&instantiate, Label::kNear);
539 
540  __ bind(&has_mapped_parameters);
541  __ mov(
542  edi,
544  __ bind(&instantiate);
545 
546  // eax = address of new object (tagged)
547  // ebx = mapped parameter count (tagged)
548  // ecx = argument count (smi-tagged)
549  // edi = address of arguments map (tagged)
550  // esp[0] = mapped parameter count (tagged)
551  // esp[8] = parameter count (tagged)
552  // esp[12] = address of receiver argument
553  // Copy the JS object part.
556  masm->isolate()->factory()->empty_fixed_array());
558  masm->isolate()->factory()->empty_fixed_array());
559 
560  // Set up the callee in-object property.
562  __ mov(edx, Operand(esp, 4 * kPointerSize));
563  __ AssertNotSmi(edx);
566  edx);
567 
568  // Use the length (smi tagged) and set that as an in-object property too.
569  __ AssertSmi(ecx);
573  ecx);
574 
575  // Set up the elements pointer in the allocated arguments object.
576  // If we allocated a parameter map, edi will point there, otherwise to the
577  // backing store.
578  __ lea(edi, Operand(eax, Heap::kSloppyArgumentsObjectSize));
580 
581  // eax = address of new object (tagged)
582  // ebx = mapped parameter count (tagged)
583  // ecx = argument count (tagged)
584  // edi = address of parameter map or backing store (tagged)
585  // esp[0] = mapped parameter count (tagged)
586  // esp[8] = parameter count (tagged)
587  // esp[12] = address of receiver argument
588  // Free a register.
589  __ push(eax);
590 
591  // Initialize parameter map. If there are no mapped arguments, we're done.
592  Label skip_parameter_map;
593  __ test(ebx, ebx);
594  __ j(zero, &skip_parameter_map);
595 
597  Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
598  __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
601  __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
603 
604  // Copy the parameter slots and the holes in the arguments.
605  // We need to fill in mapped_parameter_count slots. They index the context,
606  // where parameters are stored in reverse order, at
607  // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
608  // The mapped parameter thus need to get indices
609  // MIN_CONTEXT_SLOTS+parameter_count-1 ..
610  // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
611  // We loop from right to left.
612  Label parameters_loop, parameters_test;
613  __ push(ecx);
614  __ mov(eax, Operand(esp, 2 * kPointerSize));
615  __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
616  __ add(ebx, Operand(esp, 4 * kPointerSize));
617  __ sub(ebx, eax);
618  __ mov(ecx, isolate()->factory()->the_hole_value());
619  __ mov(edx, edi);
620  __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
621  // eax = loop variable (tagged)
622  // ebx = mapping index (tagged)
623  // ecx = the hole value
624  // edx = address of parameter map (tagged)
625  // edi = address of backing store (tagged)
626  // esp[0] = argument count (tagged)
627  // esp[4] = address of new object (tagged)
628  // esp[8] = mapped parameter count (tagged)
629  // esp[16] = parameter count (tagged)
630  // esp[20] = address of receiver argument
631  __ jmp(&parameters_test, Label::kNear);
632 
633  __ bind(&parameters_loop);
634  __ sub(eax, Immediate(Smi::FromInt(1)));
635  __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
637  __ add(ebx, Immediate(Smi::FromInt(1)));
638  __ bind(&parameters_test);
639  __ test(eax, eax);
640  __ j(not_zero, &parameters_loop, Label::kNear);
641  __ pop(ecx);
642 
643  __ bind(&skip_parameter_map);
644 
645  // ecx = argument count (tagged)
646  // edi = address of backing store (tagged)
647  // esp[0] = address of new object (tagged)
648  // esp[4] = mapped parameter count (tagged)
649  // esp[12] = parameter count (tagged)
650  // esp[16] = address of receiver argument
651  // Copy arguments header and remaining slots (if there are any).
653  Immediate(isolate()->factory()->fixed_array_map()));
655 
656  Label arguments_loop, arguments_test;
657  __ mov(ebx, Operand(esp, 1 * kPointerSize));
658  __ mov(edx, Operand(esp, 4 * kPointerSize));
659  __ sub(edx, ebx); // Is there a smarter way to do negative scaling?
660  __ sub(edx, ebx);
661  __ jmp(&arguments_test, Label::kNear);
662 
663  __ bind(&arguments_loop);
664  __ sub(edx, Immediate(kPointerSize));
665  __ mov(eax, Operand(edx, 0));
667  __ add(ebx, Immediate(Smi::FromInt(1)));
668 
669  __ bind(&arguments_test);
670  __ cmp(ebx, ecx);
671  __ j(less, &arguments_loop, Label::kNear);
672 
673  // Restore.
674  __ pop(eax); // Address of arguments object.
675  __ pop(ebx); // Parameter count.
676 
677  // Return and remove the on-stack parameters.
678  __ ret(3 * kPointerSize);
679 
680  // Do the runtime call to allocate the arguments object.
681  __ bind(&runtime);
682  __ pop(eax); // Remove saved parameter count.
683  __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count.
684  __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
685 }
686 
687 
688 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
689  // esp[0] : return address
690  // esp[4] : number of parameters
691  // esp[8] : receiver displacement
692  // esp[12] : function
693 
694  // Check if the calling frame is an arguments adaptor frame.
695  Label adaptor_frame, try_allocate, runtime;
699  __ j(equal, &adaptor_frame, Label::kNear);
700 
701  // Get the length from the frame.
702  __ mov(ecx, Operand(esp, 1 * kPointerSize));
703  __ jmp(&try_allocate, Label::kNear);
704 
705  // Patch the arguments.length and the parameters pointer.
706  __ bind(&adaptor_frame);
708  __ mov(Operand(esp, 1 * kPointerSize), ecx);
709  __ lea(edx, Operand(edx, ecx, times_2,
711  __ mov(Operand(esp, 2 * kPointerSize), edx);
712 
713  // Try the new space allocation. Start out with computing the size of
714  // the arguments object and the elements array.
715  Label add_arguments_object;
716  __ bind(&try_allocate);
717  __ test(ecx, ecx);
718  __ j(zero, &add_arguments_object, Label::kNear);
719  __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
720  __ bind(&add_arguments_object);
721  __ add(ecx, Immediate(Heap::kStrictArgumentsObjectSize));
722 
723  // Do the allocation of both objects in one go.
724  __ Allocate(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
725 
726  // Get the arguments map from the current native context.
730  __ mov(edi, Operand(edi, offset));
731 
734  masm->isolate()->factory()->empty_fixed_array());
736  masm->isolate()->factory()->empty_fixed_array());
737 
738  // Get the length (smi tagged) and set that as an in-object property too.
740  __ mov(ecx, Operand(esp, 1 * kPointerSize));
741  __ AssertSmi(ecx);
744  ecx);
745 
746  // If there are no actual arguments, we're done.
747  Label done;
748  __ test(ecx, ecx);
749  __ j(zero, &done, Label::kNear);
750 
751  // Get the parameters pointer from the stack.
752  __ mov(edx, Operand(esp, 2 * kPointerSize));
753 
754  // Set up the elements pointer in the allocated arguments object and
755  // initialize the header in the elements fixed array.
756  __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
759  Immediate(isolate()->factory()->fixed_array_map()));
760 
762  // Untag the length for the loop below.
763  __ SmiUntag(ecx);
764 
765  // Copy the fixed array slots.
766  Label loop;
767  __ bind(&loop);
768  __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
770  __ add(edi, Immediate(kPointerSize));
771  __ sub(edx, Immediate(kPointerSize));
772  __ dec(ecx);
773  __ j(not_zero, &loop);
774 
775  // Return and remove the on-stack parameters.
776  __ bind(&done);
777  __ ret(3 * kPointerSize);
778 
779  // Do the runtime call to allocate the arguments object.
780  __ bind(&runtime);
781  __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
782 }
783 
784 
785 void RegExpExecStub::Generate(MacroAssembler* masm) {
786  // Just jump directly to runtime if native RegExp is not selected at compile
787  // time or if regexp entry in generated code is turned off runtime switch or
788  // at compilation.
789 #ifdef V8_INTERPRETED_REGEXP
790  __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
791 #else // V8_INTERPRETED_REGEXP
792 
793  // Stack frame on entry.
794  // esp[0]: return address
795  // esp[4]: last_match_info (expected JSArray)
796  // esp[8]: previous index
797  // esp[12]: subject string
798  // esp[16]: JSRegExp object
799 
800  static const int kLastMatchInfoOffset = 1 * kPointerSize;
801  static const int kPreviousIndexOffset = 2 * kPointerSize;
802  static const int kSubjectOffset = 3 * kPointerSize;
803  static const int kJSRegExpOffset = 4 * kPointerSize;
804 
805  Label runtime;
806  Factory* factory = isolate()->factory();
807 
808  // Ensure that a RegExp stack is allocated.
809  ExternalReference address_of_regexp_stack_memory_address =
810  ExternalReference::address_of_regexp_stack_memory_address(isolate());
811  ExternalReference address_of_regexp_stack_memory_size =
812  ExternalReference::address_of_regexp_stack_memory_size(isolate());
813  __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
814  __ test(ebx, ebx);
815  __ j(zero, &runtime);
816 
817  // Check that the first argument is a JSRegExp object.
818  __ mov(eax, Operand(esp, kJSRegExpOffset));
819  STATIC_ASSERT(kSmiTag == 0);
820  __ JumpIfSmi(eax, &runtime);
821  __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
822  __ j(not_equal, &runtime);
823 
824  // Check that the RegExp has been compiled (data contains a fixed array).
826  if (FLAG_debug_code) {
827  __ test(ecx, Immediate(kSmiTagMask));
828  __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
829  __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
830  __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
831  }
832 
833  // ecx: RegExp data (FixedArray)
834  // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
836  __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
837  __ j(not_equal, &runtime);
838 
839  // ecx: RegExp data (FixedArray)
840  // Check that the number of captures fit in the static offsets vector buffer.
842  // Check (number_of_captures + 1) * 2 <= offsets vector size
843  // Or number_of_captures * 2 <= offsets vector size - 2
844  // Multiplying by 2 comes for free since edx is smi-tagged.
845  STATIC_ASSERT(kSmiTag == 0);
849  __ j(above, &runtime);
850 
851  // Reset offset for possibly sliced string.
852  __ Move(edi, Immediate(0));
853  __ mov(eax, Operand(esp, kSubjectOffset));
854  __ JumpIfSmi(eax, &runtime);
855  __ mov(edx, eax); // Make a copy of the original subject string.
858 
859  // eax: subject string
860  // edx: subject string
861  // ebx: subject string instance type
862  // ecx: RegExp data (FixedArray)
863  // Handle subject string according to its encoding and representation:
864  // (1) Sequential two byte? If yes, go to (9).
865  // (2) Sequential one byte? If yes, go to (6).
866  // (3) Anything but sequential or cons? If yes, go to (7).
867  // (4) Cons string. If the string is flat, replace subject with first string.
868  // Otherwise bailout.
869  // (5a) Is subject sequential two byte? If yes, go to (9).
870  // (5b) Is subject external? If yes, go to (8).
871  // (6) One byte sequential. Load regexp code for one byte.
872  // (E) Carry on.
873  /// [...]
874 
875  // Deferred code at the end of the stub:
876  // (7) Not a long external string? If yes, go to (10).
877  // (8) External string. Make it, offset-wise, look like a sequential string.
878  // (8a) Is the external string one byte? If yes, go to (6).
879  // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
880  // (10) Short external string or not a string? If yes, bail out to runtime.
881  // (11) Sliced string. Replace subject with parent. Go to (5a).
882 
883  Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
884  external_string /* 8 */, check_underlying /* 5a */,
885  not_seq_nor_cons /* 7 */, check_code /* E */,
886  not_long_external /* 10 */;
887 
888  // (1) Sequential two byte? If yes, go to (9).
889  __ and_(ebx, kIsNotStringMask |
894  __ j(zero, &seq_two_byte_string); // Go to (9).
895 
896  // (2) Sequential one byte? If yes, go to (6).
897  // Any other sequential string must be one byte.
898  __ and_(ebx, Immediate(kIsNotStringMask |
901  __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6).
902 
903  // (3) Anything but sequential or cons? If yes, go to (7).
904  // We check whether the subject string is a cons, since sequential strings
905  // have already been covered.
910  __ cmp(ebx, Immediate(kExternalStringTag));
911  __ j(greater_equal, &not_seq_nor_cons); // Go to (7).
912 
913  // (4) Cons string. Check that it's flat.
914  // Replace subject with first string and reload instance type.
915  __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
916  __ j(not_equal, &runtime);
918  __ bind(&check_underlying);
921 
922  // (5a) Is subject sequential two byte? If yes, go to (9).
925  __ j(zero, &seq_two_byte_string); // Go to (9).
926  // (5b) Is subject external? If yes, go to (8).
928  // The underlying external string is never a short external string.
931  __ j(not_zero, &external_string); // Go to (8).
932 
933  // eax: sequential subject string (or look-alike, external string)
934  // edx: original subject string
935  // ecx: RegExp data (FixedArray)
936  // (6) One byte sequential. Load regexp code for one byte.
937  __ bind(&seq_one_byte_string);
938  // Load previous index and check range before edx is overwritten. We have
939  // to use edx instead of eax here because it might have been only made to
940  // look like a sequential string when it actually is an external string.
941  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
942  __ JumpIfNotSmi(ebx, &runtime);
944  __ j(above_equal, &runtime);
946  __ Move(ecx, Immediate(1)); // Type is one byte.
947 
948  // (E) Carry on. String handling is done.
949  __ bind(&check_code);
950  // edx: irregexp code
951  // Check that the irregexp code has been generated for the actual string
952  // encoding. If it has, the field contains a code object otherwise it contains
953  // a smi (code flushing support).
954  __ JumpIfSmi(edx, &runtime);
955 
956  // eax: subject string
957  // ebx: previous index (smi)
958  // edx: code
959  // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
960  // All checks done. Now push arguments for native regexp code.
961  Counters* counters = isolate()->counters();
962  __ IncrementCounter(counters->regexp_entry_native(), 1);
963 
964  // Isolates: note we add an additional parameter here (isolate pointer).
965  static const int kRegExpExecuteArguments = 9;
966  __ EnterApiExitFrame(kRegExpExecuteArguments);
967 
968  // Argument 9: Pass current isolate address.
969  __ mov(Operand(esp, 8 * kPointerSize),
970  Immediate(ExternalReference::isolate_address(isolate())));
971 
972  // Argument 8: Indicate that this is a direct call from JavaScript.
973  __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
974 
975  // Argument 7: Start (high end) of backtracking stack memory area.
976  __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
977  __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
978  __ mov(Operand(esp, 6 * kPointerSize), esi);
979 
980  // Argument 6: Set the number of capture registers to zero to force global
981  // regexps to behave as non-global. This does not affect non-global regexps.
982  __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
983 
984  // Argument 5: static offsets vector buffer.
985  __ mov(Operand(esp, 4 * kPointerSize),
986  Immediate(ExternalReference::address_of_static_offsets_vector(
987  isolate())));
988 
989  // Argument 2: Previous index.
990  __ SmiUntag(ebx);
991  __ mov(Operand(esp, 1 * kPointerSize), ebx);
992 
993  // Argument 1: Original subject string.
994  // The original subject is in the previous stack frame. Therefore we have to
995  // use ebp, which points exactly to one pointer size below the previous esp.
996  // (Because creating a new stack frame pushes the previous ebp onto the stack
997  // and thereby moves up esp by one kPointerSize.)
998  __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
999  __ mov(Operand(esp, 0 * kPointerSize), esi);
1000 
1001  // esi: original subject string
1002  // eax: underlying subject string
1003  // ebx: previous index
1004  // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
1005  // edx: code
1006  // Argument 4: End of string data
1007  // Argument 3: Start of string data
1008  // Prepare start and end index of the input.
1009  // Load the length from the original sliced string if that is the case.
1011  __ add(esi, edi); // Calculate input end wrt offset.
1012  __ SmiUntag(edi);
1013  __ add(ebx, edi); // Calculate input start wrt offset.
1014 
1015  // ebx: start index of the input string
1016  // esi: end index of the input string
1017  Label setup_two_byte, setup_rest;
1018  __ test(ecx, ecx);
1019  __ j(zero, &setup_two_byte, Label::kNear);
1020  __ SmiUntag(esi);
1022  __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
1024  __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
1025  __ jmp(&setup_rest, Label::kNear);
1026 
1027  __ bind(&setup_two_byte);
1028  STATIC_ASSERT(kSmiTag == 0);
1029  STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
1031  __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
1033  __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
1034 
1035  __ bind(&setup_rest);
1036 
1037  // Locate the code entry and call it.
1038  __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1039  __ call(edx);
1040 
1041  // Drop arguments and come back to JS mode.
1042  __ LeaveApiExitFrame(true);
1043 
1044  // Check the result.
1045  Label success;
1046  __ cmp(eax, 1);
1047  // We expect exactly one result since we force the called regexp to behave
1048  // as non-global.
1049  __ j(equal, &success);
1050  Label failure;
1052  __ j(equal, &failure);
1054  // If not exception it can only be retry. Handle that in the runtime system.
1055  __ j(not_equal, &runtime);
1056  // Result must now be exception. If there is no pending exception already a
1057  // stack overflow (on the backtrack stack) was detected in RegExp code but
1058  // haven't created the exception yet. Handle that in the runtime system.
1059  // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1060  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1061  isolate());
1062  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
1063  __ mov(eax, Operand::StaticVariable(pending_exception));
1064  __ cmp(edx, eax);
1065  __ j(equal, &runtime);
1066  // For exception, throw the exception again.
1067 
1068  // Clear the pending exception variable.
1069  __ mov(Operand::StaticVariable(pending_exception), edx);
1070 
1071  // Special handling of termination exceptions which are uncatchable
1072  // by javascript code.
1073  __ cmp(eax, factory->termination_exception());
1074  Label throw_termination_exception;
1075  __ j(equal, &throw_termination_exception, Label::kNear);
1076 
1077  // Handle normal exception by following handler chain.
1078  __ Throw(eax);
1079 
1080  __ bind(&throw_termination_exception);
1081  __ ThrowUncatchable(eax);
1082 
1083  __ bind(&failure);
1084  // For failure to match, return null.
1085  __ mov(eax, factory->null_value());
1086  __ ret(4 * kPointerSize);
1087 
1088  // Load RegExp data.
1089  __ bind(&success);
1090  __ mov(eax, Operand(esp, kJSRegExpOffset));
1093  // Calculate number of capture registers (number_of_captures + 1) * 2.
1094  STATIC_ASSERT(kSmiTag == 0);
1096  __ add(edx, Immediate(2)); // edx was a smi.
1097 
1098  // edx: Number of capture registers
1099  // Load last_match_info which is still known to be a fast case JSArray.
1100  // Check that the fourth object is a JSArray object.
1101  __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1102  __ JumpIfSmi(eax, &runtime);
1103  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
1104  __ j(not_equal, &runtime);
1105  // Check that the JSArray is in fast case.
1108  __ cmp(eax, factory->fixed_array_map());
1109  __ j(not_equal, &runtime);
1110  // Check that the last match info has space for the capture registers and the
1111  // additional information.
1113  __ SmiUntag(eax);
1114  __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
1115  __ cmp(edx, eax);
1116  __ j(greater, &runtime);
1117 
1118  // ebx: last_match_info backing store (FixedArray)
1119  // edx: number of capture registers
1120  // Store the capture count.
1121  __ SmiTag(edx); // Number of capture registers to smi.
1123  __ SmiUntag(edx); // Number of capture registers back from smi.
1124  // Store last subject and last input.
1125  __ mov(eax, Operand(esp, kSubjectOffset));
1126  __ mov(ecx, eax);
1128  __ RecordWriteField(ebx, RegExpImpl::kLastSubjectOffset, eax, edi,
1129  kDontSaveFPRegs);
1130  __ mov(eax, ecx);
1132  __ RecordWriteField(ebx, RegExpImpl::kLastInputOffset, eax, edi,
1133  kDontSaveFPRegs);
1134 
1135  // Get the static offsets vector filled by the native regexp code.
1136  ExternalReference address_of_static_offsets_vector =
1137  ExternalReference::address_of_static_offsets_vector(isolate());
1138  __ mov(ecx, Immediate(address_of_static_offsets_vector));
1139 
1140  // ebx: last_match_info backing store (FixedArray)
1141  // ecx: offsets vector
1142  // edx: number of capture registers
1143  Label next_capture, done;
1144  // Capture register counter starts from number of capture registers and
1145  // counts down until wraping after zero.
1146  __ bind(&next_capture);
1147  __ sub(edx, Immediate(1));
1148  __ j(negative, &done, Label::kNear);
1149  // Read the value from the static offsets vector buffer.
1150  __ mov(edi, Operand(ecx, edx, times_int_size, 0));
1151  __ SmiTag(edi);
1152  // Store the smi value in the last match info.
1153  __ mov(FieldOperand(ebx,
1154  edx,
1157  edi);
1158  __ jmp(&next_capture);
1159  __ bind(&done);
1160 
1161  // Return last match info.
1162  __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1163  __ ret(4 * kPointerSize);
1164 
1165  // Do the runtime call to execute the regexp.
1166  __ bind(&runtime);
1167  __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
1168 
1169  // Deferred code for string handling.
1170  // (7) Not a long external string? If yes, go to (10).
1171  __ bind(&not_seq_nor_cons);
1172  // Compare flags are still set from (3).
1173  __ j(greater, &not_long_external, Label::kNear); // Go to (10).
1174 
1175  // (8) External string. Short external strings have been ruled out.
1176  __ bind(&external_string);
1177  // Reload instance type.
1180  if (FLAG_debug_code) {
1181  // Assert that we do not have a cons or slice (indirect strings) here.
1182  // Sequential strings have already been ruled out.
1183  __ test_b(ebx, kIsIndirectStringMask);
1184  __ Assert(zero, kExternalStringExpectedButNotFound);
1185  }
1187  // Move the pointer so that offset-wise, it looks like a sequential string.
1191  // (8a) Is the external string one byte? If yes, go to (6).
1192  __ test_b(ebx, kStringEncodingMask);
1193  __ j(not_zero, &seq_one_byte_string); // Goto (6).
1194 
1195  // eax: sequential subject string (or look-alike, external string)
1196  // edx: original subject string
1197  // ecx: RegExp data (FixedArray)
1198  // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1199  __ bind(&seq_two_byte_string);
1200  // Load previous index and check range before edx is overwritten. We have
1201  // to use edx instead of eax here because it might have been only made to
1202  // look like a sequential string when it actually is an external string.
1203  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1204  __ JumpIfNotSmi(ebx, &runtime);
1206  __ j(above_equal, &runtime);
1208  __ Move(ecx, Immediate(0)); // Type is two byte.
1209  __ jmp(&check_code); // Go to (E).
1210 
1211  // (10) Not a string or a short external string? If yes, bail out to runtime.
1212  __ bind(&not_long_external);
1213  // Catch non-string subject or short external string.
1215  __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
1216  __ j(not_zero, &runtime);
1217 
1218  // (11) Sliced string. Replace subject with parent. Go to (5a).
1219  // Load offset into edi and replace subject string with parent.
1222  __ jmp(&check_underlying); // Go to (5a).
1223 #endif // V8_INTERPRETED_REGEXP
1224 }
1225 
1226 
1227 static int NegativeComparisonResult(Condition cc) {
1228  DCHECK(cc != equal);
1229  DCHECK((cc == less) || (cc == less_equal)
1230  || (cc == greater) || (cc == greater_equal));
1231  return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1232 }
1233 
1234 
1235 static void CheckInputType(MacroAssembler* masm, Register input,
1236  CompareICState::State expected, Label* fail) {
1237  Label ok;
1238  if (expected == CompareICState::SMI) {
1239  __ JumpIfNotSmi(input, fail);
1240  } else if (expected == CompareICState::NUMBER) {
1241  __ JumpIfSmi(input, &ok);
1243  Immediate(masm->isolate()->factory()->heap_number_map()));
1244  __ j(not_equal, fail);
1245  }
1246  // We could be strict about internalized/non-internalized here, but as long as
1247  // hydrogen doesn't care, the stub doesn't have to care either.
1248  __ bind(&ok);
1249 }
1250 
1251 
1252 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1253  Label* label,
1254  Register object,
1255  Register scratch) {
1256  __ JumpIfSmi(object, label);
1257  __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
1258  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
1260  __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1261  __ j(not_zero, label);
1262 }
1263 
1264 
1265 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
1266  Label check_unequal_objects;
1268 
1269  Label miss;
1270  CheckInputType(masm, edx, left(), &miss);
1271  CheckInputType(masm, eax, right(), &miss);
1272 
1273  // Compare two smis.
1274  Label non_smi, smi_done;
1275  __ mov(ecx, edx);
1276  __ or_(ecx, eax);
1277  __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
1278  __ sub(edx, eax); // Return on the result of the subtraction.
1279  __ j(no_overflow, &smi_done, Label::kNear);
1280  __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
1281  __ bind(&smi_done);
1282  __ mov(eax, edx);
1283  __ ret(0);
1284  __ bind(&non_smi);
1285 
1286  // NOTICE! This code is only reached after a smi-fast-case check, so
1287  // it is certain that at least one operand isn't a smi.
1288 
1289  // Identical objects can be compared fast, but there are some tricky cases
1290  // for NaN and undefined.
1291  Label generic_heap_number_comparison;
1292  {
1293  Label not_identical;
1294  __ cmp(eax, edx);
1295  __ j(not_equal, &not_identical);
1296 
1297  if (cc != equal) {
1298  // Check for undefined. undefined OP undefined is false even though
1299  // undefined == undefined.
1300  Label check_for_nan;
1301  __ cmp(edx, isolate()->factory()->undefined_value());
1302  __ j(not_equal, &check_for_nan, Label::kNear);
1303  __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1304  __ ret(0);
1305  __ bind(&check_for_nan);
1306  }
1307 
1308  // Test for NaN. Compare heap numbers in a general way,
1309  // to hanlde NaNs correctly.
1311  Immediate(isolate()->factory()->heap_number_map()));
1312  __ j(equal, &generic_heap_number_comparison, Label::kNear);
1313  if (cc != equal) {
1314  // Call runtime on identical JSObjects. Otherwise return equal.
1315  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1316  __ j(above_equal, &not_identical);
1317  }
1318  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1319  __ ret(0);
1320 
1321 
1322  __ bind(&not_identical);
1323  }
1324 
1325  // Strict equality can quickly decide whether objects are equal.
1326  // Non-strict object equality is slower, so it is handled later in the stub.
1327  if (cc == equal && strict()) {
1328  Label slow; // Fallthrough label.
1329  Label not_smis;
1330  // If we're doing a strict equality comparison, we don't have to do
1331  // type conversion, so we generate code to do fast comparison for objects
1332  // and oddballs. Non-smi numbers and strings still go through the usual
1333  // slow-case code.
1334  // If either is a Smi (we know that not both are), then they can only
1335  // be equal if the other is a HeapNumber. If so, use the slow case.
1336  STATIC_ASSERT(kSmiTag == 0);
1337  DCHECK_EQ(0, Smi::FromInt(0));
1338  __ mov(ecx, Immediate(kSmiTagMask));
1339  __ and_(ecx, eax);
1340  __ test(ecx, edx);
1341  __ j(not_zero, &not_smis, Label::kNear);
1342  // One operand is a smi.
1343 
1344  // Check whether the non-smi is a heap number.
1345  STATIC_ASSERT(kSmiTagMask == 1);
1346  // ecx still holds eax & kSmiTag, which is either zero or one.
1347  __ sub(ecx, Immediate(0x01));
1348  __ mov(ebx, edx);
1349  __ xor_(ebx, eax);
1350  __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx.
1351  __ xor_(ebx, eax);
1352  // if eax was smi, ebx is now edx, else eax.
1353 
1354  // Check if the non-smi operand is a heap number.
1356  Immediate(isolate()->factory()->heap_number_map()));
1357  // If heap number, handle it in the slow case.
1358  __ j(equal, &slow, Label::kNear);
1359  // Return non-equal (ebx is not zero)
1360  __ mov(eax, ebx);
1361  __ ret(0);
1362 
1363  __ bind(&not_smis);
1364  // If either operand is a JSObject or an oddball value, then they are not
1365  // equal since their pointers are different
1366  // There is no test for undetectability in strict equality.
1367 
1368  // Get the type of the first operand.
1369  // If the first object is a JS object, we have done pointer comparison.
1370  Label first_non_object;
1372  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1373  __ j(below, &first_non_object, Label::kNear);
1374 
1375  // Return non-zero (eax is not zero)
1376  Label return_not_equal;
1378  __ bind(&return_not_equal);
1379  __ ret(0);
1380 
1381  __ bind(&first_non_object);
1382  // Check for oddballs: true, false, null, undefined.
1383  __ CmpInstanceType(ecx, ODDBALL_TYPE);
1384  __ j(equal, &return_not_equal);
1385 
1386  __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ecx);
1387  __ j(above_equal, &return_not_equal);
1388 
1389  // Check for oddballs: true, false, null, undefined.
1390  __ CmpInstanceType(ecx, ODDBALL_TYPE);
1391  __ j(equal, &return_not_equal);
1392 
1393  // Fall through to the general case.
1394  __ bind(&slow);
1395  }
1396 
1397  // Generate the number comparison code.
1398  Label non_number_comparison;
1399  Label unordered;
1400  __ bind(&generic_heap_number_comparison);
1401  FloatingPointHelper::CheckFloatOperands(
1402  masm, &non_number_comparison, ebx);
1403  FloatingPointHelper::LoadFloatOperand(masm, eax);
1404  FloatingPointHelper::LoadFloatOperand(masm, edx);
1405  __ FCmp();
1406 
1407  // Don't base result on EFLAGS when a NaN is involved.
1408  __ j(parity_even, &unordered, Label::kNear);
1409 
1410  Label below_label, above_label;
1411  // Return a result of -1, 0, or 1, based on EFLAGS.
1412  __ j(below, &below_label, Label::kNear);
1413  __ j(above, &above_label, Label::kNear);
1414 
1415  __ Move(eax, Immediate(0));
1416  __ ret(0);
1417 
1418  __ bind(&below_label);
1419  __ mov(eax, Immediate(Smi::FromInt(-1)));
1420  __ ret(0);
1421 
1422  __ bind(&above_label);
1423  __ mov(eax, Immediate(Smi::FromInt(1)));
1424  __ ret(0);
1425 
1426  // If one of the numbers was NaN, then the result is always false.
1427  // The cc is never not-equal.
1428  __ bind(&unordered);
1429  DCHECK(cc != not_equal);
1430  if (cc == less || cc == less_equal) {
1431  __ mov(eax, Immediate(Smi::FromInt(1)));
1432  } else {
1433  __ mov(eax, Immediate(Smi::FromInt(-1)));
1434  }
1435  __ ret(0);
1436 
1437  // The number comparison code did not provide a valid result.
1438  __ bind(&non_number_comparison);
1439 
1440  // Fast negative check for internalized-to-internalized equality.
1441  Label check_for_strings;
1442  if (cc == equal) {
1443  BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
1444  BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
1445 
1446  // We've already checked for object identity, so if both operands
1447  // are internalized they aren't equal. Register eax already holds a
1448  // non-zero value, which indicates not equal, so just return.
1449  __ ret(0);
1450  }
1451 
1452  __ bind(&check_for_strings);
1453 
1454  __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
1455  &check_unequal_objects);
1456 
1457  // Inline comparison of one-byte strings.
1458  if (cc == equal) {
1460  } else {
1462  edi);
1463  }
1464 #ifdef DEBUG
1465  __ Abort(kUnexpectedFallThroughFromStringComparison);
1466 #endif
1467 
1468  __ bind(&check_unequal_objects);
1469  if (cc == equal && !strict()) {
1470  // Non-strict equality. Objects are unequal if
1471  // they are both JSObjects and not undetectable,
1472  // and their pointers are different.
1473  Label not_both_objects;
1474  Label return_unequal;
1475  // At most one is a smi, so we can test for smi by adding the two.
1476  // A smi plus a heap object has the low bit set, a heap object plus
1477  // a heap object has the low bit clear.
1478  STATIC_ASSERT(kSmiTag == 0);
1479  STATIC_ASSERT(kSmiTagMask == 1);
1480  __ lea(ecx, Operand(eax, edx, times_1, 0));
1481  __ test(ecx, Immediate(kSmiTagMask));
1482  __ j(not_zero, &not_both_objects, Label::kNear);
1483  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1484  __ j(below, &not_both_objects, Label::kNear);
1485  __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ebx);
1486  __ j(below, &not_both_objects, Label::kNear);
1487  // We do not bail out after this point. Both are JSObjects, and
1488  // they are equal if and only if both are undetectable.
1489  // The and of the undetectable flags is 1 if and only if they are equal.
1491  1 << Map::kIsUndetectable);
1492  __ j(zero, &return_unequal, Label::kNear);
1494  1 << Map::kIsUndetectable);
1495  __ j(zero, &return_unequal, Label::kNear);
1496  // The objects are both undetectable, so they both compare as the value
1497  // undefined, and are equal.
1498  __ Move(eax, Immediate(EQUAL));
1499  __ bind(&return_unequal);
1500  // Return non-equal by returning the non-zero object pointer in eax,
1501  // or return equal if we fell through to here.
1502  __ ret(0); // rax, rdx were pushed
1503  __ bind(&not_both_objects);
1504  }
1505 
1506  // Push arguments below the return address.
1507  __ pop(ecx);
1508  __ push(edx);
1509  __ push(eax);
1510 
1511  // Figure out which native to call and setup the arguments.
1512  Builtins::JavaScript builtin;
1513  if (cc == equal) {
1514  builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1515  } else {
1516  builtin = Builtins::COMPARE;
1517  __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1518  }
1519 
1520  // Restore return address on the stack.
1521  __ push(ecx);
1522 
1523  // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1524  // tagged as a small integer.
1525  __ InvokeBuiltin(builtin, JUMP_FUNCTION);
1526 
1527  __ bind(&miss);
1528  GenerateMiss(masm);
1529 }
1530 
1531 
1532 static void GenerateRecordCallTarget(MacroAssembler* masm) {
1533  // Cache the called function in a feedback vector slot. Cache states
1534  // are uninitialized, monomorphic (indicated by a JSFunction), and
1535  // megamorphic.
1536  // eax : number of arguments to the construct function
1537  // ebx : Feedback vector
1538  // edx : slot in feedback vector (Smi)
1539  // edi : the function to call
1540  Isolate* isolate = masm->isolate();
1541  Label initialize, done, miss, megamorphic, not_array_function;
1542 
1543  // Load the cache state into ecx.
1546 
1547  // A monomorphic cache hit or an already megamorphic state: invoke the
1548  // function without changing the state.
1549  __ cmp(ecx, edi);
1550  __ j(equal, &done, Label::kFar);
1551  __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1552  __ j(equal, &done, Label::kFar);
1553 
1554  if (!FLAG_pretenuring_call_new) {
1555  // If we came here, we need to see if we are the array function.
1556  // If we didn't have a matching function, and we didn't find the megamorph
1557  // sentinel, then we have in the slot either some other function or an
1558  // AllocationSite. Do a map check on the object in ecx.
1559  Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
1560  __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
1561  __ j(not_equal, &miss);
1562 
1563  // Make sure the function is the Array() function
1564  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1565  __ cmp(edi, ecx);
1566  __ j(not_equal, &megamorphic);
1567  __ jmp(&done, Label::kFar);
1568  }
1569 
1570  __ bind(&miss);
1571 
1572  // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1573  // megamorphic.
1574  __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
1575  __ j(equal, &initialize);
1576  // MegamorphicSentinel is an immortal immovable object (undefined) so no
1577  // write-barrier is needed.
1578  __ bind(&megamorphic);
1579  __ mov(
1581  Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1582  __ jmp(&done, Label::kFar);
1583 
1584  // An uninitialized cache is patched with the function or sentinel to
1585  // indicate the ElementsKind if function is the Array constructor.
1586  __ bind(&initialize);
1587  if (!FLAG_pretenuring_call_new) {
1588  // Make sure the function is the Array() function
1589  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1590  __ cmp(edi, ecx);
1591  __ j(not_equal, &not_array_function);
1592 
1593  // The target function is the Array constructor,
1594  // Create an AllocationSite if we don't already have it, store it in the
1595  // slot.
1596  {
1597  FrameScope scope(masm, StackFrame::INTERNAL);
1598 
1599  // Arguments register must be smi-tagged to call out.
1600  __ SmiTag(eax);
1601  __ push(eax);
1602  __ push(edi);
1603  __ push(edx);
1604  __ push(ebx);
1605 
1606  CreateAllocationSiteStub create_stub(isolate);
1607  __ CallStub(&create_stub);
1608 
1609  __ pop(ebx);
1610  __ pop(edx);
1611  __ pop(edi);
1612  __ pop(eax);
1613  __ SmiUntag(eax);
1614  }
1615  __ jmp(&done);
1616 
1617  __ bind(&not_array_function);
1618  }
1619 
1622  edi);
1623  // We won't need edx or ebx anymore, just save edi
1624  __ push(edi);
1625  __ push(ebx);
1626  __ push(edx);
1627  __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1628  OMIT_SMI_CHECK);
1629  __ pop(edx);
1630  __ pop(ebx);
1631  __ pop(edi);
1632 
1633  __ bind(&done);
1634 }
1635 
1636 
1637 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
1638  // Do not transform the receiver for strict mode functions.
1640  __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
1642  __ j(not_equal, cont);
1643 
1644  // Do not transform the receiver for natives (shared already in ecx).
1645  __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
1647  __ j(not_equal, cont);
1648 }
1649 
1650 
1651 static void EmitSlowCase(Isolate* isolate,
1652  MacroAssembler* masm,
1653  int argc,
1654  Label* non_function) {
1655  // Check for function proxy.
1656  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
1657  __ j(not_equal, non_function);
1658  __ pop(ecx);
1659  __ push(edi); // put proxy as additional argument under return address
1660  __ push(ecx);
1661  __ Move(eax, Immediate(argc + 1));
1662  __ Move(ebx, Immediate(0));
1663  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
1664  {
1665  Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
1666  __ jmp(adaptor, RelocInfo::CODE_TARGET);
1667  }
1668 
1669  // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
1670  // of the original receiver from the call site).
1671  __ bind(non_function);
1672  __ mov(Operand(esp, (argc + 1) * kPointerSize), edi);
1673  __ Move(eax, Immediate(argc));
1674  __ Move(ebx, Immediate(0));
1675  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
1676  Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
1677  __ jmp(adaptor, RelocInfo::CODE_TARGET);
1678 }
1679 
1680 
1681 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
1682  // Wrap the receiver and patch it back onto the stack.
1683  { FrameScope frame_scope(masm, StackFrame::INTERNAL);
1684  __ push(edi);
1685  __ push(eax);
1686  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1687  __ pop(edi);
1688  }
1689  __ mov(Operand(esp, (argc + 1) * kPointerSize), eax);
1690  __ jmp(cont);
1691 }
1692 
1693 
1694 static void CallFunctionNoFeedback(MacroAssembler* masm,
1695  int argc, bool needs_checks,
1696  bool call_as_method) {
1697  // edi : the function to call
1698  Label slow, non_function, wrap, cont;
1699 
1700  if (needs_checks) {
1701  // Check that the function really is a JavaScript function.
1702  __ JumpIfSmi(edi, &non_function);
1703 
1704  // Goto slow case if we do not have a function.
1705  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1706  __ j(not_equal, &slow);
1707  }
1708 
1709  // Fast-case: Just invoke the function.
1710  ParameterCount actual(argc);
1711 
1712  if (call_as_method) {
1713  if (needs_checks) {
1714  EmitContinueIfStrictOrNative(masm, &cont);
1715  }
1716 
1717  // Load the receiver from the stack.
1718  __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
1719 
1720  if (needs_checks) {
1721  __ JumpIfSmi(eax, &wrap);
1722 
1723  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1724  __ j(below, &wrap);
1725  } else {
1726  __ jmp(&wrap);
1727  }
1728 
1729  __ bind(&cont);
1730  }
1731 
1732  __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
1733 
1734  if (needs_checks) {
1735  // Slow-case: Non-function called.
1736  __ bind(&slow);
1737  // (non_function is bound in EmitSlowCase)
1738  EmitSlowCase(masm->isolate(), masm, argc, &non_function);
1739  }
1740 
1741  if (call_as_method) {
1742  __ bind(&wrap);
1743  EmitWrapCase(masm, argc, &cont);
1744  }
1745 }
1746 
1747 
1748 void CallFunctionStub::Generate(MacroAssembler* masm) {
1749  CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod());
1750 }
1751 
1752 
1753 void CallConstructStub::Generate(MacroAssembler* masm) {
1754  // eax : number of arguments
1755  // ebx : feedback vector
1756  // edx : (only if ebx is not the megamorphic symbol) slot in feedback
1757  // vector (Smi)
1758  // edi : constructor function
1759  Label slow, non_function_call;
1760 
1761  // Check that function is not a smi.
1762  __ JumpIfSmi(edi, &non_function_call);
1763  // Check that function is a JSFunction.
1764  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1765  __ j(not_equal, &slow);
1766 
1767  if (RecordCallTarget()) {
1768  GenerateRecordCallTarget(masm);
1769 
1770  if (FLAG_pretenuring_call_new) {
1771  // Put the AllocationSite from the feedback vector into ebx.
1772  // By adding kPointerSize we encode that we know the AllocationSite
1773  // entry is at the feedback vector slot given by edx + 1.
1776  } else {
1777  Label feedback_register_initialized;
1778  // Put the AllocationSite from the feedback vector into ebx, or undefined.
1781  Handle<Map> allocation_site_map =
1782  isolate()->factory()->allocation_site_map();
1783  __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
1784  __ j(equal, &feedback_register_initialized);
1785  __ mov(ebx, isolate()->factory()->undefined_value());
1786  __ bind(&feedback_register_initialized);
1787  }
1788 
1789  __ AssertUndefinedOrAllocationSite(ebx);
1790  }
1791 
1792  // Jump to the function-specific construct stub.
1793  Register jmp_reg = ecx;
1795  __ mov(jmp_reg, FieldOperand(jmp_reg,
1797  __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
1798  __ jmp(jmp_reg);
1799 
1800  // edi: called object
1801  // eax: number of arguments
1802  // ecx: object map
1803  Label do_call;
1804  __ bind(&slow);
1805  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
1806  __ j(not_equal, &non_function_call);
1807  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
1808  __ jmp(&do_call);
1809 
1810  __ bind(&non_function_call);
1811  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
1812  __ bind(&do_call);
1813  // Set expected number of arguments to zero (not changing eax).
1814  __ Move(ebx, Immediate(0));
1815  Handle<Code> arguments_adaptor =
1816  isolate()->builtins()->ArgumentsAdaptorTrampoline();
1817  __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
1818 }
1819 
1820 
1821 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
1822  __ mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1824  __ mov(vector, FieldOperand(vector,
1826 }
1827 
1828 
1829 void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
1830  // edi - function
1831  // edx - slot id
1832  Label miss;
1833  int argc = arg_count();
1834  ParameterCount actual(argc);
1835 
1836  EmitLoadTypeFeedbackVector(masm, ebx);
1837 
1838  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1839  __ cmp(edi, ecx);
1840  __ j(not_equal, &miss);
1841 
1842  __ mov(eax, arg_count());
1845 
1846  // Verify that ecx contains an AllocationSite
1847  Factory* factory = masm->isolate()->factory();
1849  factory->allocation_site_map());
1850  __ j(not_equal, &miss);
1851 
1852  __ mov(ebx, ecx);
1853  ArrayConstructorStub stub(masm->isolate(), arg_count());
1854  __ TailCallStub(&stub);
1855 
1856  __ bind(&miss);
1857  GenerateMiss(masm);
1858 
1859  // The slow case, we need this no matter what to complete a call after a miss.
1860  CallFunctionNoFeedback(masm,
1861  arg_count(),
1862  true,
1863  CallAsMethod());
1864 
1865  // Unreachable.
1866  __ int3();
1867 }
1868 
1869 
1870 void CallICStub::Generate(MacroAssembler* masm) {
1871  // edi - function
1872  // edx - slot id
1873  Isolate* isolate = masm->isolate();
1874  Label extra_checks_or_miss, slow_start;
1875  Label slow, non_function, wrap, cont;
1876  Label have_js_function;
1877  int argc = arg_count();
1878  ParameterCount actual(argc);
1879 
1880  EmitLoadTypeFeedbackVector(masm, ebx);
1881 
1882  // The checks. First, does edi match the recorded monomorphic target?
1885  __ j(not_equal, &extra_checks_or_miss);
1886 
1887  __ bind(&have_js_function);
1888  if (CallAsMethod()) {
1889  EmitContinueIfStrictOrNative(masm, &cont);
1890 
1891  // Load the receiver from the stack.
1892  __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
1893 
1894  __ JumpIfSmi(eax, &wrap);
1895 
1896  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1897  __ j(below, &wrap);
1898 
1899  __ bind(&cont);
1900  }
1901 
1902  __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
1903 
1904  __ bind(&slow);
1905  EmitSlowCase(isolate, masm, argc, &non_function);
1906 
1907  if (CallAsMethod()) {
1908  __ bind(&wrap);
1909  EmitWrapCase(masm, argc, &cont);
1910  }
1911 
1912  __ bind(&extra_checks_or_miss);
1913  Label miss;
1914 
1917  __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1918  __ j(equal, &slow_start);
1919  __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
1920  __ j(equal, &miss);
1921 
1922  if (!FLAG_trace_ic) {
1923  // We are going megamorphic. If the feedback is a JSFunction, it is fine
1924  // to handle it here. More complex cases are dealt with in the runtime.
1925  __ AssertNotSmi(ecx);
1926  __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
1927  __ j(not_equal, &miss);
1930  Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1931  __ jmp(&slow_start);
1932  }
1933 
1934  // We are here because tracing is on or we are going monomorphic.
1935  __ bind(&miss);
1936  GenerateMiss(masm);
1937 
1938  // the slow case
1939  __ bind(&slow_start);
1940 
1941  // Check that the function really is a JavaScript function.
1942  __ JumpIfSmi(edi, &non_function);
1943 
1944  // Goto slow case if we do not have a function.
1945  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1946  __ j(not_equal, &slow);
1947  __ jmp(&have_js_function);
1948 
1949  // Unreachable
1950  __ int3();
1951 }
1952 
1953 
1954 void CallICStub::GenerateMiss(MacroAssembler* masm) {
1955  // Get the receiver of the function from the stack; 1 ~ return address.
1956  __ mov(ecx, Operand(esp, (arg_count() + 1) * kPointerSize));
1957 
1958  {
1959  FrameScope scope(masm, StackFrame::INTERNAL);
1960 
1961  // Push the receiver and the function and feedback info.
1962  __ push(ecx);
1963  __ push(edi);
1964  __ push(ebx);
1965  __ push(edx);
1966 
1967  // Call the entry.
1968  IC::UtilityId id = GetICState() == DEFAULT ? IC::kCallIC_Miss
1969  : IC::kCallIC_Customization_Miss;
1970 
1971  ExternalReference miss = ExternalReference(IC_Utility(id),
1972  masm->isolate());
1973  __ CallExternalReference(miss, 4);
1974 
1975  // Move result to edi and exit the internal frame.
1976  __ mov(edi, eax);
1977  }
1978 }
1979 
1980 
1982  return false;
1983 }
1984 
1985 
1986 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1990  // It is important that the store buffer overflow stubs are generated first.
1994  BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1995 }
1996 
1997 
1998 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1999  CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
2000  // Stubs might already be in the snapshot, detect that and don't regenerate,
2001  // which would lead to code stub initialization state being messed up.
2002  Code* save_doubles_code;
2003  if (!save_doubles.FindCodeInCache(&save_doubles_code)) {
2004  save_doubles_code = *(save_doubles.GetCode());
2005  }
2006  isolate->set_fp_stubs_generated(true);
2007 }
2008 
2009 
2010 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2011  CEntryStub stub(isolate, 1, kDontSaveFPRegs);
2012  stub.GetCode();
2013 }
2014 
2015 
2016 void CEntryStub::Generate(MacroAssembler* masm) {
2017  // eax: number of arguments including receiver
2018  // ebx: pointer to C function (C callee-saved)
2019  // ebp: frame pointer (restored after C call)
2020  // esp: stack pointer (restored after C call)
2021  // esi: current context (C callee-saved)
2022  // edi: JS function of the caller (C callee-saved)
2023 
2025 
2026  // Enter the exit frame that transitions from JavaScript to C++.
2027  __ EnterExitFrame(save_doubles());
2028 
2029  // ebx: pointer to C function (C callee-saved)
2030  // ebp: frame pointer (restored after C call)
2031  // esp: stack pointer (restored after C call)
2032  // edi: number of arguments including receiver (C callee-saved)
2033  // esi: pointer to the first argument (C callee-saved)
2034 
2035  // Result returned in eax, or eax+edx if result size is 2.
2036 
2037  // Check stack alignment.
2038  if (FLAG_debug_code) {
2039  __ CheckStackAlignment();
2040  }
2041 
2042  // Call C function.
2043  __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
2044  __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
2045  __ mov(Operand(esp, 2 * kPointerSize),
2046  Immediate(ExternalReference::isolate_address(isolate())));
2047  __ call(ebx);
2048  // Result is in eax or edx:eax - do not destroy these registers!
2049 
2050  // Runtime functions should not return 'the hole'. Allowing it to escape may
2051  // lead to crashes in the IC code later.
2052  if (FLAG_debug_code) {
2053  Label okay;
2054  __ cmp(eax, isolate()->factory()->the_hole_value());
2055  __ j(not_equal, &okay, Label::kNear);
2056  __ int3();
2057  __ bind(&okay);
2058  }
2059 
2060  // Check result for exception sentinel.
2061  Label exception_returned;
2062  __ cmp(eax, isolate()->factory()->exception());
2063  __ j(equal, &exception_returned);
2064 
2065  ExternalReference pending_exception_address(
2066  Isolate::kPendingExceptionAddress, isolate());
2067 
2068  // Check that there is no pending exception, otherwise we
2069  // should have returned the exception sentinel.
2070  if (FLAG_debug_code) {
2071  __ push(edx);
2072  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2073  Label okay;
2074  __ cmp(edx, Operand::StaticVariable(pending_exception_address));
2075  // Cannot use check here as it attempts to generate call into runtime.
2076  __ j(equal, &okay, Label::kNear);
2077  __ int3();
2078  __ bind(&okay);
2079  __ pop(edx);
2080  }
2081 
2082  // Exit the JavaScript to C++ exit frame.
2083  __ LeaveExitFrame(save_doubles());
2084  __ ret(0);
2085 
2086  // Handling of exception.
2087  __ bind(&exception_returned);
2088 
2089  // Retrieve the pending exception.
2090  __ mov(eax, Operand::StaticVariable(pending_exception_address));
2091 
2092  // Clear the pending exception.
2093  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2094  __ mov(Operand::StaticVariable(pending_exception_address), edx);
2095 
2096  // Special handling of termination exceptions which are uncatchable
2097  // by javascript code.
2098  Label throw_termination_exception;
2099  __ cmp(eax, isolate()->factory()->termination_exception());
2100  __ j(equal, &throw_termination_exception);
2101 
2102  // Handle normal exception.
2103  __ Throw(eax);
2104 
2105  __ bind(&throw_termination_exception);
2106  __ ThrowUncatchable(eax);
2107 }
2108 
2109 
2110 void JSEntryStub::Generate(MacroAssembler* masm) {
2111  Label invoke, handler_entry, exit;
2112  Label not_outermost_js, not_outermost_js_2;
2113 
2115 
2116  // Set up frame.
2117  __ push(ebp);
2118  __ mov(ebp, esp);
2119 
2120  // Push marker in two places.
2121  int marker = type();
2122  __ push(Immediate(Smi::FromInt(marker))); // context slot
2123  __ push(Immediate(Smi::FromInt(marker))); // function slot
2124  // Save callee-saved registers (C calling conventions).
2125  __ push(edi);
2126  __ push(esi);
2127  __ push(ebx);
2128 
2129  // Save copies of the top frame descriptor on the stack.
2130  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
2131  __ push(Operand::StaticVariable(c_entry_fp));
2132 
2133  // If this is the outermost JS call, set js_entry_sp value.
2134  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
2135  __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
2136  __ j(not_equal, &not_outermost_js, Label::kNear);
2137  __ mov(Operand::StaticVariable(js_entry_sp), ebp);
2138  __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2139  __ jmp(&invoke, Label::kNear);
2140  __ bind(&not_outermost_js);
2141  __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
2142 
2143  // Jump to a faked try block that does the invoke, with a faked catch
2144  // block that sets the pending exception.
2145  __ jmp(&invoke);
2146  __ bind(&handler_entry);
2147  handler_offset_ = handler_entry.pos();
2148  // Caught exception: Store result (exception) in the pending exception
2149  // field in the JSEnv and return a failure sentinel.
2150  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2151  isolate());
2152  __ mov(Operand::StaticVariable(pending_exception), eax);
2153  __ mov(eax, Immediate(isolate()->factory()->exception()));
2154  __ jmp(&exit);
2155 
2156  // Invoke: Link this frame into the handler chain. There's only one
2157  // handler block in this code object, so its index is 0.
2158  __ bind(&invoke);
2159  __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2160 
2161  // Clear any pending exceptions.
2162  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2163  __ mov(Operand::StaticVariable(pending_exception), edx);
2164 
2165  // Fake a receiver (NULL).
2166  __ push(Immediate(0)); // receiver
2167 
2168  // Invoke the function by calling through JS entry trampoline builtin and
2169  // pop the faked function when we return. Notice that we cannot store a
2170  // reference to the trampoline code directly in this stub, because the
2171  // builtin stubs may not have been generated yet.
2172  if (type() == StackFrame::ENTRY_CONSTRUCT) {
2173  ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2174  isolate());
2175  __ mov(edx, Immediate(construct_entry));
2176  } else {
2177  ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
2178  __ mov(edx, Immediate(entry));
2179  }
2180  __ mov(edx, Operand(edx, 0)); // deref address
2182  __ call(edx);
2183 
2184  // Unlink this frame from the handler chain.
2185  __ PopTryHandler();
2186 
2187  __ bind(&exit);
2188  // Check if the current stack frame is marked as the outermost JS frame.
2189  __ pop(ebx);
2190  __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2191  __ j(not_equal, &not_outermost_js_2);
2192  __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
2193  __ bind(&not_outermost_js_2);
2194 
2195  // Restore the top frame descriptor from the stack.
2196  __ pop(Operand::StaticVariable(ExternalReference(
2197  Isolate::kCEntryFPAddress, isolate())));
2198 
2199  // Restore callee-saved registers (C calling conventions).
2200  __ pop(ebx);
2201  __ pop(esi);
2202  __ pop(edi);
2203  __ add(esp, Immediate(2 * kPointerSize)); // remove markers
2204 
2205  // Restore frame pointer and return.
2206  __ pop(ebp);
2207  __ ret(0);
2208 }
2209 
2210 
2211 // Generate stub code for instanceof.
2212 // This code can patch a call site inlined cache of the instance of check,
2213 // which looks like this.
2214 //
2215 // 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
2216 // 75 0a jne <some near label>
2217 // b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
2218 //
2219 // If call site patching is requested the stack will have the delta from the
2220 // return address to the cmp instruction just below the return address. This
2221 // also means that call site patching can only take place with arguments in
2222 // registers. TOS looks like this when call site patching is requested
2223 //
2224 // esp[0] : return address
2225 // esp[4] : delta from return address to cmp instruction
2226 //
2227 void InstanceofStub::Generate(MacroAssembler* masm) {
2228  // Call site inlining and patching implies arguments in registers.
2230 
2231  // Fixed register usage throughout the stub.
2232  Register object = eax; // Object (lhs).
2233  Register map = ebx; // Map of the object.
2234  Register function = edx; // Function (rhs).
2235  Register prototype = edi; // Prototype of the function.
2236  Register scratch = ecx;
2237 
2238  // Constants describing the call site code to patch.
2239  static const int kDeltaToCmpImmediate = 2;
2240  static const int kDeltaToMov = 8;
2241  static const int kDeltaToMovImmediate = 9;
2242  static const int8_t kCmpEdiOperandByte1 = bit_cast<int8_t, uint8_t>(0x3b);
2243  static const int8_t kCmpEdiOperandByte2 = bit_cast<int8_t, uint8_t>(0x3d);
2244  static const int8_t kMovEaxImmediateByte = bit_cast<int8_t, uint8_t>(0xb8);
2245 
2246  DCHECK_EQ(object.code(), InstanceofStub::left().code());
2247  DCHECK_EQ(function.code(), InstanceofStub::right().code());
2248 
2249  // Get the object and function - they are always both needed.
2250  Label slow, not_js_object;
2251  if (!HasArgsInRegisters()) {
2252  __ mov(object, Operand(esp, 2 * kPointerSize));
2253  __ mov(function, Operand(esp, 1 * kPointerSize));
2254  }
2255 
2256  // Check that the left hand is a JS object.
2257  __ JumpIfSmi(object, &not_js_object);
2258  __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
2259 
2260  // If there is a call site cache don't look in the global cache, but do the
2261  // real lookup and update the call site cache.
2263  // Look up the function and the map in the instanceof cache.
2264  Label miss;
2265  __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2266  __ j(not_equal, &miss, Label::kNear);
2267  __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2268  __ j(not_equal, &miss, Label::kNear);
2269  __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
2270  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2271  __ bind(&miss);
2272  }
2273 
2274  // Get the prototype of the function.
2275  __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
2276 
2277  // Check that the function prototype is a JS object.
2278  __ JumpIfSmi(prototype, &slow);
2279  __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
2280 
2281  // Update the global instanceof or call site inlined cache with the current
2282  // map and function. The cached answer will be set when it is known below.
2283  if (!HasCallSiteInlineCheck()) {
2284  __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2285  __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2286  } else {
2287  // The constants for the code patching are based on no push instructions
2288  // at the call site.
2290  // Get return address and delta to inlined map check.
2291  __ mov(scratch, Operand(esp, 0 * kPointerSize));
2292  __ sub(scratch, Operand(esp, 1 * kPointerSize));
2293  if (FLAG_debug_code) {
2294  __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
2295  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
2296  __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
2297  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
2298  }
2299  __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
2300  __ mov(Operand(scratch, 0), map);
2301  }
2302 
2303  // Loop through the prototype chain of the object looking for the function
2304  // prototype.
2305  __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
2306  Label loop, is_instance, is_not_instance;
2307  __ bind(&loop);
2308  __ cmp(scratch, prototype);
2309  __ j(equal, &is_instance, Label::kNear);
2310  Factory* factory = isolate()->factory();
2311  __ cmp(scratch, Immediate(factory->null_value()));
2312  __ j(equal, &is_not_instance, Label::kNear);
2313  __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2314  __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
2315  __ jmp(&loop);
2316 
2317  __ bind(&is_instance);
2318  if (!HasCallSiteInlineCheck()) {
2319  __ mov(eax, Immediate(0));
2320  __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2321  if (ReturnTrueFalseObject()) {
2322  __ mov(eax, factory->true_value());
2323  }
2324  } else {
2325  // Get return address and delta to inlined map check.
2326  __ mov(eax, factory->true_value());
2327  __ mov(scratch, Operand(esp, 0 * kPointerSize));
2328  __ sub(scratch, Operand(esp, 1 * kPointerSize));
2329  if (FLAG_debug_code) {
2330  __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2331  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2332  }
2333  __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
2334  if (!ReturnTrueFalseObject()) {
2335  __ Move(eax, Immediate(0));
2336  }
2337  }
2338  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2339 
2340  __ bind(&is_not_instance);
2341  if (!HasCallSiteInlineCheck()) {
2342  __ mov(eax, Immediate(Smi::FromInt(1)));
2343  __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2344  if (ReturnTrueFalseObject()) {
2345  __ mov(eax, factory->false_value());
2346  }
2347  } else {
2348  // Get return address and delta to inlined map check.
2349  __ mov(eax, factory->false_value());
2350  __ mov(scratch, Operand(esp, 0 * kPointerSize));
2351  __ sub(scratch, Operand(esp, 1 * kPointerSize));
2352  if (FLAG_debug_code) {
2353  __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2354  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2355  }
2356  __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
2357  if (!ReturnTrueFalseObject()) {
2358  __ Move(eax, Immediate(Smi::FromInt(1)));
2359  }
2360  }
2361  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2362 
2363  Label object_not_null, object_not_null_or_smi;
2364  __ bind(&not_js_object);
2365  // Before null, smi and string value checks, check that the rhs is a function
2366  // as for a non-function rhs an exception needs to be thrown.
2367  __ JumpIfSmi(function, &slow, Label::kNear);
2368  __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
2369  __ j(not_equal, &slow, Label::kNear);
2370 
2371  // Null is not instance of anything.
2372  __ cmp(object, factory->null_value());
2373  __ j(not_equal, &object_not_null, Label::kNear);
2374  if (ReturnTrueFalseObject()) {
2375  __ mov(eax, factory->false_value());
2376  } else {
2377  __ Move(eax, Immediate(Smi::FromInt(1)));
2378  }
2379  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2380 
2381  __ bind(&object_not_null);
2382  // Smi values is not instance of anything.
2383  __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
2384  if (ReturnTrueFalseObject()) {
2385  __ mov(eax, factory->false_value());
2386  } else {
2387  __ Move(eax, Immediate(Smi::FromInt(1)));
2388  }
2389  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2390 
2391  __ bind(&object_not_null_or_smi);
2392  // String values is not instance of anything.
2393  Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
2394  __ j(NegateCondition(is_string), &slow, Label::kNear);
2395  if (ReturnTrueFalseObject()) {
2396  __ mov(eax, factory->false_value());
2397  } else {
2398  __ Move(eax, Immediate(Smi::FromInt(1)));
2399  }
2400  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2401 
2402  // Slow-case: Go through the JavaScript implementation.
2403  __ bind(&slow);
2404  if (!ReturnTrueFalseObject()) {
2405  // Tail call the builtin which returns 0 or 1.
2406  if (HasArgsInRegisters()) {
2407  // Push arguments below return address.
2408  __ pop(scratch);
2409  __ push(object);
2410  __ push(function);
2411  __ push(scratch);
2412  }
2413  __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
2414  } else {
2415  // Call the builtin and convert 0/1 to true/false.
2416  {
2417  FrameScope scope(masm, StackFrame::INTERNAL);
2418  __ push(object);
2419  __ push(function);
2420  __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
2421  }
2422  Label true_value, done;
2423  __ test(eax, eax);
2424  __ j(zero, &true_value, Label::kNear);
2425  __ mov(eax, factory->false_value());
2426  __ jmp(&done, Label::kNear);
2427  __ bind(&true_value);
2428  __ mov(eax, factory->true_value());
2429  __ bind(&done);
2430  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2431  }
2432 }
2433 
2434 
2435 // -------------------------------------------------------------------------
2436 // StringCharCodeAtGenerator
2437 
2438 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2439  // If the receiver is a smi trigger the non-string case.
2440  STATIC_ASSERT(kSmiTag == 0);
2441  __ JumpIfSmi(object_, receiver_not_string_);
2442 
2443  // Fetch the instance type of the receiver into result register.
2446  // If the receiver is not a string trigger the non-string case.
2447  __ test(result_, Immediate(kIsNotStringMask));
2449 
2450  // If the index is non-smi trigger the non-smi case.
2451  STATIC_ASSERT(kSmiTag == 0);
2452  __ JumpIfNotSmi(index_, &index_not_smi_);
2453  __ bind(&got_smi_index_);
2454 
2455  // Check for index out of range.
2458 
2459  __ SmiUntag(index_);
2460 
2461  Factory* factory = masm->isolate()->factory();
2463  masm, factory, object_, index_, result_, &call_runtime_);
2464 
2465  __ SmiTag(result_);
2466  __ bind(&exit_);
2467 }
2468 
2469 
2471  MacroAssembler* masm,
2472  const RuntimeCallHelper& call_helper) {
2473  __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2474 
2475  // Index is not a smi.
2476  __ bind(&index_not_smi_);
2477  // If index is a heap number, try converting it to an integer.
2478  __ CheckMap(index_,
2479  masm->isolate()->factory()->heap_number_map(),
2482  call_helper.BeforeCall(masm);
2483  __ push(object_);
2484  __ push(index_); // Consumed by runtime conversion function.
2486  __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
2487  } else {
2489  // NumberToSmi discards numbers that are not exact integers.
2490  __ CallRuntime(Runtime::kNumberToSmi, 1);
2491  }
2492  if (!index_.is(eax)) {
2493  // Save the conversion result before the pop instructions below
2494  // have a chance to overwrite it.
2495  __ mov(index_, eax);
2496  }
2497  __ pop(object_);
2498  // Reload the instance type.
2501  call_helper.AfterCall(masm);
2502  // If index is still not a smi, it must be out of range.
2503  STATIC_ASSERT(kSmiTag == 0);
2504  __ JumpIfNotSmi(index_, index_out_of_range_);
2505  // Otherwise, return to the fast path.
2506  __ jmp(&got_smi_index_);
2507 
2508  // Call runtime. We get here when the receiver is a string and the
2509  // index is a number, but the code of getting the actual character
2510  // is too complex (e.g., when the string needs to be flattened).
2511  __ bind(&call_runtime_);
2512  call_helper.BeforeCall(masm);
2513  __ push(object_);
2514  __ SmiTag(index_);
2515  __ push(index_);
2516  __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
2517  if (!result_.is(eax)) {
2518  __ mov(result_, eax);
2519  }
2520  call_helper.AfterCall(masm);
2521  __ jmp(&exit_);
2522 
2523  __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2524 }
2525 
2526 
2527 // -------------------------------------------------------------------------
2528 // StringCharFromCodeGenerator
2529 
2530 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2531  // Fast case of Heap::LookupSingleCharacterStringFromCode.
2532  STATIC_ASSERT(kSmiTag == 0);
2535  __ test(code_,
2536  Immediate(kSmiTagMask |
2538  __ j(not_zero, &slow_case_);
2539 
2540  Factory* factory = masm->isolate()->factory();
2541  __ Move(result_, Immediate(factory->single_character_string_cache()));
2542  STATIC_ASSERT(kSmiTag == 0);
2543  STATIC_ASSERT(kSmiTagSize == 1);
2545  // At this point code register contains smi tagged one byte char code.
2549  __ cmp(result_, factory->undefined_value());
2550  __ j(equal, &slow_case_);
2551  __ bind(&exit_);
2552 }
2553 
2554 
2556  MacroAssembler* masm,
2557  const RuntimeCallHelper& call_helper) {
2558  __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2559 
2560  __ bind(&slow_case_);
2561  call_helper.BeforeCall(masm);
2562  __ push(code_);
2563  __ CallRuntime(Runtime::kCharFromCode, 1);
2564  if (!result_.is(eax)) {
2565  __ mov(result_, eax);
2566  }
2567  call_helper.AfterCall(masm);
2568  __ jmp(&exit_);
2569 
2570  __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2571 }
2572 
2573 
2574 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2575  Register dest,
2576  Register src,
2577  Register count,
2578  Register scratch,
2579  String::Encoding encoding) {
2580  DCHECK(!scratch.is(dest));
2581  DCHECK(!scratch.is(src));
2582  DCHECK(!scratch.is(count));
2583 
2584  // Nothing to do for zero characters.
2585  Label done;
2586  __ test(count, count);
2587  __ j(zero, &done);
2588 
2589  // Make count the number of bytes to copy.
2590  if (encoding == String::TWO_BYTE_ENCODING) {
2591  __ shl(count, 1);
2592  }
2593 
2594  Label loop;
2595  __ bind(&loop);
2596  __ mov_b(scratch, Operand(src, 0));
2597  __ mov_b(Operand(dest, 0), scratch);
2598  __ inc(src);
2599  __ inc(dest);
2600  __ dec(count);
2601  __ j(not_zero, &loop);
2602 
2603  __ bind(&done);
2604 }
2605 
2606 
2607 void SubStringStub::Generate(MacroAssembler* masm) {
2608  Label runtime;
2609 
2610  // Stack frame on entry.
2611  // esp[0]: return address
2612  // esp[4]: to
2613  // esp[8]: from
2614  // esp[12]: string
2615 
2616  // Make sure first argument is a string.
2617  __ mov(eax, Operand(esp, 3 * kPointerSize));
2618  STATIC_ASSERT(kSmiTag == 0);
2619  __ JumpIfSmi(eax, &runtime);
2620  Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
2621  __ j(NegateCondition(is_string), &runtime);
2622 
2623  // eax: string
2624  // ebx: instance type
2625 
2626  // Calculate length of sub string using the smi values.
2627  __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
2628  __ JumpIfNotSmi(ecx, &runtime);
2629  __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
2630  __ JumpIfNotSmi(edx, &runtime);
2631  __ sub(ecx, edx);
2633  Label not_original_string;
2634  // Shorter than original string's length: an actual substring.
2635  __ j(below, &not_original_string, Label::kNear);
2636  // Longer than original string's length or negative: unsafe arguments.
2637  __ j(above, &runtime);
2638  // Return original string.
2639  Counters* counters = isolate()->counters();
2640  __ IncrementCounter(counters->sub_string_native(), 1);
2641  __ ret(3 * kPointerSize);
2642  __ bind(&not_original_string);
2643 
2644  Label single_char;
2645  __ cmp(ecx, Immediate(Smi::FromInt(1)));
2646  __ j(equal, &single_char);
2647 
2648  // eax: string
2649  // ebx: instance type
2650  // ecx: sub string length (smi)
2651  // edx: from index (smi)
2652  // Deal with different string types: update the index if necessary
2653  // and put the underlying string into edi.
2654  Label underlying_unpacked, sliced_string, seq_or_external_string;
2655  // If the string is not indirect, it can only be sequential or external.
2658  __ test(ebx, Immediate(kIsIndirectStringMask));
2659  __ j(zero, &seq_or_external_string, Label::kNear);
2660 
2661  Factory* factory = isolate()->factory();
2662  __ test(ebx, Immediate(kSlicedNotConsMask));
2663  __ j(not_zero, &sliced_string, Label::kNear);
2664  // Cons string. Check whether it is flat, then fetch first part.
2665  // Flat cons strings have an empty second part.
2667  factory->empty_string());
2668  __ j(not_equal, &runtime);
2670  // Update instance type.
2673  __ jmp(&underlying_unpacked, Label::kNear);
2674 
2675  __ bind(&sliced_string);
2676  // Sliced string. Fetch parent and adjust start index by offset.
2679  // Update instance type.
2682  __ jmp(&underlying_unpacked, Label::kNear);
2683 
2684  __ bind(&seq_or_external_string);
2685  // Sequential or external string. Just move string to the expected register.
2686  __ mov(edi, eax);
2687 
2688  __ bind(&underlying_unpacked);
2689 
2690  if (FLAG_string_slices) {
2691  Label copy_routine;
2692  // edi: underlying subject string
2693  // ebx: instance type of underlying subject string
2694  // edx: adjusted start index (smi)
2695  // ecx: length (smi)
2696  __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
2697  // Short slice. Copy instead of slicing.
2698  __ j(less, &copy_routine);
2699  // Allocate new sliced string. At this point we do not reload the instance
2700  // type including the string encoding because we simply rely on the info
2701  // provided by the original string. It does not matter if the original
2702  // string's encoding is wrong because we always have to recheck encoding of
2703  // the newly created string's parent anyways due to externalized strings.
2704  Label two_byte_slice, set_slice_header;
2707  __ test(ebx, Immediate(kStringEncodingMask));
2708  __ j(zero, &two_byte_slice, Label::kNear);
2709  __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime);
2710  __ jmp(&set_slice_header, Label::kNear);
2711  __ bind(&two_byte_slice);
2712  __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
2713  __ bind(&set_slice_header);
2716  Immediate(String::kEmptyHashField));
2719  __ IncrementCounter(counters->sub_string_native(), 1);
2720  __ ret(3 * kPointerSize);
2721 
2722  __ bind(&copy_routine);
2723  }
2724 
2725  // edi: underlying subject string
2726  // ebx: instance type of underlying subject string
2727  // edx: adjusted start index (smi)
2728  // ecx: length (smi)
2729  // The subject string can only be external or sequential string of either
2730  // encoding at this point.
2731  Label two_byte_sequential, runtime_drop_two, sequential_string;
2734  __ test_b(ebx, kExternalStringTag);
2735  __ j(zero, &sequential_string);
2736 
2737  // Handle external string.
2738  // Rule out short external strings.
2740  __ test_b(ebx, kShortExternalStringMask);
2741  __ j(not_zero, &runtime);
2743  // Move the pointer so that offset-wise, it looks like a sequential string.
2746 
2747  __ bind(&sequential_string);
2748  // Stash away (adjusted) index and (underlying) string.
2749  __ push(edx);
2750  __ push(edi);
2751  __ SmiUntag(ecx);
2753  __ test_b(ebx, kStringEncodingMask);
2754  __ j(zero, &two_byte_sequential);
2755 
2756  // Sequential one byte string. Allocate the result.
2757  __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
2758 
2759  // eax: result string
2760  // ecx: result string length
2761  // Locate first character of result.
2762  __ mov(edi, eax);
2764  // Load string argument and locate character of sub string start.
2765  __ pop(edx);
2766  __ pop(ebx);
2767  __ SmiUntag(ebx);
2769 
2770  // eax: result string
2771  // ecx: result length
2772  // edi: first character of result
2773  // edx: character of sub string start
2776  __ IncrementCounter(counters->sub_string_native(), 1);
2777  __ ret(3 * kPointerSize);
2778 
2779  __ bind(&two_byte_sequential);
2780  // Sequential two-byte string. Allocate the result.
2781  __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
2782 
2783  // eax: result string
2784  // ecx: result string length
2785  // Locate first character of result.
2786  __ mov(edi, eax);
2787  __ add(edi,
2789  // Load string argument and locate character of sub string start.
2790  __ pop(edx);
2791  __ pop(ebx);
2792  // As from is a smi it is 2 times the value which matches the size of a two
2793  // byte character.
2794  STATIC_ASSERT(kSmiTag == 0);
2797 
2798  // eax: result string
2799  // ecx: result length
2800  // edi: first character of result
2801  // edx: character of sub string start
2804  __ IncrementCounter(counters->sub_string_native(), 1);
2805  __ ret(3 * kPointerSize);
2806 
2807  // Drop pushed values on the stack before tail call.
2808  __ bind(&runtime_drop_two);
2809  __ Drop(2);
2810 
2811  // Just jump to runtime to create the sub string.
2812  __ bind(&runtime);
2813  __ TailCallRuntime(Runtime::kSubString, 3, 1);
2814 
2815  __ bind(&single_char);
2816  // eax: string
2817  // ebx: instance type
2818  // ecx: sub string length (smi)
2819  // edx: from index (smi)
2820  StringCharAtGenerator generator(
2821  eax, edx, ecx, eax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
2822  generator.GenerateFast(masm);
2823  __ ret(3 * kPointerSize);
2824  generator.SkipSlow(masm, &runtime);
2825 }
2826 
2827 
2828 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
2829  Register left,
2830  Register right,
2831  Register scratch1,
2832  Register scratch2) {
2833  Register length = scratch1;
2834 
2835  // Compare lengths.
2836  Label strings_not_equal, check_zero_length;
2837  __ mov(length, FieldOperand(left, String::kLengthOffset));
2838  __ cmp(length, FieldOperand(right, String::kLengthOffset));
2839  __ j(equal, &check_zero_length, Label::kNear);
2840  __ bind(&strings_not_equal);
2841  __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
2842  __ ret(0);
2843 
2844  // Check if the length is zero.
2845  Label compare_chars;
2846  __ bind(&check_zero_length);
2847  STATIC_ASSERT(kSmiTag == 0);
2848  __ test(length, length);
2849  __ j(not_zero, &compare_chars, Label::kNear);
2850  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2851  __ ret(0);
2852 
2853  // Compare characters.
2854  __ bind(&compare_chars);
2855  GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
2856  &strings_not_equal, Label::kNear);
2857 
2858  // Characters are equal.
2859  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2860  __ ret(0);
2861 }
2862 
2863 
2865  MacroAssembler* masm, Register left, Register right, Register scratch1,
2866  Register scratch2, Register scratch3) {
2867  Counters* counters = masm->isolate()->counters();
2868  __ IncrementCounter(counters->string_compare_native(), 1);
2869 
2870  // Find minimum length.
2871  Label left_shorter;
2872  __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
2873  __ mov(scratch3, scratch1);
2874  __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
2875 
2876  Register length_delta = scratch3;
2877 
2878  __ j(less_equal, &left_shorter, Label::kNear);
2879  // Right string is shorter. Change scratch1 to be length of right string.
2880  __ sub(scratch1, length_delta);
2881  __ bind(&left_shorter);
2882 
2883  Register min_length = scratch1;
2884 
2885  // If either length is zero, just compare lengths.
2886  Label compare_lengths;
2887  __ test(min_length, min_length);
2888  __ j(zero, &compare_lengths, Label::kNear);
2889 
2890  // Compare characters.
2891  Label result_not_equal;
2892  GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
2893  &result_not_equal, Label::kNear);
2894 
2895  // Compare lengths - strings up to min-length are equal.
2896  __ bind(&compare_lengths);
2897  __ test(length_delta, length_delta);
2898  Label length_not_equal;
2899  __ j(not_zero, &length_not_equal, Label::kNear);
2900 
2901  // Result is EQUAL.
2902  STATIC_ASSERT(EQUAL == 0);
2903  STATIC_ASSERT(kSmiTag == 0);
2904  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2905  __ ret(0);
2906 
2907  Label result_greater;
2908  Label result_less;
2909  __ bind(&length_not_equal);
2910  __ j(greater, &result_greater, Label::kNear);
2911  __ jmp(&result_less, Label::kNear);
2912  __ bind(&result_not_equal);
2913  __ j(above, &result_greater, Label::kNear);
2914  __ bind(&result_less);
2915 
2916  // Result is LESS.
2917  __ Move(eax, Immediate(Smi::FromInt(LESS)));
2918  __ ret(0);
2919 
2920  // Result is GREATER.
2921  __ bind(&result_greater);
2922  __ Move(eax, Immediate(Smi::FromInt(GREATER)));
2923  __ ret(0);
2924 }
2925 
2926 
2928  MacroAssembler* masm, Register left, Register right, Register length,
2929  Register scratch, Label* chars_not_equal,
2930  Label::Distance chars_not_equal_near) {
2931  // Change index to run from -length to -1 by adding length to string
2932  // start. This means that loop ends when index reaches zero, which
2933  // doesn't need an additional compare.
2934  __ SmiUntag(length);
2935  __ lea(left,
2937  __ lea(right,
2939  __ neg(length);
2940  Register index = length; // index = -length;
2941 
2942  // Compare loop.
2943  Label loop;
2944  __ bind(&loop);
2945  __ mov_b(scratch, Operand(left, index, times_1, 0));
2946  __ cmpb(scratch, Operand(right, index, times_1, 0));
2947  __ j(not_equal, chars_not_equal, chars_not_equal_near);
2948  __ inc(index);
2949  __ j(not_zero, &loop);
2950 }
2951 
2952 
2953 void StringCompareStub::Generate(MacroAssembler* masm) {
2954  Label runtime;
2955 
2956  // Stack frame on entry.
2957  // esp[0]: return address
2958  // esp[4]: right string
2959  // esp[8]: left string
2960 
2961  __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
2962  __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
2963 
2964  Label not_same;
2965  __ cmp(edx, eax);
2966  __ j(not_equal, &not_same, Label::kNear);
2967  STATIC_ASSERT(EQUAL == 0);
2968  STATIC_ASSERT(kSmiTag == 0);
2969  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2970  __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
2971  __ ret(2 * kPointerSize);
2972 
2973  __ bind(&not_same);
2974 
2975  // Check that both objects are sequential one-byte strings.
2976  __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, &runtime);
2977 
2978  // Compare flat one-byte strings.
2979  // Drop arguments from the stack.
2980  __ pop(ecx);
2981  __ add(esp, Immediate(2 * kPointerSize));
2982  __ push(ecx);
2984  edi);
2985 
2986  // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
2987  // tagged as a small integer.
2988  __ bind(&runtime);
2989  __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
2990 }
2991 
2992 
2993 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2994  // ----------- S t a t e -------------
2995  // -- edx : left
2996  // -- eax : right
2997  // -- esp[0] : return address
2998  // -----------------------------------
2999 
3000  // Load ecx with the allocation site. We stick an undefined dummy value here
3001  // and replace it with the real allocation site later when we instantiate this
3002  // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3003  __ mov(ecx, handle(isolate()->heap()->undefined_value()));
3004 
3005  // Make sure that we actually patched the allocation site.
3006  if (FLAG_debug_code) {
3007  __ test(ecx, Immediate(kSmiTagMask));
3008  __ Assert(not_equal, kExpectedAllocationSite);
3010  isolate()->factory()->allocation_site_map());
3011  __ Assert(equal, kExpectedAllocationSite);
3012  }
3013 
3014  // Tail call into the stub that handles binary operations with allocation
3015  // sites.
3016  BinaryOpWithAllocationSiteStub stub(isolate(), state());
3017  __ TailCallStub(&stub);
3018 }
3019 
3020 
3021 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
3023  Label miss;
3024  __ mov(ecx, edx);
3025  __ or_(ecx, eax);
3026  __ JumpIfNotSmi(ecx, &miss, Label::kNear);
3027 
3028  if (GetCondition() == equal) {
3029  // For equality we do not care about the sign of the result.
3030  __ sub(eax, edx);
3031  } else {
3032  Label done;
3033  __ sub(edx, eax);
3034  __ j(no_overflow, &done, Label::kNear);
3035  // Correct sign of result in case of overflow.
3036  __ not_(edx);
3037  __ bind(&done);
3038  __ mov(eax, edx);
3039  }
3040  __ ret(0);
3041 
3042  __ bind(&miss);
3043  GenerateMiss(masm);
3044 }
3045 
3046 
3047 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
3049 
3050  Label generic_stub;
3051  Label unordered, maybe_undefined1, maybe_undefined2;
3052  Label miss;
3053 
3054  if (left() == CompareICState::SMI) {
3055  __ JumpIfNotSmi(edx, &miss);
3056  }
3057  if (right() == CompareICState::SMI) {
3058  __ JumpIfNotSmi(eax, &miss);
3059  }
3060 
3061  // Inlining the double comparison and falling back to the general compare
3062  // stub if NaN is involved or SSE2 or CMOV is unsupported.
3063  __ mov(ecx, edx);
3064  __ and_(ecx, eax);
3065  __ JumpIfSmi(ecx, &generic_stub, Label::kNear);
3066 
3068  isolate()->factory()->heap_number_map());
3069  __ j(not_equal, &maybe_undefined1, Label::kNear);
3071  isolate()->factory()->heap_number_map());
3072  __ j(not_equal, &maybe_undefined2, Label::kNear);
3073 
3074  __ bind(&unordered);
3075  __ bind(&generic_stub);
3076  CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
3078  __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3079 
3080  __ bind(&maybe_undefined1);
3082  __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
3083  __ j(not_equal, &miss);
3084  __ JumpIfSmi(edx, &unordered);
3085  __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
3086  __ j(not_equal, &maybe_undefined2, Label::kNear);
3087  __ jmp(&unordered);
3088  }
3089 
3090  __ bind(&maybe_undefined2);
3092  __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
3093  __ j(equal, &unordered);
3094  }
3095 
3096  __ bind(&miss);
3097  GenerateMiss(masm);
3098 }
3099 
3100 
3101 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3103  DCHECK(GetCondition() == equal);
3104 
3105  // Registers containing left and right operands respectively.
3106  Register left = edx;
3107  Register right = eax;
3108  Register tmp1 = ecx;
3109  Register tmp2 = ebx;
3110 
3111  // Check that both operands are heap objects.
3112  Label miss;
3113  __ mov(tmp1, left);
3114  STATIC_ASSERT(kSmiTag == 0);
3115  __ and_(tmp1, right);
3116  __ JumpIfSmi(tmp1, &miss, Label::kNear);
3117 
3118  // Check that both operands are internalized strings.
3121  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3122  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3124  __ or_(tmp1, tmp2);
3125  __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3126  __ j(not_zero, &miss, Label::kNear);
3127 
3128  // Internalized strings are compared by identity.
3129  Label done;
3130  __ cmp(left, right);
3131  // Make sure eax is non-zero. At this point input operands are
3132  // guaranteed to be non-zero.
3133  DCHECK(right.is(eax));
3134  __ j(not_equal, &done, Label::kNear);
3135  STATIC_ASSERT(EQUAL == 0);
3136  STATIC_ASSERT(kSmiTag == 0);
3137  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3138  __ bind(&done);
3139  __ ret(0);
3140 
3141  __ bind(&miss);
3142  GenerateMiss(masm);
3143 }
3144 
3145 
3146 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3148  DCHECK(GetCondition() == equal);
3149 
3150  // Registers containing left and right operands respectively.
3151  Register left = edx;
3152  Register right = eax;
3153  Register tmp1 = ecx;
3154  Register tmp2 = ebx;
3155 
3156  // Check that both operands are heap objects.
3157  Label miss;
3158  __ mov(tmp1, left);
3159  STATIC_ASSERT(kSmiTag == 0);
3160  __ and_(tmp1, right);
3161  __ JumpIfSmi(tmp1, &miss, Label::kNear);
3162 
3163  // Check that both operands are unique names. This leaves the instance
3164  // types loaded in tmp1 and tmp2.
3167  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3168  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3169 
3170  __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
3171  __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3172 
3173  // Unique names are compared by identity.
3174  Label done;
3175  __ cmp(left, right);
3176  // Make sure eax is non-zero. At this point input operands are
3177  // guaranteed to be non-zero.
3178  DCHECK(right.is(eax));
3179  __ j(not_equal, &done, Label::kNear);
3180  STATIC_ASSERT(EQUAL == 0);
3181  STATIC_ASSERT(kSmiTag == 0);
3182  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3183  __ bind(&done);
3184  __ ret(0);
3185 
3186  __ bind(&miss);
3187  GenerateMiss(masm);
3188 }
3189 
3190 
3191 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3193  Label miss;
3194 
3195  bool equality = Token::IsEqualityOp(op());
3196 
3197  // Registers containing left and right operands respectively.
3198  Register left = edx;
3199  Register right = eax;
3200  Register tmp1 = ecx;
3201  Register tmp2 = ebx;
3202  Register tmp3 = edi;
3203 
3204  // Check that both operands are heap objects.
3205  __ mov(tmp1, left);
3206  STATIC_ASSERT(kSmiTag == 0);
3207  __ and_(tmp1, right);
3208  __ JumpIfSmi(tmp1, &miss);
3209 
3210  // Check that both operands are strings. This leaves the instance
3211  // types loaded in tmp1 and tmp2.
3214  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3215  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3216  __ mov(tmp3, tmp1);
3218  __ or_(tmp3, tmp2);
3219  __ test(tmp3, Immediate(kIsNotStringMask));
3220  __ j(not_zero, &miss);
3221 
3222  // Fast check for identical strings.
3223  Label not_same;
3224  __ cmp(left, right);
3225  __ j(not_equal, &not_same, Label::kNear);
3226  STATIC_ASSERT(EQUAL == 0);
3227  STATIC_ASSERT(kSmiTag == 0);
3228  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3229  __ ret(0);
3230 
3231  // Handle not identical strings.
3232  __ bind(&not_same);
3233 
3234  // Check that both strings are internalized. If they are, we're done
3235  // because we already know they are not identical. But in the case of
3236  // non-equality compare, we still need to determine the order. We
3237  // also know they are both strings.
3238  if (equality) {
3239  Label do_compare;
3241  __ or_(tmp1, tmp2);
3242  __ test(tmp1, Immediate(kIsNotInternalizedMask));
3243  __ j(not_zero, &do_compare, Label::kNear);
3244  // Make sure eax is non-zero. At this point input operands are
3245  // guaranteed to be non-zero.
3246  DCHECK(right.is(eax));
3247  __ ret(0);
3248  __ bind(&do_compare);
3249  }
3250 
3251  // Check that both strings are sequential one-byte.
3252  Label runtime;
3253  __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
3254 
3255  // Compare flat one byte strings. Returns when done.
3256  if (equality) {
3258  tmp2);
3259  } else {
3261  tmp2, tmp3);
3262  }
3263 
3264  // Handle more complex cases in runtime.
3265  __ bind(&runtime);
3266  __ pop(tmp1); // Return address.
3267  __ push(left);
3268  __ push(right);
3269  __ push(tmp1);
3270  if (equality) {
3271  __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
3272  } else {
3273  __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3274  }
3275 
3276  __ bind(&miss);
3277  GenerateMiss(masm);
3278 }
3279 
3280 
3281 void CompareICStub::GenerateObjects(MacroAssembler* masm) {
3283  Label miss;
3284  __ mov(ecx, edx);
3285  __ and_(ecx, eax);
3286  __ JumpIfSmi(ecx, &miss, Label::kNear);
3287 
3288  __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
3289  __ j(not_equal, &miss, Label::kNear);
3290  __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
3291  __ j(not_equal, &miss, Label::kNear);
3292 
3293  DCHECK(GetCondition() == equal);
3294  __ sub(eax, edx);
3295  __ ret(0);
3296 
3297  __ bind(&miss);
3298  GenerateMiss(masm);
3299 }
3300 
3301 
3302 void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
3303  Label miss;
3304  __ mov(ecx, edx);
3305  __ and_(ecx, eax);
3306  __ JumpIfSmi(ecx, &miss, Label::kNear);
3307 
3310  __ cmp(ecx, known_map_);
3311  __ j(not_equal, &miss, Label::kNear);
3312  __ cmp(ebx, known_map_);
3313  __ j(not_equal, &miss, Label::kNear);
3314 
3315  __ sub(eax, edx);
3316  __ ret(0);
3317 
3318  __ bind(&miss);
3319  GenerateMiss(masm);
3320 }
3321 
3322 
3323 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3324  {
3325  // Call the runtime system in a fresh internal frame.
3326  ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
3327  isolate());
3328  FrameScope scope(masm, StackFrame::INTERNAL);
3329  __ push(edx); // Preserve edx and eax.
3330  __ push(eax);
3331  __ push(edx); // And also use them as the arguments.
3332  __ push(eax);
3333  __ push(Immediate(Smi::FromInt(op())));
3334  __ CallExternalReference(miss, 3);
3335  // Compute the entry point of the rewritten stub.
3337  __ pop(eax);
3338  __ pop(edx);
3339  }
3340 
3341  // Do a tail call to the rewritten stub.
3342  __ jmp(edi);
3343 }
3344 
3345 
3346 // Helper function used to check that the dictionary doesn't contain
3347 // the property. This function may return false negatives, so miss_label
3348 // must always call a backup property check that is complete.
3349 // This function is safe to call if the receiver has fast properties.
3350 // Name must be a unique name and receiver must be a heap object.
3351 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3352  Label* miss,
3353  Label* done,
3354  Register properties,
3355  Handle<Name> name,
3356  Register r0) {
3357  DCHECK(name->IsUniqueName());
3358 
3359  // If names of slots in range from 1 to kProbes - 1 for the hash value are
3360  // not equal to the name and kProbes-th slot is not used (its name is the
3361  // undefined value), it guarantees the hash table doesn't contain the
3362  // property. It's true even if some slots represent deleted properties
3363  // (their names are the hole value).
3364  for (int i = 0; i < kInlinedProbes; i++) {
3365  // Compute the masked index: (hash + i + i * i) & mask.
3366  Register index = r0;
3367  // Capacity is smi 2^n.
3368  __ mov(index, FieldOperand(properties, kCapacityOffset));
3369  __ dec(index);
3370  __ and_(index,
3371  Immediate(Smi::FromInt(name->Hash() +
3372  NameDictionary::GetProbeOffset(i))));
3373 
3374  // Scale the index by multiplying by the entry size.
3376  __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
3377  Register entity_name = r0;
3378  // Having undefined at this place means the name is not contained.
3379  DCHECK_EQ(kSmiTagSize, 1);
3380  __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
3382  __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
3383  __ j(equal, done);
3384 
3385  // Stop if found the property.
3386  __ cmp(entity_name, Handle<Name>(name));
3387  __ j(equal, miss);
3388 
3389  Label good;
3390  // Check for the hole and skip.
3391  __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
3392  __ j(equal, &good, Label::kNear);
3393 
3394  // Check if the entry name is not a unique name.
3395  __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3396  __ JumpIfNotUniqueNameInstanceType(
3397  FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3398  __ bind(&good);
3399  }
3400 
3401  NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3402  NEGATIVE_LOOKUP);
3403  __ push(Immediate(Handle<Object>(name)));
3404  __ push(Immediate(name->Hash()));
3405  __ CallStub(&stub);
3406  __ test(r0, r0);
3407  __ j(not_zero, miss);
3408  __ jmp(done);
3409 }
3410 
3411 
3412 // Probe the name dictionary in the |elements| register. Jump to the
3413 // |done| label if a property with the given name is found leaving the
3414 // index into the dictionary in |r0|. Jump to the |miss| label
3415 // otherwise.
3416 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3417  Label* miss,
3418  Label* done,
3419  Register elements,
3420  Register name,
3421  Register r0,
3422  Register r1) {
3423  DCHECK(!elements.is(r0));
3424  DCHECK(!elements.is(r1));
3425  DCHECK(!name.is(r0));
3426  DCHECK(!name.is(r1));
3427 
3428  __ AssertName(name);
3429 
3430  __ mov(r1, FieldOperand(elements, kCapacityOffset));
3431  __ shr(r1, kSmiTagSize); // convert smi to int
3432  __ dec(r1);
3433 
3434  // Generate an unrolled loop that performs a few probes before
3435  // giving up. Measurements done on Gmail indicate that 2 probes
3436  // cover ~93% of loads from dictionaries.
3437  for (int i = 0; i < kInlinedProbes; i++) {
3438  // Compute the masked index: (hash + i + i * i) & mask.
3440  __ shr(r0, Name::kHashShift);
3441  if (i > 0) {
3442  __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
3443  }
3444  __ and_(r0, r1);
3445 
3446  // Scale the index by multiplying by the entry size.
3448  __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
3449 
3450  // Check if the key is identical to the name.
3451  __ cmp(name, Operand(elements,
3452  r0,
3453  times_4,
3455  __ j(equal, done);
3456  }
3457 
3458  NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
3459  POSITIVE_LOOKUP);
3460  __ push(name);
3462  __ shr(r0, Name::kHashShift);
3463  __ push(r0);
3464  __ CallStub(&stub);
3465 
3466  __ test(r1, r1);
3467  __ j(zero, miss);
3468  __ jmp(done);
3469 }
3470 
3471 
3472 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
3473  // This stub overrides SometimesSetsUpAFrame() to return false. That means
3474  // we cannot call anything that could cause a GC from this stub.
3475  // Stack frame on entry:
3476  // esp[0 * kPointerSize]: return address.
3477  // esp[1 * kPointerSize]: key's hash.
3478  // esp[2 * kPointerSize]: key.
3479  // Registers:
3480  // dictionary_: NameDictionary to probe.
3481  // result_: used as scratch.
3482  // index_: will hold an index of entry if lookup is successful.
3483  // might alias with result_.
3484  // Returns:
3485  // result_ is zero if lookup failed, non zero otherwise.
3486 
3487  Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3488 
3489  Register scratch = result();
3490 
3491  __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
3492  __ dec(scratch);
3493  __ SmiUntag(scratch);
3494  __ push(scratch);
3495 
3496  // If names of slots in range from 1 to kProbes - 1 for the hash value are
3497  // not equal to the name and kProbes-th slot is not used (its name is the
3498  // undefined value), it guarantees the hash table doesn't contain the
3499  // property. It's true even if some slots represent deleted properties
3500  // (their names are the null value).
3501  for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3502  // Compute the masked index: (hash + i + i * i) & mask.
3503  __ mov(scratch, Operand(esp, 2 * kPointerSize));
3504  if (i > 0) {
3505  __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
3506  }
3507  __ and_(scratch, Operand(esp, 0));
3508 
3509  // Scale the index by multiplying by the entry size.
3511  __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
3512 
3513  // Having undefined at this place means the name is not contained.
3514  DCHECK_EQ(kSmiTagSize, 1);
3515  __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
3517  __ cmp(scratch, isolate()->factory()->undefined_value());
3518  __ j(equal, &not_in_dictionary);
3519 
3520  // Stop if found the property.
3521  __ cmp(scratch, Operand(esp, 3 * kPointerSize));
3522  __ j(equal, &in_dictionary);
3523 
3524  if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3525  // If we hit a key that is not a unique name during negative
3526  // lookup we have to bailout as this key might be equal to the
3527  // key we are looking for.
3528 
3529  // Check if the entry name is not a unique name.
3530  __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3531  __ JumpIfNotUniqueNameInstanceType(
3533  &maybe_in_dictionary);
3534  }
3535  }
3536 
3537  __ bind(&maybe_in_dictionary);
3538  // If we are doing negative lookup then probing failure should be
3539  // treated as a lookup success. For positive lookup probing failure
3540  // should be treated as lookup failure.
3541  if (mode() == POSITIVE_LOOKUP) {
3542  __ mov(result(), Immediate(0));
3543  __ Drop(1);
3544  __ ret(2 * kPointerSize);
3545  }
3546 
3547  __ bind(&in_dictionary);
3548  __ mov(result(), Immediate(1));
3549  __ Drop(1);
3550  __ ret(2 * kPointerSize);
3551 
3552  __ bind(&not_in_dictionary);
3553  __ mov(result(), Immediate(0));
3554  __ Drop(1);
3555  __ ret(2 * kPointerSize);
3556 }
3557 
3558 
3560  Isolate* isolate) {
3562  stub.GetCode();
3563  StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3564  stub2.GetCode();
3565 }
3566 
3567 
3568 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
3569 // the value has just been written into the object, now this stub makes sure
3570 // we keep the GC informed. The word in the object where the value has been
3571 // written is in the address register.
3572 void RecordWriteStub::Generate(MacroAssembler* masm) {
3573  Label skip_to_incremental_noncompacting;
3574  Label skip_to_incremental_compacting;
3575 
3576  // The first two instructions are generated with labels so as to get the
3577  // offset fixed up correctly by the bind(Label*) call. We patch it back and
3578  // forth between a compare instructions (a nop in this position) and the
3579  // real branch when we start and stop incremental heap marking.
3580  __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3581  __ jmp(&skip_to_incremental_compacting, Label::kFar);
3582 
3584  __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3586  } else {
3587  __ ret(0);
3588  }
3589 
3590  __ bind(&skip_to_incremental_noncompacting);
3592 
3593  __ bind(&skip_to_incremental_compacting);
3595 
3596  // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3597  // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3598  masm->set_byte_at(0, kTwoByteNopInstruction);
3599  masm->set_byte_at(2, kFiveByteNopInstruction);
3600 }
3601 
3602 
3603 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3604  regs_.Save(masm);
3605 
3607  Label dont_need_remembered_set;
3608 
3609  __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
3610  __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
3611  regs_.scratch0(),
3612  &dont_need_remembered_set);
3613 
3614  __ CheckPageFlag(regs_.object(),
3615  regs_.scratch0(),
3617  not_zero,
3618  &dont_need_remembered_set);
3619 
3620  // First notify the incremental marker if necessary, then update the
3621  // remembered set.
3623  masm,
3625  mode);
3627  regs_.Restore(masm);
3628  __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3630 
3631  __ bind(&dont_need_remembered_set);
3632  }
3633 
3635  masm,
3637  mode);
3639  regs_.Restore(masm);
3640  __ ret(0);
3641 }
3642 
3643 
3644 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3646  int argument_count = 3;
3647  __ PrepareCallCFunction(argument_count, regs_.scratch0());
3648  __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
3649  __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot.
3650  __ mov(Operand(esp, 2 * kPointerSize),
3651  Immediate(ExternalReference::isolate_address(isolate())));
3652 
3653  AllowExternalCallThatCantCauseGC scope(masm);
3654  __ CallCFunction(
3655  ExternalReference::incremental_marking_record_write_function(isolate()),
3656  argument_count);
3657 
3659 }
3660 
3661 
3663  MacroAssembler* masm,
3664  OnNoNeedToInformIncrementalMarker on_no_need,
3665  Mode mode) {
3666  Label object_is_black, need_incremental, need_incremental_pop_object;
3667 
3668  __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
3669  __ and_(regs_.scratch0(), regs_.object());
3670  __ mov(regs_.scratch1(),
3671  Operand(regs_.scratch0(),
3673  __ sub(regs_.scratch1(), Immediate(1));
3674  __ mov(Operand(regs_.scratch0(),
3676  regs_.scratch1());
3677  __ j(negative, &need_incremental);
3678 
3679  // Let's look at the color of the object: If it is not black we don't have
3680  // to inform the incremental marker.
3681  __ JumpIfBlack(regs_.object(),
3682  regs_.scratch0(),
3683  regs_.scratch1(),
3684  &object_is_black,
3685  Label::kNear);
3686 
3687  regs_.Restore(masm);
3689  __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3691  } else {
3692  __ ret(0);
3693  }
3694 
3695  __ bind(&object_is_black);
3696 
3697  // Get the value from the slot.
3698  __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
3699 
3700  if (mode == INCREMENTAL_COMPACTION) {
3701  Label ensure_not_white;
3702 
3703  __ CheckPageFlag(regs_.scratch0(), // Contains value.
3704  regs_.scratch1(), // Scratch.
3706  zero,
3707  &ensure_not_white,
3708  Label::kNear);
3709 
3710  __ CheckPageFlag(regs_.object(),
3711  regs_.scratch1(), // Scratch.
3713  not_zero,
3714  &ensure_not_white,
3715  Label::kNear);
3716 
3717  __ jmp(&need_incremental);
3718 
3719  __ bind(&ensure_not_white);
3720  }
3721 
3722  // We need an extra register for this, so we push the object register
3723  // temporarily.
3724  __ push(regs_.object());
3725  __ EnsureNotWhite(regs_.scratch0(), // The value.
3726  regs_.scratch1(), // Scratch.
3727  regs_.object(), // Scratch.
3728  &need_incremental_pop_object,
3729  Label::kNear);
3730  __ pop(regs_.object());
3731 
3732  regs_.Restore(masm);
3734  __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3736  } else {
3737  __ ret(0);
3738  }
3739 
3740  __ bind(&need_incremental_pop_object);
3741  __ pop(regs_.object());
3742 
3743  __ bind(&need_incremental);
3744 
3745  // Fall through when we need to inform the incremental marker.
3746 }
3747 
3748 
3749 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
3750  // ----------- S t a t e -------------
3751  // -- eax : element value to store
3752  // -- ecx : element index as smi
3753  // -- esp[0] : return address
3754  // -- esp[4] : array literal index in function
3755  // -- esp[8] : array literal
3756  // clobbers ebx, edx, edi
3757  // -----------------------------------
3758 
3759  Label element_done;
3760  Label double_elements;
3761  Label smi_element;
3762  Label slow_elements;
3763  Label slow_elements_from_double;
3764  Label fast_elements;
3765 
3766  // Get array literal index, array literal and its map.
3767  __ mov(edx, Operand(esp, 1 * kPointerSize));
3768  __ mov(ebx, Operand(esp, 2 * kPointerSize));
3770 
3771  __ CheckFastElements(edi, &double_elements);
3772 
3773  // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
3774  __ JumpIfSmi(eax, &smi_element);
3775  __ CheckFastSmiElements(edi, &fast_elements, Label::kNear);
3776 
3777  // Store into the array literal requires a elements transition. Call into
3778  // the runtime.
3779 
3780  __ bind(&slow_elements);
3781  __ pop(edi); // Pop return address and remember to put back later for tail
3782  // call.
3783  __ push(ebx);
3784  __ push(ecx);
3785  __ push(eax);
3788  __ push(edx);
3789  __ push(edi); // Return return address so that tail call returns to right
3790  // place.
3791  __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
3792 
3793  __ bind(&slow_elements_from_double);
3794  __ pop(edx);
3795  __ jmp(&slow_elements);
3796 
3797  // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
3798  __ bind(&fast_elements);
3802  __ mov(Operand(ecx, 0), eax);
3803  // Update the write barrier for the array store.
3804  __ RecordWrite(ebx, ecx, eax, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
3805  OMIT_SMI_CHECK);
3806  __ ret(0);
3807 
3808  // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
3809  // and value is Smi.
3810  __ bind(&smi_element);
3814  __ ret(0);
3815 
3816  // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
3817  __ bind(&double_elements);
3818 
3819  __ push(edx);
3821  __ StoreNumberToDoubleElements(eax,
3822  edx,
3823  ecx,
3824  edi,
3825  &slow_elements_from_double,
3826  false);
3827  __ pop(edx);
3828  __ ret(0);
3829 }
3830 
3831 
3832 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3833  CEntryStub ces(isolate(), 1, kSaveFPRegs);
3834  __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
3835  int parameter_count_offset =
3837  __ mov(ebx, MemOperand(ebp, parameter_count_offset));
3838  masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3839  __ pop(ecx);
3840  int additional_offset =
3842  __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
3843  __ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
3844 }
3845 
3846 
3847 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
3848  EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister());
3849  VectorLoadStub stub(isolate(), state());
3850  __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3851 }
3852 
3853 
3854 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
3855  EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister());
3856  VectorKeyedLoadStub stub(isolate());
3857  __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3858 }
3859 
3860 
3861 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
3862  if (masm->isolate()->function_entry_hook() != NULL) {
3863  ProfileEntryHookStub stub(masm->isolate());
3864  masm->CallStub(&stub);
3865  }
3866 }
3867 
3868 
3869 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
3870  // Save volatile registers.
3871  const int kNumSavedRegisters = 3;
3872  __ push(eax);
3873  __ push(ecx);
3874  __ push(edx);
3875 
3876  // Calculate and push the original stack pointer.
3877  __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
3878  __ push(eax);
3879 
3880  // Retrieve our return address and use it to calculate the calling
3881  // function's address.
3882  __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
3883  __ sub(eax, Immediate(Assembler::kCallInstructionLength));
3884  __ push(eax);
3885 
3886  // Call the entry hook.
3887  DCHECK(isolate()->function_entry_hook() != NULL);
3888  __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
3890  __ add(esp, Immediate(2 * kPointerSize));
3891 
3892  // Restore ecx.
3893  __ pop(edx);
3894  __ pop(ecx);
3895  __ pop(eax);
3896 
3897  __ ret(0);
3898 }
3899 
3900 
3901 template<class T>
3902 static void CreateArrayDispatch(MacroAssembler* masm,
3904  if (mode == DISABLE_ALLOCATION_SITES) {
3905  T stub(masm->isolate(),
3907  mode);
3908  __ TailCallStub(&stub);
3909  } else if (mode == DONT_OVERRIDE) {
3910  int last_index = GetSequenceIndexFromFastElementsKind(
3912  for (int i = 0; i <= last_index; ++i) {
3913  Label next;
3915  __ cmp(edx, kind);
3916  __ j(not_equal, &next);
3917  T stub(masm->isolate(), kind);
3918  __ TailCallStub(&stub);
3919  __ bind(&next);
3920  }
3921 
3922  // If we reached this point there is a problem.
3923  __ Abort(kUnexpectedElementsKindInArrayConstructor);
3924  } else {
3925  UNREACHABLE();
3926  }
3927 }
3928 
3929 
3930 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
3932  // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
3933  // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
3934  // eax - number of arguments
3935  // edi - constructor?
3936  // esp[0] - return address
3937  // esp[4] - last argument
3938  Label normal_sequence;
3939  if (mode == DONT_OVERRIDE) {
3940  DCHECK(FAST_SMI_ELEMENTS == 0);
3942  DCHECK(FAST_ELEMENTS == 2);
3946 
3947  // is the low bit set? If so, we are holey and that is good.
3948  __ test_b(edx, 1);
3949  __ j(not_zero, &normal_sequence);
3950  }
3951 
3952  // look at the first argument
3953  __ mov(ecx, Operand(esp, kPointerSize));
3954  __ test(ecx, ecx);
3955  __ j(zero, &normal_sequence);
3956 
3957  if (mode == DISABLE_ALLOCATION_SITES) {
3959  ElementsKind holey_initial = GetHoleyElementsKind(initial);
3960 
3961  ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
3962  holey_initial,
3964  __ TailCallStub(&stub_holey);
3965 
3966  __ bind(&normal_sequence);
3967  ArraySingleArgumentConstructorStub stub(masm->isolate(),
3968  initial,
3970  __ TailCallStub(&stub);
3971  } else if (mode == DONT_OVERRIDE) {
3972  // We are going to create a holey array, but our kind is non-holey.
3973  // Fix kind and retry.
3974  __ inc(edx);
3975 
3976  if (FLAG_debug_code) {
3977  Handle<Map> allocation_site_map =
3978  masm->isolate()->factory()->allocation_site_map();
3979  __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
3980  __ Assert(equal, kExpectedAllocationSite);
3981  }
3982 
3983  // Save the resulting elements kind in type info. We can't just store r3
3984  // in the AllocationSite::transition_info field because elements kind is
3985  // restricted to a portion of the field...upper bits need to be left alone.
3989 
3990  __ bind(&normal_sequence);
3991  int last_index = GetSequenceIndexFromFastElementsKind(
3993  for (int i = 0; i <= last_index; ++i) {
3994  Label next;
3996  __ cmp(edx, kind);
3997  __ j(not_equal, &next);
3998  ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
3999  __ TailCallStub(&stub);
4000  __ bind(&next);
4001  }
4002 
4003  // If we reached this point there is a problem.
4004  __ Abort(kUnexpectedElementsKindInArrayConstructor);
4005  } else {
4006  UNREACHABLE();
4007  }
4008 }
4009 
4010 
4011 template<class T>
4012 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4013  int to_index = GetSequenceIndexFromFastElementsKind(
4015  for (int i = 0; i <= to_index; ++i) {
4017  T stub(isolate, kind);
4018  stub.GetCode();
4020  T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4021  stub1.GetCode();
4022  }
4023  }
4024 }
4025 
4026 
4028  ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4029  isolate);
4030  ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4031  isolate);
4032  ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4033  isolate);
4034 }
4035 
4036 
4038  Isolate* isolate) {
4040  for (int i = 0; i < 2; i++) {
4041  // For internal arrays we only need a few things
4042  InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4043  stubh1.GetCode();
4044  InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4045  stubh2.GetCode();
4046  InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4047  stubh3.GetCode();
4048  }
4049 }
4050 
4051 
4053  MacroAssembler* masm,
4055  if (argument_count() == ANY) {
4056  Label not_zero_case, not_one_case;
4057  __ test(eax, eax);
4058  __ j(not_zero, &not_zero_case);
4059  CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4060 
4061  __ bind(&not_zero_case);
4062  __ cmp(eax, 1);
4063  __ j(greater, &not_one_case);
4064  CreateArrayDispatchOneArgument(masm, mode);
4065 
4066  __ bind(&not_one_case);
4067  CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4068  } else if (argument_count() == NONE) {
4069  CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4070  } else if (argument_count() == ONE) {
4071  CreateArrayDispatchOneArgument(masm, mode);
4072  } else if (argument_count() == MORE_THAN_ONE) {
4073  CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4074  } else {
4075  UNREACHABLE();
4076  }
4077 }
4078 
4079 
4080 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4081  // ----------- S t a t e -------------
4082  // -- eax : argc (only if argument_count() == ANY)
4083  // -- ebx : AllocationSite or undefined
4084  // -- edi : constructor
4085  // -- esp[0] : return address
4086  // -- esp[4] : last argument
4087  // -----------------------------------
4088  if (FLAG_debug_code) {
4089  // The array construct code is only set for the global and natives
4090  // builtin Array functions which always have maps.
4091 
4092  // Initial map for the builtin Array function should be a map.
4094  // Will both indicate a NULL and a Smi.
4095  __ test(ecx, Immediate(kSmiTagMask));
4096  __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4097  __ CmpObjectType(ecx, MAP_TYPE, ecx);
4098  __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4099 
4100  // We should either have undefined in ebx or a valid AllocationSite
4101  __ AssertUndefinedOrAllocationSite(ebx);
4102  }
4103 
4104  Label no_info;
4105  // If the feedback vector is the undefined value call an array constructor
4106  // that doesn't use AllocationSites.
4107  __ cmp(ebx, isolate()->factory()->undefined_value());
4108  __ j(equal, &no_info);
4109 
4110  // Only look at the lower 16 bits of the transition info.
4112  __ SmiUntag(edx);
4116 
4117  __ bind(&no_info);
4119 }
4120 
4121 
4123  MacroAssembler* masm, ElementsKind kind) {
4124  Label not_zero_case, not_one_case;
4125  Label normal_sequence;
4126 
4127  __ test(eax, eax);
4128  __ j(not_zero, &not_zero_case);
4129  InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4130  __ TailCallStub(&stub0);
4131 
4132  __ bind(&not_zero_case);
4133  __ cmp(eax, 1);
4134  __ j(greater, &not_one_case);
4135 
4136  if (IsFastPackedElementsKind(kind)) {
4137  // We might need to create a holey array
4138  // look at the first argument
4139  __ mov(ecx, Operand(esp, kPointerSize));
4140  __ test(ecx, ecx);
4141  __ j(zero, &normal_sequence);
4142 
4143  InternalArraySingleArgumentConstructorStub
4144  stub1_holey(isolate(), GetHoleyElementsKind(kind));
4145  __ TailCallStub(&stub1_holey);
4146  }
4147 
4148  __ bind(&normal_sequence);
4149  InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4150  __ TailCallStub(&stub1);
4151 
4152  __ bind(&not_one_case);
4153  InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4154  __ TailCallStub(&stubN);
4155 }
4156 
4157 
4158 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4159  // ----------- S t a t e -------------
4160  // -- eax : argc
4161  // -- edi : constructor
4162  // -- esp[0] : return address
4163  // -- esp[4] : last argument
4164  // -----------------------------------
4165 
4166  if (FLAG_debug_code) {
4167  // The array construct code is only set for the global and natives
4168  // builtin Array functions which always have maps.
4169 
4170  // Initial map for the builtin Array function should be a map.
4172  // Will both indicate a NULL and a Smi.
4173  __ test(ecx, Immediate(kSmiTagMask));
4174  __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4175  __ CmpObjectType(ecx, MAP_TYPE, ecx);
4176  __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4177  }
4178 
4179  // Figure out the right elements kind
4181 
4182  // Load the map's "bit field 2" into |result|. We only need the first byte,
4183  // but the following masking takes care of that anyway.
4185  // Retrieve elements_kind from bit field 2.
4186  __ DecodeField<Map::ElementsKindBits>(ecx);
4187 
4188  if (FLAG_debug_code) {
4189  Label done;
4190  __ cmp(ecx, Immediate(FAST_ELEMENTS));
4191  __ j(equal, &done);
4192  __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
4193  __ Assert(equal,
4194  kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4195  __ bind(&done);
4196  }
4197 
4198  Label fast_elements_case;
4199  __ cmp(ecx, Immediate(FAST_ELEMENTS));
4200  __ j(equal, &fast_elements_case);
4202 
4203  __ bind(&fast_elements_case);
4204  GenerateCase(masm, FAST_ELEMENTS);
4205 }
4206 
4207 
4208 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
4209  // ----------- S t a t e -------------
4210  // -- eax : callee
4211  // -- ebx : call_data
4212  // -- ecx : holder
4213  // -- edx : api_function_address
4214  // -- esi : context
4215  // --
4216  // -- esp[0] : return address
4217  // -- esp[4] : last argument
4218  // -- ...
4219  // -- esp[argc * 4] : first argument
4220  // -- esp[(argc + 1) * 4] : receiver
4221  // -----------------------------------
4222 
4223  Register callee = eax;
4224  Register call_data = ebx;
4225  Register holder = ecx;
4226  Register api_function_address = edx;
4227  Register return_address = edi;
4228  Register context = esi;
4229 
4230  int argc = this->argc();
4231  bool is_store = this->is_store();
4233 
4234  typedef FunctionCallbackArguments FCA;
4235 
4236  STATIC_ASSERT(FCA::kContextSaveIndex == 6);
4237  STATIC_ASSERT(FCA::kCalleeIndex == 5);
4238  STATIC_ASSERT(FCA::kDataIndex == 4);
4239  STATIC_ASSERT(FCA::kReturnValueOffset == 3);
4240  STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
4241  STATIC_ASSERT(FCA::kIsolateIndex == 1);
4242  STATIC_ASSERT(FCA::kHolderIndex == 0);
4243  STATIC_ASSERT(FCA::kArgsLength == 7);
4244 
4245  __ pop(return_address);
4246 
4247  // context save
4248  __ push(context);
4249  // load context from callee
4250  __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
4251 
4252  // callee
4253  __ push(callee);
4254 
4255  // call data
4256  __ push(call_data);
4257 
4258  Register scratch = call_data;
4259  if (!call_data_undefined) {
4260  // return value
4261  __ push(Immediate(isolate()->factory()->undefined_value()));
4262  // return value default
4263  __ push(Immediate(isolate()->factory()->undefined_value()));
4264  } else {
4265  // return value
4266  __ push(scratch);
4267  // return value default
4268  __ push(scratch);
4269  }
4270  // isolate
4271  __ push(Immediate(reinterpret_cast<int>(isolate())));
4272  // holder
4273  __ push(holder);
4274 
4275  __ mov(scratch, esp);
4276 
4277  // return address
4278  __ push(return_address);
4279 
4280  // API function gets reference to the v8::Arguments. If CPU profiler
4281  // is enabled wrapper function will be called and we need to pass
4282  // address of the callback as additional parameter, always allocate
4283  // space for it.
4284  const int kApiArgc = 1 + 1;
4285 
4286  // Allocate the v8::Arguments structure in the arguments' space since
4287  // it's not controlled by GC.
4288  const int kApiStackSpace = 4;
4289 
4290  __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
4291 
4292  // FunctionCallbackInfo::implicit_args_.
4293  __ mov(ApiParameterOperand(2), scratch);
4294  __ add(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
4295  // FunctionCallbackInfo::values_.
4296  __ mov(ApiParameterOperand(3), scratch);
4297  // FunctionCallbackInfo::length_.
4298  __ Move(ApiParameterOperand(4), Immediate(argc));
4299  // FunctionCallbackInfo::is_construct_call_.
4300  __ Move(ApiParameterOperand(5), Immediate(0));
4301 
4302  // v8::InvocationCallback's argument.
4303  __ lea(scratch, ApiParameterOperand(2));
4304  __ mov(ApiParameterOperand(0), scratch);
4305 
4306  ExternalReference thunk_ref =
4307  ExternalReference::invoke_function_callback(isolate());
4308 
4309  Operand context_restore_operand(ebp,
4310  (2 + FCA::kContextSaveIndex) * kPointerSize);
4311  // Stores return the first js argument
4312  int return_value_offset = 0;
4313  if (is_store) {
4314  return_value_offset = 2 + FCA::kArgsLength;
4315  } else {
4316  return_value_offset = 2 + FCA::kReturnValueOffset;
4317  }
4318  Operand return_value_operand(ebp, return_value_offset * kPointerSize);
4319  __ CallApiFunctionAndReturn(api_function_address,
4320  thunk_ref,
4322  argc + FCA::kArgsLength + 1,
4323  return_value_operand,
4324  &context_restore_operand);
4325 }
4326 
4327 
4328 void CallApiGetterStub::Generate(MacroAssembler* masm) {
4329  // ----------- S t a t e -------------
4330  // -- esp[0] : return address
4331  // -- esp[4] : name
4332  // -- esp[8 - kArgsLength*4] : PropertyCallbackArguments object
4333  // -- ...
4334  // -- edx : api_function_address
4335  // -----------------------------------
4337 
4338  // array for v8::Arguments::values_, handler for name and pointer
4339  // to the values (it considered as smi in GC).
4340  const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
4341  // Allocate space for opional callback address parameter in case
4342  // CPU profiler is active.
4343  const int kApiArgc = 2 + 1;
4344 
4345  Register api_function_address = edx;
4346  Register scratch = ebx;
4347 
4348  // load address of name
4349  __ lea(scratch, Operand(esp, 1 * kPointerSize));
4350 
4351  __ PrepareCallApiFunction(kApiArgc);
4352  __ mov(ApiParameterOperand(0), scratch); // name.
4353  __ add(scratch, Immediate(kPointerSize));
4354  __ mov(ApiParameterOperand(1), scratch); // arguments pointer.
4355 
4356  ExternalReference thunk_ref =
4357  ExternalReference::invoke_accessor_getter_callback(isolate());
4358 
4359  __ CallApiFunctionAndReturn(api_function_address,
4360  thunk_ref,
4362  kStackSpace,
4363  Operand(ebp, 7 * kPointerSize),
4364  NULL);
4365 }
4366 
4367 
4368 #undef __
4369 
4370 } } // namespace v8::internal
4371 
4372 #endif // V8_TARGET_ARCH_X87
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
Definition: objects-inl.h:1591
static const int kTransitionInfoOffset
Definition: objects.h:8254
static const Register function_address()
void GenerateReadElement(MacroAssembler *masm)
void GenerateNewSloppySlow(MacroAssembler *masm)
void GenerateNewStrict(MacroAssembler *masm)
void GenerateNewSloppyFast(MacroAssembler *masm)
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateDispatchToArrayStub(MacroAssembler *masm, AllocationSiteOverrideMode mode)
ArgumentCountKey argument_count() const
Definition: code-stubs.h:732
static const int kCallInstructionLength
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:266
static const U kShift
Definition: utils.h:204
static const U kMask
Definition: utils.h:203
bool save_doubles() const
Definition: code-stubs.h:1423
static void GenerateAheadOfTime(Isolate *isolate)
CEntryStub(Isolate *isolate, int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
Definition: code-stubs.h:1406
STATIC_ASSERT(Code::kArgumentsBits+2<=kStubMinorKeyBits)
bool CallAsMethod() const
Definition: code-stubs.h:811
void GenerateMiss(MacroAssembler *masm)
virtual InlineCacheState GetICState() const OVERRIDE
Definition: code-stubs.h:804
static const int kHeaderSize
Definition: objects.h:5373
Condition GetCondition() const
Definition: code-stubs.cc:354
void GenerateInternalizedStrings(MacroAssembler *masm)
void GenerateStrings(MacroAssembler *masm)
CompareICState::State state() const
Definition: code-stubs.h:1278
Token::Value op() const
Definition: code-stubs.h:1268
void GenerateMiss(MacroAssembler *masm)
CompareICState::State left() const
Definition: code-stubs.h:1272
void GenerateGeneric(MacroAssembler *masm)
CompareICState::State right() const
Definition: code-stubs.h:1275
void GenerateObjects(MacroAssembler *masm)
CompareICStub(Isolate *isolate, Token::Value op, CompareICState::State left, CompareICState::State right, CompareICState::State state)
Definition: code-stubs.h:1256
void GenerateNumbers(MacroAssembler *masm)
void GenerateUniqueNames(MacroAssembler *masm)
void GenerateKnownObjects(MacroAssembler *masm)
void GenerateSmis(MacroAssembler *masm)
static const int kFirstOffset
Definition: objects.h:9061
static const int kMinLength
Definition: objects.h:9066
static const int kSecondOffset
Definition: objects.h:9062
static int SlotOffset(int index)
Definition: contexts.h:552
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:725
Register source() const
Definition: code-stubs.h:1901
Register destination() const
Definition: code-stubs.h:1904
static const uint64_t kSignificandMask
Definition: double.h:22
static const uint64_t kHiddenBit
Definition: double.h:24
static const int kPhysicalSignificandSize
Definition: double.h:25
static const int kMaxShortLength
Definition: objects.h:9141
static const int kResourceDataOffset
Definition: objects.h:9138
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kEntrySize
Definition: objects.h:3276
static const int kMantissaBits
Definition: objects.h:1525
static const int kValueOffset
Definition: objects.h:1506
static const uint32_t kExponentMask
Definition: objects.h:1523
static const int kExponentBias
Definition: objects.h:1527
static const int kExponentShift
Definition: objects.h:1528
static const int kMapOffset
Definition: objects.h:1427
static const int kStrictArgumentsObjectSize
Definition: heap.h:674
static const int kSloppyArgumentsObjectSize
Definition: heap.h:671
static const int kArgumentsCalleeIndex
Definition: heap.h:679
static const int kArgumentsLengthIndex
Definition: heap.h:677
void GenerateLightweightMiss(MacroAssembler *masm, ExternalReference miss)
bool HasCallSiteInlineCheck() const
Definition: code-stubs.h:700
static Register right()
Definition: code-stubs.h:686
bool HasArgsInRegisters() const
Definition: code-stubs.h:698
static Register left()
Definition: code-stubs.h:685
bool ReturnTrueFalseObject() const
Definition: code-stubs.h:704
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateCase(MacroAssembler *masm, ElementsKind kind)
static const int kJSRegexpStaticOffsetsVectorSize
Definition: isolate.h:984
StackFrame::Type type() const
Definition: code-stubs.h:1454
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kContextOffset
Definition: objects.h:7381
static const int kLiteralsOffset
Definition: objects.h:7382
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7377
static const int kHeaderSize
Definition: objects.h:2195
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kDataOneByteCodeOffset
Definition: objects.h:7813
static const int kIrregexpCaptureCountOffset
Definition: objects.h:7817
static const int kDataTagOffset
Definition: objects.h:7811
static const int kDataOffset
Definition: objects.h:7771
static const int kDataUC16CodeOffset
Definition: objects.h:7815
static const Register ReceiverRegister()
static const Register NameRegister()
static const int kIsUndetectable
Definition: objects.h:6244
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kBitField2Offset
Definition: objects.h:6233
static const int kPrototypeOffset
Definition: objects.h:6190
static const size_t kWriteBarrierCounterOffset
Definition: spaces.h:536
static const int kEvacuationCandidateMask
Definition: spaces.h:398
static const int kSkipEvacuationSlotsRecordingMask
Definition: spaces.h:400
NameDictionaryLookupStub(Isolate *isolate, LookupMode mode)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kHashShift
Definition: objects.h:8499
static const int kEmptyHashField
Definition: objects.h:8534
static const int kHashFieldOffset
Definition: objects.h:8486
static void GenerateLoadFunctionPrototype(MacroAssembler *masm, Register receiver, Register scratch1, Register scratch2, Label *miss_label)
static const intptr_t kPageAlignmentMask
Definition: spaces.h:757
virtual void Generate(MacroAssembler *masm)=0
ProfileEntryHookStub(Isolate *isolate)
Definition: code-stubs.h:2373
static void MaybeCallEntryHook(MacroAssembler *masm)
void SaveCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void RestoreCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void GenerateIncremental(MacroAssembler *masm, Mode mode)
void InformIncrementalMarker(MacroAssembler *masm)
RememberedSetAction remembered_set_action() const
SaveFPRegsMode save_fp_regs_mode() const
static const byte kTwoByteNopInstruction
void CheckNeedsToInformIncrementalMarker(MacroAssembler *masm, OnNoNeedToInformIncrementalMarker on_no_need, Mode mode)
static const byte kFiveByteNopInstruction
virtual void Generate(MacroAssembler *masm) OVERRIDE
static const int kLastCaptureCountOffset
Definition: jsregexp.h:168
static const int kLastSubjectOffset
Definition: jsregexp.h:170
static const int kLastMatchOverhead
Definition: jsregexp.h:165
static const int kLastInputOffset
Definition: jsregexp.h:172
static const int kFirstCaptureOffset
Definition: jsregexp.h:174
static const Function * FunctionForId(FunctionId id)
Definition: runtime.cc:9312
static const int kHeaderSize
Definition: objects.h:8941
static const int kConstructStubOffset
Definition: objects.h:6896
static const int kFeedbackVectorOffset
Definition: objects.h:6904
static const int kNativeBitWithinByte
Definition: objects.h:7046
static const int kStrictModeBitWithinByte
Definition: objects.h:7043
static const int kMinLength
Definition: objects.h:9109
static const int kParentOffset
Definition: objects.h:9104
static const int kOffsetOffset
Definition: objects.h:9105
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static const int kContextOffset
Definition: frames.h:162
static const int kCallerSPOffset
Definition: frames.h:167
static const int kCallerFPOffset
Definition: frames.h:165
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
StoreBufferOverflowStub(Isolate *isolate, SaveFPRegsMode save_fp)
Definition: code-stubs.h:2395
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static void GenerateOneByteCharsCompareLoop(MacroAssembler *masm, Register left, Register right, Register length, Register scratch1, Register scratch2, Label *chars_not_equal)
static void GenerateCompareFlatOneByteStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, String::Encoding encoding)
static void GenerateFlatOneByteStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int32_t kMaxOneByteCharCode
Definition: objects.h:8811
static const int kLengthOffset
Definition: objects.h:8802
static const int kCallerStackParameterCountFrameOffset
Definition: frames.h:755
StubFunctionMode function_mode() const
Definition: code-stubs.h:2360
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:917
static bool IsOrderedRelationalCompareOp(Value op)
Definition: token.h:206
static bool IsEqualityOp(Value op)
Definition: token.h:210
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const Register VectorRegister()
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define FUNCTION_ADDR(f)
Definition: globals.h:195
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ JUMP_FUNCTION
@ CALL_FUNCTION
@ TAG_OBJECT
bool IsPowerOfTwo32(uint32_t value)
Definition: bits.h:77
const int kPointerSize
Definition: globals.h:129
const Register edx
const uint32_t kStringEncodingMask
Definition: objects.h:555
const Register edi
@ DONT_DO_SMI_CHECK
Definition: globals.h:640
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
@ DONT_TRACK_ALLOCATION_SITE
Definition: objects.h:8084
@ kSeqStringTag
Definition: objects.h:563
@ kConsStringTag
Definition: objects.h:564
@ kSlicedStringTag
Definition: objects.h:566
@ kExternalStringTag
Definition: objects.h:565
const Register esp
const intptr_t kSmiSignMask
Definition: globals.h:223
const uint32_t kTwoByteStringTag
Definition: objects.h:556
const Register r0
const uint32_t kShortExternalStringTag
Definition: objects.h:590
const int kSmiTagSize
Definition: v8.h:5743
const int kFastElementsKindPackedToHoley
Definition: elements-kind.h:71
const int kDoubleSize
Definition: globals.h:127
const uint32_t kNotStringTag
Definition: objects.h:545
Operand FieldOperand(Register object, int offset)
@ JS_FUNCTION_STUB_MODE
Definition: code-stubs.h:350
const uint32_t kStringTag
Definition: objects.h:544
@ JS_REGEXP_TYPE
Definition: objects.h:748
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIXED_ARRAY_TYPE
Definition: objects.h:717
@ JS_OBJECT_TYPE
Definition: objects.h:731
@ ODDBALL_TYPE
Definition: objects.h:663
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ HEAP_NUMBER_TYPE
Definition: objects.h:669
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
@ FAST_HOLEY_DOUBLE_ELEMENTS
Definition: elements-kind.h:27
@ TERMINAL_FAST_ELEMENTS_KIND
Definition: elements-kind.h:63
@ FAST_HOLEY_SMI_ELEMENTS
Definition: elements-kind.h:17
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:146
const uint32_t kOneByteStringTag
Definition: objects.h:557
const Register esi
const Register eax
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
bool IsFastPackedElementsKind(ElementsKind kind)
const Register ebx
const uint32_t kShortExternalStringMask
Definition: objects.h:589
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
AllocationSiteOverrideMode
Definition: code-stubs.h:716
@ DISABLE_ALLOCATION_SITES
Definition: code-stubs.h:718
Condition NegateCondition(Condition cond)
Definition: constants-arm.h:86
const uint32_t kStringRepresentationMask
Definition: objects.h:561
byte * Address
Definition: globals.h:101
const uint32_t kSlicedNotConsMask
Definition: objects.h:579
const Register r1
OStream & dec(OStream &os)
Definition: ostreams.cc:122
const int kHeapObjectTag
Definition: v8.h:5737
const int kSmiShiftSize
Definition: v8.h:5805
const Register no_reg
const uint32_t kInternalizedTag
Definition: objects.h:551
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
Definition: v8.h:5744
const uint32_t kIsNotInternalizedMask
Definition: objects.h:549
const Register ebp
Operand ApiParameterOperand(int index)
const int kSmiTag
Definition: v8.h:5742
const uint32_t kIsNotStringMask
Definition: objects.h:543
ElementsKind GetInitialFastElementsKind()
Definition: elements-kind.h:78
@ STRING_INDEX_IS_NUMBER
Definition: code-stubs.h:1590
@ STRING_INDEX_IS_ARRAY_INDEX
Definition: code-stubs.h:1595
const uint32_t kIsIndirectStringMask
Definition: objects.h:568
const Register ecx
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
static Handle< Value > Throw(Isolate *isolate, const char *message)
Definition: d8.cc:72
bool is(Register reg) const
#define T(name, string, precedence)
Definition: token.cc:25