V8 Project
code-stubs-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_IA32
8 
9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h"
11 #include "src/code-stubs.h"
12 #include "src/codegen.h"
14 #include "src/ic/ic.h"
15 #include "src/isolate.h"
16 #include "src/jsregexp.h"
18 #include "src/runtime/runtime.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 
24 static void InitializeArrayConstructorDescriptor(
25  Isolate* isolate, CodeStubDescriptor* descriptor,
26  int constant_stack_parameter_count) {
27  // register state
28  // eax -- number of arguments
29  // edi -- function
30  // ebx -- allocation site with elements kind
31  Address deopt_handler = Runtime::FunctionForId(
32  Runtime::kArrayConstructor)->entry;
33 
34  if (constant_stack_parameter_count == 0) {
35  descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
37  } else {
38  descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
40  }
41 }
42 
43 
44 static void InitializeInternalArrayConstructorDescriptor(
45  Isolate* isolate, CodeStubDescriptor* descriptor,
46  int constant_stack_parameter_count) {
47  // register state
48  // eax -- number of arguments
49  // edi -- constructor function
50  Address deopt_handler = Runtime::FunctionForId(
51  Runtime::kInternalArrayConstructor)->entry;
52 
53  if (constant_stack_parameter_count == 0) {
54  descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
56  } else {
57  descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
59  }
60 }
61 
62 
63 void ArrayNoArgumentConstructorStub::InitializeDescriptor(
64  CodeStubDescriptor* descriptor) {
65  InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
66 }
67 
68 
69 void ArraySingleArgumentConstructorStub::InitializeDescriptor(
70  CodeStubDescriptor* descriptor) {
71  InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
72 }
73 
74 
75 void ArrayNArgumentsConstructorStub::InitializeDescriptor(
76  CodeStubDescriptor* descriptor) {
77  InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
78 }
79 
80 
81 void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
82  CodeStubDescriptor* descriptor) {
83  InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
84 }
85 
86 
87 void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
88  CodeStubDescriptor* descriptor) {
89  InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
90 }
91 
92 
93 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
94  CodeStubDescriptor* descriptor) {
95  InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
96 }
97 
98 
99 #define __ ACCESS_MASM(masm)
100 
101 
102 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
103  ExternalReference miss) {
104  // Update the static counter each time a new code stub is generated.
105  isolate()->counters()->code_stubs()->Increment();
106 
107  CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
108  int param_count = descriptor.GetEnvironmentParameterCount();
109  {
110  // Call the runtime system in a fresh internal frame.
111  FrameScope scope(masm, StackFrame::INTERNAL);
112  DCHECK(param_count == 0 ||
113  eax.is(descriptor.GetEnvironmentParameterRegister(param_count - 1)));
114  // Push arguments
115  for (int i = 0; i < param_count; ++i) {
116  __ push(descriptor.GetEnvironmentParameterRegister(i));
117  }
118  __ CallExternalReference(miss, param_count);
119  }
120 
121  __ ret(0);
122 }
123 
124 
125 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
126  // We don't allow a GC during a store buffer overflow so there is no need to
127  // store the registers in any particular way, but we do have to store and
128  // restore them.
129  __ pushad();
130  if (save_doubles()) {
131  __ sub(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
132  for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
133  XMMRegister reg = XMMRegister::from_code(i);
134  __ movsd(Operand(esp, i * kDoubleSize), reg);
135  }
136  }
137  const int argument_count = 1;
138 
139  AllowExternalCallThatCantCauseGC scope(masm);
140  __ PrepareCallCFunction(argument_count, ecx);
141  __ mov(Operand(esp, 0 * kPointerSize),
142  Immediate(ExternalReference::isolate_address(isolate())));
143  __ CallCFunction(
144  ExternalReference::store_buffer_overflow_function(isolate()),
145  argument_count);
146  if (save_doubles()) {
147  for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
148  XMMRegister reg = XMMRegister::from_code(i);
149  __ movsd(reg, Operand(esp, i * kDoubleSize));
150  }
151  __ add(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
152  }
153  __ popad();
154  __ ret(0);
155 }
156 
157 
158 class FloatingPointHelper : public AllStatic {
159  public:
160  enum ArgLocation {
161  ARGS_ON_STACK,
162  ARGS_IN_REGISTERS
163  };
164 
165  // Code pattern for loading a floating point value. Input value must
166  // be either a smi or a heap number object (fp value). Requirements:
167  // operand in register number. Returns operand as floating point number
168  // on FPU stack.
169  static void LoadFloatOperand(MacroAssembler* masm, Register number);
170 
171  // Test if operands are smi or number objects (fp). Requirements:
172  // operand_1 in eax, operand_2 in edx; falls through on float
173  // operands, jumps to the non_float label otherwise.
174  static void CheckFloatOperands(MacroAssembler* masm,
175  Label* non_float,
176  Register scratch);
177 
178  // Test if operands are numbers (smi or HeapNumber objects), and load
179  // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
180  // either operand is not a number. Operands are in edx and eax.
181  // Leaves operands unchanged.
182  static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
183 };
184 
185 
186 void DoubleToIStub::Generate(MacroAssembler* masm) {
187  Register input_reg = this->source();
188  Register final_result_reg = this->destination();
190 
191  Label check_negative, process_64_bits, done, done_no_stash;
192 
193  int double_offset = offset();
194 
195  // Account for return address and saved regs if input is esp.
196  if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
197 
198  MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
199  MemOperand exponent_operand(MemOperand(input_reg,
200  double_offset + kDoubleSize / 2));
201 
202  Register scratch1;
203  {
204  Register scratch_candidates[3] = { ebx, edx, edi };
205  for (int i = 0; i < 3; i++) {
206  scratch1 = scratch_candidates[i];
207  if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
208  }
209  }
210  // Since we must use ecx for shifts below, use some other register (eax)
211  // to calculate the result if ecx is the requested return register.
212  Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
213  // Save ecx if it isn't the return register and therefore volatile, or if it
214  // is the return register, then save the temp register we use in its stead for
215  // the result.
216  Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
217  __ push(scratch1);
218  __ push(save_reg);
219 
220  bool stash_exponent_copy = !input_reg.is(esp);
221  __ mov(scratch1, mantissa_operand);
223  CpuFeatureScope scope(masm, SSE3);
224  // Load x87 register with heap number.
225  __ fld_d(mantissa_operand);
226  }
227  __ mov(ecx, exponent_operand);
228  if (stash_exponent_copy) __ push(ecx);
229 
232  __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
233  __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
234  __ j(below, &process_64_bits);
235 
236  // Result is entirely in lower 32-bits of mantissa
239  __ fstp(0);
240  }
241  __ sub(ecx, Immediate(delta));
242  __ xor_(result_reg, result_reg);
243  __ cmp(ecx, Immediate(31));
244  __ j(above, &done);
245  __ shl_cl(scratch1);
246  __ jmp(&check_negative);
247 
248  __ bind(&process_64_bits);
250  CpuFeatureScope scope(masm, SSE3);
251  if (stash_exponent_copy) {
252  // Already a copy of the exponent on the stack, overwrite it.
254  __ sub(esp, Immediate(kDoubleSize / 2));
255  } else {
256  // Reserve space for 64 bit answer.
257  __ sub(esp, Immediate(kDoubleSize)); // Nolint.
258  }
259  // Do conversion, which cannot fail because we checked the exponent.
260  __ fisttp_d(Operand(esp, 0));
261  __ mov(result_reg, Operand(esp, 0)); // Load low word of answer as result
262  __ add(esp, Immediate(kDoubleSize));
263  __ jmp(&done_no_stash);
264  } else {
265  // Result must be extracted from shifted 32-bit mantissa
266  __ sub(ecx, Immediate(delta));
267  __ neg(ecx);
268  if (stash_exponent_copy) {
269  __ mov(result_reg, MemOperand(esp, 0));
270  } else {
271  __ mov(result_reg, exponent_operand);
272  }
273  __ and_(result_reg,
274  Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
275  __ add(result_reg,
276  Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
277  __ shrd(result_reg, scratch1);
278  __ shr_cl(result_reg);
279  __ test(ecx, Immediate(32));
280  __ cmov(not_equal, scratch1, result_reg);
281  }
282 
283  // If the double was negative, negate the integer result.
284  __ bind(&check_negative);
285  __ mov(result_reg, scratch1);
286  __ neg(result_reg);
287  if (stash_exponent_copy) {
288  __ cmp(MemOperand(esp, 0), Immediate(0));
289  } else {
290  __ cmp(exponent_operand, Immediate(0));
291  }
292  __ cmov(greater, result_reg, scratch1);
293 
294  // Restore registers
295  __ bind(&done);
296  if (stash_exponent_copy) {
297  __ add(esp, Immediate(kDoubleSize / 2));
298  }
299  __ bind(&done_no_stash);
300  if (!final_result_reg.is(result_reg)) {
301  DCHECK(final_result_reg.is(ecx));
302  __ mov(final_result_reg, result_reg);
303  }
304  __ pop(save_reg);
305  __ pop(scratch1);
306  __ ret(0);
307 }
308 
309 
310 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
311  Register number) {
312  Label load_smi, done;
313 
314  __ JumpIfSmi(number, &load_smi, Label::kNear);
315  __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
316  __ jmp(&done, Label::kNear);
317 
318  __ bind(&load_smi);
319  __ SmiUntag(number);
320  __ push(number);
321  __ fild_s(Operand(esp, 0));
322  __ pop(number);
323 
324  __ bind(&done);
325 }
326 
327 
328 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
329  Label* not_numbers) {
330  Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
331  // Load operand in edx into xmm0, or branch to not_numbers.
332  __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
333  Factory* factory = masm->isolate()->factory();
334  __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
335  __ j(not_equal, not_numbers); // Argument in edx is not a number.
337  __ bind(&load_eax);
338  // Load operand in eax into xmm1, or branch to not_numbers.
339  __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
340  __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
341  __ j(equal, &load_float_eax, Label::kNear);
342  __ jmp(not_numbers); // Argument in eax is not a number.
343  __ bind(&load_smi_edx);
344  __ SmiUntag(edx); // Untag smi before converting to float.
345  __ Cvtsi2sd(xmm0, edx);
346  __ SmiTag(edx); // Retag smi for heap number overwriting test.
347  __ jmp(&load_eax);
348  __ bind(&load_smi_eax);
349  __ SmiUntag(eax); // Untag smi before converting to float.
350  __ Cvtsi2sd(xmm1, eax);
351  __ SmiTag(eax); // Retag smi for heap number overwriting test.
352  __ jmp(&done, Label::kNear);
353  __ bind(&load_float_eax);
355  __ bind(&done);
356 }
357 
358 
359 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
360  Label* non_float,
361  Register scratch) {
362  Label test_other, done;
363  // Test if both operands are floats or smi -> scratch=k_is_float;
364  // Otherwise scratch = k_not_float.
365  __ JumpIfSmi(edx, &test_other, Label::kNear);
366  __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
367  Factory* factory = masm->isolate()->factory();
368  __ cmp(scratch, factory->heap_number_map());
369  __ j(not_equal, non_float); // argument in edx is not a number -> NaN
370 
371  __ bind(&test_other);
372  __ JumpIfSmi(eax, &done, Label::kNear);
373  __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
374  __ cmp(scratch, factory->heap_number_map());
375  __ j(not_equal, non_float); // argument in eax is not a number -> NaN
376 
377  // Fall-through: Both operands are numbers.
378  __ bind(&done);
379 }
380 
381 
382 void MathPowStub::Generate(MacroAssembler* masm) {
383  Factory* factory = isolate()->factory();
384  const Register exponent = MathPowTaggedDescriptor::exponent();
385  DCHECK(exponent.is(eax));
386  const Register base = edx;
387  const Register scratch = ecx;
388  const XMMRegister double_result = xmm3;
389  const XMMRegister double_base = xmm2;
390  const XMMRegister double_exponent = xmm1;
391  const XMMRegister double_scratch = xmm4;
392 
393  Label call_runtime, done, exponent_not_smi, int_exponent;
394 
395  // Save 1 in double_result - we need this several times later on.
396  __ mov(scratch, Immediate(1));
397  __ Cvtsi2sd(double_result, scratch);
398 
399  if (exponent_type() == ON_STACK) {
400  Label base_is_smi, unpack_exponent;
401  // The exponent and base are supplied as arguments on the stack.
402  // This can only happen if the stub is called from non-optimized code.
403  // Load input parameters from stack.
404  __ mov(base, Operand(esp, 2 * kPointerSize));
405  __ mov(exponent, Operand(esp, 1 * kPointerSize));
406 
407  __ JumpIfSmi(base, &base_is_smi, Label::kNear);
409  factory->heap_number_map());
410  __ j(not_equal, &call_runtime);
411 
412  __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
413  __ jmp(&unpack_exponent, Label::kNear);
414 
415  __ bind(&base_is_smi);
416  __ SmiUntag(base);
417  __ Cvtsi2sd(double_base, base);
418 
419  __ bind(&unpack_exponent);
420  __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
421  __ SmiUntag(exponent);
422  __ jmp(&int_exponent);
423 
424  __ bind(&exponent_not_smi);
425  __ cmp(FieldOperand(exponent, HeapObject::kMapOffset),
426  factory->heap_number_map());
427  __ j(not_equal, &call_runtime);
428  __ movsd(double_exponent,
430  } else if (exponent_type() == TAGGED) {
431  __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
432  __ SmiUntag(exponent);
433  __ jmp(&int_exponent);
434 
435  __ bind(&exponent_not_smi);
436  __ movsd(double_exponent,
438  }
439 
440  if (exponent_type() != INTEGER) {
441  Label fast_power, try_arithmetic_simplification;
442  __ DoubleToI(exponent, double_exponent, double_scratch,
443  TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
444  &try_arithmetic_simplification,
445  &try_arithmetic_simplification);
446  __ jmp(&int_exponent);
447 
448  __ bind(&try_arithmetic_simplification);
449  // Skip to runtime if possibly NaN (indicated by the indefinite integer).
450  __ cvttsd2si(exponent, Operand(double_exponent));
451  __ cmp(exponent, Immediate(0x1));
452  __ j(overflow, &call_runtime);
453 
454  if (exponent_type() == ON_STACK) {
455  // Detect square root case. Crankshaft detects constant +/-0.5 at
456  // compile time and uses DoMathPowHalf instead. We then skip this check
457  // for non-constant cases of +/-0.5 as these hardly occur.
458  Label continue_sqrt, continue_rsqrt, not_plus_half;
459  // Test for 0.5.
460  // Load double_scratch with 0.5.
461  __ mov(scratch, Immediate(0x3F000000u));
462  __ movd(double_scratch, scratch);
463  __ cvtss2sd(double_scratch, double_scratch);
464  // Already ruled out NaNs for exponent.
465  __ ucomisd(double_scratch, double_exponent);
466  __ j(not_equal, &not_plus_half, Label::kNear);
467 
468  // Calculates square root of base. Check for the special case of
469  // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
470  // According to IEEE-754, single-precision -Infinity has the highest
471  // 9 bits set and the lowest 23 bits cleared.
472  __ mov(scratch, 0xFF800000u);
473  __ movd(double_scratch, scratch);
474  __ cvtss2sd(double_scratch, double_scratch);
475  __ ucomisd(double_base, double_scratch);
476  // Comparing -Infinity with NaN results in "unordered", which sets the
477  // zero flag as if both were equal. However, it also sets the carry flag.
478  __ j(not_equal, &continue_sqrt, Label::kNear);
479  __ j(carry, &continue_sqrt, Label::kNear);
480 
481  // Set result to Infinity in the special case.
482  __ xorps(double_result, double_result);
483  __ subsd(double_result, double_scratch);
484  __ jmp(&done);
485 
486  __ bind(&continue_sqrt);
487  // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
488  __ xorps(double_scratch, double_scratch);
489  __ addsd(double_scratch, double_base); // Convert -0 to +0.
490  __ sqrtsd(double_result, double_scratch);
491  __ jmp(&done);
492 
493  // Test for -0.5.
494  __ bind(&not_plus_half);
495  // Load double_exponent with -0.5 by substracting 1.
496  __ subsd(double_scratch, double_result);
497  // Already ruled out NaNs for exponent.
498  __ ucomisd(double_scratch, double_exponent);
499  __ j(not_equal, &fast_power, Label::kNear);
500 
501  // Calculates reciprocal of square root of base. Check for the special
502  // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
503  // According to IEEE-754, single-precision -Infinity has the highest
504  // 9 bits set and the lowest 23 bits cleared.
505  __ mov(scratch, 0xFF800000u);
506  __ movd(double_scratch, scratch);
507  __ cvtss2sd(double_scratch, double_scratch);
508  __ ucomisd(double_base, double_scratch);
509  // Comparing -Infinity with NaN results in "unordered", which sets the
510  // zero flag as if both were equal. However, it also sets the carry flag.
511  __ j(not_equal, &continue_rsqrt, Label::kNear);
512  __ j(carry, &continue_rsqrt, Label::kNear);
513 
514  // Set result to 0 in the special case.
515  __ xorps(double_result, double_result);
516  __ jmp(&done);
517 
518  __ bind(&continue_rsqrt);
519  // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
520  __ xorps(double_exponent, double_exponent);
521  __ addsd(double_exponent, double_base); // Convert -0 to +0.
522  __ sqrtsd(double_exponent, double_exponent);
523  __ divsd(double_result, double_exponent);
524  __ jmp(&done);
525  }
526 
527  // Using FPU instructions to calculate power.
528  Label fast_power_failed;
529  __ bind(&fast_power);
530  __ fnclex(); // Clear flags to catch exceptions later.
531  // Transfer (B)ase and (E)xponent onto the FPU register stack.
532  __ sub(esp, Immediate(kDoubleSize));
533  __ movsd(Operand(esp, 0), double_exponent);
534  __ fld_d(Operand(esp, 0)); // E
535  __ movsd(Operand(esp, 0), double_base);
536  __ fld_d(Operand(esp, 0)); // B, E
537 
538  // Exponent is in st(1) and base is in st(0)
539  // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
540  // FYL2X calculates st(1) * log2(st(0))
541  __ fyl2x(); // X
542  __ fld(0); // X, X
543  __ frndint(); // rnd(X), X
544  __ fsub(1); // rnd(X), X-rnd(X)
545  __ fxch(1); // X - rnd(X), rnd(X)
546  // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
547  __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
548  __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
549  __ faddp(1); // 2^(X-rnd(X)), rnd(X)
550  // FSCALE calculates st(0) * 2^st(1)
551  __ fscale(); // 2^X, rnd(X)
552  __ fstp(1); // 2^X
553  // Bail out to runtime in case of exceptions in the status word.
554  __ fnstsw_ax();
555  __ test_b(eax, 0x5F); // We check for all but precision exception.
556  __ j(not_zero, &fast_power_failed, Label::kNear);
557  __ fstp_d(Operand(esp, 0));
558  __ movsd(double_result, Operand(esp, 0));
559  __ add(esp, Immediate(kDoubleSize));
560  __ jmp(&done);
561 
562  __ bind(&fast_power_failed);
563  __ fninit();
564  __ add(esp, Immediate(kDoubleSize));
565  __ jmp(&call_runtime);
566  }
567 
568  // Calculate power with integer exponent.
569  __ bind(&int_exponent);
570  const XMMRegister double_scratch2 = double_exponent;
571  __ mov(scratch, exponent); // Back up exponent.
572  __ movsd(double_scratch, double_base); // Back up base.
573  __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
574 
575  // Get absolute value of exponent.
576  Label no_neg, while_true, while_false;
577  __ test(scratch, scratch);
578  __ j(positive, &no_neg, Label::kNear);
579  __ neg(scratch);
580  __ bind(&no_neg);
581 
582  __ j(zero, &while_false, Label::kNear);
583  __ shr(scratch, 1);
584  // Above condition means CF==0 && ZF==0. This means that the
585  // bit that has been shifted out is 0 and the result is not 0.
586  __ j(above, &while_true, Label::kNear);
587  __ movsd(double_result, double_scratch);
588  __ j(zero, &while_false, Label::kNear);
589 
590  __ bind(&while_true);
591  __ shr(scratch, 1);
592  __ mulsd(double_scratch, double_scratch);
593  __ j(above, &while_true, Label::kNear);
594  __ mulsd(double_result, double_scratch);
595  __ j(not_zero, &while_true);
596 
597  __ bind(&while_false);
598  // scratch has the original value of the exponent - if the exponent is
599  // negative, return 1/result.
600  __ test(exponent, exponent);
601  __ j(positive, &done);
602  __ divsd(double_scratch2, double_result);
603  __ movsd(double_result, double_scratch2);
604  // Test whether result is zero. Bail out to check for subnormal result.
605  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
606  __ xorps(double_scratch2, double_scratch2);
607  __ ucomisd(double_scratch2, double_result); // Result cannot be NaN.
608  // double_exponent aliased as double_scratch2 has already been overwritten
609  // and may not have contained the exponent value in the first place when the
610  // exponent is a smi. We reset it with exponent value before bailing out.
611  __ j(not_equal, &done);
612  __ Cvtsi2sd(double_exponent, exponent);
613 
614  // Returning or bailing out.
615  Counters* counters = isolate()->counters();
616  if (exponent_type() == ON_STACK) {
617  // The arguments are still on the stack.
618  __ bind(&call_runtime);
619  __ TailCallRuntime(Runtime::kMathPowRT, 2, 1);
620 
621  // The stub is called from non-optimized code, which expects the result
622  // as heap number in exponent.
623  __ bind(&done);
624  __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
625  __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result);
626  __ IncrementCounter(counters->math_pow(), 1);
627  __ ret(2 * kPointerSize);
628  } else {
629  __ bind(&call_runtime);
630  {
631  AllowExternalCallThatCantCauseGC scope(masm);
632  __ PrepareCallCFunction(4, scratch);
633  __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
634  __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
635  __ CallCFunction(
636  ExternalReference::power_double_double_function(isolate()), 4);
637  }
638  // Return value is in st(0) on ia32.
639  // Store it into the (fixed) result register.
640  __ sub(esp, Immediate(kDoubleSize));
641  __ fstp_d(Operand(esp, 0));
642  __ movsd(double_result, Operand(esp, 0));
643  __ add(esp, Immediate(kDoubleSize));
644 
645  __ bind(&done);
646  __ IncrementCounter(counters->math_pow(), 1);
647  __ ret(0);
648  }
649 }
650 
651 
652 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
653  Label miss;
654  Register receiver = LoadDescriptor::ReceiverRegister();
655 
657  ebx, &miss);
658  __ bind(&miss);
659  PropertyAccessCompiler::TailCallBuiltin(
660  masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
661 }
662 
663 
664 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
665  // Return address is on the stack.
666  Label slow;
667 
668  Register receiver = LoadDescriptor::ReceiverRegister();
669  Register key = LoadDescriptor::NameRegister();
670  Register scratch = eax;
671  DCHECK(!scratch.is(receiver) && !scratch.is(key));
672 
673  // Check that the key is an array index, that is Uint32.
674  __ test(key, Immediate(kSmiTagMask | kSmiSignMask));
675  __ j(not_zero, &slow);
676 
677  // Everything is fine, call runtime.
678  __ pop(scratch);
679  __ push(receiver); // receiver
680  __ push(key); // key
681  __ push(scratch); // return address
682 
683  // Perform tail call to the entry.
684  ExternalReference ref = ExternalReference(
685  IC_Utility(IC::kLoadElementWithInterceptor), masm->isolate());
686  __ TailCallExternalReference(ref, 2, 1);
687 
688  __ bind(&slow);
689  PropertyAccessCompiler::TailCallBuiltin(
690  masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
691 }
692 
693 
694 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
695  // The key is in edx and the parameter count is in eax.
698 
699  // The displacement is used for skipping the frame pointer on the
700  // stack. It is the offset of the last parameter (if any) relative
701  // to the frame pointer.
702  static const int kDisplacement = 1 * kPointerSize;
703 
704  // Check that the key is a smi.
705  Label slow;
706  __ JumpIfNotSmi(edx, &slow, Label::kNear);
707 
708  // Check if the calling frame is an arguments adaptor frame.
709  Label adaptor;
713  __ j(equal, &adaptor, Label::kNear);
714 
715  // Check index against formal parameters count limit passed in
716  // through register eax. Use unsigned comparison to get negative
717  // check for free.
718  __ cmp(edx, eax);
719  __ j(above_equal, &slow, Label::kNear);
720 
721  // Read the argument from the stack and return it.
723  STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
724  __ lea(ebx, Operand(ebp, eax, times_2, 0));
725  __ neg(edx);
726  __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
727  __ ret(0);
728 
729  // Arguments adaptor case: Check index against actual arguments
730  // limit found in the arguments adaptor frame. Use unsigned
731  // comparison to get negative check for free.
732  __ bind(&adaptor);
734  __ cmp(edx, ecx);
735  __ j(above_equal, &slow, Label::kNear);
736 
737  // Read the argument from the stack and return it.
739  STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
740  __ lea(ebx, Operand(ebx, ecx, times_2, 0));
741  __ neg(edx);
742  __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
743  __ ret(0);
744 
745  // Slow-case: Handle non-smi or out-of-bounds access to arguments
746  // by calling the runtime system.
747  __ bind(&slow);
748  __ pop(ebx); // Return address.
749  __ push(edx);
750  __ push(ebx);
751  __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
752 }
753 
754 
755 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
756  // esp[0] : return address
757  // esp[4] : number of parameters
758  // esp[8] : receiver displacement
759  // esp[12] : function
760 
761  // Check if the calling frame is an arguments adaptor frame.
762  Label runtime;
766  __ j(not_equal, &runtime, Label::kNear);
767 
768  // Patch the arguments.length and the parameters pointer.
770  __ mov(Operand(esp, 1 * kPointerSize), ecx);
771  __ lea(edx, Operand(edx, ecx, times_2,
773  __ mov(Operand(esp, 2 * kPointerSize), edx);
774 
775  __ bind(&runtime);
776  __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
777 }
778 
779 
780 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
781  // esp[0] : return address
782  // esp[4] : number of parameters (tagged)
783  // esp[8] : receiver displacement
784  // esp[12] : function
785 
786  // ebx = parameter count (tagged)
787  __ mov(ebx, Operand(esp, 1 * kPointerSize));
788 
789  // Check if the calling frame is an arguments adaptor frame.
790  // TODO(rossberg): Factor out some of the bits that are shared with the other
791  // Generate* functions.
792  Label runtime;
793  Label adaptor_frame, try_allocate;
797  __ j(equal, &adaptor_frame, Label::kNear);
798 
799  // No adaptor, parameter count = argument count.
800  __ mov(ecx, ebx);
801  __ jmp(&try_allocate, Label::kNear);
802 
803  // We have an adaptor frame. Patch the parameters pointer.
804  __ bind(&adaptor_frame);
806  __ lea(edx, Operand(edx, ecx, times_2,
808  __ mov(Operand(esp, 2 * kPointerSize), edx);
809 
810  // ebx = parameter count (tagged)
811  // ecx = argument count (smi-tagged)
812  // esp[4] = parameter count (tagged)
813  // esp[8] = address of receiver argument
814  // Compute the mapped parameter count = min(ebx, ecx) in ebx.
815  __ cmp(ebx, ecx);
816  __ j(less_equal, &try_allocate, Label::kNear);
817  __ mov(ebx, ecx);
818 
819  __ bind(&try_allocate);
820 
821  // Save mapped parameter count.
822  __ push(ebx);
823 
824  // Compute the sizes of backing store, parameter map, and arguments object.
825  // 1. Parameter map, has 2 extra words containing context and backing store.
826  const int kParameterMapHeaderSize =
828  Label no_parameter_map;
829  __ test(ebx, ebx);
830  __ j(zero, &no_parameter_map, Label::kNear);
831  __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
832  __ bind(&no_parameter_map);
833 
834  // 2. Backing store.
835  __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
836 
837  // 3. Arguments object.
838  __ add(ebx, Immediate(Heap::kSloppyArgumentsObjectSize));
839 
840  // Do the allocation of all three objects in one go.
841  __ Allocate(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
842 
843  // eax = address of new object(s) (tagged)
844  // ecx = argument count (smi-tagged)
845  // esp[0] = mapped parameter count (tagged)
846  // esp[8] = parameter count (tagged)
847  // esp[12] = address of receiver argument
848  // Get the arguments map from the current native context into edi.
849  Label has_mapped_parameters, instantiate;
852  __ mov(ebx, Operand(esp, 0 * kPointerSize));
853  __ test(ebx, ebx);
854  __ j(not_zero, &has_mapped_parameters, Label::kNear);
855  __ mov(
856  edi,
858  __ jmp(&instantiate, Label::kNear);
859 
860  __ bind(&has_mapped_parameters);
861  __ mov(
862  edi,
864  __ bind(&instantiate);
865 
866  // eax = address of new object (tagged)
867  // ebx = mapped parameter count (tagged)
868  // ecx = argument count (smi-tagged)
869  // edi = address of arguments map (tagged)
870  // esp[0] = mapped parameter count (tagged)
871  // esp[8] = parameter count (tagged)
872  // esp[12] = address of receiver argument
873  // Copy the JS object part.
876  masm->isolate()->factory()->empty_fixed_array());
878  masm->isolate()->factory()->empty_fixed_array());
879 
880  // Set up the callee in-object property.
882  __ mov(edx, Operand(esp, 4 * kPointerSize));
883  __ AssertNotSmi(edx);
886  edx);
887 
888  // Use the length (smi tagged) and set that as an in-object property too.
889  __ AssertSmi(ecx);
893  ecx);
894 
895  // Set up the elements pointer in the allocated arguments object.
896  // If we allocated a parameter map, edi will point there, otherwise to the
897  // backing store.
898  __ lea(edi, Operand(eax, Heap::kSloppyArgumentsObjectSize));
900 
901  // eax = address of new object (tagged)
902  // ebx = mapped parameter count (tagged)
903  // ecx = argument count (tagged)
904  // edi = address of parameter map or backing store (tagged)
905  // esp[0] = mapped parameter count (tagged)
906  // esp[8] = parameter count (tagged)
907  // esp[12] = address of receiver argument
908  // Free a register.
909  __ push(eax);
910 
911  // Initialize parameter map. If there are no mapped arguments, we're done.
912  Label skip_parameter_map;
913  __ test(ebx, ebx);
914  __ j(zero, &skip_parameter_map);
915 
917  Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
918  __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
921  __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
923 
924  // Copy the parameter slots and the holes in the arguments.
925  // We need to fill in mapped_parameter_count slots. They index the context,
926  // where parameters are stored in reverse order, at
927  // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
928  // The mapped parameter thus need to get indices
929  // MIN_CONTEXT_SLOTS+parameter_count-1 ..
930  // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
931  // We loop from right to left.
932  Label parameters_loop, parameters_test;
933  __ push(ecx);
934  __ mov(eax, Operand(esp, 2 * kPointerSize));
935  __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
936  __ add(ebx, Operand(esp, 4 * kPointerSize));
937  __ sub(ebx, eax);
938  __ mov(ecx, isolate()->factory()->the_hole_value());
939  __ mov(edx, edi);
940  __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
941  // eax = loop variable (tagged)
942  // ebx = mapping index (tagged)
943  // ecx = the hole value
944  // edx = address of parameter map (tagged)
945  // edi = address of backing store (tagged)
946  // esp[0] = argument count (tagged)
947  // esp[4] = address of new object (tagged)
948  // esp[8] = mapped parameter count (tagged)
949  // esp[16] = parameter count (tagged)
950  // esp[20] = address of receiver argument
951  __ jmp(&parameters_test, Label::kNear);
952 
953  __ bind(&parameters_loop);
954  __ sub(eax, Immediate(Smi::FromInt(1)));
955  __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
957  __ add(ebx, Immediate(Smi::FromInt(1)));
958  __ bind(&parameters_test);
959  __ test(eax, eax);
960  __ j(not_zero, &parameters_loop, Label::kNear);
961  __ pop(ecx);
962 
963  __ bind(&skip_parameter_map);
964 
965  // ecx = argument count (tagged)
966  // edi = address of backing store (tagged)
967  // esp[0] = address of new object (tagged)
968  // esp[4] = mapped parameter count (tagged)
969  // esp[12] = parameter count (tagged)
970  // esp[16] = address of receiver argument
971  // Copy arguments header and remaining slots (if there are any).
973  Immediate(isolate()->factory()->fixed_array_map()));
975 
976  Label arguments_loop, arguments_test;
977  __ mov(ebx, Operand(esp, 1 * kPointerSize));
978  __ mov(edx, Operand(esp, 4 * kPointerSize));
979  __ sub(edx, ebx); // Is there a smarter way to do negative scaling?
980  __ sub(edx, ebx);
981  __ jmp(&arguments_test, Label::kNear);
982 
983  __ bind(&arguments_loop);
984  __ sub(edx, Immediate(kPointerSize));
985  __ mov(eax, Operand(edx, 0));
987  __ add(ebx, Immediate(Smi::FromInt(1)));
988 
989  __ bind(&arguments_test);
990  __ cmp(ebx, ecx);
991  __ j(less, &arguments_loop, Label::kNear);
992 
993  // Restore.
994  __ pop(eax); // Address of arguments object.
995  __ pop(ebx); // Parameter count.
996 
997  // Return and remove the on-stack parameters.
998  __ ret(3 * kPointerSize);
999 
1000  // Do the runtime call to allocate the arguments object.
1001  __ bind(&runtime);
1002  __ pop(eax); // Remove saved parameter count.
1003  __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count.
1004  __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1005 }
1006 
1007 
1008 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1009  // esp[0] : return address
1010  // esp[4] : number of parameters
1011  // esp[8] : receiver displacement
1012  // esp[12] : function
1013 
1014  // Check if the calling frame is an arguments adaptor frame.
1015  Label adaptor_frame, try_allocate, runtime;
1019  __ j(equal, &adaptor_frame, Label::kNear);
1020 
1021  // Get the length from the frame.
1022  __ mov(ecx, Operand(esp, 1 * kPointerSize));
1023  __ jmp(&try_allocate, Label::kNear);
1024 
1025  // Patch the arguments.length and the parameters pointer.
1026  __ bind(&adaptor_frame);
1028  __ mov(Operand(esp, 1 * kPointerSize), ecx);
1029  __ lea(edx, Operand(edx, ecx, times_2,
1031  __ mov(Operand(esp, 2 * kPointerSize), edx);
1032 
1033  // Try the new space allocation. Start out with computing the size of
1034  // the arguments object and the elements array.
1035  Label add_arguments_object;
1036  __ bind(&try_allocate);
1037  __ test(ecx, ecx);
1038  __ j(zero, &add_arguments_object, Label::kNear);
1039  __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
1040  __ bind(&add_arguments_object);
1041  __ add(ecx, Immediate(Heap::kStrictArgumentsObjectSize));
1042 
1043  // Do the allocation of both objects in one go.
1044  __ Allocate(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
1045 
1046  // Get the arguments map from the current native context.
1050  __ mov(edi, Operand(edi, offset));
1051 
1054  masm->isolate()->factory()->empty_fixed_array());
1056  masm->isolate()->factory()->empty_fixed_array());
1057 
1058  // Get the length (smi tagged) and set that as an in-object property too.
1060  __ mov(ecx, Operand(esp, 1 * kPointerSize));
1061  __ AssertSmi(ecx);
1064  ecx);
1065 
1066  // If there are no actual arguments, we're done.
1067  Label done;
1068  __ test(ecx, ecx);
1069  __ j(zero, &done, Label::kNear);
1070 
1071  // Get the parameters pointer from the stack.
1072  __ mov(edx, Operand(esp, 2 * kPointerSize));
1073 
1074  // Set up the elements pointer in the allocated arguments object and
1075  // initialize the header in the elements fixed array.
1076  __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
1079  Immediate(isolate()->factory()->fixed_array_map()));
1080 
1082  // Untag the length for the loop below.
1083  __ SmiUntag(ecx);
1084 
1085  // Copy the fixed array slots.
1086  Label loop;
1087  __ bind(&loop);
1088  __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
1090  __ add(edi, Immediate(kPointerSize));
1091  __ sub(edx, Immediate(kPointerSize));
1092  __ dec(ecx);
1093  __ j(not_zero, &loop);
1094 
1095  // Return and remove the on-stack parameters.
1096  __ bind(&done);
1097  __ ret(3 * kPointerSize);
1098 
1099  // Do the runtime call to allocate the arguments object.
1100  __ bind(&runtime);
1101  __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
1102 }
1103 
1104 
1105 void RegExpExecStub::Generate(MacroAssembler* masm) {
1106  // Just jump directly to runtime if native RegExp is not selected at compile
1107  // time or if regexp entry in generated code is turned off runtime switch or
1108  // at compilation.
1109 #ifdef V8_INTERPRETED_REGEXP
1110  __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
1111 #else // V8_INTERPRETED_REGEXP
1112 
1113  // Stack frame on entry.
1114  // esp[0]: return address
1115  // esp[4]: last_match_info (expected JSArray)
1116  // esp[8]: previous index
1117  // esp[12]: subject string
1118  // esp[16]: JSRegExp object
1119 
1120  static const int kLastMatchInfoOffset = 1 * kPointerSize;
1121  static const int kPreviousIndexOffset = 2 * kPointerSize;
1122  static const int kSubjectOffset = 3 * kPointerSize;
1123  static const int kJSRegExpOffset = 4 * kPointerSize;
1124 
1125  Label runtime;
1126  Factory* factory = isolate()->factory();
1127 
1128  // Ensure that a RegExp stack is allocated.
1129  ExternalReference address_of_regexp_stack_memory_address =
1130  ExternalReference::address_of_regexp_stack_memory_address(isolate());
1131  ExternalReference address_of_regexp_stack_memory_size =
1132  ExternalReference::address_of_regexp_stack_memory_size(isolate());
1133  __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1134  __ test(ebx, ebx);
1135  __ j(zero, &runtime);
1136 
1137  // Check that the first argument is a JSRegExp object.
1138  __ mov(eax, Operand(esp, kJSRegExpOffset));
1139  STATIC_ASSERT(kSmiTag == 0);
1140  __ JumpIfSmi(eax, &runtime);
1141  __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
1142  __ j(not_equal, &runtime);
1143 
1144  // Check that the RegExp has been compiled (data contains a fixed array).
1146  if (FLAG_debug_code) {
1147  __ test(ecx, Immediate(kSmiTagMask));
1148  __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1149  __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
1150  __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1151  }
1152 
1153  // ecx: RegExp data (FixedArray)
1154  // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1156  __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
1157  __ j(not_equal, &runtime);
1158 
1159  // ecx: RegExp data (FixedArray)
1160  // Check that the number of captures fit in the static offsets vector buffer.
1162  // Check (number_of_captures + 1) * 2 <= offsets vector size
1163  // Or number_of_captures * 2 <= offsets vector size - 2
1164  // Multiplying by 2 comes for free since edx is smi-tagged.
1165  STATIC_ASSERT(kSmiTag == 0);
1169  __ j(above, &runtime);
1170 
1171  // Reset offset for possibly sliced string.
1172  __ Move(edi, Immediate(0));
1173  __ mov(eax, Operand(esp, kSubjectOffset));
1174  __ JumpIfSmi(eax, &runtime);
1175  __ mov(edx, eax); // Make a copy of the original subject string.
1178 
1179  // eax: subject string
1180  // edx: subject string
1181  // ebx: subject string instance type
1182  // ecx: RegExp data (FixedArray)
1183  // Handle subject string according to its encoding and representation:
1184  // (1) Sequential two byte? If yes, go to (9).
1185  // (2) Sequential one byte? If yes, go to (6).
1186  // (3) Anything but sequential or cons? If yes, go to (7).
1187  // (4) Cons string. If the string is flat, replace subject with first string.
1188  // Otherwise bailout.
1189  // (5a) Is subject sequential two byte? If yes, go to (9).
1190  // (5b) Is subject external? If yes, go to (8).
1191  // (6) One byte sequential. Load regexp code for one byte.
1192  // (E) Carry on.
1193  /// [...]
1194 
1195  // Deferred code at the end of the stub:
1196  // (7) Not a long external string? If yes, go to (10).
1197  // (8) External string. Make it, offset-wise, look like a sequential string.
1198  // (8a) Is the external string one byte? If yes, go to (6).
1199  // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1200  // (10) Short external string or not a string? If yes, bail out to runtime.
1201  // (11) Sliced string. Replace subject with parent. Go to (5a).
1202 
1203  Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1204  external_string /* 8 */, check_underlying /* 5a */,
1205  not_seq_nor_cons /* 7 */, check_code /* E */,
1206  not_long_external /* 10 */;
1207 
1208  // (1) Sequential two byte? If yes, go to (9).
1209  __ and_(ebx, kIsNotStringMask |
1214  __ j(zero, &seq_two_byte_string); // Go to (9).
1215 
1216  // (2) Sequential one byte? If yes, go to (6).
1217  // Any other sequential string must be one byte.
1218  __ and_(ebx, Immediate(kIsNotStringMask |
1221  __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6).
1222 
1223  // (3) Anything but sequential or cons? If yes, go to (7).
1224  // We check whether the subject string is a cons, since sequential strings
1225  // have already been covered.
1230  __ cmp(ebx, Immediate(kExternalStringTag));
1231  __ j(greater_equal, &not_seq_nor_cons); // Go to (7).
1232 
1233  // (4) Cons string. Check that it's flat.
1234  // Replace subject with first string and reload instance type.
1235  __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
1236  __ j(not_equal, &runtime);
1238  __ bind(&check_underlying);
1241 
1242  // (5a) Is subject sequential two byte? If yes, go to (9).
1245  __ j(zero, &seq_two_byte_string); // Go to (9).
1246  // (5b) Is subject external? If yes, go to (8).
1247  __ test_b(ebx, kStringRepresentationMask);
1248  // The underlying external string is never a short external string.
1251  __ j(not_zero, &external_string); // Go to (8).
1252 
1253  // eax: sequential subject string (or look-alike, external string)
1254  // edx: original subject string
1255  // ecx: RegExp data (FixedArray)
1256  // (6) One byte sequential. Load regexp code for one byte.
1257  __ bind(&seq_one_byte_string);
1258  // Load previous index and check range before edx is overwritten. We have
1259  // to use edx instead of eax here because it might have been only made to
1260  // look like a sequential string when it actually is an external string.
1261  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1262  __ JumpIfNotSmi(ebx, &runtime);
1264  __ j(above_equal, &runtime);
1266  __ Move(ecx, Immediate(1)); // Type is one byte.
1267 
1268  // (E) Carry on. String handling is done.
1269  __ bind(&check_code);
1270  // edx: irregexp code
1271  // Check that the irregexp code has been generated for the actual string
1272  // encoding. If it has, the field contains a code object otherwise it contains
1273  // a smi (code flushing support).
1274  __ JumpIfSmi(edx, &runtime);
1275 
1276  // eax: subject string
1277  // ebx: previous index (smi)
1278  // edx: code
1279  // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
1280  // All checks done. Now push arguments for native regexp code.
1281  Counters* counters = isolate()->counters();
1282  __ IncrementCounter(counters->regexp_entry_native(), 1);
1283 
1284  // Isolates: note we add an additional parameter here (isolate pointer).
1285  static const int kRegExpExecuteArguments = 9;
1286  __ EnterApiExitFrame(kRegExpExecuteArguments);
1287 
1288  // Argument 9: Pass current isolate address.
1289  __ mov(Operand(esp, 8 * kPointerSize),
1290  Immediate(ExternalReference::isolate_address(isolate())));
1291 
1292  // Argument 8: Indicate that this is a direct call from JavaScript.
1293  __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
1294 
1295  // Argument 7: Start (high end) of backtracking stack memory area.
1296  __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
1297  __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1298  __ mov(Operand(esp, 6 * kPointerSize), esi);
1299 
1300  // Argument 6: Set the number of capture registers to zero to force global
1301  // regexps to behave as non-global. This does not affect non-global regexps.
1302  __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
1303 
1304  // Argument 5: static offsets vector buffer.
1305  __ mov(Operand(esp, 4 * kPointerSize),
1306  Immediate(ExternalReference::address_of_static_offsets_vector(
1307  isolate())));
1308 
1309  // Argument 2: Previous index.
1310  __ SmiUntag(ebx);
1311  __ mov(Operand(esp, 1 * kPointerSize), ebx);
1312 
1313  // Argument 1: Original subject string.
1314  // The original subject is in the previous stack frame. Therefore we have to
1315  // use ebp, which points exactly to one pointer size below the previous esp.
1316  // (Because creating a new stack frame pushes the previous ebp onto the stack
1317  // and thereby moves up esp by one kPointerSize.)
1318  __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
1319  __ mov(Operand(esp, 0 * kPointerSize), esi);
1320 
1321  // esi: original subject string
1322  // eax: underlying subject string
1323  // ebx: previous index
1324  // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
1325  // edx: code
1326  // Argument 4: End of string data
1327  // Argument 3: Start of string data
1328  // Prepare start and end index of the input.
1329  // Load the length from the original sliced string if that is the case.
1331  __ add(esi, edi); // Calculate input end wrt offset.
1332  __ SmiUntag(edi);
1333  __ add(ebx, edi); // Calculate input start wrt offset.
1334 
1335  // ebx: start index of the input string
1336  // esi: end index of the input string
1337  Label setup_two_byte, setup_rest;
1338  __ test(ecx, ecx);
1339  __ j(zero, &setup_two_byte, Label::kNear);
1340  __ SmiUntag(esi);
1342  __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
1344  __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
1345  __ jmp(&setup_rest, Label::kNear);
1346 
1347  __ bind(&setup_two_byte);
1348  STATIC_ASSERT(kSmiTag == 0);
1349  STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
1351  __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
1353  __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
1354 
1355  __ bind(&setup_rest);
1356 
1357  // Locate the code entry and call it.
1358  __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1359  __ call(edx);
1360 
1361  // Drop arguments and come back to JS mode.
1362  __ LeaveApiExitFrame(true);
1363 
1364  // Check the result.
1365  Label success;
1366  __ cmp(eax, 1);
1367  // We expect exactly one result since we force the called regexp to behave
1368  // as non-global.
1369  __ j(equal, &success);
1370  Label failure;
1372  __ j(equal, &failure);
1374  // If not exception it can only be retry. Handle that in the runtime system.
1375  __ j(not_equal, &runtime);
1376  // Result must now be exception. If there is no pending exception already a
1377  // stack overflow (on the backtrack stack) was detected in RegExp code but
1378  // haven't created the exception yet. Handle that in the runtime system.
1379  // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1380  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1381  isolate());
1382  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
1383  __ mov(eax, Operand::StaticVariable(pending_exception));
1384  __ cmp(edx, eax);
1385  __ j(equal, &runtime);
1386  // For exception, throw the exception again.
1387 
1388  // Clear the pending exception variable.
1389  __ mov(Operand::StaticVariable(pending_exception), edx);
1390 
1391  // Special handling of termination exceptions which are uncatchable
1392  // by javascript code.
1393  __ cmp(eax, factory->termination_exception());
1394  Label throw_termination_exception;
1395  __ j(equal, &throw_termination_exception, Label::kNear);
1396 
1397  // Handle normal exception by following handler chain.
1398  __ Throw(eax);
1399 
1400  __ bind(&throw_termination_exception);
1401  __ ThrowUncatchable(eax);
1402 
1403  __ bind(&failure);
1404  // For failure to match, return null.
1405  __ mov(eax, factory->null_value());
1406  __ ret(4 * kPointerSize);
1407 
1408  // Load RegExp data.
1409  __ bind(&success);
1410  __ mov(eax, Operand(esp, kJSRegExpOffset));
1413  // Calculate number of capture registers (number_of_captures + 1) * 2.
1414  STATIC_ASSERT(kSmiTag == 0);
1416  __ add(edx, Immediate(2)); // edx was a smi.
1417 
1418  // edx: Number of capture registers
1419  // Load last_match_info which is still known to be a fast case JSArray.
1420  // Check that the fourth object is a JSArray object.
1421  __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1422  __ JumpIfSmi(eax, &runtime);
1423  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
1424  __ j(not_equal, &runtime);
1425  // Check that the JSArray is in fast case.
1428  __ cmp(eax, factory->fixed_array_map());
1429  __ j(not_equal, &runtime);
1430  // Check that the last match info has space for the capture registers and the
1431  // additional information.
1433  __ SmiUntag(eax);
1434  __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
1435  __ cmp(edx, eax);
1436  __ j(greater, &runtime);
1437 
1438  // ebx: last_match_info backing store (FixedArray)
1439  // edx: number of capture registers
1440  // Store the capture count.
1441  __ SmiTag(edx); // Number of capture registers to smi.
1443  __ SmiUntag(edx); // Number of capture registers back from smi.
1444  // Store last subject and last input.
1445  __ mov(eax, Operand(esp, kSubjectOffset));
1446  __ mov(ecx, eax);
1448  __ RecordWriteField(ebx,
1450  eax,
1451  edi,
1452  kDontSaveFPRegs);
1453  __ mov(eax, ecx);
1455  __ RecordWriteField(ebx,
1457  eax,
1458  edi,
1459  kDontSaveFPRegs);
1460 
1461  // Get the static offsets vector filled by the native regexp code.
1462  ExternalReference address_of_static_offsets_vector =
1463  ExternalReference::address_of_static_offsets_vector(isolate());
1464  __ mov(ecx, Immediate(address_of_static_offsets_vector));
1465 
1466  // ebx: last_match_info backing store (FixedArray)
1467  // ecx: offsets vector
1468  // edx: number of capture registers
1469  Label next_capture, done;
1470  // Capture register counter starts from number of capture registers and
1471  // counts down until wraping after zero.
1472  __ bind(&next_capture);
1473  __ sub(edx, Immediate(1));
1474  __ j(negative, &done, Label::kNear);
1475  // Read the value from the static offsets vector buffer.
1476  __ mov(edi, Operand(ecx, edx, times_int_size, 0));
1477  __ SmiTag(edi);
1478  // Store the smi value in the last match info.
1479  __ mov(FieldOperand(ebx,
1480  edx,
1483  edi);
1484  __ jmp(&next_capture);
1485  __ bind(&done);
1486 
1487  // Return last match info.
1488  __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1489  __ ret(4 * kPointerSize);
1490 
1491  // Do the runtime call to execute the regexp.
1492  __ bind(&runtime);
1493  __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
1494 
1495  // Deferred code for string handling.
1496  // (7) Not a long external string? If yes, go to (10).
1497  __ bind(&not_seq_nor_cons);
1498  // Compare flags are still set from (3).
1499  __ j(greater, &not_long_external, Label::kNear); // Go to (10).
1500 
1501  // (8) External string. Short external strings have been ruled out.
1502  __ bind(&external_string);
1503  // Reload instance type.
1506  if (FLAG_debug_code) {
1507  // Assert that we do not have a cons or slice (indirect strings) here.
1508  // Sequential strings have already been ruled out.
1509  __ test_b(ebx, kIsIndirectStringMask);
1510  __ Assert(zero, kExternalStringExpectedButNotFound);
1511  }
1513  // Move the pointer so that offset-wise, it looks like a sequential string.
1517  // (8a) Is the external string one byte? If yes, go to (6).
1518  __ test_b(ebx, kStringEncodingMask);
1519  __ j(not_zero, &seq_one_byte_string); // Goto (6).
1520 
1521  // eax: sequential subject string (or look-alike, external string)
1522  // edx: original subject string
1523  // ecx: RegExp data (FixedArray)
1524  // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1525  __ bind(&seq_two_byte_string);
1526  // Load previous index and check range before edx is overwritten. We have
1527  // to use edx instead of eax here because it might have been only made to
1528  // look like a sequential string when it actually is an external string.
1529  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1530  __ JumpIfNotSmi(ebx, &runtime);
1532  __ j(above_equal, &runtime);
1534  __ Move(ecx, Immediate(0)); // Type is two byte.
1535  __ jmp(&check_code); // Go to (E).
1536 
1537  // (10) Not a string or a short external string? If yes, bail out to runtime.
1538  __ bind(&not_long_external);
1539  // Catch non-string subject or short external string.
1541  __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
1542  __ j(not_zero, &runtime);
1543 
1544  // (11) Sliced string. Replace subject with parent. Go to (5a).
1545  // Load offset into edi and replace subject string with parent.
1548  __ jmp(&check_underlying); // Go to (5a).
1549 #endif // V8_INTERPRETED_REGEXP
1550 }
1551 
1552 
1553 static int NegativeComparisonResult(Condition cc) {
1554  DCHECK(cc != equal);
1555  DCHECK((cc == less) || (cc == less_equal)
1556  || (cc == greater) || (cc == greater_equal));
1557  return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1558 }
1559 
1560 
1561 static void CheckInputType(MacroAssembler* masm, Register input,
1562  CompareICState::State expected, Label* fail) {
1563  Label ok;
1564  if (expected == CompareICState::SMI) {
1565  __ JumpIfNotSmi(input, fail);
1566  } else if (expected == CompareICState::NUMBER) {
1567  __ JumpIfSmi(input, &ok);
1569  Immediate(masm->isolate()->factory()->heap_number_map()));
1570  __ j(not_equal, fail);
1571  }
1572  // We could be strict about internalized/non-internalized here, but as long as
1573  // hydrogen doesn't care, the stub doesn't have to care either.
1574  __ bind(&ok);
1575 }
1576 
1577 
1578 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1579  Label* label,
1580  Register object,
1581  Register scratch) {
1582  __ JumpIfSmi(object, label);
1583  __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
1584  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
1586  __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1587  __ j(not_zero, label);
1588 }
1589 
1590 
1591 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
1592  Label check_unequal_objects;
1594 
1595  Label miss;
1596  CheckInputType(masm, edx, left(), &miss);
1597  CheckInputType(masm, eax, right(), &miss);
1598 
1599  // Compare two smis.
1600  Label non_smi, smi_done;
1601  __ mov(ecx, edx);
1602  __ or_(ecx, eax);
1603  __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
1604  __ sub(edx, eax); // Return on the result of the subtraction.
1605  __ j(no_overflow, &smi_done, Label::kNear);
1606  __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
1607  __ bind(&smi_done);
1608  __ mov(eax, edx);
1609  __ ret(0);
1610  __ bind(&non_smi);
1611 
1612  // NOTICE! This code is only reached after a smi-fast-case check, so
1613  // it is certain that at least one operand isn't a smi.
1614 
1615  // Identical objects can be compared fast, but there are some tricky cases
1616  // for NaN and undefined.
1617  Label generic_heap_number_comparison;
1618  {
1619  Label not_identical;
1620  __ cmp(eax, edx);
1621  __ j(not_equal, &not_identical);
1622 
1623  if (cc != equal) {
1624  // Check for undefined. undefined OP undefined is false even though
1625  // undefined == undefined.
1626  Label check_for_nan;
1627  __ cmp(edx, isolate()->factory()->undefined_value());
1628  __ j(not_equal, &check_for_nan, Label::kNear);
1629  __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1630  __ ret(0);
1631  __ bind(&check_for_nan);
1632  }
1633 
1634  // Test for NaN. Compare heap numbers in a general way,
1635  // to hanlde NaNs correctly.
1637  Immediate(isolate()->factory()->heap_number_map()));
1638  __ j(equal, &generic_heap_number_comparison, Label::kNear);
1639  if (cc != equal) {
1640  // Call runtime on identical JSObjects. Otherwise return equal.
1641  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1642  __ j(above_equal, &not_identical);
1643  }
1644  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1645  __ ret(0);
1646 
1647 
1648  __ bind(&not_identical);
1649  }
1650 
1651  // Strict equality can quickly decide whether objects are equal.
1652  // Non-strict object equality is slower, so it is handled later in the stub.
1653  if (cc == equal && strict()) {
1654  Label slow; // Fallthrough label.
1655  Label not_smis;
1656  // If we're doing a strict equality comparison, we don't have to do
1657  // type conversion, so we generate code to do fast comparison for objects
1658  // and oddballs. Non-smi numbers and strings still go through the usual
1659  // slow-case code.
1660  // If either is a Smi (we know that not both are), then they can only
1661  // be equal if the other is a HeapNumber. If so, use the slow case.
1662  STATIC_ASSERT(kSmiTag == 0);
1663  DCHECK_EQ(0, Smi::FromInt(0));
1664  __ mov(ecx, Immediate(kSmiTagMask));
1665  __ and_(ecx, eax);
1666  __ test(ecx, edx);
1667  __ j(not_zero, &not_smis, Label::kNear);
1668  // One operand is a smi.
1669 
1670  // Check whether the non-smi is a heap number.
1671  STATIC_ASSERT(kSmiTagMask == 1);
1672  // ecx still holds eax & kSmiTag, which is either zero or one.
1673  __ sub(ecx, Immediate(0x01));
1674  __ mov(ebx, edx);
1675  __ xor_(ebx, eax);
1676  __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx.
1677  __ xor_(ebx, eax);
1678  // if eax was smi, ebx is now edx, else eax.
1679 
1680  // Check if the non-smi operand is a heap number.
1682  Immediate(isolate()->factory()->heap_number_map()));
1683  // If heap number, handle it in the slow case.
1684  __ j(equal, &slow, Label::kNear);
1685  // Return non-equal (ebx is not zero)
1686  __ mov(eax, ebx);
1687  __ ret(0);
1688 
1689  __ bind(&not_smis);
1690  // If either operand is a JSObject or an oddball value, then they are not
1691  // equal since their pointers are different
1692  // There is no test for undetectability in strict equality.
1693 
1694  // Get the type of the first operand.
1695  // If the first object is a JS object, we have done pointer comparison.
1696  Label first_non_object;
1698  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1699  __ j(below, &first_non_object, Label::kNear);
1700 
1701  // Return non-zero (eax is not zero)
1702  Label return_not_equal;
1704  __ bind(&return_not_equal);
1705  __ ret(0);
1706 
1707  __ bind(&first_non_object);
1708  // Check for oddballs: true, false, null, undefined.
1709  __ CmpInstanceType(ecx, ODDBALL_TYPE);
1710  __ j(equal, &return_not_equal);
1711 
1712  __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ecx);
1713  __ j(above_equal, &return_not_equal);
1714 
1715  // Check for oddballs: true, false, null, undefined.
1716  __ CmpInstanceType(ecx, ODDBALL_TYPE);
1717  __ j(equal, &return_not_equal);
1718 
1719  // Fall through to the general case.
1720  __ bind(&slow);
1721  }
1722 
1723  // Generate the number comparison code.
1724  Label non_number_comparison;
1725  Label unordered;
1726  __ bind(&generic_heap_number_comparison);
1727 
1728  FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
1729  __ ucomisd(xmm0, xmm1);
1730  // Don't base result on EFLAGS when a NaN is involved.
1731  __ j(parity_even, &unordered, Label::kNear);
1732 
1733  __ mov(eax, 0); // equal
1734  __ mov(ecx, Immediate(Smi::FromInt(1)));
1735  __ cmov(above, eax, ecx);
1736  __ mov(ecx, Immediate(Smi::FromInt(-1)));
1737  __ cmov(below, eax, ecx);
1738  __ ret(0);
1739 
1740  // If one of the numbers was NaN, then the result is always false.
1741  // The cc is never not-equal.
1742  __ bind(&unordered);
1743  DCHECK(cc != not_equal);
1744  if (cc == less || cc == less_equal) {
1745  __ mov(eax, Immediate(Smi::FromInt(1)));
1746  } else {
1747  __ mov(eax, Immediate(Smi::FromInt(-1)));
1748  }
1749  __ ret(0);
1750 
1751  // The number comparison code did not provide a valid result.
1752  __ bind(&non_number_comparison);
1753 
1754  // Fast negative check for internalized-to-internalized equality.
1755  Label check_for_strings;
1756  if (cc == equal) {
1757  BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
1758  BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
1759 
1760  // We've already checked for object identity, so if both operands
1761  // are internalized they aren't equal. Register eax already holds a
1762  // non-zero value, which indicates not equal, so just return.
1763  __ ret(0);
1764  }
1765 
1766  __ bind(&check_for_strings);
1767 
1768  __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
1769  &check_unequal_objects);
1770 
1771  // Inline comparison of one-byte strings.
1772  if (cc == equal) {
1774  } else {
1776  edi);
1777  }
1778 #ifdef DEBUG
1779  __ Abort(kUnexpectedFallThroughFromStringComparison);
1780 #endif
1781 
1782  __ bind(&check_unequal_objects);
1783  if (cc == equal && !strict()) {
1784  // Non-strict equality. Objects are unequal if
1785  // they are both JSObjects and not undetectable,
1786  // and their pointers are different.
1787  Label not_both_objects;
1788  Label return_unequal;
1789  // At most one is a smi, so we can test for smi by adding the two.
1790  // A smi plus a heap object has the low bit set, a heap object plus
1791  // a heap object has the low bit clear.
1792  STATIC_ASSERT(kSmiTag == 0);
1793  STATIC_ASSERT(kSmiTagMask == 1);
1794  __ lea(ecx, Operand(eax, edx, times_1, 0));
1795  __ test(ecx, Immediate(kSmiTagMask));
1796  __ j(not_zero, &not_both_objects, Label::kNear);
1797  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1798  __ j(below, &not_both_objects, Label::kNear);
1799  __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ebx);
1800  __ j(below, &not_both_objects, Label::kNear);
1801  // We do not bail out after this point. Both are JSObjects, and
1802  // they are equal if and only if both are undetectable.
1803  // The and of the undetectable flags is 1 if and only if they are equal.
1805  1 << Map::kIsUndetectable);
1806  __ j(zero, &return_unequal, Label::kNear);
1808  1 << Map::kIsUndetectable);
1809  __ j(zero, &return_unequal, Label::kNear);
1810  // The objects are both undetectable, so they both compare as the value
1811  // undefined, and are equal.
1812  __ Move(eax, Immediate(EQUAL));
1813  __ bind(&return_unequal);
1814  // Return non-equal by returning the non-zero object pointer in eax,
1815  // or return equal if we fell through to here.
1816  __ ret(0); // rax, rdx were pushed
1817  __ bind(&not_both_objects);
1818  }
1819 
1820  // Push arguments below the return address.
1821  __ pop(ecx);
1822  __ push(edx);
1823  __ push(eax);
1824 
1825  // Figure out which native to call and setup the arguments.
1826  Builtins::JavaScript builtin;
1827  if (cc == equal) {
1828  builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1829  } else {
1830  builtin = Builtins::COMPARE;
1831  __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1832  }
1833 
1834  // Restore return address on the stack.
1835  __ push(ecx);
1836 
1837  // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1838  // tagged as a small integer.
1839  __ InvokeBuiltin(builtin, JUMP_FUNCTION);
1840 
1841  __ bind(&miss);
1842  GenerateMiss(masm);
1843 }
1844 
1845 
1846 static void GenerateRecordCallTarget(MacroAssembler* masm) {
1847  // Cache the called function in a feedback vector slot. Cache states
1848  // are uninitialized, monomorphic (indicated by a JSFunction), and
1849  // megamorphic.
1850  // eax : number of arguments to the construct function
1851  // ebx : Feedback vector
1852  // edx : slot in feedback vector (Smi)
1853  // edi : the function to call
1854  Isolate* isolate = masm->isolate();
1855  Label initialize, done, miss, megamorphic, not_array_function;
1856 
1857  // Load the cache state into ecx.
1860 
1861  // A monomorphic cache hit or an already megamorphic state: invoke the
1862  // function without changing the state.
1863  __ cmp(ecx, edi);
1864  __ j(equal, &done, Label::kFar);
1865  __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1866  __ j(equal, &done, Label::kFar);
1867 
1868  if (!FLAG_pretenuring_call_new) {
1869  // If we came here, we need to see if we are the array function.
1870  // If we didn't have a matching function, and we didn't find the megamorph
1871  // sentinel, then we have in the slot either some other function or an
1872  // AllocationSite. Do a map check on the object in ecx.
1873  Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
1874  __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
1875  __ j(not_equal, &miss);
1876 
1877  // Make sure the function is the Array() function
1878  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1879  __ cmp(edi, ecx);
1880  __ j(not_equal, &megamorphic);
1881  __ jmp(&done, Label::kFar);
1882  }
1883 
1884  __ bind(&miss);
1885 
1886  // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1887  // megamorphic.
1888  __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
1889  __ j(equal, &initialize);
1890  // MegamorphicSentinel is an immortal immovable object (undefined) so no
1891  // write-barrier is needed.
1892  __ bind(&megamorphic);
1893  __ mov(
1895  Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1896  __ jmp(&done, Label::kFar);
1897 
1898  // An uninitialized cache is patched with the function or sentinel to
1899  // indicate the ElementsKind if function is the Array constructor.
1900  __ bind(&initialize);
1901  if (!FLAG_pretenuring_call_new) {
1902  // Make sure the function is the Array() function
1903  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1904  __ cmp(edi, ecx);
1905  __ j(not_equal, &not_array_function);
1906 
1907  // The target function is the Array constructor,
1908  // Create an AllocationSite if we don't already have it, store it in the
1909  // slot.
1910  {
1911  FrameScope scope(masm, StackFrame::INTERNAL);
1912 
1913  // Arguments register must be smi-tagged to call out.
1914  __ SmiTag(eax);
1915  __ push(eax);
1916  __ push(edi);
1917  __ push(edx);
1918  __ push(ebx);
1919 
1920  CreateAllocationSiteStub create_stub(isolate);
1921  __ CallStub(&create_stub);
1922 
1923  __ pop(ebx);
1924  __ pop(edx);
1925  __ pop(edi);
1926  __ pop(eax);
1927  __ SmiUntag(eax);
1928  }
1929  __ jmp(&done);
1930 
1931  __ bind(&not_array_function);
1932  }
1933 
1936  edi);
1937  // We won't need edx or ebx anymore, just save edi
1938  __ push(edi);
1939  __ push(ebx);
1940  __ push(edx);
1941  __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs,
1943  __ pop(edx);
1944  __ pop(ebx);
1945  __ pop(edi);
1946 
1947  __ bind(&done);
1948 }
1949 
1950 
1951 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
1952  // Do not transform the receiver for strict mode functions.
1954  __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
1956  __ j(not_equal, cont);
1957 
1958  // Do not transform the receiver for natives (shared already in ecx).
1959  __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
1961  __ j(not_equal, cont);
1962 }
1963 
1964 
1965 static void EmitSlowCase(Isolate* isolate,
1966  MacroAssembler* masm,
1967  int argc,
1968  Label* non_function) {
1969  // Check for function proxy.
1970  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
1971  __ j(not_equal, non_function);
1972  __ pop(ecx);
1973  __ push(edi); // put proxy as additional argument under return address
1974  __ push(ecx);
1975  __ Move(eax, Immediate(argc + 1));
1976  __ Move(ebx, Immediate(0));
1977  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
1978  {
1979  Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
1980  __ jmp(adaptor, RelocInfo::CODE_TARGET);
1981  }
1982 
1983  // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
1984  // of the original receiver from the call site).
1985  __ bind(non_function);
1986  __ mov(Operand(esp, (argc + 1) * kPointerSize), edi);
1987  __ Move(eax, Immediate(argc));
1988  __ Move(ebx, Immediate(0));
1989  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
1990  Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
1991  __ jmp(adaptor, RelocInfo::CODE_TARGET);
1992 }
1993 
1994 
1995 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
1996  // Wrap the receiver and patch it back onto the stack.
1997  { FrameScope frame_scope(masm, StackFrame::INTERNAL);
1998  __ push(edi);
1999  __ push(eax);
2000  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2001  __ pop(edi);
2002  }
2003  __ mov(Operand(esp, (argc + 1) * kPointerSize), eax);
2004  __ jmp(cont);
2005 }
2006 
2007 
2008 static void CallFunctionNoFeedback(MacroAssembler* masm,
2009  int argc, bool needs_checks,
2010  bool call_as_method) {
2011  // edi : the function to call
2012  Label slow, non_function, wrap, cont;
2013 
2014  if (needs_checks) {
2015  // Check that the function really is a JavaScript function.
2016  __ JumpIfSmi(edi, &non_function);
2017 
2018  // Goto slow case if we do not have a function.
2019  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2020  __ j(not_equal, &slow);
2021  }
2022 
2023  // Fast-case: Just invoke the function.
2024  ParameterCount actual(argc);
2025 
2026  if (call_as_method) {
2027  if (needs_checks) {
2028  EmitContinueIfStrictOrNative(masm, &cont);
2029  }
2030 
2031  // Load the receiver from the stack.
2032  __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
2033 
2034  if (needs_checks) {
2035  __ JumpIfSmi(eax, &wrap);
2036 
2037  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2038  __ j(below, &wrap);
2039  } else {
2040  __ jmp(&wrap);
2041  }
2042 
2043  __ bind(&cont);
2044  }
2045 
2046  __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
2047 
2048  if (needs_checks) {
2049  // Slow-case: Non-function called.
2050  __ bind(&slow);
2051  // (non_function is bound in EmitSlowCase)
2052  EmitSlowCase(masm->isolate(), masm, argc, &non_function);
2053  }
2054 
2055  if (call_as_method) {
2056  __ bind(&wrap);
2057  EmitWrapCase(masm, argc, &cont);
2058  }
2059 }
2060 
2061 
2062 void CallFunctionStub::Generate(MacroAssembler* masm) {
2063  CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod());
2064 }
2065 
2066 
2067 void CallConstructStub::Generate(MacroAssembler* masm) {
2068  // eax : number of arguments
2069  // ebx : feedback vector
2070  // edx : (only if ebx is not the megamorphic symbol) slot in feedback
2071  // vector (Smi)
2072  // edi : constructor function
2073  Label slow, non_function_call;
2074 
2075  // Check that function is not a smi.
2076  __ JumpIfSmi(edi, &non_function_call);
2077  // Check that function is a JSFunction.
2078  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2079  __ j(not_equal, &slow);
2080 
2081  if (RecordCallTarget()) {
2082  GenerateRecordCallTarget(masm);
2083 
2084  if (FLAG_pretenuring_call_new) {
2085  // Put the AllocationSite from the feedback vector into ebx.
2086  // By adding kPointerSize we encode that we know the AllocationSite
2087  // entry is at the feedback vector slot given by edx + 1.
2090  } else {
2091  Label feedback_register_initialized;
2092  // Put the AllocationSite from the feedback vector into ebx, or undefined.
2095  Handle<Map> allocation_site_map =
2096  isolate()->factory()->allocation_site_map();
2097  __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
2098  __ j(equal, &feedback_register_initialized);
2099  __ mov(ebx, isolate()->factory()->undefined_value());
2100  __ bind(&feedback_register_initialized);
2101  }
2102 
2103  __ AssertUndefinedOrAllocationSite(ebx);
2104  }
2105 
2106  // Jump to the function-specific construct stub.
2107  Register jmp_reg = ecx;
2109  __ mov(jmp_reg, FieldOperand(jmp_reg,
2111  __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2112  __ jmp(jmp_reg);
2113 
2114  // edi: called object
2115  // eax: number of arguments
2116  // ecx: object map
2117  Label do_call;
2118  __ bind(&slow);
2119  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2120  __ j(not_equal, &non_function_call);
2121  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2122  __ jmp(&do_call);
2123 
2124  __ bind(&non_function_call);
2125  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2126  __ bind(&do_call);
2127  // Set expected number of arguments to zero (not changing eax).
2128  __ Move(ebx, Immediate(0));
2129  Handle<Code> arguments_adaptor =
2130  isolate()->builtins()->ArgumentsAdaptorTrampoline();
2131  __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
2132 }
2133 
2134 
2135 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
2136  __ mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2138  __ mov(vector, FieldOperand(vector,
2140 }
2141 
2142 
2143 void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
2144  // edi - function
2145  // edx - slot id
2146  Label miss;
2147  int argc = arg_count();
2148  ParameterCount actual(argc);
2149 
2150  EmitLoadTypeFeedbackVector(masm, ebx);
2151 
2152  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2153  __ cmp(edi, ecx);
2154  __ j(not_equal, &miss);
2155 
2156  __ mov(eax, arg_count());
2159 
2160  // Verify that ecx contains an AllocationSite
2161  Factory* factory = masm->isolate()->factory();
2163  factory->allocation_site_map());
2164  __ j(not_equal, &miss);
2165 
2166  __ mov(ebx, ecx);
2167  ArrayConstructorStub stub(masm->isolate(), arg_count());
2168  __ TailCallStub(&stub);
2169 
2170  __ bind(&miss);
2171  GenerateMiss(masm);
2172 
2173  // The slow case, we need this no matter what to complete a call after a miss.
2174  CallFunctionNoFeedback(masm,
2175  arg_count(),
2176  true,
2177  CallAsMethod());
2178 
2179  // Unreachable.
2180  __ int3();
2181 }
2182 
2183 
2184 void CallICStub::Generate(MacroAssembler* masm) {
2185  // edi - function
2186  // edx - slot id
2187  Isolate* isolate = masm->isolate();
2188  Label extra_checks_or_miss, slow_start;
2189  Label slow, non_function, wrap, cont;
2190  Label have_js_function;
2191  int argc = arg_count();
2192  ParameterCount actual(argc);
2193 
2194  EmitLoadTypeFeedbackVector(masm, ebx);
2195 
2196  // The checks. First, does edi match the recorded monomorphic target?
2199  __ j(not_equal, &extra_checks_or_miss);
2200 
2201  __ bind(&have_js_function);
2202  if (CallAsMethod()) {
2203  EmitContinueIfStrictOrNative(masm, &cont);
2204 
2205  // Load the receiver from the stack.
2206  __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
2207 
2208  __ JumpIfSmi(eax, &wrap);
2209 
2210  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2211  __ j(below, &wrap);
2212 
2213  __ bind(&cont);
2214  }
2215 
2216  __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
2217 
2218  __ bind(&slow);
2219  EmitSlowCase(isolate, masm, argc, &non_function);
2220 
2221  if (CallAsMethod()) {
2222  __ bind(&wrap);
2223  EmitWrapCase(masm, argc, &cont);
2224  }
2225 
2226  __ bind(&extra_checks_or_miss);
2227  Label miss;
2228 
2231  __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
2232  __ j(equal, &slow_start);
2233  __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
2234  __ j(equal, &miss);
2235 
2236  if (!FLAG_trace_ic) {
2237  // We are going megamorphic. If the feedback is a JSFunction, it is fine
2238  // to handle it here. More complex cases are dealt with in the runtime.
2239  __ AssertNotSmi(ecx);
2240  __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
2241  __ j(not_equal, &miss);
2244  Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
2245  __ jmp(&slow_start);
2246  }
2247 
2248  // We are here because tracing is on or we are going monomorphic.
2249  __ bind(&miss);
2250  GenerateMiss(masm);
2251 
2252  // the slow case
2253  __ bind(&slow_start);
2254 
2255  // Check that the function really is a JavaScript function.
2256  __ JumpIfSmi(edi, &non_function);
2257 
2258  // Goto slow case if we do not have a function.
2259  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2260  __ j(not_equal, &slow);
2261  __ jmp(&have_js_function);
2262 
2263  // Unreachable
2264  __ int3();
2265 }
2266 
2267 
2268 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2269  // Get the receiver of the function from the stack; 1 ~ return address.
2270  __ mov(ecx, Operand(esp, (arg_count() + 1) * kPointerSize));
2271 
2272  {
2273  FrameScope scope(masm, StackFrame::INTERNAL);
2274 
2275  // Push the receiver and the function and feedback info.
2276  __ push(ecx);
2277  __ push(edi);
2278  __ push(ebx);
2279  __ push(edx);
2280 
2281  // Call the entry.
2282  IC::UtilityId id = GetICState() == DEFAULT ? IC::kCallIC_Miss
2283  : IC::kCallIC_Customization_Miss;
2284 
2285  ExternalReference miss = ExternalReference(IC_Utility(id),
2286  masm->isolate());
2287  __ CallExternalReference(miss, 4);
2288 
2289  // Move result to edi and exit the internal frame.
2290  __ mov(edi, eax);
2291  }
2292 }
2293 
2294 
2296  return false;
2297 }
2298 
2299 
2300 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2304  // It is important that the store buffer overflow stubs are generated first.
2308  BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2309 }
2310 
2311 
2312 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2313  // Generate if not already in cache.
2314  CEntryStub(isolate, 1, kSaveFPRegs).GetCode();
2315  isolate->set_fp_stubs_generated(true);
2316 }
2317 
2318 
2319 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2320  CEntryStub stub(isolate, 1, kDontSaveFPRegs);
2321  stub.GetCode();
2322 }
2323 
2324 
2325 void CEntryStub::Generate(MacroAssembler* masm) {
2326  // eax: number of arguments including receiver
2327  // ebx: pointer to C function (C callee-saved)
2328  // ebp: frame pointer (restored after C call)
2329  // esp: stack pointer (restored after C call)
2330  // esi: current context (C callee-saved)
2331  // edi: JS function of the caller (C callee-saved)
2332 
2334 
2335  // Enter the exit frame that transitions from JavaScript to C++.
2336  __ EnterExitFrame(save_doubles());
2337 
2338  // ebx: pointer to C function (C callee-saved)
2339  // ebp: frame pointer (restored after C call)
2340  // esp: stack pointer (restored after C call)
2341  // edi: number of arguments including receiver (C callee-saved)
2342  // esi: pointer to the first argument (C callee-saved)
2343 
2344  // Result returned in eax, or eax+edx if result size is 2.
2345 
2346  // Check stack alignment.
2347  if (FLAG_debug_code) {
2348  __ CheckStackAlignment();
2349  }
2350 
2351  // Call C function.
2352  __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
2353  __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
2354  __ mov(Operand(esp, 2 * kPointerSize),
2355  Immediate(ExternalReference::isolate_address(isolate())));
2356  __ call(ebx);
2357  // Result is in eax or edx:eax - do not destroy these registers!
2358 
2359  // Runtime functions should not return 'the hole'. Allowing it to escape may
2360  // lead to crashes in the IC code later.
2361  if (FLAG_debug_code) {
2362  Label okay;
2363  __ cmp(eax, isolate()->factory()->the_hole_value());
2364  __ j(not_equal, &okay, Label::kNear);
2365  __ int3();
2366  __ bind(&okay);
2367  }
2368 
2369  // Check result for exception sentinel.
2370  Label exception_returned;
2371  __ cmp(eax, isolate()->factory()->exception());
2372  __ j(equal, &exception_returned);
2373 
2374  ExternalReference pending_exception_address(
2375  Isolate::kPendingExceptionAddress, isolate());
2376 
2377  // Check that there is no pending exception, otherwise we
2378  // should have returned the exception sentinel.
2379  if (FLAG_debug_code) {
2380  __ push(edx);
2381  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2382  Label okay;
2383  __ cmp(edx, Operand::StaticVariable(pending_exception_address));
2384  // Cannot use check here as it attempts to generate call into runtime.
2385  __ j(equal, &okay, Label::kNear);
2386  __ int3();
2387  __ bind(&okay);
2388  __ pop(edx);
2389  }
2390 
2391  // Exit the JavaScript to C++ exit frame.
2392  __ LeaveExitFrame(save_doubles());
2393  __ ret(0);
2394 
2395  // Handling of exception.
2396  __ bind(&exception_returned);
2397 
2398  // Retrieve the pending exception.
2399  __ mov(eax, Operand::StaticVariable(pending_exception_address));
2400 
2401  // Clear the pending exception.
2402  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2403  __ mov(Operand::StaticVariable(pending_exception_address), edx);
2404 
2405  // Special handling of termination exceptions which are uncatchable
2406  // by javascript code.
2407  Label throw_termination_exception;
2408  __ cmp(eax, isolate()->factory()->termination_exception());
2409  __ j(equal, &throw_termination_exception);
2410 
2411  // Handle normal exception.
2412  __ Throw(eax);
2413 
2414  __ bind(&throw_termination_exception);
2415  __ ThrowUncatchable(eax);
2416 }
2417 
2418 
2419 void JSEntryStub::Generate(MacroAssembler* masm) {
2420  Label invoke, handler_entry, exit;
2421  Label not_outermost_js, not_outermost_js_2;
2422 
2424 
2425  // Set up frame.
2426  __ push(ebp);
2427  __ mov(ebp, esp);
2428 
2429  // Push marker in two places.
2430  int marker = type();
2431  __ push(Immediate(Smi::FromInt(marker))); // context slot
2432  __ push(Immediate(Smi::FromInt(marker))); // function slot
2433  // Save callee-saved registers (C calling conventions).
2434  __ push(edi);
2435  __ push(esi);
2436  __ push(ebx);
2437 
2438  // Save copies of the top frame descriptor on the stack.
2439  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
2440  __ push(Operand::StaticVariable(c_entry_fp));
2441 
2442  // If this is the outermost JS call, set js_entry_sp value.
2443  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
2444  __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
2445  __ j(not_equal, &not_outermost_js, Label::kNear);
2446  __ mov(Operand::StaticVariable(js_entry_sp), ebp);
2447  __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2448  __ jmp(&invoke, Label::kNear);
2449  __ bind(&not_outermost_js);
2450  __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
2451 
2452  // Jump to a faked try block that does the invoke, with a faked catch
2453  // block that sets the pending exception.
2454  __ jmp(&invoke);
2455  __ bind(&handler_entry);
2456  handler_offset_ = handler_entry.pos();
2457  // Caught exception: Store result (exception) in the pending exception
2458  // field in the JSEnv and return a failure sentinel.
2459  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2460  isolate());
2461  __ mov(Operand::StaticVariable(pending_exception), eax);
2462  __ mov(eax, Immediate(isolate()->factory()->exception()));
2463  __ jmp(&exit);
2464 
2465  // Invoke: Link this frame into the handler chain. There's only one
2466  // handler block in this code object, so its index is 0.
2467  __ bind(&invoke);
2468  __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2469 
2470  // Clear any pending exceptions.
2471  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2472  __ mov(Operand::StaticVariable(pending_exception), edx);
2473 
2474  // Fake a receiver (NULL).
2475  __ push(Immediate(0)); // receiver
2476 
2477  // Invoke the function by calling through JS entry trampoline builtin and
2478  // pop the faked function when we return. Notice that we cannot store a
2479  // reference to the trampoline code directly in this stub, because the
2480  // builtin stubs may not have been generated yet.
2481  if (type() == StackFrame::ENTRY_CONSTRUCT) {
2482  ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2483  isolate());
2484  __ mov(edx, Immediate(construct_entry));
2485  } else {
2486  ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
2487  __ mov(edx, Immediate(entry));
2488  }
2489  __ mov(edx, Operand(edx, 0)); // deref address
2491  __ call(edx);
2492 
2493  // Unlink this frame from the handler chain.
2494  __ PopTryHandler();
2495 
2496  __ bind(&exit);
2497  // Check if the current stack frame is marked as the outermost JS frame.
2498  __ pop(ebx);
2499  __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2500  __ j(not_equal, &not_outermost_js_2);
2501  __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
2502  __ bind(&not_outermost_js_2);
2503 
2504  // Restore the top frame descriptor from the stack.
2505  __ pop(Operand::StaticVariable(ExternalReference(
2506  Isolate::kCEntryFPAddress, isolate())));
2507 
2508  // Restore callee-saved registers (C calling conventions).
2509  __ pop(ebx);
2510  __ pop(esi);
2511  __ pop(edi);
2512  __ add(esp, Immediate(2 * kPointerSize)); // remove markers
2513 
2514  // Restore frame pointer and return.
2515  __ pop(ebp);
2516  __ ret(0);
2517 }
2518 
2519 
2520 // Generate stub code for instanceof.
2521 // This code can patch a call site inlined cache of the instance of check,
2522 // which looks like this.
2523 //
2524 // 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
2525 // 75 0a jne <some near label>
2526 // b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
2527 //
2528 // If call site patching is requested the stack will have the delta from the
2529 // return address to the cmp instruction just below the return address. This
2530 // also means that call site patching can only take place with arguments in
2531 // registers. TOS looks like this when call site patching is requested
2532 //
2533 // esp[0] : return address
2534 // esp[4] : delta from return address to cmp instruction
2535 //
2536 void InstanceofStub::Generate(MacroAssembler* masm) {
2537  // Call site inlining and patching implies arguments in registers.
2539 
2540  // Fixed register usage throughout the stub.
2541  Register object = eax; // Object (lhs).
2542  Register map = ebx; // Map of the object.
2543  Register function = edx; // Function (rhs).
2544  Register prototype = edi; // Prototype of the function.
2545  Register scratch = ecx;
2546 
2547  // Constants describing the call site code to patch.
2548  static const int kDeltaToCmpImmediate = 2;
2549  static const int kDeltaToMov = 8;
2550  static const int kDeltaToMovImmediate = 9;
2551  static const int8_t kCmpEdiOperandByte1 = bit_cast<int8_t, uint8_t>(0x3b);
2552  static const int8_t kCmpEdiOperandByte2 = bit_cast<int8_t, uint8_t>(0x3d);
2553  static const int8_t kMovEaxImmediateByte = bit_cast<int8_t, uint8_t>(0xb8);
2554 
2555  DCHECK_EQ(object.code(), InstanceofStub::left().code());
2556  DCHECK_EQ(function.code(), InstanceofStub::right().code());
2557 
2558  // Get the object and function - they are always both needed.
2559  Label slow, not_js_object;
2560  if (!HasArgsInRegisters()) {
2561  __ mov(object, Operand(esp, 2 * kPointerSize));
2562  __ mov(function, Operand(esp, 1 * kPointerSize));
2563  }
2564 
2565  // Check that the left hand is a JS object.
2566  __ JumpIfSmi(object, &not_js_object);
2567  __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
2568 
2569  // If there is a call site cache don't look in the global cache, but do the
2570  // real lookup and update the call site cache.
2572  // Look up the function and the map in the instanceof cache.
2573  Label miss;
2574  __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2575  __ j(not_equal, &miss, Label::kNear);
2576  __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2577  __ j(not_equal, &miss, Label::kNear);
2578  __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
2579  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2580  __ bind(&miss);
2581  }
2582 
2583  // Get the prototype of the function.
2584  __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
2585 
2586  // Check that the function prototype is a JS object.
2587  __ JumpIfSmi(prototype, &slow);
2588  __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
2589 
2590  // Update the global instanceof or call site inlined cache with the current
2591  // map and function. The cached answer will be set when it is known below.
2592  if (!HasCallSiteInlineCheck()) {
2593  __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2594  __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2595  } else {
2596  // The constants for the code patching are based on no push instructions
2597  // at the call site.
2599  // Get return address and delta to inlined map check.
2600  __ mov(scratch, Operand(esp, 0 * kPointerSize));
2601  __ sub(scratch, Operand(esp, 1 * kPointerSize));
2602  if (FLAG_debug_code) {
2603  __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
2604  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
2605  __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
2606  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
2607  }
2608  __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
2609  __ mov(Operand(scratch, 0), map);
2610  }
2611 
2612  // Loop through the prototype chain of the object looking for the function
2613  // prototype.
2614  __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
2615  Label loop, is_instance, is_not_instance;
2616  __ bind(&loop);
2617  __ cmp(scratch, prototype);
2618  __ j(equal, &is_instance, Label::kNear);
2619  Factory* factory = isolate()->factory();
2620  __ cmp(scratch, Immediate(factory->null_value()));
2621  __ j(equal, &is_not_instance, Label::kNear);
2622  __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2623  __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
2624  __ jmp(&loop);
2625 
2626  __ bind(&is_instance);
2627  if (!HasCallSiteInlineCheck()) {
2628  __ mov(eax, Immediate(0));
2629  __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2630  if (ReturnTrueFalseObject()) {
2631  __ mov(eax, factory->true_value());
2632  }
2633  } else {
2634  // Get return address and delta to inlined map check.
2635  __ mov(eax, factory->true_value());
2636  __ mov(scratch, Operand(esp, 0 * kPointerSize));
2637  __ sub(scratch, Operand(esp, 1 * kPointerSize));
2638  if (FLAG_debug_code) {
2639  __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2640  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2641  }
2642  __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
2643  if (!ReturnTrueFalseObject()) {
2644  __ Move(eax, Immediate(0));
2645  }
2646  }
2647  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2648 
2649  __ bind(&is_not_instance);
2650  if (!HasCallSiteInlineCheck()) {
2651  __ mov(eax, Immediate(Smi::FromInt(1)));
2652  __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2653  if (ReturnTrueFalseObject()) {
2654  __ mov(eax, factory->false_value());
2655  }
2656  } else {
2657  // Get return address and delta to inlined map check.
2658  __ mov(eax, factory->false_value());
2659  __ mov(scratch, Operand(esp, 0 * kPointerSize));
2660  __ sub(scratch, Operand(esp, 1 * kPointerSize));
2661  if (FLAG_debug_code) {
2662  __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2663  __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2664  }
2665  __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
2666  if (!ReturnTrueFalseObject()) {
2667  __ Move(eax, Immediate(Smi::FromInt(1)));
2668  }
2669  }
2670  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2671 
2672  Label object_not_null, object_not_null_or_smi;
2673  __ bind(&not_js_object);
2674  // Before null, smi and string value checks, check that the rhs is a function
2675  // as for a non-function rhs an exception needs to be thrown.
2676  __ JumpIfSmi(function, &slow, Label::kNear);
2677  __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
2678  __ j(not_equal, &slow, Label::kNear);
2679 
2680  // Null is not instance of anything.
2681  __ cmp(object, factory->null_value());
2682  __ j(not_equal, &object_not_null, Label::kNear);
2683  if (ReturnTrueFalseObject()) {
2684  __ mov(eax, factory->false_value());
2685  } else {
2686  __ Move(eax, Immediate(Smi::FromInt(1)));
2687  }
2688  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2689 
2690  __ bind(&object_not_null);
2691  // Smi values is not instance of anything.
2692  __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
2693  if (ReturnTrueFalseObject()) {
2694  __ mov(eax, factory->false_value());
2695  } else {
2696  __ Move(eax, Immediate(Smi::FromInt(1)));
2697  }
2698  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2699 
2700  __ bind(&object_not_null_or_smi);
2701  // String values is not instance of anything.
2702  Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
2703  __ j(NegateCondition(is_string), &slow, Label::kNear);
2704  if (ReturnTrueFalseObject()) {
2705  __ mov(eax, factory->false_value());
2706  } else {
2707  __ Move(eax, Immediate(Smi::FromInt(1)));
2708  }
2709  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2710 
2711  // Slow-case: Go through the JavaScript implementation.
2712  __ bind(&slow);
2713  if (!ReturnTrueFalseObject()) {
2714  // Tail call the builtin which returns 0 or 1.
2715  if (HasArgsInRegisters()) {
2716  // Push arguments below return address.
2717  __ pop(scratch);
2718  __ push(object);
2719  __ push(function);
2720  __ push(scratch);
2721  }
2722  __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
2723  } else {
2724  // Call the builtin and convert 0/1 to true/false.
2725  {
2726  FrameScope scope(masm, StackFrame::INTERNAL);
2727  __ push(object);
2728  __ push(function);
2729  __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
2730  }
2731  Label true_value, done;
2732  __ test(eax, eax);
2733  __ j(zero, &true_value, Label::kNear);
2734  __ mov(eax, factory->false_value());
2735  __ jmp(&done, Label::kNear);
2736  __ bind(&true_value);
2737  __ mov(eax, factory->true_value());
2738  __ bind(&done);
2739  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2740  }
2741 }
2742 
2743 
2744 // -------------------------------------------------------------------------
2745 // StringCharCodeAtGenerator
2746 
2747 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2748  // If the receiver is a smi trigger the non-string case.
2749  STATIC_ASSERT(kSmiTag == 0);
2750  __ JumpIfSmi(object_, receiver_not_string_);
2751 
2752  // Fetch the instance type of the receiver into result register.
2755  // If the receiver is not a string trigger the non-string case.
2756  __ test(result_, Immediate(kIsNotStringMask));
2758 
2759  // If the index is non-smi trigger the non-smi case.
2760  STATIC_ASSERT(kSmiTag == 0);
2761  __ JumpIfNotSmi(index_, &index_not_smi_);
2762  __ bind(&got_smi_index_);
2763 
2764  // Check for index out of range.
2767 
2768  __ SmiUntag(index_);
2769 
2770  Factory* factory = masm->isolate()->factory();
2772  masm, factory, object_, index_, result_, &call_runtime_);
2773 
2774  __ SmiTag(result_);
2775  __ bind(&exit_);
2776 }
2777 
2778 
2780  MacroAssembler* masm,
2781  const RuntimeCallHelper& call_helper) {
2782  __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2783 
2784  // Index is not a smi.
2785  __ bind(&index_not_smi_);
2786  // If index is a heap number, try converting it to an integer.
2787  __ CheckMap(index_,
2788  masm->isolate()->factory()->heap_number_map(),
2791  call_helper.BeforeCall(masm);
2792  __ push(object_);
2793  __ push(index_); // Consumed by runtime conversion function.
2795  __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
2796  } else {
2798  // NumberToSmi discards numbers that are not exact integers.
2799  __ CallRuntime(Runtime::kNumberToSmi, 1);
2800  }
2801  if (!index_.is(eax)) {
2802  // Save the conversion result before the pop instructions below
2803  // have a chance to overwrite it.
2804  __ mov(index_, eax);
2805  }
2806  __ pop(object_);
2807  // Reload the instance type.
2810  call_helper.AfterCall(masm);
2811  // If index is still not a smi, it must be out of range.
2812  STATIC_ASSERT(kSmiTag == 0);
2813  __ JumpIfNotSmi(index_, index_out_of_range_);
2814  // Otherwise, return to the fast path.
2815  __ jmp(&got_smi_index_);
2816 
2817  // Call runtime. We get here when the receiver is a string and the
2818  // index is a number, but the code of getting the actual character
2819  // is too complex (e.g., when the string needs to be flattened).
2820  __ bind(&call_runtime_);
2821  call_helper.BeforeCall(masm);
2822  __ push(object_);
2823  __ SmiTag(index_);
2824  __ push(index_);
2825  __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
2826  if (!result_.is(eax)) {
2827  __ mov(result_, eax);
2828  }
2829  call_helper.AfterCall(masm);
2830  __ jmp(&exit_);
2831 
2832  __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2833 }
2834 
2835 
2836 // -------------------------------------------------------------------------
2837 // StringCharFromCodeGenerator
2838 
2839 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2840  // Fast case of Heap::LookupSingleCharacterStringFromCode.
2841  STATIC_ASSERT(kSmiTag == 0);
2844  __ test(code_,
2845  Immediate(kSmiTagMask |
2847  __ j(not_zero, &slow_case_);
2848 
2849  Factory* factory = masm->isolate()->factory();
2850  __ Move(result_, Immediate(factory->single_character_string_cache()));
2851  STATIC_ASSERT(kSmiTag == 0);
2852  STATIC_ASSERT(kSmiTagSize == 1);
2854  // At this point code register contains smi tagged one byte char code.
2858  __ cmp(result_, factory->undefined_value());
2859  __ j(equal, &slow_case_);
2860  __ bind(&exit_);
2861 }
2862 
2863 
2865  MacroAssembler* masm,
2866  const RuntimeCallHelper& call_helper) {
2867  __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2868 
2869  __ bind(&slow_case_);
2870  call_helper.BeforeCall(masm);
2871  __ push(code_);
2872  __ CallRuntime(Runtime::kCharFromCode, 1);
2873  if (!result_.is(eax)) {
2874  __ mov(result_, eax);
2875  }
2876  call_helper.AfterCall(masm);
2877  __ jmp(&exit_);
2878 
2879  __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2880 }
2881 
2882 
2883 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2884  Register dest,
2885  Register src,
2886  Register count,
2887  Register scratch,
2888  String::Encoding encoding) {
2889  DCHECK(!scratch.is(dest));
2890  DCHECK(!scratch.is(src));
2891  DCHECK(!scratch.is(count));
2892 
2893  // Nothing to do for zero characters.
2894  Label done;
2895  __ test(count, count);
2896  __ j(zero, &done);
2897 
2898  // Make count the number of bytes to copy.
2899  if (encoding == String::TWO_BYTE_ENCODING) {
2900  __ shl(count, 1);
2901  }
2902 
2903  Label loop;
2904  __ bind(&loop);
2905  __ mov_b(scratch, Operand(src, 0));
2906  __ mov_b(Operand(dest, 0), scratch);
2907  __ inc(src);
2908  __ inc(dest);
2909  __ dec(count);
2910  __ j(not_zero, &loop);
2911 
2912  __ bind(&done);
2913 }
2914 
2915 
2916 void SubStringStub::Generate(MacroAssembler* masm) {
2917  Label runtime;
2918 
2919  // Stack frame on entry.
2920  // esp[0]: return address
2921  // esp[4]: to
2922  // esp[8]: from
2923  // esp[12]: string
2924 
2925  // Make sure first argument is a string.
2926  __ mov(eax, Operand(esp, 3 * kPointerSize));
2927  STATIC_ASSERT(kSmiTag == 0);
2928  __ JumpIfSmi(eax, &runtime);
2929  Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
2930  __ j(NegateCondition(is_string), &runtime);
2931 
2932  // eax: string
2933  // ebx: instance type
2934 
2935  // Calculate length of sub string using the smi values.
2936  __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
2937  __ JumpIfNotSmi(ecx, &runtime);
2938  __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
2939  __ JumpIfNotSmi(edx, &runtime);
2940  __ sub(ecx, edx);
2942  Label not_original_string;
2943  // Shorter than original string's length: an actual substring.
2944  __ j(below, &not_original_string, Label::kNear);
2945  // Longer than original string's length or negative: unsafe arguments.
2946  __ j(above, &runtime);
2947  // Return original string.
2948  Counters* counters = isolate()->counters();
2949  __ IncrementCounter(counters->sub_string_native(), 1);
2950  __ ret(3 * kPointerSize);
2951  __ bind(&not_original_string);
2952 
2953  Label single_char;
2954  __ cmp(ecx, Immediate(Smi::FromInt(1)));
2955  __ j(equal, &single_char);
2956 
2957  // eax: string
2958  // ebx: instance type
2959  // ecx: sub string length (smi)
2960  // edx: from index (smi)
2961  // Deal with different string types: update the index if necessary
2962  // and put the underlying string into edi.
2963  Label underlying_unpacked, sliced_string, seq_or_external_string;
2964  // If the string is not indirect, it can only be sequential or external.
2967  __ test(ebx, Immediate(kIsIndirectStringMask));
2968  __ j(zero, &seq_or_external_string, Label::kNear);
2969 
2970  Factory* factory = isolate()->factory();
2971  __ test(ebx, Immediate(kSlicedNotConsMask));
2972  __ j(not_zero, &sliced_string, Label::kNear);
2973  // Cons string. Check whether it is flat, then fetch first part.
2974  // Flat cons strings have an empty second part.
2976  factory->empty_string());
2977  __ j(not_equal, &runtime);
2979  // Update instance type.
2982  __ jmp(&underlying_unpacked, Label::kNear);
2983 
2984  __ bind(&sliced_string);
2985  // Sliced string. Fetch parent and adjust start index by offset.
2988  // Update instance type.
2991  __ jmp(&underlying_unpacked, Label::kNear);
2992 
2993  __ bind(&seq_or_external_string);
2994  // Sequential or external string. Just move string to the expected register.
2995  __ mov(edi, eax);
2996 
2997  __ bind(&underlying_unpacked);
2998 
2999  if (FLAG_string_slices) {
3000  Label copy_routine;
3001  // edi: underlying subject string
3002  // ebx: instance type of underlying subject string
3003  // edx: adjusted start index (smi)
3004  // ecx: length (smi)
3005  __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
3006  // Short slice. Copy instead of slicing.
3007  __ j(less, &copy_routine);
3008  // Allocate new sliced string. At this point we do not reload the instance
3009  // type including the string encoding because we simply rely on the info
3010  // provided by the original string. It does not matter if the original
3011  // string's encoding is wrong because we always have to recheck encoding of
3012  // the newly created string's parent anyways due to externalized strings.
3013  Label two_byte_slice, set_slice_header;
3016  __ test(ebx, Immediate(kStringEncodingMask));
3017  __ j(zero, &two_byte_slice, Label::kNear);
3018  __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime);
3019  __ jmp(&set_slice_header, Label::kNear);
3020  __ bind(&two_byte_slice);
3021  __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
3022  __ bind(&set_slice_header);
3025  Immediate(String::kEmptyHashField));
3028  __ IncrementCounter(counters->sub_string_native(), 1);
3029  __ ret(3 * kPointerSize);
3030 
3031  __ bind(&copy_routine);
3032  }
3033 
3034  // edi: underlying subject string
3035  // ebx: instance type of underlying subject string
3036  // edx: adjusted start index (smi)
3037  // ecx: length (smi)
3038  // The subject string can only be external or sequential string of either
3039  // encoding at this point.
3040  Label two_byte_sequential, runtime_drop_two, sequential_string;
3043  __ test_b(ebx, kExternalStringTag);
3044  __ j(zero, &sequential_string);
3045 
3046  // Handle external string.
3047  // Rule out short external strings.
3049  __ test_b(ebx, kShortExternalStringMask);
3050  __ j(not_zero, &runtime);
3052  // Move the pointer so that offset-wise, it looks like a sequential string.
3055 
3056  __ bind(&sequential_string);
3057  // Stash away (adjusted) index and (underlying) string.
3058  __ push(edx);
3059  __ push(edi);
3060  __ SmiUntag(ecx);
3062  __ test_b(ebx, kStringEncodingMask);
3063  __ j(zero, &two_byte_sequential);
3064 
3065  // Sequential one byte string. Allocate the result.
3066  __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
3067 
3068  // eax: result string
3069  // ecx: result string length
3070  // Locate first character of result.
3071  __ mov(edi, eax);
3073  // Load string argument and locate character of sub string start.
3074  __ pop(edx);
3075  __ pop(ebx);
3076  __ SmiUntag(ebx);
3078 
3079  // eax: result string
3080  // ecx: result length
3081  // edi: first character of result
3082  // edx: character of sub string start
3085  __ IncrementCounter(counters->sub_string_native(), 1);
3086  __ ret(3 * kPointerSize);
3087 
3088  __ bind(&two_byte_sequential);
3089  // Sequential two-byte string. Allocate the result.
3090  __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
3091 
3092  // eax: result string
3093  // ecx: result string length
3094  // Locate first character of result.
3095  __ mov(edi, eax);
3096  __ add(edi,
3098  // Load string argument and locate character of sub string start.
3099  __ pop(edx);
3100  __ pop(ebx);
3101  // As from is a smi it is 2 times the value which matches the size of a two
3102  // byte character.
3103  STATIC_ASSERT(kSmiTag == 0);
3106 
3107  // eax: result string
3108  // ecx: result length
3109  // edi: first character of result
3110  // edx: character of sub string start
3113  __ IncrementCounter(counters->sub_string_native(), 1);
3114  __ ret(3 * kPointerSize);
3115 
3116  // Drop pushed values on the stack before tail call.
3117  __ bind(&runtime_drop_two);
3118  __ Drop(2);
3119 
3120  // Just jump to runtime to create the sub string.
3121  __ bind(&runtime);
3122  __ TailCallRuntime(Runtime::kSubString, 3, 1);
3123 
3124  __ bind(&single_char);
3125  // eax: string
3126  // ebx: instance type
3127  // ecx: sub string length (smi)
3128  // edx: from index (smi)
3129  StringCharAtGenerator generator(
3130  eax, edx, ecx, eax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
3131  generator.GenerateFast(masm);
3132  __ ret(3 * kPointerSize);
3133  generator.SkipSlow(masm, &runtime);
3134 }
3135 
3136 
3137 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
3138  Register left,
3139  Register right,
3140  Register scratch1,
3141  Register scratch2) {
3142  Register length = scratch1;
3143 
3144  // Compare lengths.
3145  Label strings_not_equal, check_zero_length;
3146  __ mov(length, FieldOperand(left, String::kLengthOffset));
3147  __ cmp(length, FieldOperand(right, String::kLengthOffset));
3148  __ j(equal, &check_zero_length, Label::kNear);
3149  __ bind(&strings_not_equal);
3150  __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
3151  __ ret(0);
3152 
3153  // Check if the length is zero.
3154  Label compare_chars;
3155  __ bind(&check_zero_length);
3156  STATIC_ASSERT(kSmiTag == 0);
3157  __ test(length, length);
3158  __ j(not_zero, &compare_chars, Label::kNear);
3159  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3160  __ ret(0);
3161 
3162  // Compare characters.
3163  __ bind(&compare_chars);
3164  GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
3165  &strings_not_equal, Label::kNear);
3166 
3167  // Characters are equal.
3168  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3169  __ ret(0);
3170 }
3171 
3172 
3174  MacroAssembler* masm, Register left, Register right, Register scratch1,
3175  Register scratch2, Register scratch3) {
3176  Counters* counters = masm->isolate()->counters();
3177  __ IncrementCounter(counters->string_compare_native(), 1);
3178 
3179  // Find minimum length.
3180  Label left_shorter;
3181  __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
3182  __ mov(scratch3, scratch1);
3183  __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
3184 
3185  Register length_delta = scratch3;
3186 
3187  __ j(less_equal, &left_shorter, Label::kNear);
3188  // Right string is shorter. Change scratch1 to be length of right string.
3189  __ sub(scratch1, length_delta);
3190  __ bind(&left_shorter);
3191 
3192  Register min_length = scratch1;
3193 
3194  // If either length is zero, just compare lengths.
3195  Label compare_lengths;
3196  __ test(min_length, min_length);
3197  __ j(zero, &compare_lengths, Label::kNear);
3198 
3199  // Compare characters.
3200  Label result_not_equal;
3201  GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
3202  &result_not_equal, Label::kNear);
3203 
3204  // Compare lengths - strings up to min-length are equal.
3205  __ bind(&compare_lengths);
3206  __ test(length_delta, length_delta);
3207  Label length_not_equal;
3208  __ j(not_zero, &length_not_equal, Label::kNear);
3209 
3210  // Result is EQUAL.
3211  STATIC_ASSERT(EQUAL == 0);
3212  STATIC_ASSERT(kSmiTag == 0);
3213  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3214  __ ret(0);
3215 
3216  Label result_greater;
3217  Label result_less;
3218  __ bind(&length_not_equal);
3219  __ j(greater, &result_greater, Label::kNear);
3220  __ jmp(&result_less, Label::kNear);
3221  __ bind(&result_not_equal);
3222  __ j(above, &result_greater, Label::kNear);
3223  __ bind(&result_less);
3224 
3225  // Result is LESS.
3226  __ Move(eax, Immediate(Smi::FromInt(LESS)));
3227  __ ret(0);
3228 
3229  // Result is GREATER.
3230  __ bind(&result_greater);
3231  __ Move(eax, Immediate(Smi::FromInt(GREATER)));
3232  __ ret(0);
3233 }
3234 
3235 
3237  MacroAssembler* masm, Register left, Register right, Register length,
3238  Register scratch, Label* chars_not_equal,
3239  Label::Distance chars_not_equal_near) {
3240  // Change index to run from -length to -1 by adding length to string
3241  // start. This means that loop ends when index reaches zero, which
3242  // doesn't need an additional compare.
3243  __ SmiUntag(length);
3244  __ lea(left,
3246  __ lea(right,
3248  __ neg(length);
3249  Register index = length; // index = -length;
3250 
3251  // Compare loop.
3252  Label loop;
3253  __ bind(&loop);
3254  __ mov_b(scratch, Operand(left, index, times_1, 0));
3255  __ cmpb(scratch, Operand(right, index, times_1, 0));
3256  __ j(not_equal, chars_not_equal, chars_not_equal_near);
3257  __ inc(index);
3258  __ j(not_zero, &loop);
3259 }
3260 
3261 
3262 void StringCompareStub::Generate(MacroAssembler* masm) {
3263  Label runtime;
3264 
3265  // Stack frame on entry.
3266  // esp[0]: return address
3267  // esp[4]: right string
3268  // esp[8]: left string
3269 
3270  __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
3271  __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
3272 
3273  Label not_same;
3274  __ cmp(edx, eax);
3275  __ j(not_equal, &not_same, Label::kNear);
3276  STATIC_ASSERT(EQUAL == 0);
3277  STATIC_ASSERT(kSmiTag == 0);
3278  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3279  __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
3280  __ ret(2 * kPointerSize);
3281 
3282  __ bind(&not_same);
3283 
3284  // Check that both objects are sequential one-byte strings.
3285  __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, &runtime);
3286 
3287  // Compare flat one-byte strings.
3288  // Drop arguments from the stack.
3289  __ pop(ecx);
3290  __ add(esp, Immediate(2 * kPointerSize));
3291  __ push(ecx);
3293  edi);
3294 
3295  // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3296  // tagged as a small integer.
3297  __ bind(&runtime);
3298  __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3299 }
3300 
3301 
3302 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3303  // ----------- S t a t e -------------
3304  // -- edx : left
3305  // -- eax : right
3306  // -- esp[0] : return address
3307  // -----------------------------------
3308 
3309  // Load ecx with the allocation site. We stick an undefined dummy value here
3310  // and replace it with the real allocation site later when we instantiate this
3311  // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3312  __ mov(ecx, handle(isolate()->heap()->undefined_value()));
3313 
3314  // Make sure that we actually patched the allocation site.
3315  if (FLAG_debug_code) {
3316  __ test(ecx, Immediate(kSmiTagMask));
3317  __ Assert(not_equal, kExpectedAllocationSite);
3319  isolate()->factory()->allocation_site_map());
3320  __ Assert(equal, kExpectedAllocationSite);
3321  }
3322 
3323  // Tail call into the stub that handles binary operations with allocation
3324  // sites.
3325  BinaryOpWithAllocationSiteStub stub(isolate(), state());
3326  __ TailCallStub(&stub);
3327 }
3328 
3329 
3330 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
3332  Label miss;
3333  __ mov(ecx, edx);
3334  __ or_(ecx, eax);
3335  __ JumpIfNotSmi(ecx, &miss, Label::kNear);
3336 
3337  if (GetCondition() == equal) {
3338  // For equality we do not care about the sign of the result.
3339  __ sub(eax, edx);
3340  } else {
3341  Label done;
3342  __ sub(edx, eax);
3343  __ j(no_overflow, &done, Label::kNear);
3344  // Correct sign of result in case of overflow.
3345  __ not_(edx);
3346  __ bind(&done);
3347  __ mov(eax, edx);
3348  }
3349  __ ret(0);
3350 
3351  __ bind(&miss);
3352  GenerateMiss(masm);
3353 }
3354 
3355 
3356 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
3358 
3359  Label generic_stub;
3360  Label unordered, maybe_undefined1, maybe_undefined2;
3361  Label miss;
3362 
3363  if (left() == CompareICState::SMI) {
3364  __ JumpIfNotSmi(edx, &miss);
3365  }
3366  if (right() == CompareICState::SMI) {
3367  __ JumpIfNotSmi(eax, &miss);
3368  }
3369 
3370  // Load left and right operand.
3371  Label done, left, left_smi, right_smi;
3372  __ JumpIfSmi(eax, &right_smi, Label::kNear);
3374  isolate()->factory()->heap_number_map());
3375  __ j(not_equal, &maybe_undefined1, Label::kNear);
3377  __ jmp(&left, Label::kNear);
3378  __ bind(&right_smi);
3379  __ mov(ecx, eax); // Can't clobber eax because we can still jump away.
3380  __ SmiUntag(ecx);
3381  __ Cvtsi2sd(xmm1, ecx);
3382 
3383  __ bind(&left);
3384  __ JumpIfSmi(edx, &left_smi, Label::kNear);
3386  isolate()->factory()->heap_number_map());
3387  __ j(not_equal, &maybe_undefined2, Label::kNear);
3389  __ jmp(&done);
3390  __ bind(&left_smi);
3391  __ mov(ecx, edx); // Can't clobber edx because we can still jump away.
3392  __ SmiUntag(ecx);
3393  __ Cvtsi2sd(xmm0, ecx);
3394 
3395  __ bind(&done);
3396  // Compare operands.
3397  __ ucomisd(xmm0, xmm1);
3398 
3399  // Don't base result on EFLAGS when a NaN is involved.
3400  __ j(parity_even, &unordered, Label::kNear);
3401 
3402  // Return a result of -1, 0, or 1, based on EFLAGS.
3403  // Performing mov, because xor would destroy the flag register.
3404  __ mov(eax, 0); // equal
3405  __ mov(ecx, Immediate(Smi::FromInt(1)));
3406  __ cmov(above, eax, ecx);
3407  __ mov(ecx, Immediate(Smi::FromInt(-1)));
3408  __ cmov(below, eax, ecx);
3409  __ ret(0);
3410 
3411  __ bind(&unordered);
3412  __ bind(&generic_stub);
3413  CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
3415  __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3416 
3417  __ bind(&maybe_undefined1);
3419  __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
3420  __ j(not_equal, &miss);
3421  __ JumpIfSmi(edx, &unordered);
3422  __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
3423  __ j(not_equal, &maybe_undefined2, Label::kNear);
3424  __ jmp(&unordered);
3425  }
3426 
3427  __ bind(&maybe_undefined2);
3429  __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
3430  __ j(equal, &unordered);
3431  }
3432 
3433  __ bind(&miss);
3434  GenerateMiss(masm);
3435 }
3436 
3437 
3438 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3440  DCHECK(GetCondition() == equal);
3441 
3442  // Registers containing left and right operands respectively.
3443  Register left = edx;
3444  Register right = eax;
3445  Register tmp1 = ecx;
3446  Register tmp2 = ebx;
3447 
3448  // Check that both operands are heap objects.
3449  Label miss;
3450  __ mov(tmp1, left);
3451  STATIC_ASSERT(kSmiTag == 0);
3452  __ and_(tmp1, right);
3453  __ JumpIfSmi(tmp1, &miss, Label::kNear);
3454 
3455  // Check that both operands are internalized strings.
3458  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3459  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3461  __ or_(tmp1, tmp2);
3462  __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3463  __ j(not_zero, &miss, Label::kNear);
3464 
3465  // Internalized strings are compared by identity.
3466  Label done;
3467  __ cmp(left, right);
3468  // Make sure eax is non-zero. At this point input operands are
3469  // guaranteed to be non-zero.
3470  DCHECK(right.is(eax));
3471  __ j(not_equal, &done, Label::kNear);
3472  STATIC_ASSERT(EQUAL == 0);
3473  STATIC_ASSERT(kSmiTag == 0);
3474  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3475  __ bind(&done);
3476  __ ret(0);
3477 
3478  __ bind(&miss);
3479  GenerateMiss(masm);
3480 }
3481 
3482 
3483 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3485  DCHECK(GetCondition() == equal);
3486 
3487  // Registers containing left and right operands respectively.
3488  Register left = edx;
3489  Register right = eax;
3490  Register tmp1 = ecx;
3491  Register tmp2 = ebx;
3492 
3493  // Check that both operands are heap objects.
3494  Label miss;
3495  __ mov(tmp1, left);
3496  STATIC_ASSERT(kSmiTag == 0);
3497  __ and_(tmp1, right);
3498  __ JumpIfSmi(tmp1, &miss, Label::kNear);
3499 
3500  // Check that both operands are unique names. This leaves the instance
3501  // types loaded in tmp1 and tmp2.
3504  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3505  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3506 
3507  __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
3508  __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3509 
3510  // Unique names are compared by identity.
3511  Label done;
3512  __ cmp(left, right);
3513  // Make sure eax is non-zero. At this point input operands are
3514  // guaranteed to be non-zero.
3515  DCHECK(right.is(eax));
3516  __ j(not_equal, &done, Label::kNear);
3517  STATIC_ASSERT(EQUAL == 0);
3518  STATIC_ASSERT(kSmiTag == 0);
3519  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3520  __ bind(&done);
3521  __ ret(0);
3522 
3523  __ bind(&miss);
3524  GenerateMiss(masm);
3525 }
3526 
3527 
3528 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3530  Label miss;
3531 
3532  bool equality = Token::IsEqualityOp(op());
3533 
3534  // Registers containing left and right operands respectively.
3535  Register left = edx;
3536  Register right = eax;
3537  Register tmp1 = ecx;
3538  Register tmp2 = ebx;
3539  Register tmp3 = edi;
3540 
3541  // Check that both operands are heap objects.
3542  __ mov(tmp1, left);
3543  STATIC_ASSERT(kSmiTag == 0);
3544  __ and_(tmp1, right);
3545  __ JumpIfSmi(tmp1, &miss);
3546 
3547  // Check that both operands are strings. This leaves the instance
3548  // types loaded in tmp1 and tmp2.
3551  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3552  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3553  __ mov(tmp3, tmp1);
3555  __ or_(tmp3, tmp2);
3556  __ test(tmp3, Immediate(kIsNotStringMask));
3557  __ j(not_zero, &miss);
3558 
3559  // Fast check for identical strings.
3560  Label not_same;
3561  __ cmp(left, right);
3562  __ j(not_equal, &not_same, Label::kNear);
3563  STATIC_ASSERT(EQUAL == 0);
3564  STATIC_ASSERT(kSmiTag == 0);
3565  __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3566  __ ret(0);
3567 
3568  // Handle not identical strings.
3569  __ bind(&not_same);
3570 
3571  // Check that both strings are internalized. If they are, we're done
3572  // because we already know they are not identical. But in the case of
3573  // non-equality compare, we still need to determine the order. We
3574  // also know they are both strings.
3575  if (equality) {
3576  Label do_compare;
3578  __ or_(tmp1, tmp2);
3579  __ test(tmp1, Immediate(kIsNotInternalizedMask));
3580  __ j(not_zero, &do_compare, Label::kNear);
3581  // Make sure eax is non-zero. At this point input operands are
3582  // guaranteed to be non-zero.
3583  DCHECK(right.is(eax));
3584  __ ret(0);
3585  __ bind(&do_compare);
3586  }
3587 
3588  // Check that both strings are sequential one-byte.
3589  Label runtime;
3590  __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
3591 
3592  // Compare flat one byte strings. Returns when done.
3593  if (equality) {
3595  tmp2);
3596  } else {
3598  tmp2, tmp3);
3599  }
3600 
3601  // Handle more complex cases in runtime.
3602  __ bind(&runtime);
3603  __ pop(tmp1); // Return address.
3604  __ push(left);
3605  __ push(right);
3606  __ push(tmp1);
3607  if (equality) {
3608  __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
3609  } else {
3610  __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3611  }
3612 
3613  __ bind(&miss);
3614  GenerateMiss(masm);
3615 }
3616 
3617 
3618 void CompareICStub::GenerateObjects(MacroAssembler* masm) {
3620  Label miss;
3621  __ mov(ecx, edx);
3622  __ and_(ecx, eax);
3623  __ JumpIfSmi(ecx, &miss, Label::kNear);
3624 
3625  __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
3626  __ j(not_equal, &miss, Label::kNear);
3627  __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
3628  __ j(not_equal, &miss, Label::kNear);
3629 
3630  DCHECK(GetCondition() == equal);
3631  __ sub(eax, edx);
3632  __ ret(0);
3633 
3634  __ bind(&miss);
3635  GenerateMiss(masm);
3636 }
3637 
3638 
3639 void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
3640  Label miss;
3641  __ mov(ecx, edx);
3642  __ and_(ecx, eax);
3643  __ JumpIfSmi(ecx, &miss, Label::kNear);
3644 
3647  __ cmp(ecx, known_map_);
3648  __ j(not_equal, &miss, Label::kNear);
3649  __ cmp(ebx, known_map_);
3650  __ j(not_equal, &miss, Label::kNear);
3651 
3652  __ sub(eax, edx);
3653  __ ret(0);
3654 
3655  __ bind(&miss);
3656  GenerateMiss(masm);
3657 }
3658 
3659 
3660 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3661  {
3662  // Call the runtime system in a fresh internal frame.
3663  ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
3664  isolate());
3665  FrameScope scope(masm, StackFrame::INTERNAL);
3666  __ push(edx); // Preserve edx and eax.
3667  __ push(eax);
3668  __ push(edx); // And also use them as the arguments.
3669  __ push(eax);
3670  __ push(Immediate(Smi::FromInt(op())));
3671  __ CallExternalReference(miss, 3);
3672  // Compute the entry point of the rewritten stub.
3674  __ pop(eax);
3675  __ pop(edx);
3676  }
3677 
3678  // Do a tail call to the rewritten stub.
3679  __ jmp(edi);
3680 }
3681 
3682 
3683 // Helper function used to check that the dictionary doesn't contain
3684 // the property. This function may return false negatives, so miss_label
3685 // must always call a backup property check that is complete.
3686 // This function is safe to call if the receiver has fast properties.
3687 // Name must be a unique name and receiver must be a heap object.
3688 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3689  Label* miss,
3690  Label* done,
3691  Register properties,
3692  Handle<Name> name,
3693  Register r0) {
3694  DCHECK(name->IsUniqueName());
3695 
3696  // If names of slots in range from 1 to kProbes - 1 for the hash value are
3697  // not equal to the name and kProbes-th slot is not used (its name is the
3698  // undefined value), it guarantees the hash table doesn't contain the
3699  // property. It's true even if some slots represent deleted properties
3700  // (their names are the hole value).
3701  for (int i = 0; i < kInlinedProbes; i++) {
3702  // Compute the masked index: (hash + i + i * i) & mask.
3703  Register index = r0;
3704  // Capacity is smi 2^n.
3705  __ mov(index, FieldOperand(properties, kCapacityOffset));
3706  __ dec(index);
3707  __ and_(index,
3708  Immediate(Smi::FromInt(name->Hash() +
3709  NameDictionary::GetProbeOffset(i))));
3710 
3711  // Scale the index by multiplying by the entry size.
3713  __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
3714  Register entity_name = r0;
3715  // Having undefined at this place means the name is not contained.
3716  DCHECK_EQ(kSmiTagSize, 1);
3717  __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
3719  __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
3720  __ j(equal, done);
3721 
3722  // Stop if found the property.
3723  __ cmp(entity_name, Handle<Name>(name));
3724  __ j(equal, miss);
3725 
3726  Label good;
3727  // Check for the hole and skip.
3728  __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
3729  __ j(equal, &good, Label::kNear);
3730 
3731  // Check if the entry name is not a unique name.
3732  __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3733  __ JumpIfNotUniqueNameInstanceType(
3734  FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3735  __ bind(&good);
3736  }
3737 
3738  NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3739  NEGATIVE_LOOKUP);
3740  __ push(Immediate(Handle<Object>(name)));
3741  __ push(Immediate(name->Hash()));
3742  __ CallStub(&stub);
3743  __ test(r0, r0);
3744  __ j(not_zero, miss);
3745  __ jmp(done);
3746 }
3747 
3748 
3749 // Probe the name dictionary in the |elements| register. Jump to the
3750 // |done| label if a property with the given name is found leaving the
3751 // index into the dictionary in |r0|. Jump to the |miss| label
3752 // otherwise.
3753 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3754  Label* miss,
3755  Label* done,
3756  Register elements,
3757  Register name,
3758  Register r0,
3759  Register r1) {
3760  DCHECK(!elements.is(r0));
3761  DCHECK(!elements.is(r1));
3762  DCHECK(!name.is(r0));
3763  DCHECK(!name.is(r1));
3764 
3765  __ AssertName(name);
3766 
3767  __ mov(r1, FieldOperand(elements, kCapacityOffset));
3768  __ shr(r1, kSmiTagSize); // convert smi to int
3769  __ dec(r1);
3770 
3771  // Generate an unrolled loop that performs a few probes before
3772  // giving up. Measurements done on Gmail indicate that 2 probes
3773  // cover ~93% of loads from dictionaries.
3774  for (int i = 0; i < kInlinedProbes; i++) {
3775  // Compute the masked index: (hash + i + i * i) & mask.
3777  __ shr(r0, Name::kHashShift);
3778  if (i > 0) {
3779  __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
3780  }
3781  __ and_(r0, r1);
3782 
3783  // Scale the index by multiplying by the entry size.
3785  __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
3786 
3787  // Check if the key is identical to the name.
3788  __ cmp(name, Operand(elements,
3789  r0,
3790  times_4,
3792  __ j(equal, done);
3793  }
3794 
3795  NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
3796  POSITIVE_LOOKUP);
3797  __ push(name);
3799  __ shr(r0, Name::kHashShift);
3800  __ push(r0);
3801  __ CallStub(&stub);
3802 
3803  __ test(r1, r1);
3804  __ j(zero, miss);
3805  __ jmp(done);
3806 }
3807 
3808 
3809 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
3810  // This stub overrides SometimesSetsUpAFrame() to return false. That means
3811  // we cannot call anything that could cause a GC from this stub.
3812  // Stack frame on entry:
3813  // esp[0 * kPointerSize]: return address.
3814  // esp[1 * kPointerSize]: key's hash.
3815  // esp[2 * kPointerSize]: key.
3816  // Registers:
3817  // dictionary_: NameDictionary to probe.
3818  // result_: used as scratch.
3819  // index_: will hold an index of entry if lookup is successful.
3820  // might alias with result_.
3821  // Returns:
3822  // result_ is zero if lookup failed, non zero otherwise.
3823 
3824  Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3825 
3826  Register scratch = result();
3827 
3828  __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
3829  __ dec(scratch);
3830  __ SmiUntag(scratch);
3831  __ push(scratch);
3832 
3833  // If names of slots in range from 1 to kProbes - 1 for the hash value are
3834  // not equal to the name and kProbes-th slot is not used (its name is the
3835  // undefined value), it guarantees the hash table doesn't contain the
3836  // property. It's true even if some slots represent deleted properties
3837  // (their names are the null value).
3838  for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3839  // Compute the masked index: (hash + i + i * i) & mask.
3840  __ mov(scratch, Operand(esp, 2 * kPointerSize));
3841  if (i > 0) {
3842  __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
3843  }
3844  __ and_(scratch, Operand(esp, 0));
3845 
3846  // Scale the index by multiplying by the entry size.
3848  __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
3849 
3850  // Having undefined at this place means the name is not contained.
3851  DCHECK_EQ(kSmiTagSize, 1);
3852  __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
3854  __ cmp(scratch, isolate()->factory()->undefined_value());
3855  __ j(equal, &not_in_dictionary);
3856 
3857  // Stop if found the property.
3858  __ cmp(scratch, Operand(esp, 3 * kPointerSize));
3859  __ j(equal, &in_dictionary);
3860 
3861  if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3862  // If we hit a key that is not a unique name during negative
3863  // lookup we have to bailout as this key might be equal to the
3864  // key we are looking for.
3865 
3866  // Check if the entry name is not a unique name.
3867  __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3868  __ JumpIfNotUniqueNameInstanceType(
3870  &maybe_in_dictionary);
3871  }
3872  }
3873 
3874  __ bind(&maybe_in_dictionary);
3875  // If we are doing negative lookup then probing failure should be
3876  // treated as a lookup success. For positive lookup probing failure
3877  // should be treated as lookup failure.
3878  if (mode() == POSITIVE_LOOKUP) {
3879  __ mov(result(), Immediate(0));
3880  __ Drop(1);
3881  __ ret(2 * kPointerSize);
3882  }
3883 
3884  __ bind(&in_dictionary);
3885  __ mov(result(), Immediate(1));
3886  __ Drop(1);
3887  __ ret(2 * kPointerSize);
3888 
3889  __ bind(&not_in_dictionary);
3890  __ mov(result(), Immediate(0));
3891  __ Drop(1);
3892  __ ret(2 * kPointerSize);
3893 }
3894 
3895 
3897  Isolate* isolate) {
3899  stub.GetCode();
3900  StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3901  stub2.GetCode();
3902 }
3903 
3904 
3905 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
3906 // the value has just been written into the object, now this stub makes sure
3907 // we keep the GC informed. The word in the object where the value has been
3908 // written is in the address register.
3909 void RecordWriteStub::Generate(MacroAssembler* masm) {
3910  Label skip_to_incremental_noncompacting;
3911  Label skip_to_incremental_compacting;
3912 
3913  // The first two instructions are generated with labels so as to get the
3914  // offset fixed up correctly by the bind(Label*) call. We patch it back and
3915  // forth between a compare instructions (a nop in this position) and the
3916  // real branch when we start and stop incremental heap marking.
3917  __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3918  __ jmp(&skip_to_incremental_compacting, Label::kFar);
3919 
3921  __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3923  } else {
3924  __ ret(0);
3925  }
3926 
3927  __ bind(&skip_to_incremental_noncompacting);
3929 
3930  __ bind(&skip_to_incremental_compacting);
3932 
3933  // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3934  // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3935  masm->set_byte_at(0, kTwoByteNopInstruction);
3936  masm->set_byte_at(2, kFiveByteNopInstruction);
3937 }
3938 
3939 
3940 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3941  regs_.Save(masm);
3942 
3944  Label dont_need_remembered_set;
3945 
3946  __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
3947  __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
3948  regs_.scratch0(),
3949  &dont_need_remembered_set);
3950 
3951  __ CheckPageFlag(regs_.object(),
3952  regs_.scratch0(),
3954  not_zero,
3955  &dont_need_remembered_set);
3956 
3957  // First notify the incremental marker if necessary, then update the
3958  // remembered set.
3960  masm,
3962  mode);
3964  regs_.Restore(masm);
3965  __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3967 
3968  __ bind(&dont_need_remembered_set);
3969  }
3970 
3972  masm,
3974  mode);
3976  regs_.Restore(masm);
3977  __ ret(0);
3978 }
3979 
3980 
3981 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3983  int argument_count = 3;
3984  __ PrepareCallCFunction(argument_count, regs_.scratch0());
3985  __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
3986  __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot.
3987  __ mov(Operand(esp, 2 * kPointerSize),
3988  Immediate(ExternalReference::isolate_address(isolate())));
3989 
3990  AllowExternalCallThatCantCauseGC scope(masm);
3991  __ CallCFunction(
3992  ExternalReference::incremental_marking_record_write_function(isolate()),
3993  argument_count);
3994 
3996 }
3997 
3998 
4000  MacroAssembler* masm,
4001  OnNoNeedToInformIncrementalMarker on_no_need,
4002  Mode mode) {
4003  Label object_is_black, need_incremental, need_incremental_pop_object;
4004 
4005  __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
4006  __ and_(regs_.scratch0(), regs_.object());
4007  __ mov(regs_.scratch1(),
4008  Operand(regs_.scratch0(),
4010  __ sub(regs_.scratch1(), Immediate(1));
4011  __ mov(Operand(regs_.scratch0(),
4013  regs_.scratch1());
4014  __ j(negative, &need_incremental);
4015 
4016  // Let's look at the color of the object: If it is not black we don't have
4017  // to inform the incremental marker.
4018  __ JumpIfBlack(regs_.object(),
4019  regs_.scratch0(),
4020  regs_.scratch1(),
4021  &object_is_black,
4022  Label::kNear);
4023 
4024  regs_.Restore(masm);
4026  __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4028  } else {
4029  __ ret(0);
4030  }
4031 
4032  __ bind(&object_is_black);
4033 
4034  // Get the value from the slot.
4035  __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
4036 
4037  if (mode == INCREMENTAL_COMPACTION) {
4038  Label ensure_not_white;
4039 
4040  __ CheckPageFlag(regs_.scratch0(), // Contains value.
4041  regs_.scratch1(), // Scratch.
4043  zero,
4044  &ensure_not_white,
4045  Label::kNear);
4046 
4047  __ CheckPageFlag(regs_.object(),
4048  regs_.scratch1(), // Scratch.
4050  not_zero,
4051  &ensure_not_white,
4052  Label::kNear);
4053 
4054  __ jmp(&need_incremental);
4055 
4056  __ bind(&ensure_not_white);
4057  }
4058 
4059  // We need an extra register for this, so we push the object register
4060  // temporarily.
4061  __ push(regs_.object());
4062  __ EnsureNotWhite(regs_.scratch0(), // The value.
4063  regs_.scratch1(), // Scratch.
4064  regs_.object(), // Scratch.
4065  &need_incremental_pop_object,
4066  Label::kNear);
4067  __ pop(regs_.object());
4068 
4069  regs_.Restore(masm);
4071  __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4073  } else {
4074  __ ret(0);
4075  }
4076 
4077  __ bind(&need_incremental_pop_object);
4078  __ pop(regs_.object());
4079 
4080  __ bind(&need_incremental);
4081 
4082  // Fall through when we need to inform the incremental marker.
4083 }
4084 
4085 
4086 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4087  // ----------- S t a t e -------------
4088  // -- eax : element value to store
4089  // -- ecx : element index as smi
4090  // -- esp[0] : return address
4091  // -- esp[4] : array literal index in function
4092  // -- esp[8] : array literal
4093  // clobbers ebx, edx, edi
4094  // -----------------------------------
4095 
4096  Label element_done;
4097  Label double_elements;
4098  Label smi_element;
4099  Label slow_elements;
4100  Label slow_elements_from_double;
4101  Label fast_elements;
4102 
4103  // Get array literal index, array literal and its map.
4104  __ mov(edx, Operand(esp, 1 * kPointerSize));
4105  __ mov(ebx, Operand(esp, 2 * kPointerSize));
4107 
4108  __ CheckFastElements(edi, &double_elements);
4109 
4110  // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
4111  __ JumpIfSmi(eax, &smi_element);
4112  __ CheckFastSmiElements(edi, &fast_elements, Label::kNear);
4113 
4114  // Store into the array literal requires a elements transition. Call into
4115  // the runtime.
4116 
4117  __ bind(&slow_elements);
4118  __ pop(edi); // Pop return address and remember to put back later for tail
4119  // call.
4120  __ push(ebx);
4121  __ push(ecx);
4122  __ push(eax);
4125  __ push(edx);
4126  __ push(edi); // Return return address so that tail call returns to right
4127  // place.
4128  __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4129 
4130  __ bind(&slow_elements_from_double);
4131  __ pop(edx);
4132  __ jmp(&slow_elements);
4133 
4134  // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
4135  __ bind(&fast_elements);
4139  __ mov(Operand(ecx, 0), eax);
4140  // Update the write barrier for the array store.
4141  __ RecordWrite(ebx, ecx, eax,
4144  OMIT_SMI_CHECK);
4145  __ ret(0);
4146 
4147  // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
4148  // and value is Smi.
4149  __ bind(&smi_element);
4153  __ ret(0);
4154 
4155  // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
4156  __ bind(&double_elements);
4157 
4158  __ push(edx);
4160  __ StoreNumberToDoubleElements(eax,
4161  edx,
4162  ecx,
4163  edi,
4164  xmm0,
4165  &slow_elements_from_double);
4166  __ pop(edx);
4167  __ ret(0);
4168 }
4169 
4170 
4171 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4172  CEntryStub ces(isolate(), 1, kSaveFPRegs);
4173  __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
4174  int parameter_count_offset =
4176  __ mov(ebx, MemOperand(ebp, parameter_count_offset));
4177  masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4178  __ pop(ecx);
4179  int additional_offset =
4181  __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
4182  __ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
4183 }
4184 
4185 
4186 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
4187  EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister());
4188  VectorLoadStub stub(isolate(), state());
4189  __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4190 }
4191 
4192 
4193 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
4194  EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister());
4195  VectorKeyedLoadStub stub(isolate());
4196  __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4197 }
4198 
4199 
4200 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4201  if (masm->isolate()->function_entry_hook() != NULL) {
4202  ProfileEntryHookStub stub(masm->isolate());
4203  masm->CallStub(&stub);
4204  }
4205 }
4206 
4207 
4208 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4209  // Save volatile registers.
4210  const int kNumSavedRegisters = 3;
4211  __ push(eax);
4212  __ push(ecx);
4213  __ push(edx);
4214 
4215  // Calculate and push the original stack pointer.
4216  __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4217  __ push(eax);
4218 
4219  // Retrieve our return address and use it to calculate the calling
4220  // function's address.
4221  __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4222  __ sub(eax, Immediate(Assembler::kCallInstructionLength));
4223  __ push(eax);
4224 
4225  // Call the entry hook.
4226  DCHECK(isolate()->function_entry_hook() != NULL);
4227  __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
4229  __ add(esp, Immediate(2 * kPointerSize));
4230 
4231  // Restore ecx.
4232  __ pop(edx);
4233  __ pop(ecx);
4234  __ pop(eax);
4235 
4236  __ ret(0);
4237 }
4238 
4239 
4240 template<class T>
4241 static void CreateArrayDispatch(MacroAssembler* masm,
4243  if (mode == DISABLE_ALLOCATION_SITES) {
4244  T stub(masm->isolate(),
4246  mode);
4247  __ TailCallStub(&stub);
4248  } else if (mode == DONT_OVERRIDE) {
4249  int last_index = GetSequenceIndexFromFastElementsKind(
4251  for (int i = 0; i <= last_index; ++i) {
4252  Label next;
4254  __ cmp(edx, kind);
4255  __ j(not_equal, &next);
4256  T stub(masm->isolate(), kind);
4257  __ TailCallStub(&stub);
4258  __ bind(&next);
4259  }
4260 
4261  // If we reached this point there is a problem.
4262  __ Abort(kUnexpectedElementsKindInArrayConstructor);
4263  } else {
4264  UNREACHABLE();
4265  }
4266 }
4267 
4268 
4269 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4271  // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4272  // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
4273  // eax - number of arguments
4274  // edi - constructor?
4275  // esp[0] - return address
4276  // esp[4] - last argument
4277  Label normal_sequence;
4278  if (mode == DONT_OVERRIDE) {
4279  DCHECK(FAST_SMI_ELEMENTS == 0);
4281  DCHECK(FAST_ELEMENTS == 2);
4285 
4286  // is the low bit set? If so, we are holey and that is good.
4287  __ test_b(edx, 1);
4288  __ j(not_zero, &normal_sequence);
4289  }
4290 
4291  // look at the first argument
4292  __ mov(ecx, Operand(esp, kPointerSize));
4293  __ test(ecx, ecx);
4294  __ j(zero, &normal_sequence);
4295 
4296  if (mode == DISABLE_ALLOCATION_SITES) {
4298  ElementsKind holey_initial = GetHoleyElementsKind(initial);
4299 
4300  ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4301  holey_initial,
4303  __ TailCallStub(&stub_holey);
4304 
4305  __ bind(&normal_sequence);
4306  ArraySingleArgumentConstructorStub stub(masm->isolate(),
4307  initial,
4309  __ TailCallStub(&stub);
4310  } else if (mode == DONT_OVERRIDE) {
4311  // We are going to create a holey array, but our kind is non-holey.
4312  // Fix kind and retry.
4313  __ inc(edx);
4314 
4315  if (FLAG_debug_code) {
4316  Handle<Map> allocation_site_map =
4317  masm->isolate()->factory()->allocation_site_map();
4318  __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
4319  __ Assert(equal, kExpectedAllocationSite);
4320  }
4321 
4322  // Save the resulting elements kind in type info. We can't just store r3
4323  // in the AllocationSite::transition_info field because elements kind is
4324  // restricted to a portion of the field...upper bits need to be left alone.
4328 
4329  __ bind(&normal_sequence);
4330  int last_index = GetSequenceIndexFromFastElementsKind(
4332  for (int i = 0; i <= last_index; ++i) {
4333  Label next;
4335  __ cmp(edx, kind);
4336  __ j(not_equal, &next);
4337  ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4338  __ TailCallStub(&stub);
4339  __ bind(&next);
4340  }
4341 
4342  // If we reached this point there is a problem.
4343  __ Abort(kUnexpectedElementsKindInArrayConstructor);
4344  } else {
4345  UNREACHABLE();
4346  }
4347 }
4348 
4349 
4350 template<class T>
4351 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4352  int to_index = GetSequenceIndexFromFastElementsKind(
4354  for (int i = 0; i <= to_index; ++i) {
4356  T stub(isolate, kind);
4357  stub.GetCode();
4359  T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4360  stub1.GetCode();
4361  }
4362  }
4363 }
4364 
4365 
4367  ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4368  isolate);
4369  ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4370  isolate);
4371  ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4372  isolate);
4373 }
4374 
4375 
4377  Isolate* isolate) {
4379  for (int i = 0; i < 2; i++) {
4380  // For internal arrays we only need a few things
4381  InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4382  stubh1.GetCode();
4383  InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4384  stubh2.GetCode();
4385  InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4386  stubh3.GetCode();
4387  }
4388 }
4389 
4390 
4392  MacroAssembler* masm,
4394  if (argument_count() == ANY) {
4395  Label not_zero_case, not_one_case;
4396  __ test(eax, eax);
4397  __ j(not_zero, &not_zero_case);
4398  CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4399 
4400  __ bind(&not_zero_case);
4401  __ cmp(eax, 1);
4402  __ j(greater, &not_one_case);
4403  CreateArrayDispatchOneArgument(masm, mode);
4404 
4405  __ bind(&not_one_case);
4406  CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4407  } else if (argument_count() == NONE) {
4408  CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4409  } else if (argument_count() == ONE) {
4410  CreateArrayDispatchOneArgument(masm, mode);
4411  } else if (argument_count() == MORE_THAN_ONE) {
4412  CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4413  } else {
4414  UNREACHABLE();
4415  }
4416 }
4417 
4418 
4419 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4420  // ----------- S t a t e -------------
4421  // -- eax : argc (only if argument_count() == ANY)
4422  // -- ebx : AllocationSite or undefined
4423  // -- edi : constructor
4424  // -- esp[0] : return address
4425  // -- esp[4] : last argument
4426  // -----------------------------------
4427  if (FLAG_debug_code) {
4428  // The array construct code is only set for the global and natives
4429  // builtin Array functions which always have maps.
4430 
4431  // Initial map for the builtin Array function should be a map.
4433  // Will both indicate a NULL and a Smi.
4434  __ test(ecx, Immediate(kSmiTagMask));
4435  __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4436  __ CmpObjectType(ecx, MAP_TYPE, ecx);
4437  __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4438 
4439  // We should either have undefined in ebx or a valid AllocationSite
4440  __ AssertUndefinedOrAllocationSite(ebx);
4441  }
4442 
4443  Label no_info;
4444  // If the feedback vector is the undefined value call an array constructor
4445  // that doesn't use AllocationSites.
4446  __ cmp(ebx, isolate()->factory()->undefined_value());
4447  __ j(equal, &no_info);
4448 
4449  // Only look at the lower 16 bits of the transition info.
4451  __ SmiUntag(edx);
4455 
4456  __ bind(&no_info);
4458 }
4459 
4460 
4462  MacroAssembler* masm, ElementsKind kind) {
4463  Label not_zero_case, not_one_case;
4464  Label normal_sequence;
4465 
4466  __ test(eax, eax);
4467  __ j(not_zero, &not_zero_case);
4468  InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4469  __ TailCallStub(&stub0);
4470 
4471  __ bind(&not_zero_case);
4472  __ cmp(eax, 1);
4473  __ j(greater, &not_one_case);
4474 
4475  if (IsFastPackedElementsKind(kind)) {
4476  // We might need to create a holey array
4477  // look at the first argument
4478  __ mov(ecx, Operand(esp, kPointerSize));
4479  __ test(ecx, ecx);
4480  __ j(zero, &normal_sequence);
4481 
4482  InternalArraySingleArgumentConstructorStub
4483  stub1_holey(isolate(), GetHoleyElementsKind(kind));
4484  __ TailCallStub(&stub1_holey);
4485  }
4486 
4487  __ bind(&normal_sequence);
4488  InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4489  __ TailCallStub(&stub1);
4490 
4491  __ bind(&not_one_case);
4492  InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4493  __ TailCallStub(&stubN);
4494 }
4495 
4496 
4497 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4498  // ----------- S t a t e -------------
4499  // -- eax : argc
4500  // -- edi : constructor
4501  // -- esp[0] : return address
4502  // -- esp[4] : last argument
4503  // -----------------------------------
4504 
4505  if (FLAG_debug_code) {
4506  // The array construct code is only set for the global and natives
4507  // builtin Array functions which always have maps.
4508 
4509  // Initial map for the builtin Array function should be a map.
4511  // Will both indicate a NULL and a Smi.
4512  __ test(ecx, Immediate(kSmiTagMask));
4513  __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4514  __ CmpObjectType(ecx, MAP_TYPE, ecx);
4515  __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4516  }
4517 
4518  // Figure out the right elements kind
4520 
4521  // Load the map's "bit field 2" into |result|. We only need the first byte,
4522  // but the following masking takes care of that anyway.
4524  // Retrieve elements_kind from bit field 2.
4525  __ DecodeField<Map::ElementsKindBits>(ecx);
4526 
4527  if (FLAG_debug_code) {
4528  Label done;
4529  __ cmp(ecx, Immediate(FAST_ELEMENTS));
4530  __ j(equal, &done);
4531  __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
4532  __ Assert(equal,
4533  kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4534  __ bind(&done);
4535  }
4536 
4537  Label fast_elements_case;
4538  __ cmp(ecx, Immediate(FAST_ELEMENTS));
4539  __ j(equal, &fast_elements_case);
4541 
4542  __ bind(&fast_elements_case);
4543  GenerateCase(masm, FAST_ELEMENTS);
4544 }
4545 
4546 
4547 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
4548  // ----------- S t a t e -------------
4549  // -- eax : callee
4550  // -- ebx : call_data
4551  // -- ecx : holder
4552  // -- edx : api_function_address
4553  // -- esi : context
4554  // --
4555  // -- esp[0] : return address
4556  // -- esp[4] : last argument
4557  // -- ...
4558  // -- esp[argc * 4] : first argument
4559  // -- esp[(argc + 1) * 4] : receiver
4560  // -----------------------------------
4561 
4562  Register callee = eax;
4563  Register call_data = ebx;
4564  Register holder = ecx;
4565  Register api_function_address = edx;
4566  Register return_address = edi;
4567  Register context = esi;
4568 
4569  int argc = this->argc();
4570  bool is_store = this->is_store();
4572 
4573  typedef FunctionCallbackArguments FCA;
4574 
4575  STATIC_ASSERT(FCA::kContextSaveIndex == 6);
4576  STATIC_ASSERT(FCA::kCalleeIndex == 5);
4577  STATIC_ASSERT(FCA::kDataIndex == 4);
4578  STATIC_ASSERT(FCA::kReturnValueOffset == 3);
4579  STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
4580  STATIC_ASSERT(FCA::kIsolateIndex == 1);
4581  STATIC_ASSERT(FCA::kHolderIndex == 0);
4582  STATIC_ASSERT(FCA::kArgsLength == 7);
4583 
4584  __ pop(return_address);
4585 
4586  // context save
4587  __ push(context);
4588  // load context from callee
4589  __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
4590 
4591  // callee
4592  __ push(callee);
4593 
4594  // call data
4595  __ push(call_data);
4596 
4597  Register scratch = call_data;
4598  if (!call_data_undefined) {
4599  // return value
4600  __ push(Immediate(isolate()->factory()->undefined_value()));
4601  // return value default
4602  __ push(Immediate(isolate()->factory()->undefined_value()));
4603  } else {
4604  // return value
4605  __ push(scratch);
4606  // return value default
4607  __ push(scratch);
4608  }
4609  // isolate
4610  __ push(Immediate(reinterpret_cast<int>(isolate())));
4611  // holder
4612  __ push(holder);
4613 
4614  __ mov(scratch, esp);
4615 
4616  // return address
4617  __ push(return_address);
4618 
4619  // API function gets reference to the v8::Arguments. If CPU profiler
4620  // is enabled wrapper function will be called and we need to pass
4621  // address of the callback as additional parameter, always allocate
4622  // space for it.
4623  const int kApiArgc = 1 + 1;
4624 
4625  // Allocate the v8::Arguments structure in the arguments' space since
4626  // it's not controlled by GC.
4627  const int kApiStackSpace = 4;
4628 
4629  __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
4630 
4631  // FunctionCallbackInfo::implicit_args_.
4632  __ mov(ApiParameterOperand(2), scratch);
4633  __ add(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
4634  // FunctionCallbackInfo::values_.
4635  __ mov(ApiParameterOperand(3), scratch);
4636  // FunctionCallbackInfo::length_.
4637  __ Move(ApiParameterOperand(4), Immediate(argc));
4638  // FunctionCallbackInfo::is_construct_call_.
4639  __ Move(ApiParameterOperand(5), Immediate(0));
4640 
4641  // v8::InvocationCallback's argument.
4642  __ lea(scratch, ApiParameterOperand(2));
4643  __ mov(ApiParameterOperand(0), scratch);
4644 
4645  ExternalReference thunk_ref =
4646  ExternalReference::invoke_function_callback(isolate());
4647 
4648  Operand context_restore_operand(ebp,
4649  (2 + FCA::kContextSaveIndex) * kPointerSize);
4650  // Stores return the first js argument
4651  int return_value_offset = 0;
4652  if (is_store) {
4653  return_value_offset = 2 + FCA::kArgsLength;
4654  } else {
4655  return_value_offset = 2 + FCA::kReturnValueOffset;
4656  }
4657  Operand return_value_operand(ebp, return_value_offset * kPointerSize);
4658  __ CallApiFunctionAndReturn(api_function_address,
4659  thunk_ref,
4661  argc + FCA::kArgsLength + 1,
4662  return_value_operand,
4663  &context_restore_operand);
4664 }
4665 
4666 
4667 void CallApiGetterStub::Generate(MacroAssembler* masm) {
4668  // ----------- S t a t e -------------
4669  // -- esp[0] : return address
4670  // -- esp[4] : name
4671  // -- esp[8 - kArgsLength*4] : PropertyCallbackArguments object
4672  // -- ...
4673  // -- edx : api_function_address
4674  // -----------------------------------
4676 
4677  // array for v8::Arguments::values_, handler for name and pointer
4678  // to the values (it considered as smi in GC).
4679  const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
4680  // Allocate space for opional callback address parameter in case
4681  // CPU profiler is active.
4682  const int kApiArgc = 2 + 1;
4683 
4684  Register api_function_address = edx;
4685  Register scratch = ebx;
4686 
4687  // load address of name
4688  __ lea(scratch, Operand(esp, 1 * kPointerSize));
4689 
4690  __ PrepareCallApiFunction(kApiArgc);
4691  __ mov(ApiParameterOperand(0), scratch); // name.
4692  __ add(scratch, Immediate(kPointerSize));
4693  __ mov(ApiParameterOperand(1), scratch); // arguments pointer.
4694 
4695  ExternalReference thunk_ref =
4696  ExternalReference::invoke_accessor_getter_callback(isolate());
4697 
4698  __ CallApiFunctionAndReturn(api_function_address,
4699  thunk_ref,
4701  kStackSpace,
4702  Operand(ebp, 7 * kPointerSize),
4703  NULL);
4704 }
4705 
4706 
4707 #undef __
4708 
4709 } } // namespace v8::internal
4710 
4711 #endif // V8_TARGET_ARCH_IA32
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
Definition: objects-inl.h:1591
static const int kTransitionInfoOffset
Definition: objects.h:8254
static const Register function_address()
void GenerateReadElement(MacroAssembler *masm)
void GenerateNewSloppySlow(MacroAssembler *masm)
void GenerateNewStrict(MacroAssembler *masm)
void GenerateNewSloppyFast(MacroAssembler *masm)
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateDispatchToArrayStub(MacroAssembler *masm, AllocationSiteOverrideMode mode)
ArgumentCountKey argument_count() const
Definition: code-stubs.h:732
static const int kCallInstructionLength
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:266
static const U kShift
Definition: utils.h:204
static const U kMask
Definition: utils.h:203
bool save_doubles() const
Definition: code-stubs.h:1423
static void GenerateAheadOfTime(Isolate *isolate)
CEntryStub(Isolate *isolate, int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
Definition: code-stubs.h:1406
STATIC_ASSERT(Code::kArgumentsBits+2<=kStubMinorKeyBits)
bool CallAsMethod() const
Definition: code-stubs.h:811
void GenerateMiss(MacroAssembler *masm)
virtual InlineCacheState GetICState() const OVERRIDE
Definition: code-stubs.h:804
static const int kHeaderSize
Definition: objects.h:5373
Condition GetCondition() const
Definition: code-stubs.cc:354
void GenerateInternalizedStrings(MacroAssembler *masm)
void GenerateStrings(MacroAssembler *masm)
CompareICState::State state() const
Definition: code-stubs.h:1278
Token::Value op() const
Definition: code-stubs.h:1268
void GenerateMiss(MacroAssembler *masm)
CompareICState::State left() const
Definition: code-stubs.h:1272
void GenerateGeneric(MacroAssembler *masm)
CompareICState::State right() const
Definition: code-stubs.h:1275
void GenerateObjects(MacroAssembler *masm)
CompareICStub(Isolate *isolate, Token::Value op, CompareICState::State left, CompareICState::State right, CompareICState::State state)
Definition: code-stubs.h:1256
void GenerateNumbers(MacroAssembler *masm)
void GenerateUniqueNames(MacroAssembler *masm)
void GenerateKnownObjects(MacroAssembler *masm)
void GenerateSmis(MacroAssembler *masm)
static const int kFirstOffset
Definition: objects.h:9061
static const int kMinLength
Definition: objects.h:9066
static const int kSecondOffset
Definition: objects.h:9062
static int SlotOffset(int index)
Definition: contexts.h:552
static bool IsSupported(CpuFeature f)
Definition: assembler.h:184
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:725
Register source() const
Definition: code-stubs.h:1901
STATIC_ASSERT((1L<< kBitsPerRegisterNumber) >=Register::kNumRegisters)
Register destination() const
Definition: code-stubs.h:1904
static const uint64_t kSignificandMask
Definition: double.h:22
static const uint64_t kHiddenBit
Definition: double.h:24
static const int kPhysicalSignificandSize
Definition: double.h:25
static const int kMaxShortLength
Definition: objects.h:9141
static const int kResourceDataOffset
Definition: objects.h:9138
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kEntrySize
Definition: objects.h:3276
static const int kMantissaBits
Definition: objects.h:1525
static const int kValueOffset
Definition: objects.h:1506
static const uint32_t kExponentMask
Definition: objects.h:1523
static const int kExponentBias
Definition: objects.h:1527
static const int kExponentShift
Definition: objects.h:1528
static const int kMapOffset
Definition: objects.h:1427
static const int kStrictArgumentsObjectSize
Definition: heap.h:674
static const int kSloppyArgumentsObjectSize
Definition: heap.h:671
static const int kArgumentsCalleeIndex
Definition: heap.h:679
static const int kArgumentsLengthIndex
Definition: heap.h:677
void GenerateLightweightMiss(MacroAssembler *masm, ExternalReference miss)
bool HasCallSiteInlineCheck() const
Definition: code-stubs.h:700
static Register right()
Definition: code-stubs.h:686
bool HasArgsInRegisters() const
Definition: code-stubs.h:698
static Register left()
Definition: code-stubs.h:685
bool ReturnTrueFalseObject() const
Definition: code-stubs.h:704
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateCase(MacroAssembler *masm, ElementsKind kind)
static const int kJSRegexpStaticOffsetsVectorSize
Definition: isolate.h:984
StackFrame::Type type() const
Definition: code-stubs.h:1454
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kContextOffset
Definition: objects.h:7381
static const int kLiteralsOffset
Definition: objects.h:7382
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7377
static const int kHeaderSize
Definition: objects.h:2195
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kDataOneByteCodeOffset
Definition: objects.h:7813
static const int kIrregexpCaptureCountOffset
Definition: objects.h:7817
static const int kDataTagOffset
Definition: objects.h:7811
static const int kDataOffset
Definition: objects.h:7771
static const int kDataUC16CodeOffset
Definition: objects.h:7815
static const Register ReceiverRegister()
static const Register NameRegister()
static const int kIsUndetectable
Definition: objects.h:6244
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kBitField2Offset
Definition: objects.h:6233
static const int kPrototypeOffset
Definition: objects.h:6190
ExponentType exponent_type() const
Definition: code-stubs.h:780
static const Register exponent()
static const size_t kWriteBarrierCounterOffset
Definition: spaces.h:536
static const int kEvacuationCandidateMask
Definition: spaces.h:398
static const int kSkipEvacuationSlotsRecordingMask
Definition: spaces.h:400
NameDictionaryLookupStub(Isolate *isolate, LookupMode mode)
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kHashShift
Definition: objects.h:8499
static const int kEmptyHashField
Definition: objects.h:8534
static const int kHashFieldOffset
Definition: objects.h:8486
static void GenerateLoadFunctionPrototype(MacroAssembler *masm, Register receiver, Register scratch1, Register scratch2, Label *miss_label)
static const intptr_t kPageAlignmentMask
Definition: spaces.h:757
virtual void Generate(MacroAssembler *masm)=0
ProfileEntryHookStub(Isolate *isolate)
Definition: code-stubs.h:2373
static void MaybeCallEntryHook(MacroAssembler *masm)
void SaveCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void RestoreCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void GenerateIncremental(MacroAssembler *masm, Mode mode)
void InformIncrementalMarker(MacroAssembler *masm)
RememberedSetAction remembered_set_action() const
SaveFPRegsMode save_fp_regs_mode() const
static const byte kTwoByteNopInstruction
void CheckNeedsToInformIncrementalMarker(MacroAssembler *masm, OnNoNeedToInformIncrementalMarker on_no_need, Mode mode)
static const byte kFiveByteNopInstruction
virtual void Generate(MacroAssembler *masm) OVERRIDE
static const int kLastCaptureCountOffset
Definition: jsregexp.h:168
static const int kLastSubjectOffset
Definition: jsregexp.h:170
static const int kLastMatchOverhead
Definition: jsregexp.h:165
static const int kLastInputOffset
Definition: jsregexp.h:172
static const int kFirstCaptureOffset
Definition: jsregexp.h:174
static const Function * FunctionForId(FunctionId id)
Definition: runtime.cc:9312
static const int kHeaderSize
Definition: objects.h:8941
static const int kConstructStubOffset
Definition: objects.h:6896
static const int kFeedbackVectorOffset
Definition: objects.h:6904
static const int kNativeBitWithinByte
Definition: objects.h:7046
static const int kStrictModeBitWithinByte
Definition: objects.h:7043
static const int kMinLength
Definition: objects.h:9109
static const int kParentOffset
Definition: objects.h:9104
static const int kOffsetOffset
Definition: objects.h:9105
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static const int kContextOffset
Definition: frames.h:162
static const int kCallerSPOffset
Definition: frames.h:167
static const int kCallerFPOffset
Definition: frames.h:165
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
StoreBufferOverflowStub(Isolate *isolate, SaveFPRegsMode save_fp)
Definition: code-stubs.h:2395
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static void GenerateOneByteCharsCompareLoop(MacroAssembler *masm, Register left, Register right, Register length, Register scratch1, Register scratch2, Label *chars_not_equal)
static void GenerateCompareFlatOneByteStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, String::Encoding encoding)
static void GenerateFlatOneByteStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int32_t kMaxOneByteCharCode
Definition: objects.h:8811
static const int kLengthOffset
Definition: objects.h:8802
static const int kCallerStackParameterCountFrameOffset
Definition: frames.h:755
StubFunctionMode function_mode() const
Definition: code-stubs.h:2360
static void GenerateAheadOfTime(Isolate *isolate)
Definition: code-stubs.cc:917
static bool IsOrderedRelationalCompareOp(Value op)
Definition: token.h:206
static bool IsEqualityOp(Value op)
Definition: token.h:210
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const Register VectorRegister()
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define FUNCTION_ADDR(f)
Definition: globals.h:195
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ JUMP_FUNCTION
@ CALL_FUNCTION
@ TAG_OBJECT
bool IsPowerOfTwo32(uint32_t value)
Definition: bits.h:77
const int kPointerSize
Definition: globals.h:129
const Register edx
const uint32_t kStringEncodingMask
Definition: objects.h:555
const Register edi
@ DONT_DO_SMI_CHECK
Definition: globals.h:640
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
@ DONT_TRACK_ALLOCATION_SITE
Definition: objects.h:8084
@ kSeqStringTag
Definition: objects.h:563
@ kConsStringTag
Definition: objects.h:564
@ kSlicedStringTag
Definition: objects.h:566
@ kExternalStringTag
Definition: objects.h:565
const XMMRegister xmm1
const Register esp
const intptr_t kSmiSignMask
Definition: globals.h:223
const uint32_t kTwoByteStringTag
Definition: objects.h:556
const Register r0
const uint32_t kShortExternalStringTag
Definition: objects.h:590
const int kSmiTagSize
Definition: v8.h:5743
const int kFastElementsKindPackedToHoley
Definition: elements-kind.h:71
const XMMRegister xmm2
const int kDoubleSize
Definition: globals.h:127
const uint32_t kNotStringTag
Definition: objects.h:545
Operand FieldOperand(Register object, int offset)
@ JS_FUNCTION_STUB_MODE
Definition: code-stubs.h:350
const uint32_t kStringTag
Definition: objects.h:544
@ JS_REGEXP_TYPE
Definition: objects.h:748
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIXED_ARRAY_TYPE
Definition: objects.h:717
@ JS_OBJECT_TYPE
Definition: objects.h:731
@ ODDBALL_TYPE
Definition: objects.h:663
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ HEAP_NUMBER_TYPE
Definition: objects.h:669
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
@ FAST_HOLEY_DOUBLE_ELEMENTS
Definition: elements-kind.h:27
@ TERMINAL_FAST_ELEMENTS_KIND
Definition: elements-kind.h:63
@ FAST_HOLEY_SMI_ELEMENTS
Definition: elements-kind.h:17
const XMMRegister xmm3
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:146
const uint32_t kOneByteStringTag
Definition: objects.h:557
const Register esi
@ TREAT_MINUS_ZERO_AS_ZERO
Definition: globals.h:767
const Register eax
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
bool IsFastPackedElementsKind(ElementsKind kind)
const Register ebx
const uint32_t kShortExternalStringMask
Definition: objects.h:589
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
AllocationSiteOverrideMode
Definition: code-stubs.h:716
@ DISABLE_ALLOCATION_SITES
Definition: code-stubs.h:718
const XMMRegister xmm0
Condition NegateCondition(Condition cond)
Definition: constants-arm.h:86
const uint32_t kStringRepresentationMask
Definition: objects.h:561
byte * Address
Definition: globals.h:101
const uint32_t kSlicedNotConsMask
Definition: objects.h:579
const Register r1
OStream & dec(OStream &os)
Definition: ostreams.cc:122
const XMMRegister xmm4
const int kHeapObjectTag
Definition: v8.h:5737
const int kSmiShiftSize
Definition: v8.h:5805
const Register no_reg
const uint32_t kInternalizedTag
Definition: objects.h:551
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
Definition: v8.h:5744
const uint32_t kIsNotInternalizedMask
Definition: objects.h:549
const Register ebp
Operand ApiParameterOperand(int index)
const int kSmiTag
Definition: v8.h:5742
const uint32_t kIsNotStringMask
Definition: objects.h:543
ElementsKind GetInitialFastElementsKind()
Definition: elements-kind.h:78
@ STRING_INDEX_IS_NUMBER
Definition: code-stubs.h:1590
@ STRING_INDEX_IS_ARRAY_INDEX
Definition: code-stubs.h:1595
const uint32_t kIsIndirectStringMask
Definition: objects.h:568
const Register ecx
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
static Handle< Value > Throw(Isolate *isolate, const char *message)
Definition: d8.cc:72
bool is(Register reg) const
static const int kMaxNumRegisters
static XMMRegister from_code(int code)
#define T(name, string, precedence)
Definition: token.cc:25