V8 Project
builtins-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_ARM
8 
9 #include "src/codegen.h"
10 #include "src/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen.h"
13 #include "src/runtime/runtime.h"
14 
15 namespace v8 {
16 namespace internal {
17 
18 
19 #define __ ACCESS_MASM(masm)
20 
21 
22 void Builtins::Generate_Adaptor(MacroAssembler* masm,
23  CFunctionId id,
24  BuiltinExtraArguments extra_args) {
25  // ----------- S t a t e -------------
26  // -- r0 : number of arguments excluding receiver
27  // -- r1 : called function (only guaranteed when
28  // extra_args requires it)
29  // -- cp : context
30  // -- sp[0] : last argument
31  // -- ...
32  // -- sp[4 * (argc - 1)] : first argument (argc == r0)
33  // -- sp[4 * argc] : receiver
34  // -----------------------------------
35 
36  // Insert extra arguments.
37  int num_extra_args = 0;
38  if (extra_args == NEEDS_CALLED_FUNCTION) {
39  num_extra_args = 1;
40  __ push(r1);
41  } else {
42  DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
43  }
44 
45  // JumpToExternalReference expects r0 to contain the number of arguments
46  // including the receiver and the extra arguments.
47  __ add(r0, r0, Operand(num_extra_args + 1));
48  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
49 }
50 
51 
52 // Load the built-in InternalArray function from the current context.
53 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
54  Register result) {
55  // Load the native context.
56 
57  __ ldr(result,
59  __ ldr(result,
61  // Load the InternalArray function from the native context.
62  __ ldr(result,
63  MemOperand(result,
66 }
67 
68 
69 // Load the built-in Array function from the current context.
70 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
71  // Load the native context.
72 
73  __ ldr(result,
75  __ ldr(result,
77  // Load the Array function from the native context.
78  __ ldr(result,
79  MemOperand(result,
81 }
82 
83 
84 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
85  // ----------- S t a t e -------------
86  // -- r0 : number of arguments
87  // -- lr : return address
88  // -- sp[...]: constructor arguments
89  // -----------------------------------
90  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
91 
92  // Get the InternalArray function.
93  GenerateLoadInternalArrayFunction(masm, r1);
94 
95  if (FLAG_debug_code) {
96  // Initial map for the builtin InternalArray functions should be maps.
98  __ SmiTst(r2);
99  __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
100  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
101  __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
102  }
103 
104  // Run the native code for the InternalArray function called as a normal
105  // function.
106  // tail call a stub
107  InternalArrayConstructorStub stub(masm->isolate());
108  __ TailCallStub(&stub);
109 }
110 
111 
112 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
113  // ----------- S t a t e -------------
114  // -- r0 : number of arguments
115  // -- lr : return address
116  // -- sp[...]: constructor arguments
117  // -----------------------------------
118  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
119 
120  // Get the Array function.
121  GenerateLoadArrayFunction(masm, r1);
122 
123  if (FLAG_debug_code) {
124  // Initial map for the builtin Array functions should be maps.
126  __ SmiTst(r2);
127  __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
128  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
129  __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
130  }
131 
132  // Run the native code for the Array function called as a normal function.
133  // tail call a stub
134  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
135  ArrayConstructorStub stub(masm->isolate());
136  __ TailCallStub(&stub);
137 }
138 
139 
140 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
141  // ----------- S t a t e -------------
142  // -- r0 : number of arguments
143  // -- r1 : constructor function
144  // -- lr : return address
145  // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
146  // -- sp[argc * 4] : receiver
147  // -----------------------------------
148  Counters* counters = masm->isolate()->counters();
149  __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
150 
151  Register function = r1;
152  if (FLAG_debug_code) {
153  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
154  __ cmp(function, Operand(r2));
155  __ Assert(eq, kUnexpectedStringFunction);
156  }
157 
158  // Load the first arguments in r0 and get rid of the rest.
159  Label no_arguments;
160  __ cmp(r0, Operand::Zero());
161  __ b(eq, &no_arguments);
162  // First args = sp[(argc - 1) * 4].
163  __ sub(r0, r0, Operand(1));
165  // sp now point to args[0], drop args[0] + receiver.
166  __ Drop(2);
167 
168  Register argument = r2;
169  Label not_cached, argument_is_string;
170  __ LookupNumberStringCache(r0, // Input.
171  argument, // Result.
172  r3, // Scratch.
173  r4, // Scratch.
174  r5, // Scratch.
175  &not_cached);
176  __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
177  __ bind(&argument_is_string);
178 
179  // ----------- S t a t e -------------
180  // -- r2 : argument converted to string
181  // -- r1 : constructor function
182  // -- lr : return address
183  // -----------------------------------
184 
185  Label gc_required;
186  __ Allocate(JSValue::kSize,
187  r0, // Result.
188  r3, // Scratch.
189  r4, // Scratch.
190  &gc_required,
191  TAG_OBJECT);
192 
193  // Initialising the String Object.
194  Register map = r3;
195  __ LoadGlobalFunctionInitialMap(function, map, r4);
196  if (FLAG_debug_code) {
198  __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
199  __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
201  __ cmp(r4, Operand::Zero());
202  __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
203  }
205 
206  __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
209 
210  __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
211 
212  // Ensure the object is fully initialized.
214 
215  __ Ret();
216 
217  // The argument was not found in the number to string cache. Check
218  // if it's a string already before calling the conversion builtin.
219  Label convert_argument;
220  __ bind(&not_cached);
221  __ JumpIfSmi(r0, &convert_argument);
222 
223  // Is it a String?
227  __ tst(r3, Operand(kIsNotStringMask));
228  __ b(ne, &convert_argument);
229  __ mov(argument, r0);
230  __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
231  __ b(&argument_is_string);
232 
233  // Invoke the conversion builtin and put the result into r2.
234  __ bind(&convert_argument);
235  __ push(function); // Preserve the function.
236  __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
237  {
238  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
239  __ push(r0);
240  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
241  }
242  __ pop(function);
243  __ mov(argument, r0);
244  __ b(&argument_is_string);
245 
246  // Load the empty string into r2, remove the receiver from the
247  // stack, and jump back to the case where the argument is a string.
248  __ bind(&no_arguments);
249  __ LoadRoot(argument, Heap::kempty_stringRootIndex);
250  __ Drop(1);
251  __ b(&argument_is_string);
252 
253  // At this point the argument is already a string. Call runtime to
254  // create a string wrapper.
255  __ bind(&gc_required);
256  __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
257  {
258  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
259  __ push(argument);
260  __ CallRuntime(Runtime::kNewStringWrapper, 1);
261  }
262  __ Ret();
263 }
264 
265 
266 static void CallRuntimePassFunction(
267  MacroAssembler* masm, Runtime::FunctionId function_id) {
268  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
269  // Push a copy of the function onto the stack.
270  __ push(r1);
271  // Push function as parameter to the runtime call.
272  __ Push(r1);
273 
274  __ CallRuntime(function_id, 1);
275  // Restore receiver.
276  __ pop(r1);
277 }
278 
279 
280 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
283  __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
284  __ Jump(r2);
285 }
286 
287 
288 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
289  __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
290  __ Jump(r0);
291 }
292 
293 
294 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
295  // Checking whether the queued function is ready for install is optional,
296  // since we come across interrupts and stack checks elsewhere. However,
297  // not checking may delay installing ready functions, and always checking
298  // would be quite expensive. A good compromise is to first check against
299  // stack limit as a cue for an interrupt signal.
300  Label ok;
301  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
302  __ cmp(sp, Operand(ip));
303  __ b(hs, &ok);
304 
305  CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
306  GenerateTailCallToReturnedCode(masm);
307 
308  __ bind(&ok);
309  GenerateTailCallToSharedCode(masm);
310 }
311 
312 
313 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
314  bool is_api_function,
315  bool create_memento) {
316  // ----------- S t a t e -------------
317  // -- r0 : number of arguments
318  // -- r1 : constructor function
319  // -- r2 : allocation site or undefined
320  // -- lr : return address
321  // -- sp[...]: constructor arguments
322  // -----------------------------------
323 
324  // Should never create mementos for api functions.
325  DCHECK(!is_api_function || !create_memento);
326 
327  Isolate* isolate = masm->isolate();
328 
329  // Enter a construct frame.
330  {
331  FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
332 
333  if (create_memento) {
334  __ AssertUndefinedOrAllocationSite(r2, r3);
335  __ push(r2);
336  }
337 
338  // Preserve the two incoming parameters on the stack.
339  __ SmiTag(r0);
340  __ push(r0); // Smi-tagged arguments count.
341  __ push(r1); // Constructor function.
342 
343  // Try to allocate the object without transitioning into C code. If any of
344  // the preconditions is not met, the code bails out to the runtime call.
345  Label rt_call, allocated;
346  if (FLAG_inline_new) {
347  Label undo_allocation;
348  ExternalReference debug_step_in_fp =
349  ExternalReference::debug_step_in_fp_address(isolate);
350  __ mov(r2, Operand(debug_step_in_fp));
351  __ ldr(r2, MemOperand(r2));
352  __ tst(r2, r2);
353  __ b(ne, &rt_call);
354 
355  // Load the initial map and verify that it is in fact a map.
356  // r1: constructor function
358  __ JumpIfSmi(r2, &rt_call);
359  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
360  __ b(ne, &rt_call);
361 
362  // Check that the constructor is not constructing a JSFunction (see
363  // comments in Runtime_NewObject in runtime.cc). In which case the
364  // initial map's instance type would be JS_FUNCTION_TYPE.
365  // r1: constructor function
366  // r2: initial map
367  __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
368  __ b(eq, &rt_call);
369 
370  if (!is_api_function) {
371  Label allocate;
373  // Check if slack tracking is enabled.
374  __ ldr(r4, bit_field3);
375  __ DecodeField<Map::ConstructionCount>(r3, r4);
376  __ cmp(r3, Operand(JSFunction::kNoSlackTracking));
377  __ b(eq, &allocate);
378  // Decrease generous allocation count.
379  __ sub(r4, r4, Operand(1 << Map::ConstructionCount::kShift));
380  __ str(r4, bit_field3);
381  __ cmp(r3, Operand(JSFunction::kFinishSlackTracking));
382  __ b(ne, &allocate);
383 
384  __ push(r1);
385 
386  __ Push(r2, r1); // r1 = constructor
387  __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
388 
389  __ pop(r2);
390  __ pop(r1);
391 
392  __ bind(&allocate);
393  }
394 
395  // Now allocate the JSObject on the heap.
396  // r1: constructor function
397  // r2: initial map
399  if (create_memento) {
400  __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize));
401  }
402 
403  __ Allocate(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
404 
405  // Allocated the JSObject, now initialize the fields. Map is set to
406  // initial map and properties and elements are set to empty fixed array.
407  // r1: constructor function
408  // r2: initial map
409  // r3: object size (not including memento if create_memento)
410  // r4: JSObject (not tagged)
411  __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
412  __ mov(r5, r4);
419 
420  // Fill all the in-object properties with the appropriate filler.
421  // r1: constructor function
422  // r2: initial map
423  // r3: object size (in words, including memento if create_memento)
424  // r4: JSObject (not tagged)
425  // r5: First in-object property of JSObject (not tagged)
427  __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
428 
429  if (!is_api_function) {
430  Label no_inobject_slack_tracking;
431 
432  // Check if slack tracking is enabled.
434  __ DecodeField<Map::ConstructionCount>(ip);
435  __ cmp(ip, Operand(JSFunction::kNoSlackTracking));
436  __ b(eq, &no_inobject_slack_tracking);
437 
438  // Allocate object with a slack.
441  kBitsPerByte);
442  __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
443  // r0: offset of first field after pre-allocated fields
444  if (FLAG_debug_code) {
445  __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
446  __ cmp(r0, ip);
447  __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
448  }
449  __ InitializeFieldsWithFiller(r5, r0, r6);
450  // To allow for truncation.
451  __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
452  // Fill the remaining fields with one pointer filler map.
453 
454  __ bind(&no_inobject_slack_tracking);
455  }
456 
457  if (create_memento) {
458  __ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize));
459  __ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2)); // End of object.
460  __ InitializeFieldsWithFiller(r5, r0, r6);
461 
462  // Fill in memento fields.
463  // r5: points to the allocated but uninitialized memento.
464  __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex);
467  // Load the AllocationSite
468  __ ldr(r6, MemOperand(sp, 2 * kPointerSize));
471  } else {
472  __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
473  __ InitializeFieldsWithFiller(r5, r0, r6);
474  }
475 
476  // Add the object tag to make the JSObject real, so that we can continue
477  // and jump into the continuation code at any time from now on. Any
478  // failures need to undo the allocation, so that the heap is in a
479  // consistent state and verifiable.
480  __ add(r4, r4, Operand(kHeapObjectTag));
481 
482  // Check if a non-empty properties array is needed. Continue with
483  // allocated object if not fall through to runtime call if it is.
484  // r1: constructor function
485  // r4: JSObject
486  // r5: start of next object (not tagged)
488  // The field instance sizes contains both pre-allocated property fields
489  // and in-object properties.
492  kBitsPerByte);
493  __ add(r3, r3, Operand(r6));
495  kBitsPerByte);
496  __ sub(r3, r3, Operand(r6), SetCC);
497 
498  // Done if no extra properties are to be allocated.
499  __ b(eq, &allocated);
500  __ Assert(pl, kPropertyAllocationCountFailed);
501 
502  // Scale the number of elements by pointer size and add the header for
503  // FixedArrays to the start of the next object calculation from above.
504  // r1: constructor
505  // r3: number of elements in properties array
506  // r4: JSObject
507  // r5: start of next object
508  __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
509  __ Allocate(
510  r0,
511  r5,
512  r6,
513  r2,
514  &undo_allocation,
516 
517  // Initialize the FixedArray.
518  // r1: constructor
519  // r3: number of elements in properties array
520  // r4: JSObject
521  // r5: FixedArray (not tagged)
522  __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
523  __ mov(r2, r5);
527  __ SmiTag(r0, r3);
529 
530  // Initialize the fields to undefined.
531  // r1: constructor function
532  // r2: First element of FixedArray (not tagged)
533  // r3: number of elements in properties array
534  // r4: JSObject
535  // r5: FixedArray (not tagged)
536  __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
538  { Label loop, entry;
539  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
540  __ b(&entry);
541  __ bind(&loop);
543  __ bind(&entry);
544  __ cmp(r2, r6);
545  __ b(lt, &loop);
546  }
547 
548  // Store the initialized FixedArray into the properties field of
549  // the JSObject
550  // r1: constructor function
551  // r4: JSObject
552  // r5: FixedArray (not tagged)
553  __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
555 
556  // Continue with JSObject being successfully allocated
557  // r1: constructor function
558  // r4: JSObject
559  __ jmp(&allocated);
560 
561  // Undo the setting of the new top so that the heap is verifiable. For
562  // example, the map's unused properties potentially do not match the
563  // allocated objects unused properties.
564  // r4: JSObject (previous new top)
565  __ bind(&undo_allocation);
566  __ UndoAllocationInNewSpace(r4, r5);
567  }
568 
569  // Allocate the new receiver object using the runtime call.
570  // r1: constructor function
571  __ bind(&rt_call);
572  if (create_memento) {
573  // Get the cell or allocation site.
574  __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
575  __ push(r2);
576  }
577 
578  __ push(r1); // argument for Runtime_NewObject
579  if (create_memento) {
580  __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
581  } else {
582  __ CallRuntime(Runtime::kNewObject, 1);
583  }
584  __ mov(r4, r0);
585 
586  // If we ended up using the runtime, and we want a memento, then the
587  // runtime call made it for us, and we shouldn't do create count
588  // increment.
589  Label count_incremented;
590  if (create_memento) {
591  __ jmp(&count_incremented);
592  }
593 
594  // Receiver for constructor call allocated.
595  // r4: JSObject
596  __ bind(&allocated);
597 
598  if (create_memento) {
599  __ ldr(r2, MemOperand(sp, kPointerSize * 2));
600  __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
601  __ cmp(r2, r5);
602  __ b(eq, &count_incremented);
603  // r2 is an AllocationSite. We are creating a memento from it, so we
604  // need to increment the memento create count.
605  __ ldr(r3, FieldMemOperand(r2,
607  __ add(r3, r3, Operand(Smi::FromInt(1)));
608  __ str(r3, FieldMemOperand(r2,
610  __ bind(&count_incremented);
611  }
612 
613  __ push(r4);
614  __ push(r4);
615 
616  // Reload the number of arguments and the constructor from the stack.
617  // sp[0]: receiver
618  // sp[1]: receiver
619  // sp[2]: constructor function
620  // sp[3]: number of arguments (smi-tagged)
621  __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
622  __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
623 
624  // Set up pointer to last argument.
626 
627  // Set up number of arguments for function call below
628  __ SmiUntag(r0, r3);
629 
630  // Copy arguments and receiver to the expression stack.
631  // r0: number of arguments
632  // r1: constructor function
633  // r2: address of last argument (caller sp)
634  // r3: number of arguments (smi-tagged)
635  // sp[0]: receiver
636  // sp[1]: receiver
637  // sp[2]: constructor function
638  // sp[3]: number of arguments (smi-tagged)
639  Label loop, entry;
640  __ b(&entry);
641  __ bind(&loop);
642  __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
643  __ push(ip);
644  __ bind(&entry);
645  __ sub(r3, r3, Operand(2), SetCC);
646  __ b(ge, &loop);
647 
648  // Call the function.
649  // r0: number of arguments
650  // r1: constructor function
651  if (is_api_function) {
653  Handle<Code> code =
654  masm->isolate()->builtins()->HandleApiCallConstruct();
655  __ Call(code, RelocInfo::CODE_TARGET);
656  } else {
657  ParameterCount actual(r0);
658  __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
659  }
660 
661  // Store offset of return address for deoptimizer.
662  if (!is_api_function) {
663  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
664  }
665 
666  // Restore context from the frame.
667  // r0: result
668  // sp[0]: receiver
669  // sp[1]: constructor function
670  // sp[2]: number of arguments (smi-tagged)
672 
673  // If the result is an object (in the ECMA sense), we should get rid
674  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
675  // on page 74.
676  Label use_receiver, exit;
677 
678  // If the result is a smi, it is *not* an object in the ECMA sense.
679  // r0: result
680  // sp[0]: receiver (newly allocated object)
681  // sp[1]: constructor function
682  // sp[2]: number of arguments (smi-tagged)
683  __ JumpIfSmi(r0, &use_receiver);
684 
685  // If the type of the result (stored in its map) is less than
686  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
687  __ CompareObjectType(r0, r1, r3, FIRST_SPEC_OBJECT_TYPE);
688  __ b(ge, &exit);
689 
690  // Throw away the result of the constructor invocation and use the
691  // on-stack receiver as the result.
692  __ bind(&use_receiver);
693  __ ldr(r0, MemOperand(sp));
694 
695  // Remove receiver from the stack, remove caller arguments, and
696  // return.
697  __ bind(&exit);
698  // r0: result
699  // sp[0]: receiver (newly allocated object)
700  // sp[1]: constructor function
701  // sp[2]: number of arguments (smi-tagged)
702  __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
703 
704  // Leave construct frame.
705  }
706 
707  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
708  __ add(sp, sp, Operand(kPointerSize));
709  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
710  __ Jump(lr);
711 }
712 
713 
714 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
715  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
716 }
717 
718 
719 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
720  Generate_JSConstructStubHelper(masm, true, false);
721 }
722 
723 
724 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
725  bool is_construct) {
726  // Called from Generate_JS_Entry
727  // r0: code entry
728  // r1: function
729  // r2: receiver
730  // r3: argc
731  // r4: argv
732  // r5-r6, r8 (if not FLAG_enable_ool_constant_pool) and cp may be clobbered
734 
735  // Clear the context before we push it when entering the internal frame.
736  __ mov(cp, Operand::Zero());
737 
738  // Enter an internal frame.
739  {
740  FrameScope scope(masm, StackFrame::INTERNAL);
741 
742  // Set up the context from the function argument.
744 
745  __ InitializeRootRegister();
746 
747  // Push the function and the receiver onto the stack.
748  __ push(r1);
749  __ push(r2);
750 
751  // Copy arguments to the stack in a loop.
752  // r1: function
753  // r3: argc
754  // r4: argv, i.e. points to first arg
755  Label loop, entry;
756  __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
757  // r2 points past last arg.
758  __ b(&entry);
759  __ bind(&loop);
760  __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
761  __ ldr(r0, MemOperand(r0)); // dereference handle
762  __ push(r0); // push parameter
763  __ bind(&entry);
764  __ cmp(r4, r2);
765  __ b(ne, &loop);
766 
767  // Initialize all JavaScript callee-saved registers, since they will be seen
768  // by the garbage collector as part of handlers.
769  __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
770  __ mov(r5, Operand(r4));
771  __ mov(r6, Operand(r4));
772  if (!FLAG_enable_ool_constant_pool) {
773  __ mov(r8, Operand(r4));
774  }
775  if (kR9Available == 1) {
776  __ mov(r9, Operand(r4));
777  }
778 
779  // Invoke the code and pass argc as r0.
780  __ mov(r0, Operand(r3));
781  if (is_construct) {
782  // No type feedback cell is available
783  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
784  CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
785  __ CallStub(&stub);
786  } else {
787  ParameterCount actual(r0);
788  __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
789  }
790  // Exit the JS frame and remove the parameters (except function), and
791  // return.
792  // Respect ABI stack constraint.
793  }
794  __ Jump(lr);
795 
796  // r0: result
797 }
798 
799 
800 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
801  Generate_JSEntryTrampolineHelper(masm, false);
802 }
803 
804 
805 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
806  Generate_JSEntryTrampolineHelper(masm, true);
807 }
808 
809 
810 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
811  CallRuntimePassFunction(masm, Runtime::kCompileLazy);
812  GenerateTailCallToReturnedCode(masm);
813 }
814 
815 
816 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
817  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
818  // Push a copy of the function onto the stack.
819  __ push(r1);
820  // Push function as parameter to the runtime call.
821  __ Push(r1);
822  // Whether to compile in a background thread.
823  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
824 
825  __ CallRuntime(Runtime::kCompileOptimized, 2);
826  // Restore receiver.
827  __ pop(r1);
828 }
829 
830 
831 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
832  CallCompileOptimized(masm, false);
833  GenerateTailCallToReturnedCode(masm);
834 }
835 
836 
837 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
838  CallCompileOptimized(masm, true);
839  GenerateTailCallToReturnedCode(masm);
840 }
841 
842 
843 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
844  // For now, we are relying on the fact that make_code_young doesn't do any
845  // garbage collection which allows us to save/restore the registers without
846  // worrying about which of them contain pointers. We also don't build an
847  // internal frame to make the code faster, since we shouldn't have to do stack
848  // crawls in MakeCodeYoung. This seems a bit fragile.
849 
850  // The following registers must be saved and restored when calling through to
851  // the runtime:
852  // r0 - contains return address (beginning of patch sequence)
853  // r1 - isolate
854  FrameScope scope(masm, StackFrame::MANUAL);
855  __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
856  __ PrepareCallCFunction(2, 0, r2);
857  __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
858  __ CallCFunction(
859  ExternalReference::get_make_code_young_function(masm->isolate()), 2);
860  __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
861  __ mov(pc, r0);
862 }
863 
864 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
865 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
866  MacroAssembler* masm) { \
867  GenerateMakeCodeYoungAgainCommon(masm); \
868 } \
869 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
870  MacroAssembler* masm) { \
871  GenerateMakeCodeYoungAgainCommon(masm); \
872 }
873 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
874 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
875 
876 
877 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
878  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
879  // that make_code_young doesn't do any garbage collection which allows us to
880  // save/restore the registers without worrying about which of them contain
881  // pointers.
882 
883  // The following registers must be saved and restored when calling through to
884  // the runtime:
885  // r0 - contains return address (beginning of patch sequence)
886  // r1 - isolate
887  FrameScope scope(masm, StackFrame::MANUAL);
888  __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
889  __ PrepareCallCFunction(2, 0, r2);
890  __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
891  __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
892  masm->isolate()), 2);
893  __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
894 
895  // Perform prologue operations usually performed by the young code stub.
896  __ PushFixedFrame(r1);
898 
899  // Jump to point after the code-age stub.
900  __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
901  __ mov(pc, r0);
902 }
903 
904 
905 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
906  GenerateMakeCodeYoungAgainCommon(masm);
907 }
908 
909 
910 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
911  SaveFPRegsMode save_doubles) {
912  {
913  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
914 
915  // Preserve registers across notification, this is important for compiled
916  // stubs that tail call the runtime on deopts passing their parameters in
917  // registers.
919  // Pass the function and deoptimization type to the runtime system.
920  __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
922  }
923 
924  __ add(sp, sp, Operand(kPointerSize)); // Ignore state
925  __ mov(pc, lr); // Jump to miss handler
926 }
927 
928 
929 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
930  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
931 }
932 
933 
934 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
935  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
936 }
937 
938 
939 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
941  {
942  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
943  // Pass the function and deoptimization type to the runtime system.
944  __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
945  __ push(r0);
946  __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
947  }
948 
949  // Get the full codegen state from the stack and untag it -> r6.
950  __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
951  __ SmiUntag(r6);
952  // Switch on the state.
953  Label with_tos_register, unknown_state;
954  __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
955  __ b(ne, &with_tos_register);
956  __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
957  __ Ret();
958 
959  __ bind(&with_tos_register);
960  __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
961  __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
962  __ b(ne, &unknown_state);
963  __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
964  __ Ret();
965 
966  __ bind(&unknown_state);
967  __ stop("no cases left");
968 }
969 
970 
971 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
972  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
973 }
974 
975 
976 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
977  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
978 }
979 
980 
981 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
982  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
983 }
984 
985 
986 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
987  // Lookup the function in the JavaScript frame.
989  {
990  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
991  // Pass function as argument.
992  __ push(r0);
993  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
994  }
995 
996  // If the code object is null, just return to the unoptimized code.
997  Label skip;
998  __ cmp(r0, Operand(Smi::FromInt(0)));
999  __ b(ne, &skip);
1000  __ Ret();
1001 
1002  __ bind(&skip);
1003 
1004  // Load deoptimization data from the code object.
1005  // <deopt_data> = <code>[#deoptimization_data_offset]
1007 
1008  { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1009  if (FLAG_enable_ool_constant_pool) {
1011  }
1012 
1013  // Load the OSR entrypoint offset from the deoptimization data.
1014  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1017 
1018  // Compute the target address = code_obj + header_size + osr_offset
1019  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1020  __ add(r0, r0, Operand::SmiUntag(r1));
1021  __ add(lr, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1022 
1023  // And "return" to the OSR entry point of the function.
1024  __ Ret();
1025  }
1026 }
1027 
1028 
1029 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1030  // We check the stack limit as indicator that recompilation might be done.
1031  Label ok;
1032  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1033  __ cmp(sp, Operand(ip));
1034  __ b(hs, &ok);
1035  {
1036  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1037  __ CallRuntime(Runtime::kStackGuard, 0);
1038  }
1039  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1041 
1042  __ bind(&ok);
1043  __ Ret();
1044 }
1045 
1046 
1047 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1048  // 1. Make sure we have at least one argument.
1049  // r0: actual number of arguments
1050  { Label done;
1051  __ cmp(r0, Operand::Zero());
1052  __ b(ne, &done);
1053  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1054  __ push(r2);
1055  __ add(r0, r0, Operand(1));
1056  __ bind(&done);
1057  }
1058 
1059  // 2. Get the function to call (passed as receiver) from the stack, check
1060  // if it is a function.
1061  // r0: actual number of arguments
1062  Label slow, non_function;
1063  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1064  __ JumpIfSmi(r1, &non_function);
1065  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1066  __ b(ne, &slow);
1067 
1068  // 3a. Patch the first argument if necessary when calling a function.
1069  // r0: actual number of arguments
1070  // r1: function
1071  Label shift_arguments;
1072  __ mov(r4, Operand::Zero()); // indicate regular JS_FUNCTION
1073  { Label convert_to_object, use_global_proxy, patch_receiver;
1074  // Change context eagerly in case we need the global receiver.
1076 
1077  // Do not transform the receiver for strict mode functions.
1080  __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1081  kSmiTagSize)));
1082  __ b(ne, &shift_arguments);
1083 
1084  // Do not transform the receiver for native (Compilerhints already in r3).
1085  __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1086  __ b(ne, &shift_arguments);
1087 
1088  // Compute the receiver in sloppy mode.
1089  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1090  __ ldr(r2, MemOperand(r2, -kPointerSize));
1091  // r0: actual number of arguments
1092  // r1: function
1093  // r2: first argument
1094  __ JumpIfSmi(r2, &convert_to_object);
1095 
1096  __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1097  __ cmp(r2, r3);
1098  __ b(eq, &use_global_proxy);
1099  __ LoadRoot(r3, Heap::kNullValueRootIndex);
1100  __ cmp(r2, r3);
1101  __ b(eq, &use_global_proxy);
1102 
1104  __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1105  __ b(ge, &shift_arguments);
1106 
1107  __ bind(&convert_to_object);
1108 
1109  {
1110  // Enter an internal frame in order to preserve argument count.
1111  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1112  __ SmiTag(r0);
1113  __ push(r0);
1114 
1115  __ push(r2);
1116  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1117  __ mov(r2, r0);
1118 
1119  __ pop(r0);
1120  __ SmiUntag(r0);
1121 
1122  // Exit the internal frame.
1123  }
1124 
1125  // Restore the function to r1, and the flag to r4.
1126  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1127  __ mov(r4, Operand::Zero());
1128  __ jmp(&patch_receiver);
1129 
1130  __ bind(&use_global_proxy);
1133 
1134  __ bind(&patch_receiver);
1135  __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1136  __ str(r2, MemOperand(r3, -kPointerSize));
1137 
1138  __ jmp(&shift_arguments);
1139  }
1140 
1141  // 3b. Check for function proxy.
1142  __ bind(&slow);
1143  __ mov(r4, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1144  __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1145  __ b(eq, &shift_arguments);
1146  __ bind(&non_function);
1147  __ mov(r4, Operand(2, RelocInfo::NONE32)); // indicate non-function
1148 
1149  // 3c. Patch the first argument when calling a non-function. The
1150  // CALL_NON_FUNCTION builtin expects the non-function callee as
1151  // receiver, so overwrite the first argument which will ultimately
1152  // become the receiver.
1153  // r0: actual number of arguments
1154  // r1: function
1155  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1156  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1157  __ str(r1, MemOperand(r2, -kPointerSize));
1158 
1159  // 4. Shift arguments and return address one slot down on the stack
1160  // (overwriting the original receiver). Adjust argument count to make
1161  // the original first argument the new receiver.
1162  // r0: actual number of arguments
1163  // r1: function
1164  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1165  __ bind(&shift_arguments);
1166  { Label loop;
1167  // Calculate the copy start address (destination). Copy end address is sp.
1168  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1169 
1170  __ bind(&loop);
1171  __ ldr(ip, MemOperand(r2, -kPointerSize));
1172  __ str(ip, MemOperand(r2));
1173  __ sub(r2, r2, Operand(kPointerSize));
1174  __ cmp(r2, sp);
1175  __ b(ne, &loop);
1176  // Adjust the actual number of arguments and remove the top element
1177  // (which is a copy of the last argument).
1178  __ sub(r0, r0, Operand(1));
1179  __ pop();
1180  }
1181 
1182  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1183  // or a function proxy via CALL_FUNCTION_PROXY.
1184  // r0: actual number of arguments
1185  // r1: function
1186  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1187  { Label function, non_proxy;
1188  __ tst(r4, r4);
1189  __ b(eq, &function);
1190  // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1191  __ mov(r2, Operand::Zero());
1192  __ cmp(r4, Operand(1));
1193  __ b(ne, &non_proxy);
1194 
1195  __ push(r1); // re-add proxy object as additional argument
1196  __ add(r0, r0, Operand(1));
1197  __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
1198  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1200 
1201  __ bind(&non_proxy);
1202  __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
1203  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1205  __ bind(&function);
1206  }
1207 
1208  // 5b. Get the code to call from the function and check that the number of
1209  // expected arguments matches what we're providing. If so, jump
1210  // (tail-call) to the code in register edx without checking arguments.
1211  // r0: actual number of arguments
1212  // r1: function
1214  __ ldr(r2,
1216  __ SmiUntag(r2);
1217  __ cmp(r2, r0); // Check formal and actual parameter counts.
1218  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1220  ne);
1221 
1223  ParameterCount expected(0);
1224  __ InvokeCode(r3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1225 }
1226 
1227 
1228 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1229  const int kIndexOffset =
1231  const int kLimitOffset =
1233  const int kArgsOffset = 2 * kPointerSize;
1234  const int kRecvOffset = 3 * kPointerSize;
1235  const int kFunctionOffset = 4 * kPointerSize;
1236 
1237  {
1238  FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1239 
1240  __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1241  __ push(r0);
1242  __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1243  __ push(r0);
1244  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1245 
1246  // Check the stack for overflow. We are not trying to catch
1247  // interruptions (e.g. debug break and preemption) here, so the "real stack
1248  // limit" is checked.
1249  Label okay;
1250  __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1251  // Make r2 the space we have left. The stack might already be overflowed
1252  // here which will cause r2 to become negative.
1253  __ sub(r2, sp, r2);
1254  // Check if the arguments will overflow the stack.
1255  __ cmp(r2, Operand::PointerOffsetFromSmiKey(r0));
1256  __ b(gt, &okay); // Signed comparison.
1257 
1258  // Out of stack space.
1259  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1260  __ Push(r1, r0);
1261  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1262  // End of stack check.
1263 
1264  // Push current limit and index.
1265  __ bind(&okay);
1266  __ push(r0); // limit
1267  __ mov(r1, Operand::Zero()); // initial index
1268  __ push(r1);
1269 
1270  // Get the receiver.
1271  __ ldr(r0, MemOperand(fp, kRecvOffset));
1272 
1273  // Check that the function is a JS function (otherwise it must be a proxy).
1274  Label push_receiver;
1275  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1276  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1277  __ b(ne, &push_receiver);
1278 
1279  // Change context eagerly to get the right global object if necessary.
1281  // Load the shared function info while the function is still in r1.
1283 
1284  // Compute the receiver.
1285  // Do not transform the receiver for strict mode functions.
1286  Label call_to_object, use_global_proxy;
1288  __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1289  kSmiTagSize)));
1290  __ b(ne, &push_receiver);
1291 
1292  // Do not transform the receiver for strict mode functions.
1293  __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1294  __ b(ne, &push_receiver);
1295 
1296  // Compute the receiver in sloppy mode.
1297  __ JumpIfSmi(r0, &call_to_object);
1298  __ LoadRoot(r1, Heap::kNullValueRootIndex);
1299  __ cmp(r0, r1);
1300  __ b(eq, &use_global_proxy);
1301  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1302  __ cmp(r0, r1);
1303  __ b(eq, &use_global_proxy);
1304 
1305  // Check if the receiver is already a JavaScript object.
1306  // r0: receiver
1308  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1309  __ b(ge, &push_receiver);
1310 
1311  // Convert the receiver to a regular object.
1312  // r0: receiver
1313  __ bind(&call_to_object);
1314  __ push(r0);
1315  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1316  __ b(&push_receiver);
1317 
1318  __ bind(&use_global_proxy);
1321 
1322  // Push the receiver.
1323  // r0: receiver
1324  __ bind(&push_receiver);
1325  __ push(r0);
1326 
1327  // Copy all arguments from the array to the stack.
1328  Label entry, loop;
1329  __ ldr(r0, MemOperand(fp, kIndexOffset));
1330  __ b(&entry);
1331 
1332  // Load the current argument from the arguments array and push it to the
1333  // stack.
1334  // r0: current argument index
1335  __ bind(&loop);
1336  __ ldr(r1, MemOperand(fp, kArgsOffset));
1337  __ Push(r1, r0);
1338 
1339  // Call the runtime to access the property in the arguments array.
1340  __ CallRuntime(Runtime::kGetProperty, 2);
1341  __ push(r0);
1342 
1343  // Use inline caching to access the arguments.
1344  __ ldr(r0, MemOperand(fp, kIndexOffset));
1345  __ add(r0, r0, Operand(1 << kSmiTagSize));
1346  __ str(r0, MemOperand(fp, kIndexOffset));
1347 
1348  // Test if the copy loop has finished copying all the elements from the
1349  // arguments object.
1350  __ bind(&entry);
1351  __ ldr(r1, MemOperand(fp, kLimitOffset));
1352  __ cmp(r0, r1);
1353  __ b(ne, &loop);
1354 
1355  // Call the function.
1356  Label call_proxy;
1357  ParameterCount actual(r0);
1358  __ SmiUntag(r0);
1359  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1360  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1361  __ b(ne, &call_proxy);
1362  __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
1363 
1364  frame_scope.GenerateLeaveFrame();
1365  __ add(sp, sp, Operand(3 * kPointerSize));
1366  __ Jump(lr);
1367 
1368  // Call the function proxy.
1369  __ bind(&call_proxy);
1370  __ push(r1); // add function proxy as last argument
1371  __ add(r0, r0, Operand(1));
1372  __ mov(r2, Operand::Zero());
1373  __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
1374  __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1376 
1377  // Tear down the internal frame and remove function, receiver and args.
1378  }
1379  __ add(sp, sp, Operand(3 * kPointerSize));
1380  __ Jump(lr);
1381 }
1382 
1383 
1384 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1385  Label* stack_overflow) {
1386  // ----------- S t a t e -------------
1387  // -- r0 : actual number of arguments
1388  // -- r1 : function (passed through to callee)
1389  // -- r2 : expected number of arguments
1390  // -----------------------------------
1391  // Check the stack for overflow. We are not trying to catch
1392  // interruptions (e.g. debug break and preemption) here, so the "real stack
1393  // limit" is checked.
1394  __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1395  // Make r5 the space we have left. The stack might already be overflowed
1396  // here which will cause r5 to become negative.
1397  __ sub(r5, sp, r5);
1398  // Check if the arguments will overflow the stack.
1399  __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
1400  __ b(le, stack_overflow); // Signed comparison.
1401 }
1402 
1403 
1404 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1405  __ SmiTag(r0);
1407  __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
1408  (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
1409  fp.bit() | lr.bit());
1410  __ add(fp, sp,
1412 }
1413 
1414 
1415 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1416  // ----------- S t a t e -------------
1417  // -- r0 : result being passed through
1418  // -----------------------------------
1419  // Get the number of arguments passed (as a smi), tear down the frame and
1420  // then tear down the parameters.
1422  kPointerSize)));
1423 
1424  __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
1425  __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
1426  __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1427 }
1428 
1429 
1430 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1431  // ----------- S t a t e -------------
1432  // -- r0 : actual number of arguments
1433  // -- r1 : function (passed through to callee)
1434  // -- r2 : expected number of arguments
1435  // -----------------------------------
1436 
1437  Label stack_overflow;
1438  ArgumentAdaptorStackCheck(masm, &stack_overflow);
1439  Label invoke, dont_adapt_arguments;
1440 
1441  Label enough, too_few;
1443  __ cmp(r0, r2);
1444  __ b(lt, &too_few);
1446  __ b(eq, &dont_adapt_arguments);
1447 
1448  { // Enough parameters: actual >= expected
1449  __ bind(&enough);
1450  EnterArgumentsAdaptorFrame(masm);
1451 
1452  // Calculate copy start address into r0 and copy end address into r2.
1453  // r0: actual number of arguments as a smi
1454  // r1: function
1455  // r2: expected number of arguments
1456  // r3: code entry to call
1457  __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1458  // adjust for return address and receiver
1459  __ add(r0, r0, Operand(2 * kPointerSize));
1460  __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1461 
1462  // Copy the arguments (including the receiver) to the new stack frame.
1463  // r0: copy start address
1464  // r1: function
1465  // r2: copy end address
1466  // r3: code entry to call
1467 
1468  Label copy;
1469  __ bind(&copy);
1470  __ ldr(ip, MemOperand(r0, 0));
1471  __ push(ip);
1472  __ cmp(r0, r2); // Compare before moving to next argument.
1473  __ sub(r0, r0, Operand(kPointerSize));
1474  __ b(ne, &copy);
1475 
1476  __ b(&invoke);
1477  }
1478 
1479  { // Too few parameters: Actual < expected
1480  __ bind(&too_few);
1481  EnterArgumentsAdaptorFrame(masm);
1482 
1483  // Calculate copy start address into r0 and copy end address is fp.
1484  // r0: actual number of arguments as a smi
1485  // r1: function
1486  // r2: expected number of arguments
1487  // r3: code entry to call
1488  __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1489 
1490  // Copy the arguments (including the receiver) to the new stack frame.
1491  // r0: copy start address
1492  // r1: function
1493  // r2: expected number of arguments
1494  // r3: code entry to call
1495  Label copy;
1496  __ bind(&copy);
1497  // Adjust load for return address and receiver.
1498  __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1499  __ push(ip);
1500  __ cmp(r0, fp); // Compare before moving to next argument.
1501  __ sub(r0, r0, Operand(kPointerSize));
1502  __ b(ne, &copy);
1503 
1504  // Fill the remaining expected arguments with undefined.
1505  // r1: function
1506  // r2: expected number of arguments
1507  // r3: code entry to call
1508  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1509  __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1510  // Adjust for frame.
1512  2 * kPointerSize));
1513 
1514  Label fill;
1515  __ bind(&fill);
1516  __ push(ip);
1517  __ cmp(sp, r2);
1518  __ b(ne, &fill);
1519  }
1520 
1521  // Call the entry point.
1522  __ bind(&invoke);
1523  __ Call(r3);
1524 
1525  // Store offset of return address for deoptimizer.
1526  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1527 
1528  // Exit frame and return.
1529  LeaveArgumentsAdaptorFrame(masm);
1530  __ Jump(lr);
1531 
1532 
1533  // -------------------------------------------
1534  // Dont adapt arguments.
1535  // -------------------------------------------
1536  __ bind(&dont_adapt_arguments);
1537  __ Jump(r3);
1538 
1539  __ bind(&stack_overflow);
1540  {
1541  FrameScope frame(masm, StackFrame::MANUAL);
1542  EnterArgumentsAdaptorFrame(masm);
1543  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1544  __ bkpt(0);
1545  }
1546 }
1547 
1548 
1549 #undef __
1550 
1551 } } // namespace v8::internal
1552 
1553 #endif // V8_TARGET_ARCH_ARM
#define CODE_AGE_LIST(V)
Definition: builtins.h:27
static const int kAllocationSiteOffset
Definition: objects.h:8286
static const int kPretenureCreateCountOffset
Definition: objects.h:8257
static const U kShift
Definition: utils.h:204
static void Generate_InternalArrayCode(MacroAssembler *masm)
static void Generate_FunctionApply(MacroAssembler *masm)
static void Generate_Adaptor(MacroAssembler *masm, CFunctionId id, BuiltinExtraArguments extra_args)
static void Generate_NotifyDeoptimized(MacroAssembler *masm)
static void Generate_JSConstructEntryTrampoline(MacroAssembler *masm)
static void Generate_CompileLazy(MacroAssembler *masm)
static void Generate_JSEntryTrampoline(MacroAssembler *masm)
static void Generate_OnStackReplacement(MacroAssembler *masm)
static void Generate_MarkCodeAsExecutedTwice(MacroAssembler *masm)
static void Generate_NotifyLazyDeoptimized(MacroAssembler *masm)
static void Generate_ArgumentsAdaptorTrampoline(MacroAssembler *masm)
static void Generate_JSConstructStubApi(MacroAssembler *masm)
static void Generate_FunctionCall(MacroAssembler *masm)
static void Generate_MarkCodeAsExecutedOnce(MacroAssembler *masm)
static void Generate_NotifyStubFailureSaveDoubles(MacroAssembler *masm)
static void Generate_NotifySoftDeoptimized(MacroAssembler *masm)
static void Generate_ArrayCode(MacroAssembler *masm)
static void Generate_StringConstructCode(MacroAssembler *masm)
static void Generate_NotifyStubFailure(MacroAssembler *masm)
static void Generate_CompileOptimized(MacroAssembler *masm)
static void Generate_OsrAfterStackCheck(MacroAssembler *masm)
static void Generate_InOptimizationQueue(MacroAssembler *masm)
static void Generate_CompileOptimizedConcurrent(MacroAssembler *masm)
static void Generate_JSConstructStubGeneric(MacroAssembler *masm)
static const int kConstantPoolOffset
Definition: objects.h:5367
static const int kHeaderSize
Definition: objects.h:5373
static const int kDeoptimizationDataOffset
Definition: objects.h:5352
static int SlotOffset(int index)
Definition: contexts.h:552
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static const int kMapOffset
Definition: objects.h:1427
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kNoSlackTracking
Definition: objects.h:7263
static const int kContextOffset
Definition: objects.h:7381
static const int kFinishSlackTracking
Definition: objects.h:7262
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7377
static const int kHeaderSize
Definition: objects.h:2195
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kValueOffset
Definition: objects.h:7546
static const int kSize
Definition: objects.h:7547
static const int kInObjectPropertiesByte
Definition: objects.h:6211
static const int kBitField3Offset
Definition: objects.h:6189
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kPreAllocatedPropertyFieldsByte
Definition: objects.h:6214
static const int kInstanceSizesOffset
Definition: objects.h:6187
static const int kInstanceSizeOffset
Definition: objects.h:6210
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:6234
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kFormalParameterCountOffset
Definition: objects.h:6946
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:6888
static const int kCompilerHintsOffset
Definition: objects.h:6961
static const int kCodeOffset
Definition: objects.h:6893
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static const int kFixedFrameSizeFromFp
Definition: frames.h:157
static const int kContextOffset
Definition: frames.h:162
static const int kExpressionsOffset
Definition: frames.h:160
static const int kCallerSPOffset
Definition: frames.h:167
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ JUMP_FUNCTION
@ CALL_FUNCTION
AllocationFlags
@ RESULT_CONTAINS_TOP
@ SIZE_IN_WORDS
@ TAG_OBJECT
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
const int kPointerSize
Definition: globals.h:129
MemOperand ContextOperand(Register context, int index)
const Register r2
const RegList kJSCallerSaved
Definition: frames-arm.h:24
const Register cp
const Register r6
const Register r0
const RegList kCalleeSaved
Definition: frames-arm.h:38
const int kSmiTagSize
Definition: v8.h:5743
const int kR9Available
Definition: frames-arm.h:15
const Register ip
const uint32_t kNotStringTag
Definition: objects.h:545
const Register r3
const Register fp
const Register sp
const int kPointerSizeLog2
Definition: globals.h:147
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
const Register r4
MemOperand FieldMemOperand(Register object, int offset)
const Register r9
const Register pc
const Register r5
const Register lr
const Register r8
const Register r1
const int kHeapObjectTag
Definition: v8.h:5737
BuiltinExtraArguments
Definition: builtins.h:12
@ NEEDS_CALLED_FUNCTION
Definition: builtins.h:14
@ NO_EXTRA_ARGUMENTS
Definition: builtins.h:13
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const int kBitsPerByte
Definition: globals.h:162
const Register pp
@ NO_CALL_CONSTRUCTOR_FLAGS
Definition: globals.h:478
static const int kNoCodeAgeSequenceLength
const uint32_t kIsNotStringMask
Definition: objects.h:543
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20