V8 Project
builtins-mips64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 
6 
7 #include "src/v8.h"
8 
9 #if V8_TARGET_ARCH_MIPS64
10 
11 #include "src/codegen.h"
12 #include "src/debug.h"
13 #include "src/deoptimizer.h"
14 #include "src/full-codegen.h"
15 #include "src/runtime/runtime.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 
21 #define __ ACCESS_MASM(masm)
22 
23 
24 void Builtins::Generate_Adaptor(MacroAssembler* masm,
25  CFunctionId id,
26  BuiltinExtraArguments extra_args) {
27  // ----------- S t a t e -------------
28  // -- a0 : number of arguments excluding receiver
29  // -- a1 : called function (only guaranteed when
30  // -- extra_args requires it)
31  // -- cp : context
32  // -- sp[0] : last argument
33  // -- ...
34  // -- sp[8 * (argc - 1)] : first argument
35  // -- sp[8 * agrc] : receiver
36  // -----------------------------------
37 
38  // Insert extra arguments.
39  int num_extra_args = 0;
40  if (extra_args == NEEDS_CALLED_FUNCTION) {
41  num_extra_args = 1;
42  __ push(a1);
43  } else {
44  DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
45  }
46 
47  // JumpToExternalReference expects s0 to contain the number of arguments
48  // including the receiver and the extra arguments.
49  __ Daddu(s0, a0, num_extra_args + 1);
50  __ dsll(s1, s0, kPointerSizeLog2);
51  __ Dsubu(s1, s1, kPointerSize);
52  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
53 }
54 
55 
56 // Load the built-in InternalArray function from the current context.
57 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
58  Register result) {
59  // Load the native context.
60 
61  __ ld(result,
63  __ ld(result,
65  // Load the InternalArray function from the native context.
66  __ ld(result,
67  MemOperand(result,
70 }
71 
72 
73 // Load the built-in Array function from the current context.
74 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
75  // Load the native context.
76 
77  __ ld(result,
79  __ ld(result,
81  // Load the Array function from the native context.
82  __ ld(result,
83  MemOperand(result,
85 }
86 
87 
88 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
89  // ----------- S t a t e -------------
90  // -- a0 : number of arguments
91  // -- ra : return address
92  // -- sp[...]: constructor arguments
93  // -----------------------------------
94  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
95 
96  // Get the InternalArray function.
97  GenerateLoadInternalArrayFunction(masm, a1);
98 
99  if (FLAG_debug_code) {
100  // Initial map for the builtin InternalArray functions should be maps.
102  __ SmiTst(a2, a4);
103  __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
104  a4, Operand(zero_reg));
105  __ GetObjectType(a2, a3, a4);
106  __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
107  a4, Operand(MAP_TYPE));
108  }
109 
110  // Run the native code for the InternalArray function called as a normal
111  // function.
112  // Tail call a stub.
113  InternalArrayConstructorStub stub(masm->isolate());
114  __ TailCallStub(&stub);
115 }
116 
117 
118 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
119  // ----------- S t a t e -------------
120  // -- a0 : number of arguments
121  // -- ra : return address
122  // -- sp[...]: constructor arguments
123  // -----------------------------------
124  Label generic_array_code;
125 
126  // Get the Array function.
127  GenerateLoadArrayFunction(masm, a1);
128 
129  if (FLAG_debug_code) {
130  // Initial map for the builtin Array functions should be maps.
132  __ SmiTst(a2, a4);
133  __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
134  a4, Operand(zero_reg));
135  __ GetObjectType(a2, a3, a4);
136  __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
137  a4, Operand(MAP_TYPE));
138  }
139 
140  // Run the native code for the Array function called as a normal function.
141  // Tail call a stub.
142  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
143  ArrayConstructorStub stub(masm->isolate());
144  __ TailCallStub(&stub);
145 }
146 
147 
148 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
149  // ----------- S t a t e -------------
150  // -- a0 : number of arguments
151  // -- a1 : constructor function
152  // -- ra : return address
153  // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
154  // -- sp[argc * 8] : receiver
155  // -----------------------------------
156  Counters* counters = masm->isolate()->counters();
157  __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
158 
159  Register function = a1;
160  if (FLAG_debug_code) {
161  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
162  __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
163  }
164 
165  // Load the first arguments in a0 and get rid of the rest.
166  Label no_arguments;
167  __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
168  // First args = sp[(argc - 1) * 8].
169  __ Dsubu(a0, a0, Operand(1));
170  __ dsll(a0, a0, kPointerSizeLog2);
171  __ Daddu(sp, a0, sp);
172  __ ld(a0, MemOperand(sp));
173  // sp now point to args[0], drop args[0] + receiver.
174  __ Drop(2);
175 
176  Register argument = a2;
177  Label not_cached, argument_is_string;
178  __ LookupNumberStringCache(a0, // Input.
179  argument, // Result.
180  a3, // Scratch.
181  a4, // Scratch.
182  a5, // Scratch.
183  &not_cached);
184  __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, a4);
185  __ bind(&argument_is_string);
186 
187  // ----------- S t a t e -------------
188  // -- a2 : argument converted to string
189  // -- a1 : constructor function
190  // -- ra : return address
191  // -----------------------------------
192 
193  Label gc_required;
194  __ Allocate(JSValue::kSize,
195  v0, // Result.
196  a3, // Scratch.
197  a4, // Scratch.
198  &gc_required,
199  TAG_OBJECT);
200 
201  // Initialising the String Object.
202  Register map = a3;
203  __ LoadGlobalFunctionInitialMap(function, map, a4);
204  if (FLAG_debug_code) {
206  __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
207  a4, Operand(JSValue::kSize >> kPointerSizeLog2));
209  __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
210  a4, Operand(zero_reg));
211  }
213 
214  __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
217 
218  __ sd(argument, FieldMemOperand(v0, JSValue::kValueOffset));
219 
220  // Ensure the object is fully initialized.
222 
223  __ Ret();
224 
225  // The argument was not found in the number to string cache. Check
226  // if it's a string already before calling the conversion builtin.
227  Label convert_argument;
228  __ bind(&not_cached);
229  __ JumpIfSmi(a0, &convert_argument);
230 
231  // Is it a String?
235  __ And(a4, a3, Operand(kIsNotStringMask));
236  __ Branch(&convert_argument, ne, a4, Operand(zero_reg));
237  __ mov(argument, a0);
238  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, a4);
239  __ Branch(&argument_is_string);
240 
241  // Invoke the conversion builtin and put the result into a2.
242  __ bind(&convert_argument);
243  __ push(function); // Preserve the function.
244  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, a4);
245  {
246  FrameScope scope(masm, StackFrame::INTERNAL);
247  __ push(a0);
248  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
249  }
250  __ pop(function);
251  __ mov(argument, v0);
252  __ Branch(&argument_is_string);
253 
254  // Load the empty string into a2, remove the receiver from the
255  // stack, and jump back to the case where the argument is a string.
256  __ bind(&no_arguments);
257  __ LoadRoot(argument, Heap::kempty_stringRootIndex);
258  __ Drop(1);
259  __ Branch(&argument_is_string);
260 
261  // At this point the argument is already a string. Call runtime to
262  // create a string wrapper.
263  __ bind(&gc_required);
264  __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, a4);
265  {
266  FrameScope scope(masm, StackFrame::INTERNAL);
267  __ push(argument);
268  __ CallRuntime(Runtime::kNewStringWrapper, 1);
269  }
270  __ Ret();
271 }
272 
273 
274 static void CallRuntimePassFunction(
275  MacroAssembler* masm, Runtime::FunctionId function_id) {
276  FrameScope scope(masm, StackFrame::INTERNAL);
277  // Push a copy of the function onto the stack.
278  // Push call kind information and function as parameter to the runtime call.
279  __ Push(a1, a1);
280 
281  __ CallRuntime(function_id, 1);
282  // Restore call kind information and receiver.
283  __ Pop(a1);
284 }
285 
286 
287 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
290  __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
291  __ Jump(at);
292 }
293 
294 
295 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
296  __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
297  __ Jump(at);
298 }
299 
300 
301 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
302  // Checking whether the queued function is ready for install is optional,
303  // since we come across interrupts and stack checks elsewhere. However,
304  // not checking may delay installing ready functions, and always checking
305  // would be quite expensive. A good compromise is to first check against
306  // stack limit as a cue for an interrupt signal.
307  Label ok;
308  __ LoadRoot(a4, Heap::kStackLimitRootIndex);
309  __ Branch(&ok, hs, sp, Operand(a4));
310 
311  CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
312  GenerateTailCallToReturnedCode(masm);
313 
314  __ bind(&ok);
315  GenerateTailCallToSharedCode(masm);
316 }
317 
318 
319 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
320  bool is_api_function,
321  bool create_memento) {
322  // ----------- S t a t e -------------
323  // -- a0 : number of arguments
324  // -- a1 : constructor function
325  // -- a2 : allocation site or undefined
326  // -- ra : return address
327  // -- sp[...]: constructor arguments
328  // -----------------------------------
329 
330  // Should never create mementos for api functions.
331  DCHECK(!is_api_function || !create_memento);
332 
333  Isolate* isolate = masm->isolate();
334 
335  // ----------- S t a t e -------------
336  // -- a0 : number of arguments
337  // -- a1 : constructor function
338  // -- ra : return address
339  // -- sp[...]: constructor arguments
340  // -----------------------------------
341 
342  // Enter a construct frame.
343  {
344  FrameScope scope(masm, StackFrame::CONSTRUCT);
345 
346  if (create_memento) {
347  __ AssertUndefinedOrAllocationSite(a2, a3);
348  __ push(a2);
349  }
350 
351  // Preserve the two incoming parameters on the stack.
352  // Tag arguments count.
353  __ dsll32(a0, a0, 0);
354  __ MultiPushReversed(a0.bit() | a1.bit());
355 
356  Label rt_call, allocated;
357  // Try to allocate the object without transitioning into C code. If any of
358  // the preconditions is not met, the code bails out to the runtime call.
359  if (FLAG_inline_new) {
360  Label undo_allocation;
361  ExternalReference debug_step_in_fp =
362  ExternalReference::debug_step_in_fp_address(isolate);
363  __ li(a2, Operand(debug_step_in_fp));
364  __ ld(a2, MemOperand(a2));
365  __ Branch(&rt_call, ne, a2, Operand(zero_reg));
366 
367  // Load the initial map and verify that it is in fact a map.
368  // a1: constructor function
370  __ JumpIfSmi(a2, &rt_call);
371  __ GetObjectType(a2, a3, t0);
372  __ Branch(&rt_call, ne, t0, Operand(MAP_TYPE));
373 
374  // Check that the constructor is not constructing a JSFunction (see
375  // comments in Runtime_NewObject in runtime.cc). In which case the
376  // initial map's instance type would be JS_FUNCTION_TYPE.
377  // a1: constructor function
378  // a2: initial map
380  __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
381 
382  if (!is_api_function) {
383  Label allocate;
385  // Check if slack tracking is enabled.
386  __ lwu(a4, bit_field3);
387  __ DecodeField<Map::ConstructionCount>(a6, a4);
388  __ Branch(&allocate,
389  eq,
390  a6,
391  Operand(static_cast<int64_t>(JSFunction::kNoSlackTracking)));
392  // Decrease generous allocation count.
393  __ Dsubu(a4, a4, Operand(1 << Map::ConstructionCount::kShift));
394  __ Branch(USE_DELAY_SLOT,
395  &allocate, ne, a6, Operand(JSFunction::kFinishSlackTracking));
396  __ sw(a4, bit_field3); // In delay slot.
397 
398  __ Push(a1, a2, a1); // a1 = Constructor.
399  __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
400 
401  __ Pop(a1, a2);
402  // Slack tracking counter is kNoSlackTracking after runtime call.
404  __ mov(a6, zero_reg);
405 
406  __ bind(&allocate);
407  }
408 
409  // Now allocate the JSObject on the heap.
410  // a1: constructor function
411  // a2: initial map
413  if (create_memento) {
414  __ Daddu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
415  }
416 
417  __ Allocate(a3, t0, t1, t2, &rt_call, SIZE_IN_WORDS);
418 
419  // Allocated the JSObject, now initialize the fields. Map is set to
420  // initial map and properties and elements are set to empty fixed array.
421  // a1: constructor function
422  // a2: initial map
423  // a3: object size (not including memento if create_memento)
424  // t0: JSObject (not tagged)
425  __ LoadRoot(t2, Heap::kEmptyFixedArrayRootIndex);
426  __ mov(t1, t0);
427  __ sd(a2, MemOperand(t1, JSObject::kMapOffset));
430  __ Daddu(t1, t1, Operand(3*kPointerSize));
434 
435  // Fill all the in-object properties with appropriate filler.
436  // a1: constructor function
437  // a2: initial map
438  // a3: object size (in words, including memento if create_memento)
439  // t0: JSObject (not tagged)
440  // t1: First in-object property of JSObject (not tagged)
441  // a6: slack tracking counter (non-API function case)
443 
444  // Use t3 to hold undefined, which is used in several places below.
445  __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
446 
447  if (!is_api_function) {
448  Label no_inobject_slack_tracking;
449 
450  // Check if slack tracking is enabled.
451  __ Branch(&no_inobject_slack_tracking,
452  eq,
453  a6,
454  Operand(static_cast<int64_t>(JSFunction::kNoSlackTracking)));
455 
456  // Allocate object with a slack.
459  kBitsPerByte);
460  __ dsll(at, a0, kPointerSizeLog2);
461  __ daddu(a0, t1, at);
462  // a0: offset of first field after pre-allocated fields
463  if (FLAG_debug_code) {
464  __ dsll(at, a3, kPointerSizeLog2);
465  __ Daddu(t2, t0, Operand(at)); // End of object.
466  __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
467  a0, Operand(t2));
468  }
469  __ InitializeFieldsWithFiller(t1, a0, t3);
470  // To allow for truncation.
471  __ LoadRoot(t3, Heap::kOnePointerFillerMapRootIndex);
472  // Fill the remaining fields with one pointer filler map.
473 
474  __ bind(&no_inobject_slack_tracking);
475  }
476 
477  if (create_memento) {
478  __ Dsubu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
479  __ dsll(a0, a0, kPointerSizeLog2);
480  __ Daddu(a0, t0, Operand(a0)); // End of object.
481  __ InitializeFieldsWithFiller(t1, a0, t3);
482 
483  // Fill in memento fields.
484  // t1: points to the allocated but uninitialized memento.
485  __ LoadRoot(t3, Heap::kAllocationMementoMapRootIndex);
487  __ sd(t3, MemOperand(t1));
488  __ Daddu(t1, t1, kPointerSize);
489  // Load the AllocationSite.
490  __ ld(t3, MemOperand(sp, 2 * kPointerSize));
492  __ sd(t3, MemOperand(t1));
493  __ Daddu(t1, t1, kPointerSize);
494  } else {
495  __ dsll(at, a3, kPointerSizeLog2);
496  __ Daddu(a0, t0, Operand(at)); // End of object.
497  __ InitializeFieldsWithFiller(t1, a0, t3);
498  }
499 
500  // Add the object tag to make the JSObject real, so that we can continue
501  // and jump into the continuation code at any time from now on. Any
502  // failures need to undo the allocation, so that the heap is in a
503  // consistent state and verifiable.
504  __ Daddu(t0, t0, Operand(kHeapObjectTag));
505 
506  // Check if a non-empty properties array is needed. Continue with
507  // allocated object if not fall through to runtime call if it is.
508  // a1: constructor function
509  // t0: JSObject
510  // t1: start of next object (not tagged)
512  // The field instance sizes contains both pre-allocated property fields
513  // and in-object properties.
516  kBitsPerByte);
517  __ Daddu(a3, a3, Operand(t2));
519  kBitsPerByte);
520  __ dsubu(a3, a3, t2);
521 
522  // Done if no extra properties are to be allocated.
523  __ Branch(&allocated, eq, a3, Operand(zero_reg));
524  __ Assert(greater_equal, kPropertyAllocationCountFailed,
525  a3, Operand(zero_reg));
526 
527  // Scale the number of elements by pointer size and add the header for
528  // FixedArrays to the start of the next object calculation from above.
529  // a1: constructor
530  // a3: number of elements in properties array
531  // t0: JSObject
532  // t1: start of next object
533  __ Daddu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
534  __ Allocate(
535  a0,
536  t1,
537  t2,
538  a2,
539  &undo_allocation,
541 
542  // Initialize the FixedArray.
543  // a1: constructor
544  // a3: number of elements in properties array (untagged)
545  // t0: JSObject
546  // t1: start of next object
547  __ LoadRoot(t2, Heap::kFixedArrayMapRootIndex);
548  __ mov(a2, t1);
549  __ sd(t2, MemOperand(a2, JSObject::kMapOffset));
550  // Tag number of elements.
551  __ dsll32(a0, a3, 0);
553  __ Daddu(a2, a2, Operand(2 * kPointerSize));
554 
557 
558  // Initialize the fields to undefined.
559  // a1: constructor
560  // a2: First element of FixedArray (not tagged)
561  // a3: number of elements in properties array
562  // t0: JSObject
563  // t1: FixedArray (not tagged)
564  __ dsll(a7, a3, kPointerSizeLog2);
565  __ daddu(t2, a2, a7); // End of object.
567  { Label loop, entry;
568  if (!is_api_function || create_memento) {
569  __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
570  } else if (FLAG_debug_code) {
571  __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
572  __ Assert(eq, kUndefinedValueNotLoaded, t3, Operand(a6));
573  }
574  __ jmp(&entry);
575  __ bind(&loop);
576  __ sd(t3, MemOperand(a2));
577  __ daddiu(a2, a2, kPointerSize);
578  __ bind(&entry);
579  __ Branch(&loop, less, a2, Operand(t2));
580  }
581 
582  // Store the initialized FixedArray into the properties field of
583  // the JSObject.
584  // a1: constructor function
585  // t0: JSObject
586  // t1: FixedArray (not tagged)
587  __ Daddu(t1, t1, Operand(kHeapObjectTag)); // Add the heap tag.
589 
590  // Continue with JSObject being successfully allocated.
591  // a1: constructor function
592  // a4: JSObject
593  __ jmp(&allocated);
594 
595  // Undo the setting of the new top so that the heap is verifiable. For
596  // example, the map's unused properties potentially do not match the
597  // allocated objects unused properties.
598  // t0: JSObject (previous new top)
599  __ bind(&undo_allocation);
600  __ UndoAllocationInNewSpace(t0, t1);
601  }
602 
603  // Allocate the new receiver object using the runtime call.
604  // a1: constructor function
605  __ bind(&rt_call);
606  if (create_memento) {
607  // Get the cell or allocation site.
608  __ ld(a2, MemOperand(sp, 2 * kPointerSize));
609  __ push(a2);
610  }
611 
612  __ push(a1); // Argument for Runtime_NewObject.
613  if (create_memento) {
614  __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
615  } else {
616  __ CallRuntime(Runtime::kNewObject, 1);
617  }
618  __ mov(t0, v0);
619 
620  // If we ended up using the runtime, and we want a memento, then the
621  // runtime call made it for us, and we shouldn't do create count
622  // increment.
623  Label count_incremented;
624  if (create_memento) {
625  __ jmp(&count_incremented);
626  }
627 
628  // Receiver for constructor call allocated.
629  // t0: JSObject
630  __ bind(&allocated);
631 
632  if (create_memento) {
633  __ ld(a2, MemOperand(sp, kPointerSize * 2));
634  __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
635  __ Branch(&count_incremented, eq, a2, Operand(t1));
636  // a2 is an AllocationSite. We are creating a memento from it, so we
637  // need to increment the memento create count.
638  __ ld(a3, FieldMemOperand(a2,
640  __ Daddu(a3, a3, Operand(Smi::FromInt(1)));
641  __ sd(a3, FieldMemOperand(a2,
643  __ bind(&count_incremented);
644  }
645 
646  __ Push(t0, t0);
647 
648  // Reload the number of arguments from the stack.
649  // sp[0]: receiver
650  // sp[1]: receiver
651  // sp[2]: constructor function
652  // sp[3]: number of arguments (smi-tagged)
653  __ ld(a1, MemOperand(sp, 2 * kPointerSize));
654  __ ld(a3, MemOperand(sp, 3 * kPointerSize));
655 
656  // Set up pointer to last argument.
657  __ Daddu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
658 
659  // Set up number of arguments for function call below.
660  __ SmiUntag(a0, a3);
661 
662  // Copy arguments and receiver to the expression stack.
663  // a0: number of arguments
664  // a1: constructor function
665  // a2: address of last argument (caller sp)
666  // a3: number of arguments (smi-tagged)
667  // sp[0]: receiver
668  // sp[1]: receiver
669  // sp[2]: constructor function
670  // sp[3]: number of arguments (smi-tagged)
671  Label loop, entry;
672  __ SmiUntag(a3);
673  __ jmp(&entry);
674  __ bind(&loop);
675  __ dsll(a4, a3, kPointerSizeLog2);
676  __ Daddu(a4, a2, Operand(a4));
677  __ ld(a5, MemOperand(a4));
678  __ push(a5);
679  __ bind(&entry);
680  __ Daddu(a3, a3, Operand(-1));
681  __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
682 
683  // Call the function.
684  // a0: number of arguments
685  // a1: constructor function
686  if (is_api_function) {
688  Handle<Code> code =
689  masm->isolate()->builtins()->HandleApiCallConstruct();
690  __ Call(code, RelocInfo::CODE_TARGET);
691  } else {
692  ParameterCount actual(a0);
693  __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
694  }
695 
696  // Store offset of return address for deoptimizer.
697  if (!is_api_function) {
698  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
699  }
700 
701  // Restore context from the frame.
703 
704  // If the result is an object (in the ECMA sense), we should get rid
705  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
706  // on page 74.
707  Label use_receiver, exit;
708 
709  // If the result is a smi, it is *not* an object in the ECMA sense.
710  // v0: result
711  // sp[0]: receiver (newly allocated object)
712  // sp[1]: constructor function
713  // sp[2]: number of arguments (smi-tagged)
714  __ JumpIfSmi(v0, &use_receiver);
715 
716  // If the type of the result (stored in its map) is less than
717  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
718  __ GetObjectType(v0, a1, a3);
719  __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
720 
721  // Throw away the result of the constructor invocation and use the
722  // on-stack receiver as the result.
723  __ bind(&use_receiver);
724  __ ld(v0, MemOperand(sp));
725 
726  // Remove receiver from the stack, remove caller arguments, and
727  // return.
728  __ bind(&exit);
729  // v0: result
730  // sp[0]: receiver (newly allocated object)
731  // sp[1]: constructor function
732  // sp[2]: number of arguments (smi-tagged)
733  __ ld(a1, MemOperand(sp, 2 * kPointerSize));
734 
735  // Leave construct frame.
736  }
737 
738  __ SmiScale(a4, a1, kPointerSizeLog2);
739  __ Daddu(sp, sp, a4);
740  __ Daddu(sp, sp, kPointerSize);
741  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
742  __ Ret();
743 }
744 
745 
746 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
747  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
748 }
749 
750 
751 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
752  Generate_JSConstructStubHelper(masm, true, false);
753 }
754 
755 
756 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
757  bool is_construct) {
758  // Called from JSEntryStub::GenerateBody
759 
760  // ----------- S t a t e -------------
761  // -- a0: code entry
762  // -- a1: function
763  // -- a2: receiver_pointer
764  // -- a3: argc
765  // -- s0: argv
766  // -----------------------------------
768  // Clear the context before we push it when entering the JS frame.
769  __ mov(cp, zero_reg);
770 
771  // Enter an internal frame.
772  {
773  FrameScope scope(masm, StackFrame::INTERNAL);
774 
775  // Set up the context from the function argument.
777 
778  // Push the function and the receiver onto the stack.
779  __ Push(a1, a2);
780 
781  // Copy arguments to the stack in a loop.
782  // a3: argc
783  // s0: argv, i.e. points to first arg
784  Label loop, entry;
785  // TODO(plind): At least on simulator, argc in a3 is an int32_t with junk
786  // in upper bits. Should fix the root cause, rather than use below
787  // workaround to clear upper bits.
788  __ dsll32(a3, a3, 0); // int32_t -> int64_t.
789  __ dsrl32(a3, a3, 0);
790  __ dsll(a4, a3, kPointerSizeLog2);
791  __ daddu(a6, s0, a4);
792  __ b(&entry);
793  __ nop(); // Branch delay slot nop.
794  // a6 points past last arg.
795  __ bind(&loop);
796  __ ld(a4, MemOperand(s0)); // Read next parameter.
797  __ daddiu(s0, s0, kPointerSize);
798  __ ld(a4, MemOperand(a4)); // Dereference handle.
799  __ push(a4); // Push parameter.
800  __ bind(&entry);
801  __ Branch(&loop, ne, s0, Operand(a6));
802 
803  // Initialize all JavaScript callee-saved registers, since they will be seen
804  // by the garbage collector as part of handlers.
805  __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
806  __ mov(s1, a4);
807  __ mov(s2, a4);
808  __ mov(s3, a4);
809  __ mov(s4, a4);
810  __ mov(s5, a4);
811  // s6 holds the root address. Do not clobber.
812  // s7 is cp. Do not init.
813 
814  // Invoke the code and pass argc as a0.
815  __ mov(a0, a3);
816  if (is_construct) {
817  // No type feedback cell is available
818  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
819  CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
820  __ CallStub(&stub);
821  } else {
822  ParameterCount actual(a0);
823  __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
824  }
825 
826  // Leave internal frame.
827  }
828  __ Jump(ra);
829 }
830 
831 
832 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
833  Generate_JSEntryTrampolineHelper(masm, false);
834 }
835 
836 
837 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
838  Generate_JSEntryTrampolineHelper(masm, true);
839 }
840 
841 
842 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
843  CallRuntimePassFunction(masm, Runtime::kCompileLazy);
844  GenerateTailCallToReturnedCode(masm);
845 }
846 
847 
848 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
849  FrameScope scope(masm, StackFrame::INTERNAL);
850  // Push a copy of the function onto the stack.
851  // Push function as parameter to the runtime call.
852  __ Push(a1, a1);
853  // Whether to compile in a background thread.
854  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
855 
856  __ CallRuntime(Runtime::kCompileOptimized, 2);
857  // Restore receiver.
858  __ Pop(a1);
859 }
860 
861 
862 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
863  CallCompileOptimized(masm, false);
864  GenerateTailCallToReturnedCode(masm);
865 }
866 
867 
868 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
869  CallCompileOptimized(masm, true);
870  GenerateTailCallToReturnedCode(masm);
871 }
872 
873 
874 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
875  // For now, we are relying on the fact that make_code_young doesn't do any
876  // garbage collection which allows us to save/restore the registers without
877  // worrying about which of them contain pointers. We also don't build an
878  // internal frame to make the code faster, since we shouldn't have to do stack
879  // crawls in MakeCodeYoung. This seems a bit fragile.
880 
881  // Set a0 to point to the head of the PlatformCodeAge sequence.
882  __ Dsubu(a0, a0,
884 
885  // The following registers must be saved and restored when calling through to
886  // the runtime:
887  // a0 - contains return address (beginning of patch sequence)
888  // a1 - isolate
889  RegList saved_regs =
890  (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
891  FrameScope scope(masm, StackFrame::MANUAL);
892  __ MultiPush(saved_regs);
893  __ PrepareCallCFunction(2, 0, a2);
894  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
895  __ CallCFunction(
896  ExternalReference::get_make_code_young_function(masm->isolate()), 2);
897  __ MultiPop(saved_regs);
898  __ Jump(a0);
899 }
900 
901 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
902 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
903  MacroAssembler* masm) { \
904  GenerateMakeCodeYoungAgainCommon(masm); \
905 } \
906 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
907  MacroAssembler* masm) { \
908  GenerateMakeCodeYoungAgainCommon(masm); \
909 }
910 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
911 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
912 
913 
914 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
915  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
916  // that make_code_young doesn't do any garbage collection which allows us to
917  // save/restore the registers without worrying about which of them contain
918  // pointers.
919 
920  // Set a0 to point to the head of the PlatformCodeAge sequence.
921  __ Dsubu(a0, a0,
923 
924  // The following registers must be saved and restored when calling through to
925  // the runtime:
926  // a0 - contains return address (beginning of patch sequence)
927  // a1 - isolate
928  RegList saved_regs =
929  (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
930  FrameScope scope(masm, StackFrame::MANUAL);
931  __ MultiPush(saved_regs);
932  __ PrepareCallCFunction(2, 0, a2);
933  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
934  __ CallCFunction(
935  ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
936  2);
937  __ MultiPop(saved_regs);
938 
939  // Perform prologue operations usually performed by the young code stub.
940  __ Push(ra, fp, cp, a1);
942 
943  // Jump to point after the code-age stub.
944  __ Daddu(a0, a0, Operand((kNoCodeAgeSequenceLength)));
945  __ Jump(a0);
946 }
947 
948 
949 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
950  GenerateMakeCodeYoungAgainCommon(masm);
951 }
952 
953 
954 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
955  SaveFPRegsMode save_doubles) {
956  {
957  FrameScope scope(masm, StackFrame::INTERNAL);
958 
959  // Preserve registers across notification, this is important for compiled
960  // stubs that tail call the runtime on deopts passing their parameters in
961  // registers.
962  __ MultiPush(kJSCallerSaved | kCalleeSaved);
963  // Pass the function and deoptimization type to the runtime system.
964  __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
965  __ MultiPop(kJSCallerSaved | kCalleeSaved);
966  }
967 
968  __ Daddu(sp, sp, Operand(kPointerSize)); // Ignore state
969  __ Jump(ra); // Jump to miss handler
970 }
971 
972 
973 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
974  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
975 }
976 
977 
978 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
979  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
980 }
981 
982 
983 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
985  {
986  FrameScope scope(masm, StackFrame::INTERNAL);
987  // Pass the function and deoptimization type to the runtime system.
988  __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
989  __ push(a0);
990  __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
991  }
992 
993  // Get the full codegen state from the stack and untag it -> a6.
994  __ ld(a6, MemOperand(sp, 0 * kPointerSize));
995  __ SmiUntag(a6);
996  // Switch on the state.
997  Label with_tos_register, unknown_state;
998  __ Branch(&with_tos_register,
999  ne, a6, Operand(FullCodeGenerator::NO_REGISTERS));
1000  __ Ret(USE_DELAY_SLOT);
1001  // Safe to fill delay slot Addu will emit one instruction.
1002  __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1003 
1004  __ bind(&with_tos_register);
1005  __ ld(v0, MemOperand(sp, 1 * kPointerSize));
1006  __ Branch(&unknown_state, ne, a6, Operand(FullCodeGenerator::TOS_REG));
1007 
1008  __ Ret(USE_DELAY_SLOT);
1009  // Safe to fill delay slot Addu will emit one instruction.
1010  __ Daddu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1011 
1012  __ bind(&unknown_state);
1013  __ stop("no cases left");
1014 }
1015 
1016 
1017 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1018  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1019 }
1020 
1021 
1022 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1023  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1024 }
1025 
1026 
1027 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1028  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1029 }
1030 
1031 
1032 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1033  // Lookup the function in the JavaScript frame.
1035  {
1036  FrameScope scope(masm, StackFrame::INTERNAL);
1037  // Pass function as argument.
1038  __ push(a0);
1039  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1040  }
1041 
1042  // If the code object is null, just return to the unoptimized code.
1043  __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1044 
1045  // Load deoptimization data from the code object.
1046  // <deopt_data> = <code>[#deoptimization_data_offset]
1048 
1049  // Load the OSR entrypoint offset from the deoptimization data.
1050  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1053  __ SmiUntag(a1);
1054 
1055  // Compute the target address = code_obj + header_size + osr_offset
1056  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1057  __ daddu(v0, v0, a1);
1058  __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1059 
1060  // And "return" to the OSR entry point of the function.
1061  __ Ret();
1062 }
1063 
1064 
1065 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1066  // We check the stack limit as indicator that recompilation might be done.
1067  Label ok;
1068  __ LoadRoot(at, Heap::kStackLimitRootIndex);
1069  __ Branch(&ok, hs, sp, Operand(at));
1070  {
1071  FrameScope scope(masm, StackFrame::INTERNAL);
1072  __ CallRuntime(Runtime::kStackGuard, 0);
1073  }
1074  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1076 
1077  __ bind(&ok);
1078  __ Ret();
1079 }
1080 
1081 
1082 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1083  // 1. Make sure we have at least one argument.
1084  // a0: actual number of arguments
1085  { Label done;
1086  __ Branch(&done, ne, a0, Operand(zero_reg));
1087  __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
1088  __ push(a6);
1089  __ Daddu(a0, a0, Operand(1));
1090  __ bind(&done);
1091  }
1092 
1093  // 2. Get the function to call (passed as receiver) from the stack, check
1094  // if it is a function.
1095  // a0: actual number of arguments
1096  Label slow, non_function;
1097  __ dsll(at, a0, kPointerSizeLog2);
1098  __ daddu(at, sp, at);
1099  __ ld(a1, MemOperand(at));
1100  __ JumpIfSmi(a1, &non_function);
1101  __ GetObjectType(a1, a2, a2);
1102  __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1103 
1104  // 3a. Patch the first argument if necessary when calling a function.
1105  // a0: actual number of arguments
1106  // a1: function
1107  Label shift_arguments;
1108  __ li(a4, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1109  { Label convert_to_object, use_global_proxy, patch_receiver;
1110  // Change context eagerly in case we need the global receiver.
1112 
1113  // Do not transform the receiver for strict mode functions.
1115  __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kStrictModeByteOffset));
1116  __ And(a7, a3, Operand(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1117  __ Branch(&shift_arguments, ne, a7, Operand(zero_reg));
1118 
1119  // Do not transform the receiver for native (Compilerhints already in a3).
1120  __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
1121  __ And(a7, a3, Operand(1 << SharedFunctionInfo::kNativeBitWithinByte));
1122  __ Branch(&shift_arguments, ne, a7, Operand(zero_reg));
1123 
1124  // Compute the receiver in sloppy mode.
1125  // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1126  __ dsll(at, a0, kPointerSizeLog2);
1127  __ daddu(a2, sp, at);
1128  __ ld(a2, MemOperand(a2, -kPointerSize));
1129  // a0: actual number of arguments
1130  // a1: function
1131  // a2: first argument
1132  __ JumpIfSmi(a2, &convert_to_object, a6);
1133 
1134  __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1135  __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1136  __ LoadRoot(a3, Heap::kNullValueRootIndex);
1137  __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1138 
1140  __ GetObjectType(a2, a3, a3);
1141  __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1142 
1143  __ bind(&convert_to_object);
1144  // Enter an internal frame in order to preserve argument count.
1145  {
1146  FrameScope scope(masm, StackFrame::INTERNAL);
1147  __ SmiTag(a0);
1148  __ Push(a0, a2);
1149  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1150  __ mov(a2, v0);
1151 
1152  __ pop(a0);
1153  __ SmiUntag(a0);
1154  // Leave internal frame.
1155  }
1156  // Restore the function to a1, and the flag to a4.
1157  __ dsll(at, a0, kPointerSizeLog2);
1158  __ daddu(at, sp, at);
1159  __ ld(a1, MemOperand(at));
1160  __ Branch(USE_DELAY_SLOT, &patch_receiver);
1161  __ li(a4, Operand(0, RelocInfo::NONE32));
1162 
1163  __ bind(&use_global_proxy);
1166 
1167  __ bind(&patch_receiver);
1168  __ dsll(at, a0, kPointerSizeLog2);
1169  __ daddu(a3, sp, at);
1170  __ sd(a2, MemOperand(a3, -kPointerSize));
1171 
1172  __ Branch(&shift_arguments);
1173  }
1174 
1175  // 3b. Check for function proxy.
1176  __ bind(&slow);
1177  __ li(a4, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1178  __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1179 
1180  __ bind(&non_function);
1181  __ li(a4, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1182 
1183  // 3c. Patch the first argument when calling a non-function. The
1184  // CALL_NON_FUNCTION builtin expects the non-function callee as
1185  // receiver, so overwrite the first argument which will ultimately
1186  // become the receiver.
1187  // a0: actual number of arguments
1188  // a1: function
1189  // a4: call type (0: JS function, 1: function proxy, 2: non-function)
1190  __ dsll(at, a0, kPointerSizeLog2);
1191  __ daddu(a2, sp, at);
1192  __ sd(a1, MemOperand(a2, -kPointerSize));
1193 
1194  // 4. Shift arguments and return address one slot down on the stack
1195  // (overwriting the original receiver). Adjust argument count to make
1196  // the original first argument the new receiver.
1197  // a0: actual number of arguments
1198  // a1: function
1199  // a4: call type (0: JS function, 1: function proxy, 2: non-function)
1200  __ bind(&shift_arguments);
1201  { Label loop;
1202  // Calculate the copy start address (destination). Copy end address is sp.
1203  __ dsll(at, a0, kPointerSizeLog2);
1204  __ daddu(a2, sp, at);
1205 
1206  __ bind(&loop);
1207  __ ld(at, MemOperand(a2, -kPointerSize));
1208  __ sd(at, MemOperand(a2));
1209  __ Dsubu(a2, a2, Operand(kPointerSize));
1210  __ Branch(&loop, ne, a2, Operand(sp));
1211  // Adjust the actual number of arguments and remove the top element
1212  // (which is a copy of the last argument).
1213  __ Dsubu(a0, a0, Operand(1));
1214  __ Pop();
1215  }
1216 
1217  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1218  // or a function proxy via CALL_FUNCTION_PROXY.
1219  // a0: actual number of arguments
1220  // a1: function
1221  // a4: call type (0: JS function, 1: function proxy, 2: non-function)
1222  { Label function, non_proxy;
1223  __ Branch(&function, eq, a4, Operand(zero_reg));
1224  // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1225  __ mov(a2, zero_reg);
1226  __ Branch(&non_proxy, ne, a4, Operand(1));
1227 
1228  __ push(a1); // Re-add proxy object as additional argument.
1229  __ Daddu(a0, a0, Operand(1));
1230  __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1231  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1233 
1234  __ bind(&non_proxy);
1235  __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1236  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1238  __ bind(&function);
1239  }
1240 
1241  // 5b. Get the code to call from the function and check that the number of
1242  // expected arguments matches what we're providing. If so, jump
1243  // (tail-call) to the code in register edx without checking arguments.
1244  // a0: actual number of arguments
1245  // a1: function
1247  // The argument count is stored as int32_t on 64-bit platforms.
1248  // TODO(plind): Smi on 32-bit platforms.
1249  __ lw(a2,
1251  // Check formal and actual parameter counts.
1252  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1253  RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1254 
1256  ParameterCount expected(0);
1257  __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1258 }
1259 
1260 
1261 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1262  const int kIndexOffset =
1264  const int kLimitOffset =
1266  const int kArgsOffset = 2 * kPointerSize;
1267  const int kRecvOffset = 3 * kPointerSize;
1268  const int kFunctionOffset = 4 * kPointerSize;
1269 
1270  {
1271  FrameScope frame_scope(masm, StackFrame::INTERNAL);
1272  __ ld(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1273  __ push(a0);
1274  __ ld(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1275  __ push(a0);
1276  // Returns (in v0) number of arguments to copy to stack as Smi.
1277  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1278 
1279  // Check the stack for overflow. We are not trying to catch
1280  // interruptions (e.g. debug break and preemption) here, so the "real stack
1281  // limit" is checked.
1282  Label okay;
1283  __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1284  // Make a2 the space we have left. The stack might already be overflowed
1285  // here which will cause a2 to become negative.
1286  __ dsubu(a2, sp, a2);
1287  // Check if the arguments will overflow the stack.
1288  __ SmiScale(a7, v0, kPointerSizeLog2);
1289  __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison.
1290 
1291  // Out of stack space.
1292  __ ld(a1, MemOperand(fp, kFunctionOffset));
1293  __ Push(a1, v0);
1294  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1295  // End of stack check.
1296 
1297  // Push current limit and index.
1298  __ bind(&okay);
1299  __ mov(a1, zero_reg);
1300  __ Push(v0, a1); // Limit and initial index.
1301 
1302  // Get the receiver.
1303  __ ld(a0, MemOperand(fp, kRecvOffset));
1304 
1305  // Check that the function is a JS function (otherwise it must be a proxy).
1306  Label push_receiver;
1307  __ ld(a1, MemOperand(fp, kFunctionOffset));
1308  __ GetObjectType(a1, a2, a2);
1309  __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1310 
1311  // Change context eagerly to get the right global object if necessary.
1313  // Load the shared function info while the function is still in a1.
1315 
1316  // Compute the receiver.
1317  // Do not transform the receiver for strict mode functions.
1318  Label call_to_object, use_global_proxy;
1319  __ lbu(a7, FieldMemOperand(a2, SharedFunctionInfo::kStrictModeByteOffset));
1320  __ And(a7, a7, Operand(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1321  __ Branch(&push_receiver, ne, a7, Operand(zero_reg));
1322 
1323  // Do not transform the receiver for native (Compilerhints already in a2).
1324  __ lbu(a7, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
1325  __ And(a7, a7, Operand(1 << SharedFunctionInfo::kNativeBitWithinByte));
1326  __ Branch(&push_receiver, ne, a7, Operand(zero_reg));
1327 
1328  // Compute the receiver in sloppy mode.
1329  __ JumpIfSmi(a0, &call_to_object);
1330  __ LoadRoot(a1, Heap::kNullValueRootIndex);
1331  __ Branch(&use_global_proxy, eq, a0, Operand(a1));
1332  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1333  __ Branch(&use_global_proxy, eq, a0, Operand(a2));
1334 
1335  // Check if the receiver is already a JavaScript object.
1336  // a0: receiver
1338  __ GetObjectType(a0, a1, a1);
1339  __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1340 
1341  // Convert the receiver to a regular object.
1342  // a0: receiver
1343  __ bind(&call_to_object);
1344  __ push(a0);
1345  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1346  __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1347  __ Branch(&push_receiver);
1348 
1349  __ bind(&use_global_proxy);
1352 
1353  // Push the receiver.
1354  // a0: receiver
1355  __ bind(&push_receiver);
1356  __ push(a0);
1357 
1358  // Copy all arguments from the array to the stack.
1359  Label entry, loop;
1360  __ ld(a0, MemOperand(fp, kIndexOffset));
1361  __ Branch(&entry);
1362 
1363  // Load the current argument from the arguments array and push it to the
1364  // stack.
1365  // a0: current argument index
1366  __ bind(&loop);
1367  __ ld(a1, MemOperand(fp, kArgsOffset));
1368  __ Push(a1, a0);
1369 
1370  // Call the runtime to access the property in the arguments array.
1371  __ CallRuntime(Runtime::kGetProperty, 2);
1372  __ push(v0);
1373 
1374  // Use inline caching to access the arguments.
1375  __ ld(a0, MemOperand(fp, kIndexOffset));
1376  __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1377  __ sd(a0, MemOperand(fp, kIndexOffset));
1378 
1379  // Test if the copy loop has finished copying all the elements from the
1380  // arguments object.
1381  __ bind(&entry);
1382  __ ld(a1, MemOperand(fp, kLimitOffset));
1383  __ Branch(&loop, ne, a0, Operand(a1));
1384 
1385  // Call the function.
1386  Label call_proxy;
1387  ParameterCount actual(a0);
1388  __ SmiUntag(a0);
1389  __ ld(a1, MemOperand(fp, kFunctionOffset));
1390  __ GetObjectType(a1, a2, a2);
1391  __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1392 
1393  __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1394 
1395  frame_scope.GenerateLeaveFrame();
1396  __ Ret(USE_DELAY_SLOT);
1397  __ Daddu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1398 
1399  // Call the function proxy.
1400  __ bind(&call_proxy);
1401  __ push(a1); // Add function proxy as last argument.
1402  __ Daddu(a0, a0, Operand(1));
1403  __ li(a2, Operand(0, RelocInfo::NONE32));
1404  __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1405  __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1407  // Tear down the internal frame and remove function, receiver and args.
1408  }
1409 
1410  __ Ret(USE_DELAY_SLOT);
1411  __ Daddu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1412 }
1413 
1414 
1415 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1416  Label* stack_overflow) {
1417  // ----------- S t a t e -------------
1418  // -- a0 : actual number of arguments
1419  // -- a1 : function (passed through to callee)
1420  // -- a2 : expected number of arguments
1421  // -----------------------------------
1422  // Check the stack for overflow. We are not trying to catch
1423  // interruptions (e.g. debug break and preemption) here, so the "real stack
1424  // limit" is checked.
1425  __ LoadRoot(a5, Heap::kRealStackLimitRootIndex);
1426  // Make a5 the space we have left. The stack might already be overflowed
1427  // here which will cause a5 to become negative.
1428  __ dsubu(a5, sp, a5);
1429  // Check if the arguments will overflow the stack.
1430  __ dsll(at, a2, kPointerSizeLog2);
1431  // Signed comparison.
1432  __ Branch(stack_overflow, le, a5, Operand(at));
1433 }
1434 
1435 
1436 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1437  // __ sll(a0, a0, kSmiTagSize);
1438  __ dsll32(a0, a0, 0);
1439  __ li(a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1440  __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
1441  __ Daddu(fp, sp,
1443 }
1444 
1445 
1446 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1447  // ----------- S t a t e -------------
1448  // -- v0 : result being passed through
1449  // -----------------------------------
1450  // Get the number of arguments passed (as a smi), tear down the frame and
1451  // then tear down the parameters.
1453  kPointerSize)));
1454  __ mov(sp, fp);
1455  __ MultiPop(fp.bit() | ra.bit());
1456  __ SmiScale(a4, a1, kPointerSizeLog2);
1457  __ Daddu(sp, sp, a4);
1458  // Adjust for the receiver.
1459  __ Daddu(sp, sp, Operand(kPointerSize));
1460 }
1461 
1462 
1463 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1464  // State setup as expected by MacroAssembler::InvokePrologue.
1465  // ----------- S t a t e -------------
1466  // -- a0: actual arguments count
1467  // -- a1: function (passed through to callee)
1468  // -- a2: expected arguments count
1469  // -----------------------------------
1470 
1471  Label stack_overflow;
1472  ArgumentAdaptorStackCheck(masm, &stack_overflow);
1473  Label invoke, dont_adapt_arguments;
1474 
1475  Label enough, too_few;
1477  __ Branch(&dont_adapt_arguments, eq,
1479  // We use Uless as the number of argument should always be greater than 0.
1480  __ Branch(&too_few, Uless, a0, Operand(a2));
1481 
1482  { // Enough parameters: actual >= expected.
1483  // a0: actual number of arguments as a smi
1484  // a1: function
1485  // a2: expected number of arguments
1486  // a3: code entry to call
1487  __ bind(&enough);
1488  EnterArgumentsAdaptorFrame(masm);
1489 
1490  // Calculate copy start address into a0 and copy end address into a2.
1491  __ SmiScale(a0, a0, kPointerSizeLog2);
1492  __ Daddu(a0, fp, a0);
1493  // Adjust for return address and receiver.
1494  __ Daddu(a0, a0, Operand(2 * kPointerSize));
1495  // Compute copy end address.
1496  __ dsll(a2, a2, kPointerSizeLog2);
1497  __ dsubu(a2, a0, a2);
1498 
1499  // Copy the arguments (including the receiver) to the new stack frame.
1500  // a0: copy start address
1501  // a1: function
1502  // a2: copy end address
1503  // a3: code entry to call
1504 
1505  Label copy;
1506  __ bind(&copy);
1507  __ ld(a4, MemOperand(a0));
1508  __ push(a4);
1509  __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2));
1510  __ daddiu(a0, a0, -kPointerSize); // In delay slot.
1511 
1512  __ jmp(&invoke);
1513  }
1514 
1515  { // Too few parameters: Actual < expected.
1516  __ bind(&too_few);
1517  EnterArgumentsAdaptorFrame(masm);
1518 
1519  // Calculate copy start address into a0 and copy end address is fp.
1520  // a0: actual number of arguments as a smi
1521  // a1: function
1522  // a2: expected number of arguments
1523  // a3: code entry to call
1524  __ SmiScale(a0, a0, kPointerSizeLog2);
1525  __ Daddu(a0, fp, a0);
1526  // Adjust for return address and receiver.
1527  __ Daddu(a0, a0, Operand(2 * kPointerSize));
1528  // Compute copy end address. Also adjust for return address.
1529  __ Daddu(a7, fp, kPointerSize);
1530 
1531  // Copy the arguments (including the receiver) to the new stack frame.
1532  // a0: copy start address
1533  // a1: function
1534  // a2: expected number of arguments
1535  // a3: code entry to call
1536  // a7: copy end address
1537  Label copy;
1538  __ bind(&copy);
1539  __ ld(a4, MemOperand(a0)); // Adjusted above for return addr and receiver.
1540  __ Dsubu(sp, sp, kPointerSize);
1541  __ Dsubu(a0, a0, kPointerSize);
1542  __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
1543  __ sd(a4, MemOperand(sp)); // In the delay slot.
1544 
1545  // Fill the remaining expected arguments with undefined.
1546  // a1: function
1547  // a2: expected number of arguments
1548  // a3: code entry to call
1549  __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
1550  __ dsll(a6, a2, kPointerSizeLog2);
1551  __ Dsubu(a2, fp, Operand(a6));
1552  // Adjust for frame.
1553  __ Dsubu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1554  2 * kPointerSize));
1555 
1556  Label fill;
1557  __ bind(&fill);
1558  __ Dsubu(sp, sp, kPointerSize);
1559  __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1560  __ sd(a4, MemOperand(sp));
1561  }
1562 
1563  // Call the entry point.
1564  __ bind(&invoke);
1565 
1566  __ Call(a3);
1567 
1568  // Store offset of return address for deoptimizer.
1569  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1570 
1571  // Exit frame and return.
1572  LeaveArgumentsAdaptorFrame(masm);
1573  __ Ret();
1574 
1575 
1576  // -------------------------------------------
1577  // Don't adapt arguments.
1578  // -------------------------------------------
1579  __ bind(&dont_adapt_arguments);
1580  __ Jump(a3);
1581 
1582  __ bind(&stack_overflow);
1583  {
1584  FrameScope frame(masm, StackFrame::MANUAL);
1585  EnterArgumentsAdaptorFrame(masm);
1586  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1587  __ break_(0xCC);
1588  }
1589 }
1590 
1591 
1592 #undef __
1593 
1594 } } // namespace v8::internal
1595 
1596 #endif // V8_TARGET_ARCH_MIPS64
#define CODE_AGE_LIST(V)
Definition: builtins.h:27
static const int kAllocationSiteOffset
Definition: objects.h:8286
static const int kPretenureCreateCountOffset
Definition: objects.h:8257
static const int kInstrSize
static const U kShift
Definition: utils.h:204
static void Generate_InternalArrayCode(MacroAssembler *masm)
static void Generate_FunctionApply(MacroAssembler *masm)
static void Generate_Adaptor(MacroAssembler *masm, CFunctionId id, BuiltinExtraArguments extra_args)
static void Generate_NotifyDeoptimized(MacroAssembler *masm)
static void Generate_JSConstructEntryTrampoline(MacroAssembler *masm)
static void Generate_CompileLazy(MacroAssembler *masm)
static void Generate_JSEntryTrampoline(MacroAssembler *masm)
static void Generate_OnStackReplacement(MacroAssembler *masm)
static void Generate_MarkCodeAsExecutedTwice(MacroAssembler *masm)
static void Generate_NotifyLazyDeoptimized(MacroAssembler *masm)
static void Generate_ArgumentsAdaptorTrampoline(MacroAssembler *masm)
static void Generate_JSConstructStubApi(MacroAssembler *masm)
static void Generate_FunctionCall(MacroAssembler *masm)
static void Generate_MarkCodeAsExecutedOnce(MacroAssembler *masm)
static void Generate_NotifyStubFailureSaveDoubles(MacroAssembler *masm)
static void Generate_NotifySoftDeoptimized(MacroAssembler *masm)
static void Generate_ArrayCode(MacroAssembler *masm)
static void Generate_StringConstructCode(MacroAssembler *masm)
static void Generate_NotifyStubFailure(MacroAssembler *masm)
static void Generate_CompileOptimized(MacroAssembler *masm)
static void Generate_OsrAfterStackCheck(MacroAssembler *masm)
static void Generate_InOptimizationQueue(MacroAssembler *masm)
static void Generate_CompileOptimizedConcurrent(MacroAssembler *masm)
static void Generate_JSConstructStubGeneric(MacroAssembler *masm)
static const int kHeaderSize
Definition: objects.h:5373
static const int kDeoptimizationDataOffset
Definition: objects.h:5352
static int SlotOffset(int index)
Definition: contexts.h:552
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static const int kMapOffset
Definition: objects.h:1427
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kNoSlackTracking
Definition: objects.h:7263
static const int kContextOffset
Definition: objects.h:7381
static const int kFinishSlackTracking
Definition: objects.h:7262
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7377
static const int kHeaderSize
Definition: objects.h:2195
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kValueOffset
Definition: objects.h:7546
static const int kSize
Definition: objects.h:7547
static const int kInObjectPropertiesByte
Definition: objects.h:6211
static const int kBitField3Offset
Definition: objects.h:6189
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kPreAllocatedPropertyFieldsByte
Definition: objects.h:6214
static const int kInstanceSizesOffset
Definition: objects.h:6187
static const int kInstanceSizeOffset
Definition: objects.h:6210
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:6234
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kFormalParameterCountOffset
Definition: objects.h:6946
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:6888
static const int kNativeBitWithinByte
Definition: objects.h:7046
static const int kStrictModeBitWithinByte
Definition: objects.h:7043
static const int kCodeOffset
Definition: objects.h:6893
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static const int kFixedFrameSizeFromFp
Definition: frames.h:157
static const int kContextOffset
Definition: frames.h:162
static const int kExpressionsOffset
Definition: frames.h:160
static const int kCallerSPOffset
Definition: frames.h:167
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ JUMP_FUNCTION
@ CALL_FUNCTION
AllocationFlags
@ RESULT_CONTAINS_TOP
@ SIZE_IN_WORDS
@ TAG_OBJECT
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
const int kPointerSize
Definition: globals.h:129
MemOperand ContextOperand(Register context, int index)
const RegList kJSCallerSaved
Definition: frames-arm.h:24
const Register cp
const SwVfpRegister s1
const RegList kCalleeSaved
Definition: frames-arm.h:38
const SwVfpRegister s2
const uint32_t kNotStringTag
Definition: objects.h:545
const SwVfpRegister s0
const Register fp
const Register sp
const int kPointerSizeLog2
Definition: globals.h:147
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
MemOperand FieldMemOperand(Register object, int offset)
const SwVfpRegister s3
uint32_t RegList
Definition: frames.h:18
const int kHeapObjectTag
Definition: v8.h:5737
BuiltinExtraArguments
Definition: builtins.h:12
@ NEEDS_CALLED_FUNCTION
Definition: builtins.h:14
@ NO_EXTRA_ARGUMENTS
Definition: builtins.h:13
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const int kBitsPerByte
Definition: globals.h:162
@ NO_CALL_CONSTRUCTOR_FLAGS
Definition: globals.h:478
const SwVfpRegister s4
static const int kNoCodeAgeSequenceLength
const uint32_t kIsNotStringMask
Definition: objects.h:543
const SwVfpRegister s5
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20