V8 Project
builtins-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 
6 
7 #include "src/v8.h"
8 
9 #if V8_TARGET_ARCH_MIPS
10 
11 #include "src/codegen.h"
12 #include "src/debug.h"
13 #include "src/deoptimizer.h"
14 #include "src/full-codegen.h"
15 #include "src/runtime/runtime.h"
16 
17 
18 namespace v8 {
19 namespace internal {
20 
21 
22 #define __ ACCESS_MASM(masm)
23 
24 
25 void Builtins::Generate_Adaptor(MacroAssembler* masm,
26  CFunctionId id,
27  BuiltinExtraArguments extra_args) {
28  // ----------- S t a t e -------------
29  // -- a0 : number of arguments excluding receiver
30  // -- a1 : called function (only guaranteed when
31  // -- extra_args requires it)
32  // -- cp : context
33  // -- sp[0] : last argument
34  // -- ...
35  // -- sp[4 * (argc - 1)] : first argument
36  // -- sp[4 * agrc] : receiver
37  // -----------------------------------
38 
39  // Insert extra arguments.
40  int num_extra_args = 0;
41  if (extra_args == NEEDS_CALLED_FUNCTION) {
42  num_extra_args = 1;
43  __ push(a1);
44  } else {
45  DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
46  }
47 
48  // JumpToExternalReference expects a0 to contain the number of arguments
49  // including the receiver and the extra arguments.
50  __ Addu(a0, a0, num_extra_args + 1);
51  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
52 }
53 
54 
55 // Load the built-in InternalArray function from the current context.
56 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
57  Register result) {
58  // Load the native context.
59 
60  __ lw(result,
62  __ lw(result,
64  // Load the InternalArray function from the native context.
65  __ lw(result,
66  MemOperand(result,
69 }
70 
71 
72 // Load the built-in Array function from the current context.
73 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
74  // Load the native context.
75 
76  __ lw(result,
78  __ lw(result,
80  // Load the Array function from the native context.
81  __ lw(result,
82  MemOperand(result,
84 }
85 
86 
87 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
88  // ----------- S t a t e -------------
89  // -- a0 : number of arguments
90  // -- ra : return address
91  // -- sp[...]: constructor arguments
92  // -----------------------------------
93  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
94 
95  // Get the InternalArray function.
96  GenerateLoadInternalArrayFunction(masm, a1);
97 
98  if (FLAG_debug_code) {
99  // Initial map for the builtin InternalArray functions should be maps.
101  __ SmiTst(a2, t0);
102  __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
103  t0, Operand(zero_reg));
104  __ GetObjectType(a2, a3, t0);
105  __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
106  t0, Operand(MAP_TYPE));
107  }
108 
109  // Run the native code for the InternalArray function called as a normal
110  // function.
111  // Tail call a stub.
112  InternalArrayConstructorStub stub(masm->isolate());
113  __ TailCallStub(&stub);
114 }
115 
116 
117 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
118  // ----------- S t a t e -------------
119  // -- a0 : number of arguments
120  // -- ra : return address
121  // -- sp[...]: constructor arguments
122  // -----------------------------------
123  Label generic_array_code;
124 
125  // Get the Array function.
126  GenerateLoadArrayFunction(masm, a1);
127 
128  if (FLAG_debug_code) {
129  // Initial map for the builtin Array functions should be maps.
131  __ SmiTst(a2, t0);
132  __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
133  t0, Operand(zero_reg));
134  __ GetObjectType(a2, a3, t0);
135  __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
136  t0, Operand(MAP_TYPE));
137  }
138 
139  // Run the native code for the Array function called as a normal function.
140  // Tail call a stub.
141  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142  ArrayConstructorStub stub(masm->isolate());
143  __ TailCallStub(&stub);
144 }
145 
146 
147 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
148  // ----------- S t a t e -------------
149  // -- a0 : number of arguments
150  // -- a1 : constructor function
151  // -- ra : return address
152  // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
153  // -- sp[argc * 4] : receiver
154  // -----------------------------------
155  Counters* counters = masm->isolate()->counters();
156  __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
157 
158  Register function = a1;
159  if (FLAG_debug_code) {
160  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
161  __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
162  }
163 
164  // Load the first arguments in a0 and get rid of the rest.
165  Label no_arguments;
166  __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
167  // First args = sp[(argc - 1) * 4].
168  __ Subu(a0, a0, Operand(1));
169  __ sll(a0, a0, kPointerSizeLog2);
170  __ Addu(sp, a0, sp);
171  __ lw(a0, MemOperand(sp));
172  // sp now point to args[0], drop args[0] + receiver.
173  __ Drop(2);
174 
175  Register argument = a2;
176  Label not_cached, argument_is_string;
177  __ LookupNumberStringCache(a0, // Input.
178  argument, // Result.
179  a3, // Scratch.
180  t0, // Scratch.
181  t1, // Scratch.
182  &not_cached);
183  __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
184  __ bind(&argument_is_string);
185 
186  // ----------- S t a t e -------------
187  // -- a2 : argument converted to string
188  // -- a1 : constructor function
189  // -- ra : return address
190  // -----------------------------------
191 
192  Label gc_required;
193  __ Allocate(JSValue::kSize,
194  v0, // Result.
195  a3, // Scratch.
196  t0, // Scratch.
197  &gc_required,
198  TAG_OBJECT);
199 
200  // Initialising the String Object.
201  Register map = a3;
202  __ LoadGlobalFunctionInitialMap(function, map, t0);
203  if (FLAG_debug_code) {
205  __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
206  t0, Operand(JSValue::kSize >> kPointerSizeLog2));
208  __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
209  t0, Operand(zero_reg));
210  }
212 
213  __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
216 
217  __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
218 
219  // Ensure the object is fully initialized.
221 
222  __ Ret();
223 
224  // The argument was not found in the number to string cache. Check
225  // if it's a string already before calling the conversion builtin.
226  Label convert_argument;
227  __ bind(&not_cached);
228  __ JumpIfSmi(a0, &convert_argument);
229 
230  // Is it a String?
234  __ And(t0, a3, Operand(kIsNotStringMask));
235  __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
236  __ mov(argument, a0);
237  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
238  __ Branch(&argument_is_string);
239 
240  // Invoke the conversion builtin and put the result into a2.
241  __ bind(&convert_argument);
242  __ push(function); // Preserve the function.
243  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
244  {
245  FrameScope scope(masm, StackFrame::INTERNAL);
246  __ push(a0);
247  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
248  }
249  __ pop(function);
250  __ mov(argument, v0);
251  __ Branch(&argument_is_string);
252 
253  // Load the empty string into a2, remove the receiver from the
254  // stack, and jump back to the case where the argument is a string.
255  __ bind(&no_arguments);
256  __ LoadRoot(argument, Heap::kempty_stringRootIndex);
257  __ Drop(1);
258  __ Branch(&argument_is_string);
259 
260  // At this point the argument is already a string. Call runtime to
261  // create a string wrapper.
262  __ bind(&gc_required);
263  __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
264  {
265  FrameScope scope(masm, StackFrame::INTERNAL);
266  __ push(argument);
267  __ CallRuntime(Runtime::kNewStringWrapper, 1);
268  }
269  __ Ret();
270 }
271 
272 
273 static void CallRuntimePassFunction(
274  MacroAssembler* masm, Runtime::FunctionId function_id) {
275  FrameScope scope(masm, StackFrame::INTERNAL);
276  // Push a copy of the function onto the stack.
277  // Push call kind information and function as parameter to the runtime call.
278  __ Push(a1, a1);
279 
280  __ CallRuntime(function_id, 1);
281  // Restore call kind information and receiver.
282  __ Pop(a1);
283 }
284 
285 
286 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
289  __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
290  __ Jump(at);
291 }
292 
293 
294 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
295  __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
296  __ Jump(at);
297 }
298 
299 
300 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
301  // Checking whether the queued function is ready for install is optional,
302  // since we come across interrupts and stack checks elsewhere. However,
303  // not checking may delay installing ready functions, and always checking
304  // would be quite expensive. A good compromise is to first check against
305  // stack limit as a cue for an interrupt signal.
306  Label ok;
307  __ LoadRoot(t0, Heap::kStackLimitRootIndex);
308  __ Branch(&ok, hs, sp, Operand(t0));
309 
310  CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
311  GenerateTailCallToReturnedCode(masm);
312 
313  __ bind(&ok);
314  GenerateTailCallToSharedCode(masm);
315 }
316 
317 
318 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
319  bool is_api_function,
320  bool create_memento) {
321  // ----------- S t a t e -------------
322  // -- a0 : number of arguments
323  // -- a1 : constructor function
324  // -- a2 : allocation site or undefined
325  // -- ra : return address
326  // -- sp[...]: constructor arguments
327  // -----------------------------------
328 
329  // Should never create mementos for api functions.
330  DCHECK(!is_api_function || !create_memento);
331 
332  Isolate* isolate = masm->isolate();
333 
334  // ----------- S t a t e -------------
335  // -- a0 : number of arguments
336  // -- a1 : constructor function
337  // -- ra : return address
338  // -- sp[...]: constructor arguments
339  // -----------------------------------
340 
341  // Enter a construct frame.
342  {
343  FrameScope scope(masm, StackFrame::CONSTRUCT);
344 
345  if (create_memento) {
346  __ AssertUndefinedOrAllocationSite(a2, a3);
347  __ push(a2);
348  }
349 
350  // Preserve the two incoming parameters on the stack.
351  __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
352  __ MultiPushReversed(a0.bit() | a1.bit());
353 
354  Label rt_call, allocated;
355  // Try to allocate the object without transitioning into C code. If any of
356  // the preconditions is not met, the code bails out to the runtime call.
357  if (FLAG_inline_new) {
358  Label undo_allocation;
359  ExternalReference debug_step_in_fp =
360  ExternalReference::debug_step_in_fp_address(isolate);
361  __ li(a2, Operand(debug_step_in_fp));
362  __ lw(a2, MemOperand(a2));
363  __ Branch(&rt_call, ne, a2, Operand(zero_reg));
364 
365  // Load the initial map and verify that it is in fact a map.
366  // a1: constructor function
368  __ JumpIfSmi(a2, &rt_call);
369  __ GetObjectType(a2, a3, t4);
370  __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
371 
372  // Check that the constructor is not constructing a JSFunction (see
373  // comments in Runtime_NewObject in runtime.cc). In which case the
374  // initial map's instance type would be JS_FUNCTION_TYPE.
375  // a1: constructor function
376  // a2: initial map
378  __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
379 
380  if (!is_api_function) {
381  Label allocate;
383  // Check if slack tracking is enabled.
384  __ lw(t0, bit_field3);
385  __ DecodeField<Map::ConstructionCount>(t2, t0);
386  __ Branch(&allocate, eq, t2, Operand(JSFunction::kNoSlackTracking));
387  // Decrease generous allocation count.
388  __ Subu(t0, t0, Operand(1 << Map::ConstructionCount::kShift));
389  __ Branch(USE_DELAY_SLOT,
390  &allocate, ne, t2, Operand(JSFunction::kFinishSlackTracking));
391  __ sw(t0, bit_field3); // In delay slot.
392 
393  __ Push(a1, a2, a1); // a1 = Constructor.
394  __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
395 
396  __ Pop(a1, a2);
397  // Slack tracking counter is kNoSlackTracking after runtime call.
399  __ mov(t2, zero_reg);
400 
401  __ bind(&allocate);
402  }
403 
404  // Now allocate the JSObject on the heap.
405  // a1: constructor function
406  // a2: initial map
408  if (create_memento) {
409  __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
410  }
411 
412  __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
413 
414  // Allocated the JSObject, now initialize the fields. Map is set to
415  // initial map and properties and elements are set to empty fixed array.
416  // a1: constructor function
417  // a2: initial map
418  // a3: object size (not including memento if create_memento)
419  // t4: JSObject (not tagged)
420  __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
421  __ mov(t5, t4);
422  __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
425  __ Addu(t5, t5, Operand(3*kPointerSize));
429 
430  // Fill all the in-object properties with appropriate filler.
431  // a1: constructor function
432  // a2: initial map
433  // a3: object size (in words, including memento if create_memento)
434  // t4: JSObject (not tagged)
435  // t5: First in-object property of JSObject (not tagged)
436  // t2: slack tracking counter (non-API function case)
438 
439  // Use t7 to hold undefined, which is used in several places below.
440  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
441 
442  if (!is_api_function) {
443  Label no_inobject_slack_tracking;
444 
445  // Check if slack tracking is enabled.
446  __ Branch(&no_inobject_slack_tracking,
447  eq, t2, Operand(JSFunction::kNoSlackTracking));
448 
449  // Allocate object with a slack.
451  __ sll(at, a0, kPointerSizeLog2);
452  __ addu(a0, t5, at);
453  // a0: offset of first field after pre-allocated fields
454  if (FLAG_debug_code) {
455  __ sll(at, a3, kPointerSizeLog2);
456  __ Addu(t6, t4, Operand(at)); // End of object.
457  __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
458  a0, Operand(t6));
459  }
460  __ InitializeFieldsWithFiller(t5, a0, t7);
461  // To allow for truncation.
462  __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
463  // Fill the remaining fields with one pointer filler map.
464 
465  __ bind(&no_inobject_slack_tracking);
466  }
467 
468  if (create_memento) {
469  __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
470  __ sll(a0, a0, kPointerSizeLog2);
471  __ Addu(a0, t4, Operand(a0)); // End of object.
472  __ InitializeFieldsWithFiller(t5, a0, t7);
473 
474  // Fill in memento fields.
475  // t5: points to the allocated but uninitialized memento.
476  __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
478  __ sw(t7, MemOperand(t5));
479  __ Addu(t5, t5, kPointerSize);
480  // Load the AllocationSite.
481  __ lw(t7, MemOperand(sp, 2 * kPointerSize));
483  __ sw(t7, MemOperand(t5));
484  __ Addu(t5, t5, kPointerSize);
485  } else {
486  __ sll(at, a3, kPointerSizeLog2);
487  __ Addu(a0, t4, Operand(at)); // End of object.
488  __ InitializeFieldsWithFiller(t5, a0, t7);
489  }
490 
491  // Add the object tag to make the JSObject real, so that we can continue
492  // and jump into the continuation code at any time from now on. Any
493  // failures need to undo the allocation, so that the heap is in a
494  // consistent state and verifiable.
495  __ Addu(t4, t4, Operand(kHeapObjectTag));
496 
497  // Check if a non-empty properties array is needed. Continue with
498  // allocated object if not fall through to runtime call if it is.
499  // a1: constructor function
500  // t4: JSObject
501  // t5: start of next object (not tagged)
503  // The field instance sizes contains both pre-allocated property fields
504  // and in-object properties.
506  __ Addu(a3, a3, Operand(t6));
508  __ subu(a3, a3, t6);
509 
510  // Done if no extra properties are to be allocated.
511  __ Branch(&allocated, eq, a3, Operand(zero_reg));
512  __ Assert(greater_equal, kPropertyAllocationCountFailed,
513  a3, Operand(zero_reg));
514 
515  // Scale the number of elements by pointer size and add the header for
516  // FixedArrays to the start of the next object calculation from above.
517  // a1: constructor
518  // a3: number of elements in properties array
519  // t4: JSObject
520  // t5: start of next object
521  __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
522  __ Allocate(
523  a0,
524  t5,
525  t6,
526  a2,
527  &undo_allocation,
529 
530  // Initialize the FixedArray.
531  // a1: constructor
532  // a3: number of elements in properties array (untagged)
533  // t4: JSObject
534  // t5: start of next object
535  __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
536  __ mov(a2, t5);
537  __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
538  __ sll(a0, a3, kSmiTagSize);
540  __ Addu(a2, a2, Operand(2 * kPointerSize));
541 
544 
545  // Initialize the fields to undefined.
546  // a1: constructor
547  // a2: First element of FixedArray (not tagged)
548  // a3: number of elements in properties array
549  // t4: JSObject
550  // t5: FixedArray (not tagged)
551  __ sll(t3, a3, kPointerSizeLog2);
552  __ addu(t6, a2, t3); // End of object.
554  { Label loop, entry;
555  if (!is_api_function || create_memento) {
556  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
557  } else if (FLAG_debug_code) {
558  __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
559  __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t2));
560  }
561  __ jmp(&entry);
562  __ bind(&loop);
563  __ sw(t7, MemOperand(a2));
564  __ addiu(a2, a2, kPointerSize);
565  __ bind(&entry);
566  __ Branch(&loop, less, a2, Operand(t6));
567  }
568 
569  // Store the initialized FixedArray into the properties field of
570  // the JSObject.
571  // a1: constructor function
572  // t4: JSObject
573  // t5: FixedArray (not tagged)
574  __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
576 
577  // Continue with JSObject being successfully allocated.
578  // a1: constructor function
579  // a4: JSObject
580  __ jmp(&allocated);
581 
582  // Undo the setting of the new top so that the heap is verifiable. For
583  // example, the map's unused properties potentially do not match the
584  // allocated objects unused properties.
585  // t4: JSObject (previous new top)
586  __ bind(&undo_allocation);
587  __ UndoAllocationInNewSpace(t4, t5);
588  }
589 
590  // Allocate the new receiver object using the runtime call.
591  // a1: constructor function
592  __ bind(&rt_call);
593  if (create_memento) {
594  // Get the cell or allocation site.
595  __ lw(a2, MemOperand(sp, 2 * kPointerSize));
596  __ push(a2);
597  }
598 
599  __ push(a1); // Argument for Runtime_NewObject.
600  if (create_memento) {
601  __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
602  } else {
603  __ CallRuntime(Runtime::kNewObject, 1);
604  }
605  __ mov(t4, v0);
606 
607  // If we ended up using the runtime, and we want a memento, then the
608  // runtime call made it for us, and we shouldn't do create count
609  // increment.
610  Label count_incremented;
611  if (create_memento) {
612  __ jmp(&count_incremented);
613  }
614 
615  // Receiver for constructor call allocated.
616  // t4: JSObject
617  __ bind(&allocated);
618 
619  if (create_memento) {
620  __ lw(a2, MemOperand(sp, kPointerSize * 2));
621  __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
622  __ Branch(&count_incremented, eq, a2, Operand(t5));
623  // a2 is an AllocationSite. We are creating a memento from it, so we
624  // need to increment the memento create count.
625  __ lw(a3, FieldMemOperand(a2,
627  __ Addu(a3, a3, Operand(Smi::FromInt(1)));
628  __ sw(a3, FieldMemOperand(a2,
630  __ bind(&count_incremented);
631  }
632 
633  __ Push(t4, t4);
634 
635  // Reload the number of arguments from the stack.
636  // sp[0]: receiver
637  // sp[1]: receiver
638  // sp[2]: constructor function
639  // sp[3]: number of arguments (smi-tagged)
640  __ lw(a1, MemOperand(sp, 2 * kPointerSize));
641  __ lw(a3, MemOperand(sp, 3 * kPointerSize));
642 
643  // Set up pointer to last argument.
644  __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
645 
646  // Set up number of arguments for function call below.
647  __ srl(a0, a3, kSmiTagSize);
648 
649  // Copy arguments and receiver to the expression stack.
650  // a0: number of arguments
651  // a1: constructor function
652  // a2: address of last argument (caller sp)
653  // a3: number of arguments (smi-tagged)
654  // sp[0]: receiver
655  // sp[1]: receiver
656  // sp[2]: constructor function
657  // sp[3]: number of arguments (smi-tagged)
658  Label loop, entry;
659  __ jmp(&entry);
660  __ bind(&loop);
661  __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
662  __ Addu(t0, a2, Operand(t0));
663  __ lw(t1, MemOperand(t0));
664  __ push(t1);
665  __ bind(&entry);
666  __ Addu(a3, a3, Operand(-2));
667  __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
668 
669  // Call the function.
670  // a0: number of arguments
671  // a1: constructor function
672  if (is_api_function) {
674  Handle<Code> code =
675  masm->isolate()->builtins()->HandleApiCallConstruct();
676  __ Call(code, RelocInfo::CODE_TARGET);
677  } else {
678  ParameterCount actual(a0);
679  __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
680  }
681 
682  // Store offset of return address for deoptimizer.
683  if (!is_api_function) {
684  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
685  }
686 
687  // Restore context from the frame.
689 
690  // If the result is an object (in the ECMA sense), we should get rid
691  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
692  // on page 74.
693  Label use_receiver, exit;
694 
695  // If the result is a smi, it is *not* an object in the ECMA sense.
696  // v0: result
697  // sp[0]: receiver (newly allocated object)
698  // sp[1]: constructor function
699  // sp[2]: number of arguments (smi-tagged)
700  __ JumpIfSmi(v0, &use_receiver);
701 
702  // If the type of the result (stored in its map) is less than
703  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
704  __ GetObjectType(v0, a1, a3);
705  __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
706 
707  // Throw away the result of the constructor invocation and use the
708  // on-stack receiver as the result.
709  __ bind(&use_receiver);
710  __ lw(v0, MemOperand(sp));
711 
712  // Remove receiver from the stack, remove caller arguments, and
713  // return.
714  __ bind(&exit);
715  // v0: result
716  // sp[0]: receiver (newly allocated object)
717  // sp[1]: constructor function
718  // sp[2]: number of arguments (smi-tagged)
719  __ lw(a1, MemOperand(sp, 2 * kPointerSize));
720 
721  // Leave construct frame.
722  }
723 
724  __ sll(t0, a1, kPointerSizeLog2 - 1);
725  __ Addu(sp, sp, t0);
726  __ Addu(sp, sp, kPointerSize);
727  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
728  __ Ret();
729 }
730 
731 
732 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
733  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
734 }
735 
736 
737 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
738  Generate_JSConstructStubHelper(masm, true, false);
739 }
740 
741 
742 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
743  bool is_construct) {
744  // Called from JSEntryStub::GenerateBody
745 
746  // ----------- S t a t e -------------
747  // -- a0: code entry
748  // -- a1: function
749  // -- a2: receiver_pointer
750  // -- a3: argc
751  // -- s0: argv
752  // -----------------------------------
754 
755  // Clear the context before we push it when entering the JS frame.
756  __ mov(cp, zero_reg);
757 
758  // Enter an internal frame.
759  {
760  FrameScope scope(masm, StackFrame::INTERNAL);
761 
762  // Set up the context from the function argument.
764 
765  // Push the function and the receiver onto the stack.
766  __ Push(a1, a2);
767 
768  // Copy arguments to the stack in a loop.
769  // a3: argc
770  // s0: argv, i.e. points to first arg
771  Label loop, entry;
772  __ sll(t0, a3, kPointerSizeLog2);
773  __ addu(t2, s0, t0);
774  __ b(&entry);
775  __ nop(); // Branch delay slot nop.
776  // t2 points past last arg.
777  __ bind(&loop);
778  __ lw(t0, MemOperand(s0)); // Read next parameter.
779  __ addiu(s0, s0, kPointerSize);
780  __ lw(t0, MemOperand(t0)); // Dereference handle.
781  __ push(t0); // Push parameter.
782  __ bind(&entry);
783  __ Branch(&loop, ne, s0, Operand(t2));
784 
785  // Initialize all JavaScript callee-saved registers, since they will be seen
786  // by the garbage collector as part of handlers.
787  __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
788  __ mov(s1, t0);
789  __ mov(s2, t0);
790  __ mov(s3, t0);
791  __ mov(s4, t0);
792  __ mov(s5, t0);
793  // s6 holds the root address. Do not clobber.
794  // s7 is cp. Do not init.
795 
796  // Invoke the code and pass argc as a0.
797  __ mov(a0, a3);
798  if (is_construct) {
799  // No type feedback cell is available
800  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
801  CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
802  __ CallStub(&stub);
803  } else {
804  ParameterCount actual(a0);
805  __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
806  }
807 
808  // Leave internal frame.
809  }
810 
811  __ Jump(ra);
812 }
813 
814 
815 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
816  Generate_JSEntryTrampolineHelper(masm, false);
817 }
818 
819 
820 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
821  Generate_JSEntryTrampolineHelper(masm, true);
822 }
823 
824 
825 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
826  CallRuntimePassFunction(masm, Runtime::kCompileLazy);
827  GenerateTailCallToReturnedCode(masm);
828 }
829 
830 
831 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
832  FrameScope scope(masm, StackFrame::INTERNAL);
833  // Push a copy of the function onto the stack.
834  // Push function as parameter to the runtime call.
835  __ Push(a1, a1);
836  // Whether to compile in a background thread.
837  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
838 
839  __ CallRuntime(Runtime::kCompileOptimized, 2);
840  // Restore receiver.
841  __ Pop(a1);
842 }
843 
844 
845 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
846  CallCompileOptimized(masm, false);
847  GenerateTailCallToReturnedCode(masm);
848 }
849 
850 
851 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
852  CallCompileOptimized(masm, true);
853  GenerateTailCallToReturnedCode(masm);
854 }
855 
856 
857 
858 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
859  // For now, we are relying on the fact that make_code_young doesn't do any
860  // garbage collection which allows us to save/restore the registers without
861  // worrying about which of them contain pointers. We also don't build an
862  // internal frame to make the code faster, since we shouldn't have to do stack
863  // crawls in MakeCodeYoung. This seems a bit fragile.
864 
865  // Set a0 to point to the head of the PlatformCodeAge sequence.
866  __ Subu(a0, a0,
868 
869  // The following registers must be saved and restored when calling through to
870  // the runtime:
871  // a0 - contains return address (beginning of patch sequence)
872  // a1 - isolate
873  RegList saved_regs =
874  (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
875  FrameScope scope(masm, StackFrame::MANUAL);
876  __ MultiPush(saved_regs);
877  __ PrepareCallCFunction(2, 0, a2);
878  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
879  __ CallCFunction(
880  ExternalReference::get_make_code_young_function(masm->isolate()), 2);
881  __ MultiPop(saved_regs);
882  __ Jump(a0);
883 }
884 
885 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
886 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
887  MacroAssembler* masm) { \
888  GenerateMakeCodeYoungAgainCommon(masm); \
889 } \
890 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
891  MacroAssembler* masm) { \
892  GenerateMakeCodeYoungAgainCommon(masm); \
893 }
894 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
895 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
896 
897 
898 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
899  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
900  // that make_code_young doesn't do any garbage collection which allows us to
901  // save/restore the registers without worrying about which of them contain
902  // pointers.
903 
904  // Set a0 to point to the head of the PlatformCodeAge sequence.
905  __ Subu(a0, a0,
907 
908  // The following registers must be saved and restored when calling through to
909  // the runtime:
910  // a0 - contains return address (beginning of patch sequence)
911  // a1 - isolate
912  RegList saved_regs =
913  (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
914  FrameScope scope(masm, StackFrame::MANUAL);
915  __ MultiPush(saved_regs);
916  __ PrepareCallCFunction(2, 0, a2);
917  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
918  __ CallCFunction(
919  ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
920  2);
921  __ MultiPop(saved_regs);
922 
923  // Perform prologue operations usually performed by the young code stub.
924  __ Push(ra, fp, cp, a1);
926 
927  // Jump to point after the code-age stub.
928  __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
929  __ Jump(a0);
930 }
931 
932 
933 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
934  GenerateMakeCodeYoungAgainCommon(masm);
935 }
936 
937 
938 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
939  SaveFPRegsMode save_doubles) {
940  {
941  FrameScope scope(masm, StackFrame::INTERNAL);
942 
943  // Preserve registers across notification, this is important for compiled
944  // stubs that tail call the runtime on deopts passing their parameters in
945  // registers.
946  __ MultiPush(kJSCallerSaved | kCalleeSaved);
947  // Pass the function and deoptimization type to the runtime system.
948  __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
949  __ MultiPop(kJSCallerSaved | kCalleeSaved);
950  }
951 
952  __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
953  __ Jump(ra); // Jump to miss handler
954 }
955 
956 
957 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
958  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
959 }
960 
961 
962 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
963  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
964 }
965 
966 
967 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
969  {
970  FrameScope scope(masm, StackFrame::INTERNAL);
971  // Pass the function and deoptimization type to the runtime system.
972  __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
973  __ push(a0);
974  __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
975  }
976 
977  // Get the full codegen state from the stack and untag it -> t2.
978  __ lw(t2, MemOperand(sp, 0 * kPointerSize));
979  __ SmiUntag(t2);
980  // Switch on the state.
981  Label with_tos_register, unknown_state;
982  __ Branch(&with_tos_register,
983  ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
984  __ Ret(USE_DELAY_SLOT);
985  // Safe to fill delay slot Addu will emit one instruction.
986  __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
987 
988  __ bind(&with_tos_register);
989  __ lw(v0, MemOperand(sp, 1 * kPointerSize));
990  __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
991 
992  __ Ret(USE_DELAY_SLOT);
993  // Safe to fill delay slot Addu will emit one instruction.
994  __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
995 
996  __ bind(&unknown_state);
997  __ stop("no cases left");
998 }
999 
1000 
1001 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1002  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1003 }
1004 
1005 
1006 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1007  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1008 }
1009 
1010 
1011 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1012  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1013 }
1014 
1015 
1016 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1017  // Lookup the function in the JavaScript frame.
1019  {
1020  FrameScope scope(masm, StackFrame::INTERNAL);
1021  // Pass function as argument.
1022  __ push(a0);
1023  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1024  }
1025 
1026  // If the code object is null, just return to the unoptimized code.
1027  __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1028 
1029  // Load deoptimization data from the code object.
1030  // <deopt_data> = <code>[#deoptimization_data_offset]
1032 
1033  // Load the OSR entrypoint offset from the deoptimization data.
1034  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1037  __ SmiUntag(a1);
1038 
1039  // Compute the target address = code_obj + header_size + osr_offset
1040  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1041  __ addu(v0, v0, a1);
1042  __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1043 
1044  // And "return" to the OSR entry point of the function.
1045  __ Ret();
1046 }
1047 
1048 
1049 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1050  // We check the stack limit as indicator that recompilation might be done.
1051  Label ok;
1052  __ LoadRoot(at, Heap::kStackLimitRootIndex);
1053  __ Branch(&ok, hs, sp, Operand(at));
1054  {
1055  FrameScope scope(masm, StackFrame::INTERNAL);
1056  __ CallRuntime(Runtime::kStackGuard, 0);
1057  }
1058  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1060 
1061  __ bind(&ok);
1062  __ Ret();
1063 }
1064 
1065 
1066 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1067  // 1. Make sure we have at least one argument.
1068  // a0: actual number of arguments
1069  { Label done;
1070  __ Branch(&done, ne, a0, Operand(zero_reg));
1071  __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1072  __ push(t2);
1073  __ Addu(a0, a0, Operand(1));
1074  __ bind(&done);
1075  }
1076 
1077  // 2. Get the function to call (passed as receiver) from the stack, check
1078  // if it is a function.
1079  // a0: actual number of arguments
1080  Label slow, non_function;
1081  __ sll(at, a0, kPointerSizeLog2);
1082  __ addu(at, sp, at);
1083  __ lw(a1, MemOperand(at));
1084  __ JumpIfSmi(a1, &non_function);
1085  __ GetObjectType(a1, a2, a2);
1086  __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1087 
1088  // 3a. Patch the first argument if necessary when calling a function.
1089  // a0: actual number of arguments
1090  // a1: function
1091  Label shift_arguments;
1092  __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1093  { Label convert_to_object, use_global_proxy, patch_receiver;
1094  // Change context eagerly in case we need the global receiver.
1096 
1097  // Do not transform the receiver for strict mode functions.
1100  __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1101  kSmiTagSize)));
1102  __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1103 
1104  // Do not transform the receiver for native (Compilerhints already in a3).
1105  __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1106  __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1107 
1108  // Compute the receiver in sloppy mode.
1109  // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1110  __ sll(at, a0, kPointerSizeLog2);
1111  __ addu(a2, sp, at);
1112  __ lw(a2, MemOperand(a2, -kPointerSize));
1113  // a0: actual number of arguments
1114  // a1: function
1115  // a2: first argument
1116  __ JumpIfSmi(a2, &convert_to_object, t2);
1117 
1118  __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1119  __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1120  __ LoadRoot(a3, Heap::kNullValueRootIndex);
1121  __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1122 
1124  __ GetObjectType(a2, a3, a3);
1125  __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1126 
1127  __ bind(&convert_to_object);
1128  // Enter an internal frame in order to preserve argument count.
1129  {
1130  FrameScope scope(masm, StackFrame::INTERNAL);
1131  __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1132  __ Push(a0, a2);
1133  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1134  __ mov(a2, v0);
1135 
1136  __ pop(a0);
1137  __ sra(a0, a0, kSmiTagSize); // Un-tag.
1138  // Leave internal frame.
1139  }
1140 
1141  // Restore the function to a1, and the flag to t0.
1142  __ sll(at, a0, kPointerSizeLog2);
1143  __ addu(at, sp, at);
1144  __ lw(a1, MemOperand(at));
1145  __ Branch(USE_DELAY_SLOT, &patch_receiver);
1146  __ li(t0, Operand(0, RelocInfo::NONE32)); // In delay slot.
1147 
1148  __ bind(&use_global_proxy);
1151 
1152  __ bind(&patch_receiver);
1153  __ sll(at, a0, kPointerSizeLog2);
1154  __ addu(a3, sp, at);
1155  __ sw(a2, MemOperand(a3, -kPointerSize));
1156 
1157  __ Branch(&shift_arguments);
1158  }
1159 
1160  // 3b. Check for function proxy.
1161  __ bind(&slow);
1162  __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1163  __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1164 
1165  __ bind(&non_function);
1166  __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1167 
1168  // 3c. Patch the first argument when calling a non-function. The
1169  // CALL_NON_FUNCTION builtin expects the non-function callee as
1170  // receiver, so overwrite the first argument which will ultimately
1171  // become the receiver.
1172  // a0: actual number of arguments
1173  // a1: function
1174  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1175  __ sll(at, a0, kPointerSizeLog2);
1176  __ addu(a2, sp, at);
1177  __ sw(a1, MemOperand(a2, -kPointerSize));
1178 
1179  // 4. Shift arguments and return address one slot down on the stack
1180  // (overwriting the original receiver). Adjust argument count to make
1181  // the original first argument the new receiver.
1182  // a0: actual number of arguments
1183  // a1: function
1184  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1185  __ bind(&shift_arguments);
1186  { Label loop;
1187  // Calculate the copy start address (destination). Copy end address is sp.
1188  __ sll(at, a0, kPointerSizeLog2);
1189  __ addu(a2, sp, at);
1190 
1191  __ bind(&loop);
1192  __ lw(at, MemOperand(a2, -kPointerSize));
1193  __ sw(at, MemOperand(a2));
1194  __ Subu(a2, a2, Operand(kPointerSize));
1195  __ Branch(&loop, ne, a2, Operand(sp));
1196  // Adjust the actual number of arguments and remove the top element
1197  // (which is a copy of the last argument).
1198  __ Subu(a0, a0, Operand(1));
1199  __ Pop();
1200  }
1201 
1202  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1203  // or a function proxy via CALL_FUNCTION_PROXY.
1204  // a0: actual number of arguments
1205  // a1: function
1206  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1207  { Label function, non_proxy;
1208  __ Branch(&function, eq, t0, Operand(zero_reg));
1209  // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1210  __ mov(a2, zero_reg);
1211  __ Branch(&non_proxy, ne, t0, Operand(1));
1212 
1213  __ push(a1); // Re-add proxy object as additional argument.
1214  __ Addu(a0, a0, Operand(1));
1215  __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1216  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1218 
1219  __ bind(&non_proxy);
1220  __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1221  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1223  __ bind(&function);
1224  }
1225 
1226  // 5b. Get the code to call from the function and check that the number of
1227  // expected arguments matches what we're providing. If so, jump
1228  // (tail-call) to the code in register edx without checking arguments.
1229  // a0: actual number of arguments
1230  // a1: function
1232  __ lw(a2,
1234  __ sra(a2, a2, kSmiTagSize);
1235  // Check formal and actual parameter counts.
1236  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1237  RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1238 
1240  ParameterCount expected(0);
1241  __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1242 }
1243 
1244 
1245 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1246  const int kIndexOffset =
1248  const int kLimitOffset =
1250  const int kArgsOffset = 2 * kPointerSize;
1251  const int kRecvOffset = 3 * kPointerSize;
1252  const int kFunctionOffset = 4 * kPointerSize;
1253 
1254  {
1255  FrameScope frame_scope(masm, StackFrame::INTERNAL);
1256  __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1257  __ push(a0);
1258  __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1259  __ push(a0);
1260  // Returns (in v0) number of arguments to copy to stack as Smi.
1261  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1262 
1263  // Check the stack for overflow. We are not trying to catch
1264  // interruptions (e.g. debug break and preemption) here, so the "real stack
1265  // limit" is checked.
1266  Label okay;
1267  __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1268  // Make a2 the space we have left. The stack might already be overflowed
1269  // here which will cause a2 to become negative.
1270  __ subu(a2, sp, a2);
1271  // Check if the arguments will overflow the stack.
1272  __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1273  __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
1274 
1275  // Out of stack space.
1276  __ lw(a1, MemOperand(fp, kFunctionOffset));
1277  __ Push(a1, v0);
1278  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1279  // End of stack check.
1280 
1281  // Push current limit and index.
1282  __ bind(&okay);
1283  __ mov(a1, zero_reg);
1284  __ Push(v0, a1); // Limit and initial index.
1285 
1286  // Get the receiver.
1287  __ lw(a0, MemOperand(fp, kRecvOffset));
1288 
1289  // Check that the function is a JS function (otherwise it must be a proxy).
1290  Label push_receiver;
1291  __ lw(a1, MemOperand(fp, kFunctionOffset));
1292  __ GetObjectType(a1, a2, a2);
1293  __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1294 
1295  // Change context eagerly to get the right global object if necessary.
1297  // Load the shared function info while the function is still in a1.
1299 
1300  // Compute the receiver.
1301  // Do not transform the receiver for strict mode functions.
1302  Label call_to_object, use_global_proxy;
1304  __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1305  kSmiTagSize)));
1306  __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1307 
1308  // Do not transform the receiver for native (Compilerhints already in a2).
1309  __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1310  __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1311 
1312  // Compute the receiver in sloppy mode.
1313  __ JumpIfSmi(a0, &call_to_object);
1314  __ LoadRoot(a1, Heap::kNullValueRootIndex);
1315  __ Branch(&use_global_proxy, eq, a0, Operand(a1));
1316  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1317  __ Branch(&use_global_proxy, eq, a0, Operand(a2));
1318 
1319  // Check if the receiver is already a JavaScript object.
1320  // a0: receiver
1322  __ GetObjectType(a0, a1, a1);
1323  __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1324 
1325  // Convert the receiver to a regular object.
1326  // a0: receiver
1327  __ bind(&call_to_object);
1328  __ push(a0);
1329  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1330  __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1331  __ Branch(&push_receiver);
1332 
1333  __ bind(&use_global_proxy);
1336 
1337  // Push the receiver.
1338  // a0: receiver
1339  __ bind(&push_receiver);
1340  __ push(a0);
1341 
1342  // Copy all arguments from the array to the stack.
1343  Label entry, loop;
1344  __ lw(a0, MemOperand(fp, kIndexOffset));
1345  __ Branch(&entry);
1346 
1347  // Load the current argument from the arguments array and push it to the
1348  // stack.
1349  // a0: current argument index
1350  __ bind(&loop);
1351  __ lw(a1, MemOperand(fp, kArgsOffset));
1352  __ Push(a1, a0);
1353 
1354  // Call the runtime to access the property in the arguments array.
1355  __ CallRuntime(Runtime::kGetProperty, 2);
1356  __ push(v0);
1357 
1358  // Use inline caching to access the arguments.
1359  __ lw(a0, MemOperand(fp, kIndexOffset));
1360  __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1361  __ sw(a0, MemOperand(fp, kIndexOffset));
1362 
1363  // Test if the copy loop has finished copying all the elements from the
1364  // arguments object.
1365  __ bind(&entry);
1366  __ lw(a1, MemOperand(fp, kLimitOffset));
1367  __ Branch(&loop, ne, a0, Operand(a1));
1368 
1369  // Call the function.
1370  Label call_proxy;
1371  ParameterCount actual(a0);
1372  __ sra(a0, a0, kSmiTagSize);
1373  __ lw(a1, MemOperand(fp, kFunctionOffset));
1374  __ GetObjectType(a1, a2, a2);
1375  __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1376 
1377  __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1378 
1379  frame_scope.GenerateLeaveFrame();
1380  __ Ret(USE_DELAY_SLOT);
1381  __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1382 
1383  // Call the function proxy.
1384  __ bind(&call_proxy);
1385  __ push(a1); // Add function proxy as last argument.
1386  __ Addu(a0, a0, Operand(1));
1387  __ li(a2, Operand(0, RelocInfo::NONE32));
1388  __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1389  __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1391  // Tear down the internal frame and remove function, receiver and args.
1392  }
1393 
1394  __ Ret(USE_DELAY_SLOT);
1395  __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1396 }
1397 
1398 
1399 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1400  Label* stack_overflow) {
1401  // ----------- S t a t e -------------
1402  // -- a0 : actual number of arguments
1403  // -- a1 : function (passed through to callee)
1404  // -- a2 : expected number of arguments
1405  // -----------------------------------
1406  // Check the stack for overflow. We are not trying to catch
1407  // interruptions (e.g. debug break and preemption) here, so the "real stack
1408  // limit" is checked.
1409  __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
1410  // Make t1 the space we have left. The stack might already be overflowed
1411  // here which will cause t1 to become negative.
1412  __ subu(t1, sp, t1);
1413  // Check if the arguments will overflow the stack.
1414  __ sll(at, a2, kPointerSizeLog2);
1415  // Signed comparison.
1416  __ Branch(stack_overflow, le, t1, Operand(at));
1417 }
1418 
1419 
1420 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1421  __ sll(a0, a0, kSmiTagSize);
1422  __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1423  __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1424  __ Addu(fp, sp,
1426 }
1427 
1428 
1429 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1430  // ----------- S t a t e -------------
1431  // -- v0 : result being passed through
1432  // -----------------------------------
1433  // Get the number of arguments passed (as a smi), tear down the frame and
1434  // then tear down the parameters.
1436  kPointerSize)));
1437  __ mov(sp, fp);
1438  __ MultiPop(fp.bit() | ra.bit());
1439  __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1440  __ Addu(sp, sp, t0);
1441  // Adjust for the receiver.
1442  __ Addu(sp, sp, Operand(kPointerSize));
1443 }
1444 
1445 
1446 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1447  // State setup as expected by MacroAssembler::InvokePrologue.
1448  // ----------- S t a t e -------------
1449  // -- a0: actual arguments count
1450  // -- a1: function (passed through to callee)
1451  // -- a2: expected arguments count
1452  // -----------------------------------
1453 
1454  Label stack_overflow;
1455  ArgumentAdaptorStackCheck(masm, &stack_overflow);
1456  Label invoke, dont_adapt_arguments;
1457 
1458  Label enough, too_few;
1460  __ Branch(&dont_adapt_arguments, eq,
1462  // We use Uless as the number of argument should always be greater than 0.
1463  __ Branch(&too_few, Uless, a0, Operand(a2));
1464 
1465  { // Enough parameters: actual >= expected.
1466  // a0: actual number of arguments as a smi
1467  // a1: function
1468  // a2: expected number of arguments
1469  // a3: code entry to call
1470  __ bind(&enough);
1471  EnterArgumentsAdaptorFrame(masm);
1472 
1473  // Calculate copy start address into a0 and copy end address into a2.
1474  __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1475  __ Addu(a0, fp, a0);
1476  // Adjust for return address and receiver.
1477  __ Addu(a0, a0, Operand(2 * kPointerSize));
1478  // Compute copy end address.
1479  __ sll(a2, a2, kPointerSizeLog2);
1480  __ subu(a2, a0, a2);
1481 
1482  // Copy the arguments (including the receiver) to the new stack frame.
1483  // a0: copy start address
1484  // a1: function
1485  // a2: copy end address
1486  // a3: code entry to call
1487 
1488  Label copy;
1489  __ bind(&copy);
1490  __ lw(t0, MemOperand(a0));
1491  __ push(t0);
1492  __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2));
1493  __ addiu(a0, a0, -kPointerSize); // In delay slot.
1494 
1495  __ jmp(&invoke);
1496  }
1497 
1498  { // Too few parameters: Actual < expected.
1499  __ bind(&too_few);
1500  EnterArgumentsAdaptorFrame(masm);
1501 
1502  // Calculate copy start address into a0 and copy end address is fp.
1503  // a0: actual number of arguments as a smi
1504  // a1: function
1505  // a2: expected number of arguments
1506  // a3: code entry to call
1507  __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1508  __ Addu(a0, fp, a0);
1509  // Adjust for return address and receiver.
1510  __ Addu(a0, a0, Operand(2 * kPointerSize));
1511  // Compute copy end address. Also adjust for return address.
1512  __ Addu(t3, fp, kPointerSize);
1513 
1514  // Copy the arguments (including the receiver) to the new stack frame.
1515  // a0: copy start address
1516  // a1: function
1517  // a2: expected number of arguments
1518  // a3: code entry to call
1519  // t3: copy end address
1520  Label copy;
1521  __ bind(&copy);
1522  __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
1523  __ Subu(sp, sp, kPointerSize);
1524  __ Subu(a0, a0, kPointerSize);
1525  __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
1526  __ sw(t0, MemOperand(sp)); // In the delay slot.
1527 
1528  // Fill the remaining expected arguments with undefined.
1529  // a1: function
1530  // a2: expected number of arguments
1531  // a3: code entry to call
1532  __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1533  __ sll(t2, a2, kPointerSizeLog2);
1534  __ Subu(a2, fp, Operand(t2));
1535  // Adjust for frame.
1536  __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1537  2 * kPointerSize));
1538 
1539  Label fill;
1540  __ bind(&fill);
1541  __ Subu(sp, sp, kPointerSize);
1542  __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1543  __ sw(t0, MemOperand(sp));
1544  }
1545 
1546  // Call the entry point.
1547  __ bind(&invoke);
1548 
1549  __ Call(a3);
1550 
1551  // Store offset of return address for deoptimizer.
1552  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1553 
1554  // Exit frame and return.
1555  LeaveArgumentsAdaptorFrame(masm);
1556  __ Ret();
1557 
1558 
1559  // -------------------------------------------
1560  // Don't adapt arguments.
1561  // -------------------------------------------
1562  __ bind(&dont_adapt_arguments);
1563  __ Jump(a3);
1564 
1565  __ bind(&stack_overflow);
1566  {
1567  FrameScope frame(masm, StackFrame::MANUAL);
1568  EnterArgumentsAdaptorFrame(masm);
1569  __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1570  __ break_(0xCC);
1571  }
1572 }
1573 
1574 
1575 #undef __
1576 
1577 } } // namespace v8::internal
1578 
1579 #endif // V8_TARGET_ARCH_MIPS
#define CODE_AGE_LIST(V)
Definition: builtins.h:27
static const int kAllocationSiteOffset
Definition: objects.h:8286
static const int kPretenureCreateCountOffset
Definition: objects.h:8257
static const int kInstrSize
static const U kShift
Definition: utils.h:204
static void Generate_InternalArrayCode(MacroAssembler *masm)
static void Generate_FunctionApply(MacroAssembler *masm)
static void Generate_Adaptor(MacroAssembler *masm, CFunctionId id, BuiltinExtraArguments extra_args)
static void Generate_NotifyDeoptimized(MacroAssembler *masm)
static void Generate_JSConstructEntryTrampoline(MacroAssembler *masm)
static void Generate_CompileLazy(MacroAssembler *masm)
static void Generate_JSEntryTrampoline(MacroAssembler *masm)
static void Generate_OnStackReplacement(MacroAssembler *masm)
static void Generate_MarkCodeAsExecutedTwice(MacroAssembler *masm)
static void Generate_NotifyLazyDeoptimized(MacroAssembler *masm)
static void Generate_ArgumentsAdaptorTrampoline(MacroAssembler *masm)
static void Generate_JSConstructStubApi(MacroAssembler *masm)
static void Generate_FunctionCall(MacroAssembler *masm)
static void Generate_MarkCodeAsExecutedOnce(MacroAssembler *masm)
static void Generate_NotifyStubFailureSaveDoubles(MacroAssembler *masm)
static void Generate_NotifySoftDeoptimized(MacroAssembler *masm)
static void Generate_ArrayCode(MacroAssembler *masm)
static void Generate_StringConstructCode(MacroAssembler *masm)
static void Generate_NotifyStubFailure(MacroAssembler *masm)
static void Generate_CompileOptimized(MacroAssembler *masm)
static void Generate_OsrAfterStackCheck(MacroAssembler *masm)
static void Generate_InOptimizationQueue(MacroAssembler *masm)
static void Generate_CompileOptimizedConcurrent(MacroAssembler *masm)
static void Generate_JSConstructStubGeneric(MacroAssembler *masm)
static const int kHeaderSize
Definition: objects.h:5373
static const int kDeoptimizationDataOffset
Definition: objects.h:5352
static int SlotOffset(int index)
Definition: contexts.h:552
static const int kLengthOffset
Definition: objects.h:2392
static const int kHeaderSize
Definition: objects.h:2393
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static const int kMapOffset
Definition: objects.h:1427
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kNoSlackTracking
Definition: objects.h:7263
static const int kContextOffset
Definition: objects.h:7381
static const int kFinishSlackTracking
Definition: objects.h:7262
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7377
static const int kHeaderSize
Definition: objects.h:2195
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kElementsOffset
Definition: objects.h:2194
static const int kValueOffset
Definition: objects.h:7546
static const int kSize
Definition: objects.h:7547
static const int kBitField3Offset
Definition: objects.h:6189
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kInstanceSizeOffset
Definition: objects.h:6210
static const int kPreAllocatedPropertyFieldsOffset
Definition: objects.h:6215
static const int kInObjectPropertiesOffset
Definition: objects.h:6212
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:6234
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kFormalParameterCountOffset
Definition: objects.h:6946
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:6888
static const int kCompilerHintsOffset
Definition: objects.h:6961
static const int kCodeOffset
Definition: objects.h:6893
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static const int kFixedFrameSizeFromFp
Definition: frames.h:157
static const int kContextOffset
Definition: frames.h:162
static const int kExpressionsOffset
Definition: frames.h:160
static const int kCallerSPOffset
Definition: frames.h:167
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
@ JUMP_FUNCTION
@ CALL_FUNCTION
AllocationFlags
@ RESULT_CONTAINS_TOP
@ SIZE_IN_WORDS
@ TAG_OBJECT
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
const int kPointerSize
Definition: globals.h:129
MemOperand ContextOperand(Register context, int index)
const RegList kJSCallerSaved
Definition: frames-arm.h:24
const Register cp
const SwVfpRegister s1
const RegList kCalleeSaved
Definition: frames-arm.h:38
const int kSmiTagSize
Definition: v8.h:5743
const SwVfpRegister s2
const uint32_t kNotStringTag
Definition: objects.h:545
const SwVfpRegister s0
const Register fp
const Register sp
const int kPointerSizeLog2
Definition: globals.h:147
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726
MemOperand FieldMemOperand(Register object, int offset)
const SwVfpRegister s3
uint32_t RegList
Definition: frames.h:18
const int kHeapObjectTag
Definition: v8.h:5737
BuiltinExtraArguments
Definition: builtins.h:12
@ NEEDS_CALLED_FUNCTION
Definition: builtins.h:14
@ NO_EXTRA_ARGUMENTS
Definition: builtins.h:13
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
@ NO_CALL_CONSTRUCTOR_FLAGS
Definition: globals.h:478
const SwVfpRegister s4
static const int kNoCodeAgeSequenceLength
const uint32_t kIsNotStringMask
Definition: objects.h:543
const SwVfpRegister s5
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20