8 #if V8_TARGET_ARCH_MIPS64
23 #define __ ACCESS_MASM(masm)
26 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
27 Label* global_object) {
51 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss,
52 Register elements, Register
name,
53 Register result, Register scratch1,
63 name, scratch1, scratch2);
68 const int kElementsStartOffset =
71 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
74 Operand(
Smi::FromInt(PropertyDetails::TypeField::kMask)));
75 __ Branch(miss,
ne, at, Operand(zero_reg));
96 static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss,
97 Register elements, Register
name,
98 Register value, Register scratch1,
108 name, scratch1, scratch2);
113 const int kElementsStartOffset =
116 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
117 const int kTypeAndReadOnlyMask =
118 (PropertyDetails::TypeField::kMask |
119 PropertyDetails::AttributesField::encode(
READ_ONLY));
121 __ And(at, scratch1, Operand(
Smi::FromInt(kTypeAndReadOnlyMask)));
122 __ Branch(miss,
ne, at, Operand(zero_reg));
125 const int kValueOffset = kElementsStartOffset +
kPointerSize;
130 __ mov(scratch1, value);
138 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
139 Register receiver, Register
map,
141 int interceptor_bit, Label* slow) {
143 __ JumpIfSmi(receiver, slow);
150 __ Branch(slow,
ne, at, Operand(zero_reg));
163 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
164 Register key, Register elements,
165 Register scratch1, Register scratch2,
166 Register result, Label* not_fast_array,
167 Label* out_of_range) {
191 if (not_fast_array !=
NULL) {
194 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
195 __ Branch(not_fast_array,
ne, scratch1, Operand(at));
197 __ AssertFastElements(elements);
202 __ Branch(out_of_range,
hs, key, Operand(scratch1));
205 __ Daddu(scratch1, elements,
210 __ daddu(at, at, scratch1);
213 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
216 __ Branch(out_of_range,
eq, scratch2, Operand(at));
217 __ mov(result, scratch2);
223 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
224 Register
map, Register hash,
225 Label* index_string, Label* not_unique) {
229 __ GetObjectType(key,
map, hash);
237 __ Branch(index_string,
eq, at, Operand(zero_reg));
245 __ Branch(not_unique,
ne, at, Operand(zero_reg));
252 Register dictionary = a0;
259 GenerateDictionaryLoad(masm, &slow, dictionary,
270 static const Register LoadIC_TempRegister() {
return a3; }
275 Isolate*
isolate = masm->isolate();
283 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss),
isolate);
284 __ TailCallExternalReference(ref, 2, 1);
294 __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
298 static MemOperand GenerateMappedArgumentsLookup(
299 MacroAssembler* masm, Register
object, Register key, Register scratch1,
300 Register scratch2, Register scratch3, Label* unmapped_case,
302 Heap* heap = masm->isolate()->heap();
307 __ JumpIfSmi(
object, slow_case);
309 __ GetObjectType(
object, scratch1, scratch2);
313 __ NonNegativeSmiTst(key, scratch1);
314 __ Branch(slow_case,
ne, scratch1, Operand(zero_reg));
317 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
330 __ SmiUntag(scratch3, key);
332 __ Daddu(scratch3, scratch3, Operand(kOffset));
334 __ Daddu(scratch2, scratch1, scratch3);
336 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
337 __ Branch(unmapped_case,
eq, scratch2, Operand(scratch3));
343 __ SmiUntag(scratch3, scratch2);
346 __ Daddu(scratch2, scratch1, scratch3);
351 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
353 Register parameter_map,
361 Register backing_store = parameter_map;
363 __ CheckMap(backing_store, scratch, Heap::kFixedArrayMapRootIndex, slow_case,
367 __ SmiUntag(scratch, key);
370 __ Daddu(scratch, backing_store, scratch);
383 MemOperand mapped_location = GenerateMappedArgumentsLookup(
384 masm, receiver, key, a3, a4, a5, ¬in, &slow);
385 __ sd(value, mapped_location);
396 GenerateUnmappedArgumentsLookup(masm, key, a3, a4, &slow);
397 __ sd(value, unmapped_location);
399 DCHECK_EQ(unmapped_location.offset(), 0);
411 Isolate*
isolate = masm->isolate();
418 ExternalReference ref =
419 ExternalReference(IC_Utility(kKeyedLoadIC_Miss),
isolate);
421 __ TailCallExternalReference(ref, 2, 1);
430 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
436 Label slow, check_name, index_smi, index_name, property_array_property;
437 Label probe_dictionary, check_number_dictionary;
444 Isolate*
isolate = masm->isolate();
447 __ JumpIfNotSmi(key, &check_name);
452 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3,
456 __ CheckFastElements(a0, a3, &check_number_dictionary);
458 GenerateFastArrayLoad(masm, receiver, key, a0, a3, a4, v0,
NULL, &slow);
462 __ bind(&check_number_dictionary);
469 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
470 __ Branch(&slow,
ne, a3, Operand(at));
471 __ dsra32(a0, key, 0);
472 __ LoadFromNumberDictionary(&slow, a4, key, v0, a0, a3, a5);
481 __ bind(&check_name);
482 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow);
484 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3,
492 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
493 __ Branch(&probe_dictionary,
eq, a4, Operand(at));
498 __ dsll32(a3, a0, 0);
499 __ dsrl32(a3, a3, 0);
505 __ And(a3, a3, Operand(mask));
509 Label load_in_object_property;
511 Label hit_on_nth_entry[kEntriesPerBucket];
512 ExternalReference cache_keys =
513 ExternalReference::keyed_lookup_cache_keys(
isolate);
514 __ li(a4, Operand(cache_keys));
516 __ daddu(a4, a4, at);
518 for (
int i = 0;
i < kEntriesPerBucket - 1;
i++) {
519 Label try_next_entry;
521 __ Branch(&try_next_entry,
ne, a0, Operand(a5));
523 __ Branch(&hit_on_nth_entry[
i],
eq, key, Operand(a5));
524 __ bind(&try_next_entry);
528 __ Branch(&slow,
ne, a0, Operand(a5));
530 __ Branch(&slow,
ne, key, Operand(a5));
535 ExternalReference cache_field_offsets =
536 ExternalReference::keyed_lookup_cache_field_offsets(
isolate);
539 for (
int i = kEntriesPerBucket - 1;
i >= 0;
i--) {
540 __ bind(&hit_on_nth_entry[
i]);
541 __ li(a4, Operand(cache_field_offsets));
545 __ daddu(at, a4, at);
549 __ Dsubu(a5, a5, a6);
550 __ Branch(&property_array_property,
ge, a5, Operand(zero_reg));
552 __ Branch(&load_in_object_property);
557 __ bind(&load_in_object_property);
560 __ daddu(a6, a6, a5);
564 __ daddu(at, receiver, at);
571 __ bind(&property_array_property);
575 __ Daddu(v0, v0, a1);
584 __ bind(&probe_dictionary);
588 GenerateGlobalInstanceTypeCheck(masm, a0, &slow);
590 GenerateDictionaryLoad(masm, &slow, a3, key, v0, a5, a4);
595 __ bind(&index_name);
596 __ IndexFromHash(a3, key);
598 __ Branch(&index_smi);
608 Register scratch = a3;
609 Register result = v0;
610 DCHECK(!scratch.is(receiver) && !scratch.is(index));
612 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
617 char_at_generator.GenerateFast(masm);
620 StubRuntimeCallHelper call_helper;
621 char_at_generator.GenerateSlow(masm, call_helper);
628 static void KeyedStoreGenerateGenericHelper(
629 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
631 Register value, Register key, Register receiver, Register receiver_map,
632 Register elements_map, Register elements) {
633 Label transition_smi_elements;
634 Label finish_object_store, non_double_value, transition_double_elements;
635 Label fast_double_without_map_check;
638 __ bind(fast_object);
639 Register scratch_value = a4;
640 Register address = a5;
643 __ Branch(fast_double,
ne, elements_map,
644 Operand(masm->isolate()->factory()->fixed_array_map()));
650 Label holecheck_passed1;
653 __ daddu(address, address, at);
656 __ Branch(&holecheck_passed1,
ne, scratch_value,
657 Operand(masm->isolate()->factory()->the_hole_value()));
658 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch_value,
661 __ bind(&holecheck_passed1);
665 __ JumpIfNotSmi(value, &non_smi_value);
673 __ Daddu(address, elements,
676 __ Daddu(address, address, scratch_value);
680 __ bind(&non_smi_value);
682 __ CheckFastObjectElements(receiver_map, scratch_value,
683 &transition_smi_elements);
686 __ bind(&finish_object_store);
692 __ Daddu(address, elements,
695 __ Daddu(address, address, scratch_value);
698 __ mov(scratch_value, value);
703 __ bind(fast_double);
707 __ LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
708 __ Branch(slow,
ne, elements_map, Operand(at));
714 __ Daddu(address, elements,
718 __ daddu(address, address, at);
720 __ Branch(&fast_double_without_map_check,
ne, scratch_value,
722 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch_value,
725 __ bind(&fast_double_without_map_check);
726 __ StoreNumberToDoubleElements(value, key,
729 a4, a5, &transition_double_elements);
737 __ bind(&transition_smi_elements);
740 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
741 __ Branch(&non_double_value,
ne, a4, Operand(at));
745 __ LoadTransitionedArrayMapConditional(
750 receiver_map,
mode, slow);
752 __ jmp(&fast_double_without_map_check);
754 __ bind(&non_double_value);
757 receiver_map, a4, slow);
760 masm, receiver, key, value, receiver_map,
mode, slow);
762 __ jmp(&finish_object_store);
764 __ bind(&transition_double_elements);
769 receiver_map, a4, slow);
772 masm, receiver, key, value, receiver_map,
mode, slow);
774 __ jmp(&finish_object_store);
786 Label slow, fast_object, fast_object_grow;
787 Label fast_double, fast_double_grow;
788 Label array, extra, check_if_double_array;
795 Register receiver_map = a3;
796 Register elements_map = a6;
797 Register elements = a7;
801 __ JumpIfNotSmi(key, &slow);
803 __ JumpIfSmi(receiver, &slow);
811 __ Branch(&slow,
ne, a4, Operand(zero_reg));
822 __ Branch(&fast_object,
lo, key, Operand(a4));
838 __ Branch(&slow,
ne, key, Operand(a4));
842 __ Branch(&slow,
hs, key, Operand(a4));
844 __ Branch(&check_if_double_array,
ne, elements_map,
845 Heap::kFixedArrayMapRootIndex);
847 __ jmp(&fast_object_grow);
849 __ bind(&check_if_double_array);
850 __ Branch(&slow,
ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
851 __ jmp(&fast_double_grow);
861 __ Branch(&extra,
hs, key, Operand(a4));
863 KeyedStoreGenerateGenericHelper(
865 value, key, receiver, receiver_map, elements_map, elements);
866 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
868 key, receiver, receiver_map, elements_map,
878 ExternalReference ref =
879 ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
880 __ TailCallExternalReference(ref, 3, 1);
894 masm->isolate()->stub_cache()->GenerateProbe(masm,
flags,
false, receiver,
895 name, a3, a4, a5, a6);
906 ExternalReference ref =
907 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
908 __ TailCallExternalReference(ref, 3, 1);
917 Register dictionary = a3;
922 GenerateDictionaryStore(masm, &miss, a3,
name, value, a4, a5);
923 Counters* counters = masm->isolate()->counters();
924 __ IncrementCounter(counters->store_normal_hit(), 1, a4, a5);
928 __ IncrementCounter(counters->store_normal_miss(), 1, a4, a5);
938 case Token::EQ_STRICT:
958 Address andi_instruction_address =
970 Address andi_instruction_address =
992 PrintF(
"[ patching ic at %p, andi=%p, delta=%d\n", address,
993 andi_instruction_address, delta);
1009 CodePatcher patcher(patch_address, 2);
1018 patcher.masm()->andi(at, reg, 0);
1022 patcher.ChangeBranchCondition(
ne);
1025 patcher.ChangeBranchCondition(
eq);
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static bool IsBne(Instr instr)
static uint32_t GetRt(Instr instr)
static uint32_t GetRs(Instr instr)
static bool IsBranch(Instr instr)
static const int kCallTargetAddressOffset
static uint32_t GetImmediate16(Instr instr)
static bool IsAndImmediate(Instr instr)
static bool IsBeq(Instr instr)
static Flags ComputeHandlerFlags(Kind handler_kind, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
static Flags RemoveTypeAndHolderFromFlags(Flags flags)
static bool HasInlinedSmiCode(Address address)
static Condition ComputeCondition(Token::Value op)
static void GenerateSmiToDouble(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *allocation_memento_found)
static void GenerateDoubleToObject(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static const int kLengthOffset
static const int kHeaderSize
static const int kElementsStartIndex
static const int kMapOffset
Isolate * isolate() const
static const int kLengthOffset
static const int kPropertiesOffset
static const int kElementsOffset
static void GenerateMiss(MacroAssembler *masm)
static void GenerateString(MacroAssembler *masm)
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static void GenerateGeneric(MacroAssembler *masm)
static const int kHashMask
static const int kCapacityMask
static const int kMapHashShift
static const int kEntriesPerBucket
static void GenerateMiss(MacroAssembler *masm)
static void GenerateSloppyArguments(MacroAssembler *masm)
static void GenerateGeneric(MacroAssembler *masm, StrictMode strict_mode)
static const Register ReceiverRegister()
static const Register NameRegister()
static void GenerateNormal(MacroAssembler *masm)
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm)
static const int kHasIndexedInterceptor
static const int kBitFieldOffset
static const int kIsAccessCheckNeeded
static const int kInstanceTypeOffset
static const int kInstanceSizeOffset
static const int kInObjectPropertiesOffset
static const int kIsObserved
static const int kHasNamedInterceptor
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kHashShift
static const int kHashFieldOffset
static const unsigned int kContainsCachedArrayIndexMask
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictMode strict_mode)
static Smi * FromInt(int value)
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static void GenerateMiss(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm)
StrictMode strict_mode() const
static void GenerateNormal(MacroAssembler *masm)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
@ DISABLE_INLINED_SMI_CHECK
@ ENABLE_INLINED_SMI_CHECK
const int kPointerSizeLog2
@ JS_BUILTINS_OBJECT_TYPE
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
MemOperand FieldMemOperand(Register object, int offset)
void PrintF(const char *format,...)
const uint32_t kInternalizedTag
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
const uint32_t kIsNotInternalizedMask
const uint32_t kHoleNanLower32
@ STRING_INDEX_IS_ARRAY_INDEX
const uint32_t kHoleNanUpper32
KeyedStoreIncrementLength
Debugger support for the V8 JavaScript engine.
static Register from_code(int code)