8 #if V8_TARGET_ARCH_MIPS
23 #define __ ACCESS_MASM(masm)
26 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
27 Label* global_object) {
51 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss,
52 Register elements, Register
name,
53 Register result, Register scratch1,
63 name, scratch1, scratch2);
68 const int kElementsStartOffset =
71 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
74 Operand(PropertyDetails::TypeField::kMask <<
kSmiTagSize));
75 __ Branch(miss,
ne, at, Operand(zero_reg));
96 static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss,
97 Register elements, Register
name,
98 Register value, Register scratch1,
108 name, scratch1, scratch2);
113 const int kElementsStartOffset =
116 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
117 const int kTypeAndReadOnlyMask =
118 (PropertyDetails::TypeField::kMask |
119 PropertyDetails::AttributesField::encode(
READ_ONLY))
122 __ And(at, scratch1, Operand(kTypeAndReadOnlyMask));
123 __ Branch(miss,
ne, at, Operand(zero_reg));
126 const int kValueOffset = kElementsStartOffset +
kPointerSize;
131 __ mov(scratch1, value);
139 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
140 Register receiver, Register
map,
142 int interceptor_bit, Label* slow) {
144 __ JumpIfSmi(receiver, slow);
151 __ Branch(slow,
ne, at, Operand(zero_reg));
164 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
165 Register key, Register elements,
166 Register scratch1, Register scratch2,
167 Register result, Label* not_fast_array,
168 Label* out_of_range) {
192 if (not_fast_array !=
NULL) {
195 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
196 __ Branch(not_fast_array,
ne, scratch1, Operand(at));
198 __ AssertFastElements(elements);
203 __ Branch(out_of_range,
hs, key, Operand(scratch1));
206 __ Addu(scratch1, elements,
211 __ addu(at, at, scratch1);
214 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
217 __ Branch(out_of_range,
eq, scratch2, Operand(at));
218 __ mov(result, scratch2);
224 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
225 Register
map, Register hash,
226 Label* index_string, Label* not_unique) {
230 __ GetObjectType(key,
map, hash);
238 __ Branch(index_string,
eq, at, Operand(zero_reg));
246 __ Branch(not_unique,
ne, at, Operand(zero_reg));
253 Register dictionary = a0;
261 GenerateDictionaryLoad(masm, &slow, dictionary,
272 static const Register LoadIC_TempRegister() {
return a3; }
277 Isolate*
isolate = masm->isolate();
285 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss),
isolate);
286 __ TailCallExternalReference(ref, 2, 1);
296 __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
300 static MemOperand GenerateMappedArgumentsLookup(
301 MacroAssembler* masm, Register
object, Register key, Register scratch1,
302 Register scratch2, Register scratch3, Label* unmapped_case,
304 Heap* heap = masm->isolate()->heap();
309 __ JumpIfSmi(
object, slow_case);
311 __ GetObjectType(
object, scratch1, scratch2);
315 __ And(scratch1, key, Operand(0x80000001));
316 __ Branch(slow_case,
ne, scratch1, Operand(zero_reg));
319 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
333 __ Mul(scratch3, key, scratch3);
334 __ Addu(scratch3, scratch3, Operand(kOffset));
336 __ Addu(scratch2, scratch1, scratch3);
338 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
339 __ Branch(unmapped_case,
eq, scratch2, Operand(scratch3));
346 __ Mul(scratch3, scratch2, scratch3);
348 __ Addu(scratch2, scratch1, scratch3);
353 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
355 Register parameter_map,
363 Register backing_store = parameter_map;
365 __ CheckMap(backing_store, scratch, Heap::kFixedArrayMapRootIndex, slow_case,
370 __ Mul(scratch, key, scratch);
372 __ Addu(scratch, backing_store, scratch);
385 MemOperand mapped_location = GenerateMappedArgumentsLookup(
386 masm, receiver, key, a3, t0, t1, ¬in, &slow);
387 __ sw(value, mapped_location);
398 GenerateUnmappedArgumentsLookup(masm, key, a3, t0, &slow);
399 __ sw(value, unmapped_location);
401 DCHECK_EQ(unmapped_location.offset(), 0);
413 Isolate*
isolate = masm->isolate();
420 ExternalReference ref =
421 ExternalReference(IC_Utility(kKeyedLoadIC_Miss),
isolate);
423 __ TailCallExternalReference(ref, 2, 1);
432 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
438 Label slow, check_name, index_smi, index_name, property_array_property;
439 Label probe_dictionary, check_number_dictionary;
446 Isolate*
isolate = masm->isolate();
449 __ JumpIfNotSmi(key, &check_name);
454 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3,
458 __ CheckFastElements(a0, a3, &check_number_dictionary);
460 GenerateFastArrayLoad(masm, receiver, key, a0, a3, t0, v0,
NULL, &slow);
464 __ bind(&check_number_dictionary);
471 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
472 __ Branch(&slow,
ne, a3, Operand(at));
474 __ LoadFromNumberDictionary(&slow, t0, key, v0, a0, a3, t1);
483 __ bind(&check_name);
484 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow);
486 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3,
494 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
495 __ Branch(&probe_dictionary,
eq, t0, Operand(at));
505 __ And(a3, a3, Operand(mask));
509 Label load_in_object_property;
511 Label hit_on_nth_entry[kEntriesPerBucket];
512 ExternalReference cache_keys =
513 ExternalReference::keyed_lookup_cache_keys(
isolate);
514 __ li(t0, Operand(cache_keys));
518 for (
int i = 0;
i < kEntriesPerBucket - 1;
i++) {
519 Label try_next_entry;
521 __ Branch(&try_next_entry,
ne, a0, Operand(t1));
523 __ Branch(&hit_on_nth_entry[
i],
eq, key, Operand(t1));
524 __ bind(&try_next_entry);
528 __ Branch(&slow,
ne, a0, Operand(t1));
530 __ Branch(&slow,
ne, key, Operand(t1));
535 ExternalReference cache_field_offsets =
536 ExternalReference::keyed_lookup_cache_field_offsets(
isolate);
539 for (
int i = kEntriesPerBucket - 1;
i >= 0;
i--) {
540 __ bind(&hit_on_nth_entry[
i]);
541 __ li(t0, Operand(cache_field_offsets));
547 __ Branch(&property_array_property,
ge, t1, Operand(zero_reg));
549 __ Branch(&load_in_object_property);
554 __ bind(&load_in_object_property);
559 __ addu(at, receiver, at);
566 __ bind(&property_array_property);
570 __ Addu(v0, v0, receiver);
579 __ bind(&probe_dictionary);
583 GenerateGlobalInstanceTypeCheck(masm, a0, &slow);
585 GenerateDictionaryLoad(masm, &slow, a3, key, v0, t1, t0);
590 __ bind(&index_name);
591 __ IndexFromHash(a3, key);
593 __ Branch(&index_smi);
603 Register scratch = a3;
604 Register result = v0;
605 DCHECK(!scratch.is(receiver) && !scratch.is(index));
607 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
612 char_at_generator.GenerateFast(masm);
615 StubRuntimeCallHelper call_helper;
616 char_at_generator.GenerateSlow(masm, call_helper);
623 static void KeyedStoreGenerateGenericHelper(
624 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
626 Register value, Register key, Register receiver, Register receiver_map,
627 Register elements_map, Register elements) {
628 Label transition_smi_elements;
629 Label finish_object_store, non_double_value, transition_double_elements;
630 Label fast_double_without_map_check;
633 __ bind(fast_object);
634 Register scratch_value = t0;
635 Register address = t1;
638 __ Branch(fast_double,
ne, elements_map,
639 Operand(masm->isolate()->factory()->fixed_array_map()));
645 Label holecheck_passed1;
648 __ addu(address, address, at);
650 __ Branch(&holecheck_passed1,
ne, scratch_value,
651 Operand(masm->isolate()->factory()->the_hole_value()));
652 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch_value,
655 __ bind(&holecheck_passed1);
659 __ JumpIfNotSmi(value, &non_smi_value);
669 __ Addu(address, address, scratch_value);
673 __ bind(&non_smi_value);
675 __ CheckFastObjectElements(receiver_map, scratch_value,
676 &transition_smi_elements);
679 __ bind(&finish_object_store);
687 __ Addu(address, address, scratch_value);
690 __ mov(scratch_value, value);
695 __ bind(fast_double);
699 __ LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
700 __ Branch(slow,
ne, elements_map, Operand(at));
709 __ addu(address, address, at);
711 __ Branch(&fast_double_without_map_check,
ne, scratch_value,
713 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch_value,
716 __ bind(&fast_double_without_map_check);
717 __ StoreNumberToDoubleElements(value, key,
720 t0, t1, &transition_double_elements);
728 __ bind(&transition_smi_elements);
731 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
732 __ Branch(&non_double_value,
ne, t0, Operand(at));
736 __ LoadTransitionedArrayMapConditional(
741 receiver_map,
mode, slow);
743 __ jmp(&fast_double_without_map_check);
745 __ bind(&non_double_value);
748 receiver_map, t0, slow);
751 masm, receiver, key, value, receiver_map,
mode, slow);
753 __ jmp(&finish_object_store);
755 __ bind(&transition_double_elements);
760 receiver_map, t0, slow);
763 masm, receiver, key, value, receiver_map,
mode, slow);
765 __ jmp(&finish_object_store);
777 Label slow, fast_object, fast_object_grow;
778 Label fast_double, fast_double_grow;
779 Label array, extra, check_if_double_array;
786 Register receiver_map = a3;
787 Register elements_map = t2;
788 Register elements = t3;
792 __ JumpIfNotSmi(key, &slow);
794 __ JumpIfSmi(receiver, &slow);
802 __ Branch(&slow,
ne, t0, Operand(zero_reg));
813 __ Branch(&fast_object,
lo, key, Operand(t0));
829 __ Branch(&slow,
ne, key, Operand(t0));
833 __ Branch(&slow,
hs, key, Operand(t0));
835 __ Branch(&check_if_double_array,
ne, elements_map,
836 Heap::kFixedArrayMapRootIndex);
838 __ jmp(&fast_object_grow);
840 __ bind(&check_if_double_array);
841 __ Branch(&slow,
ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
842 __ jmp(&fast_double_grow);
852 __ Branch(&extra,
hs, key, Operand(t0));
854 KeyedStoreGenerateGenericHelper(
856 value, key, receiver, receiver_map, elements_map, elements);
857 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
859 key, receiver, receiver_map, elements_map,
869 ExternalReference ref =
870 ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
871 __ TailCallExternalReference(ref, 3, 1);
885 masm->isolate()->stub_cache()->GenerateProbe(masm,
flags,
false, receiver,
886 name, a3, t0, t1, t2);
897 ExternalReference ref =
898 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
899 __ TailCallExternalReference(ref, 3, 1);
908 Register dictionary = a3;
915 GenerateDictionaryStore(masm, &miss, dictionary,
name, value, t0, t1);
916 Counters* counters = masm->isolate()->counters();
917 __ IncrementCounter(counters->store_normal_hit(), 1, t0, t1);
921 __ IncrementCounter(counters->store_normal_miss(), 1, t0, t1);
931 case Token::EQ_STRICT:
951 Address andi_instruction_address =
963 Address andi_instruction_address =
985 PrintF(
"[ patching ic at %p, andi=%p, delta=%d\n", address,
986 andi_instruction_address, delta);
1002 CodePatcher patcher(patch_address, 2);
1011 patcher.masm()->andi(at, reg, 0);
1015 patcher.ChangeBranchCondition(
ne);
1018 patcher.ChangeBranchCondition(
eq);
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static bool IsBne(Instr instr)
static uint32_t GetRt(Instr instr)
static uint32_t GetRs(Instr instr)
static bool IsBranch(Instr instr)
static const int kCallTargetAddressOffset
static uint32_t GetImmediate16(Instr instr)
static bool IsAndImmediate(Instr instr)
static bool IsBeq(Instr instr)
static Flags ComputeHandlerFlags(Kind handler_kind, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
static Flags RemoveTypeAndHolderFromFlags(Flags flags)
static bool HasInlinedSmiCode(Address address)
static Condition ComputeCondition(Token::Value op)
static void GenerateSmiToDouble(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *allocation_memento_found)
static void GenerateDoubleToObject(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static const int kLengthOffset
static const int kHeaderSize
static const int kElementsStartIndex
static const int kMapOffset
Isolate * isolate() const
static const int kLengthOffset
static const int kPropertiesOffset
static const int kElementsOffset
static void GenerateMiss(MacroAssembler *masm)
static void GenerateString(MacroAssembler *masm)
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static void GenerateGeneric(MacroAssembler *masm)
static const int kHashMask
static const int kCapacityMask
static const int kMapHashShift
static const int kEntriesPerBucket
static void GenerateMiss(MacroAssembler *masm)
static void GenerateSloppyArguments(MacroAssembler *masm)
static void GenerateGeneric(MacroAssembler *masm, StrictMode strict_mode)
static const Register ReceiverRegister()
static const Register NameRegister()
static void GenerateNormal(MacroAssembler *masm)
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm)
static const int kHasIndexedInterceptor
static const int kBitFieldOffset
static const int kIsAccessCheckNeeded
static const int kInstanceTypeOffset
static const int kInstanceSizeOffset
static const int kInObjectPropertiesOffset
static const int kIsObserved
static const int kHasNamedInterceptor
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kHashShift
static const int kHashFieldOffset
static const unsigned int kContainsCachedArrayIndexMask
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictMode strict_mode)
static Smi * FromInt(int value)
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static void GenerateMiss(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm)
StrictMode strict_mode() const
static void GenerateNormal(MacroAssembler *masm)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
@ DISABLE_INLINED_SMI_CHECK
@ ENABLE_INLINED_SMI_CHECK
const int kPointerSizeLog2
@ JS_BUILTINS_OBJECT_TYPE
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
MemOperand FieldMemOperand(Register object, int offset)
void PrintF(const char *format,...)
const uint32_t kInternalizedTag
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
const uint32_t kIsNotInternalizedMask
@ STRING_INDEX_IS_ARRAY_INDEX
const uint32_t kHoleNanUpper32
KeyedStoreIncrementLength
Debugger support for the V8 JavaScript engine.
static Register from_code(int code)