21 #define __ ACCESS_MASM(masm)
24 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
25 Label* global_object) {
44 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
45 Register elements, Register
name,
46 Register
r0, Register
r1, Register result) {
69 const int kElementsStartOffset =
72 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
79 const int kValueOffset = kElementsStartOffset +
kPointerSize;
92 static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss_label,
93 Register elements, Register
name,
94 Register value, Register scratch0,
111 masm, miss_label, &done, elements,
name, scratch0, scratch1);
117 const int kElementsStartOffset =
120 const int kDetailsOffset = kElementsStartOffset + 2 *
kPointerSize;
121 const int kTypeAndReadOnlyMask =
122 (PropertyDetails::TypeField::kMask |
123 PropertyDetails::AttributesField::encode(
READ_ONLY))
131 const int kValueOffset = kElementsStartOffset +
kPointerSize;
134 __ movp(Operand(scratch1, 0), value);
137 __ movp(scratch0, value);
144 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
145 Register receiver, Register
map,
146 int interceptor_bit, Label* slow) {
153 __ JumpIfSmi(receiver, slow);
173 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
174 Register key, Register elements,
175 Register scratch, Register result,
176 Label* not_fast_array, Label* out_of_range) {
197 if (not_fast_array !=
NULL) {
200 Heap::kFixedArrayMapRootIndex);
203 __ AssertFastElements(elements);
213 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
217 if (!result.is(scratch)) {
218 __ movp(result, scratch);
225 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
226 Register
map, Register hash,
227 Label* index_string, Label* not_unique) {
257 Label slow, check_name, index_smi, index_name, property_array_property;
258 Label probe_dictionary, check_number_dictionary;
266 __ JumpIfNotSmi(key, &check_name);
271 GenerateKeyedLoadReceiverCheck(masm, receiver,
rax,
275 __ CheckFastElements(
rax, &check_number_dictionary);
277 GenerateFastArrayLoad(masm, receiver, key,
rax,
rbx,
rax,
NULL, &slow);
278 Counters* counters = masm->isolate()->counters();
279 __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
282 __ bind(&check_number_dictionary);
283 __ SmiToInteger32(
rbx, key);
290 Heap::kHashTableMapRootIndex);
297 __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
300 __ bind(&check_name);
301 GenerateKeyNameCheck(masm, key,
rax,
rbx, &index_name, &slow);
310 Heap::kHashTableMapRootIndex);
311 __ j(
equal, &probe_dictionary);
322 __ andp(
rax, Immediate(mask));
326 Label load_in_object_property;
328 Label hit_on_nth_entry[kEntriesPerBucket];
329 ExternalReference cache_keys =
330 ExternalReference::keyed_lookup_cache_keys(masm->isolate());
332 for (
int i = 0;
i < kEntriesPerBucket - 1;
i++) {
333 Label try_next_entry;
342 __ bind(&try_next_entry);
352 ExternalReference cache_field_offsets =
353 ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
356 for (
int i = kEntriesPerBucket - 1;
i >= 0;
i--) {
357 __ bind(&hit_on_nth_entry[
i]);
359 __ addl(
rax, Immediate(
i));
367 __ jmp(&load_in_object_property);
372 __ bind(&load_in_object_property);
376 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
380 __ bind(&property_array_property);
384 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
389 __ bind(&probe_dictionary);
394 GenerateGlobalInstanceTypeCheck(masm,
rax, &slow);
396 GenerateDictionaryLoad(masm, &slow,
rbx, key,
rax,
rdi,
rax);
397 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
400 __ bind(&index_name);
401 __ IndexFromHash(
rbx, key);
412 Register scratch =
rbx;
413 Register result =
rax;
414 DCHECK(!scratch.is(receiver) && !scratch.is(index));
416 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
421 char_at_generator.GenerateFast(masm);
424 StubRuntimeCallHelper call_helper;
425 char_at_generator.GenerateSlow(masm, call_helper);
432 static void KeyedStoreGenerateGenericHelper(
433 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
435 Label transition_smi_elements;
436 Label finish_object_store, non_double_value, transition_double_elements;
437 Label fast_double_without_map_check;
445 __ bind(fast_object);
451 __ CompareRoot(
rdi, Heap::kFixedArrayMapRootIndex);
458 Label holecheck_passed1;
465 __ bind(&holecheck_passed1);
469 __ JumpIfNotSmi(value, &non_smi_value);
472 __ leal(
rdi, Operand(key, 1));
480 __ bind(&non_smi_value);
483 __ CheckFastObjectElements(
r9, &transition_smi_elements);
485 __ bind(&finish_object_store);
488 __ leal(
rdi, Operand(key, 1));
498 __ bind(fast_double);
503 __ CompareRoot(
rdi, Heap::kFixedDoubleArrayMapRootIndex);
515 __ bind(&fast_double_without_map_check);
516 __ StoreNumberToDoubleElements(value,
rbx, key,
xmm0,
517 &transition_double_elements);
520 __ leal(
rdi, Operand(key, 1));
525 __ bind(&transition_smi_elements);
530 __ CompareRoot(
r9, Heap::kHeapNumberMapRootIndex);
542 __ jmp(&fast_double_without_map_check);
544 __ bind(&non_double_value);
550 masm, receiver, key, value,
rbx,
mode, slow);
552 __ jmp(&finish_object_store);
554 __ bind(&transition_double_elements);
565 __ jmp(&finish_object_store);
572 Label slow, slow_with_tagged_index, fast_object, fast_object_grow;
573 Label fast_double, fast_double_grow;
574 Label array, extra, check_if_double_array;
581 __ JumpIfSmi(receiver, &slow_with_tagged_index);
590 __ JumpIfNotSmi(key, &slow_with_tagged_index);
591 __ SmiToInteger32(key, key);
608 __ Integer32ToSmi(key, key);
609 __ bind(&slow_with_tagged_index);
625 __ CompareRoot(
rdi, Heap::kFixedArrayMapRootIndex);
627 __ jmp(&fast_object_grow);
629 __ bind(&check_if_double_array);
631 __ CompareRoot(
rdi, Heap::kFixedDoubleArrayMapRootIndex);
633 __ jmp(&fast_double_grow);
647 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, &slow,
649 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
654 static Operand GenerateMappedArgumentsLookup(
655 MacroAssembler* masm, Register
object, Register key, Register scratch1,
656 Register scratch2, Register scratch3, Label* unmapped_case,
658 Heap* heap = masm->isolate()->heap();
663 __ JumpIfSmi(
object, slow_case);
669 Condition check = masm->CheckNonNegativeSmi(key);
674 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
681 __ cmpp(key, scratch2);
686 __ SmiToInteger64(scratch3, key);
689 __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
696 __ SmiToInteger64(scratch3, scratch2);
702 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
704 Register parameter_map,
712 Register backing_store = parameter_map;
713 __ movp(backing_store,
FieldOperand(parameter_map, kBackingStoreOffset));
714 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
717 __ cmpp(key, scratch);
719 __ SmiToInteger64(scratch, key);
735 Operand mapped_location = GenerateMappedArgumentsLookup(
737 __ movp(mapped_location, value);
738 __ leap(
r9, mapped_location);
745 Operand unmapped_location =
746 GenerateUnmappedArgumentsLookup(masm,
name,
rbx,
rdi, &slow);
747 __ movp(unmapped_location, value);
748 __ leap(
r9, unmapped_location);
759 Register dictionary =
rax;
767 GenerateDictionaryLoad(masm, &slow, dictionary,
778 static const Register LoadIC_TempRegister() {
return rbx; }
781 static const Register KeyedLoadIC_TempRegister() {
return rbx; }
787 Counters* counters = masm->isolate()->counters();
788 __ IncrementCounter(counters->load_miss(), 1);
790 __ PopReturnAddressTo(LoadIC_TempRegister());
793 __ PushReturnAddressFrom(LoadIC_TempRegister());
796 ExternalReference ref =
797 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
798 __ TailCallExternalReference(ref, 2, 1);
805 __ PopReturnAddressTo(LoadIC_TempRegister());
808 __ PushReturnAddressFrom(LoadIC_TempRegister());
811 __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
817 Counters* counters = masm->isolate()->counters();
818 __ IncrementCounter(counters->keyed_load_miss(), 1);
820 __ PopReturnAddressTo(KeyedLoadIC_TempRegister());
823 __ PushReturnAddressFrom(KeyedLoadIC_TempRegister());
826 ExternalReference ref =
827 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
828 __ TailCallExternalReference(ref, 2, 1);
835 __ PopReturnAddressTo(KeyedLoadIC_TempRegister());
838 __ PushReturnAddressFrom(KeyedLoadIC_TempRegister());
841 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
851 masm->isolate()->stub_cache()->GenerateProbe(
860 static void StoreIC_PushArgs(MacroAssembler* masm) {
867 __ PopReturnAddressTo(
rbx);
871 __ PushReturnAddressFrom(
rbx);
877 StoreIC_PushArgs(masm);
880 ExternalReference ref =
881 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
882 __ TailCallExternalReference(ref, 3, 1);
890 Register dictionary =
rbx;
895 GenerateDictionaryStore(masm, &miss, dictionary,
name, value,
r8,
r9);
896 Counters* counters = masm->isolate()->counters();
897 __ IncrementCounter(counters->store_normal_hit(), 1);
901 __ IncrementCounter(counters->store_normal_miss(), 1);
908 StoreIC_PushArgs(masm);
911 ExternalReference ref =
912 ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
913 __ TailCallExternalReference(ref, 3, 1);
922 case Token::EQ_STRICT:
942 Address test_instruction_address =
953 Address test_instruction_address =
963 Address delta_address = test_instruction_address + 1;
966 uint8_t delta = *
reinterpret_cast<uint8_t*
>(delta_address);
968 PrintF(
"[ patching ic at %p, test=%p, delta=%d\n", address,
969 test_instruction_address, delta);
975 Address jmp_address = test_instruction_address - delta;
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static const byte kJnzShortOpcode
static const byte kNopByte
static const byte kJccShortPrefix
static const byte kJncShortOpcode
static const byte kTestAlByte
static const int kCallTargetAddressOffset
static const byte kJcShortOpcode
static const byte kJzShortOpcode
static Flags ComputeHandlerFlags(Kind handler_kind, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
static Flags RemoveTypeAndHolderFromFlags(Flags flags)
static bool HasInlinedSmiCode(Address address)
static Condition ComputeCondition(Token::Value op)
static void GenerateSmiToDouble(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *allocation_memento_found)
static void GenerateDoubleToObject(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static const int kLengthOffset
static const int kHeaderSize
static const int kElementsStartIndex
static const int kMapOffset
static const int kLengthOffset
static const int kPropertiesOffset
static const int kElementsOffset
static void GenerateMiss(MacroAssembler *masm)
static void GenerateString(MacroAssembler *masm)
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static void GenerateGeneric(MacroAssembler *masm)
static const int kHashMask
static const int kCapacityMask
static const int kMapHashShift
static const int kEntriesPerBucket
static void GenerateMiss(MacroAssembler *masm)
static void GenerateSloppyArguments(MacroAssembler *masm)
static void GenerateGeneric(MacroAssembler *masm, StrictMode strict_mode)
static const Register ReceiverRegister()
static const Register NameRegister()
static void GenerateNormal(MacroAssembler *masm)
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm)
static const int kHasIndexedInterceptor
static const int kBitFieldOffset
static const int kIsAccessCheckNeeded
static const int kInstanceTypeOffset
static const int kInstanceSizeOffset
static const int kInObjectPropertiesOffset
static const int kIsObserved
static const int kHasNamedInterceptor
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kHashShift
static const int kHashFieldOffset
static const unsigned int kContainsCachedArrayIndexMask
static void GenerateRuntimeSetProperty(MacroAssembler *masm, StrictMode strict_mode)
static Smi * FromInt(int value)
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static void GenerateMiss(MacroAssembler *masm)
static void GenerateMegamorphic(MacroAssembler *masm)
StrictMode strict_mode() const
static void GenerateNormal(MacroAssembler *masm)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
const Register kScratchRegister
@ ENABLE_INLINED_SMI_CHECK
Operand FieldOperand(Register object, int offset)
const uint32_t kNotInternalizedTag
const int kPointerSizeLog2
@ JS_BUILTINS_OBJECT_TYPE
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check)
Condition NegateCondition(Condition cond)
void PrintF(const char *format,...)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const uint32_t kIsNotInternalizedMask
const uint32_t kHoleNanLower32
@ STRING_INDEX_IS_ARRAY_INDEX
const uint32_t kHoleNanUpper32
KeyedStoreIncrementLength
Debugger support for the V8 JavaScript engine.
bool is(Register reg) const