7 #if V8_TARGET_ARCH_ARM64
16 #define __ ACCESS_MASM(masm)
18 #if defined(USE_SIMULATOR)
19 byte* fast_exp_arm64_machine_code =
NULL;
20 double fast_exp_simulator(
double x) {
21 Simulator * simulator = Simulator::current(Isolate::Current());
22 Simulator::CallArgument args[] = {
23 Simulator::CallArgument(x),
24 Simulator::CallArgument::End()
26 return simulator->CallDouble(fast_exp_arm64_machine_code, args);
32 if (!FLAG_fast_math)
return &std::exp;
40 if (buffer ==
NULL)
return &std::exp;
42 ExternalReference::InitializeMathExpData();
43 MacroAssembler masm(
NULL, buffer,
static_cast<int>(actual_size));
44 masm.SetStackPointer(csp);
57 double_temp1, double_temp2,
60 masm.Fmov(
d0, result);
65 DCHECK(!RelocInfo::RequiresRelocation(desc));
70 #if !defined(USE_SIMULATOR)
71 return FUNCTION_CAST<UnaryMathFunction>(buffer);
73 fast_exp_arm64_machine_code = buffer;
74 return &fast_exp_simulator;
89 DCHECK(!masm->has_frame());
90 masm->set_has_frame(
true);
97 masm->set_has_frame(
false);
105 MacroAssembler* masm,
111 Label* allocation_memento_found) {
113 "ElementsTransitionGenerator::GenerateMapChangeElementsTransition");
118 __ JumpIfJSArrayHasAllocationMemento(receiver, x10, x11,
119 allocation_memento_found);
124 __ RecordWriteField(receiver,
136 MacroAssembler* masm,
143 ASM_LOCATION(
"ElementsTransitionGenerator::GenerateSmiToDouble");
144 Label gc_required, only_change_map;
145 Register elements = x4;
146 Register length = x5;
147 Register array_size = x6;
150 Register scratch = x6;
154 elements, length, array_size, array));
157 __ JumpIfJSArrayHasAllocationMemento(receiver, x10, x11, fail);
163 __ JumpIfRoot(elements, Heap::kEmptyFixedArrayRootIndex, &only_change_map);
176 Register map_root = array_size;
177 __ LoadRoot(map_root, Heap::kFixedDoubleArrayMapRootIndex);
178 __ SmiTag(x11, length);
195 Register src_elements = x10;
196 Register dst_elements = x11;
197 Register dst_end = x12;
202 FPRegister nan_d =
d1;
208 __ Bind(&only_change_map);
216 __ Bind(&gc_required);
232 __ Cmp(dst_elements, dst_end);
242 MacroAssembler* masm,
249 ASM_LOCATION(
"ElementsTransitionGenerator::GenerateDoubleToObject");
250 Register elements = x4;
251 Register array_size = x6;
253 Register length = x5;
257 elements, array_size, array, length));
260 __ JumpIfJSArrayHasAllocationMemento(receiver, x10, x11, fail);
265 Label only_change_map;
268 __ JumpIfRoot(elements, Heap::kEmptyFixedArrayRootIndex, &only_change_map);
273 __ Push(target_map, receiver, key, value);
283 Register map_root = array_size;
284 __ LoadRoot(map_root, Heap::kFixedArrayMapRootIndex);
285 __ SmiTag(x11, length);
290 Register src_elements = x10;
291 Register dst_elements = x11;
292 Register dst_end = x12;
293 __ Add(src_elements, elements,
299 Register the_hole = x14;
300 Register heap_num_map = x15;
301 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
302 __ LoadRoot(heap_num_map, Heap::kHeapNumberMapRootIndex);
308 __ Bind(&gc_required);
309 __ Pop(value, key, receiver, target_map);
314 Label loop, convert_hole;
318 __ B(
eq, &convert_hole);
321 Register heap_num = length;
322 Register scratch = array_size;
323 Register scratch2 = elements;
324 __ AllocateHeapNumber(heap_num, &gc_required, scratch, scratch2,
326 __ Mov(x13, dst_elements);
334 __ Bind(&convert_hole);
338 __ Cmp(dst_elements, dst_end);
342 __ Pop(value, key, receiver, target_map);
350 __ Bind(&only_change_map);
372 PatchingAssembler patcher_old(old_sequence_.start(), length);
379 bool CodeAgingHelper::IsOld(
byte* candidate)
const {
407 PatchingAssembler patcher(sequence,
422 Label* call_runtime) {
423 DCHECK(
string.Is64Bits() && index.Is32Bits() && result.Is64Bits());
429 Label check_sequential;
437 Label indirect_string_loaded;
441 __ Add(index, index, result.W());
442 __ B(&indirect_string_loaded);
449 __ Bind(&cons_string);
451 __ JumpIfNotRoot(result, Heap::kempty_stringRootIndex, call_runtime);
455 __ Bind(&indirect_string_loaded);
462 Label external_string, check_encoding;
463 __ Bind(&check_sequential);
470 __ B(&check_encoding);
473 __ Bind(&external_string);
474 if (FLAG_debug_code) {
478 __ Assert(
eq, kExternalStringExpectedButNotFound);
485 __ B(
ne, call_runtime);
488 Label one_byte, done;
489 __ Bind(&check_encoding);
502 static MemOperand ExpConstant(Register base,
int index) {
520 double_temp1, double_temp2,
521 temp1, temp2, temp3));
522 DCHECK(ExternalReference::math_exp_constants(0).address() !=
NULL);
523 DCHECK(!masm->serializer_enabled());
527 Register constants = temp3;
533 __ Mov(constants, ExternalReference::math_exp_constants(0));
540 Label result_is_finite_non_zero;
544 ExpConstant(constants, 0).offset()));
545 __ Ldp(double_temp1, double_temp2, ExpConstant(constants, 0));
547 __ Fcmp(input, double_temp1);
558 __ B(&result_is_finite_non_zero,
mi);
561 __ Ldr(double_temp2, ExpConstant(constants, 2));
564 __ Fcsel(result, fp_zero, double_temp2,
lo);
566 __ Fcsel(result, result, input,
vc);
570 __ Bind(&result_is_finite_non_zero);
574 ExpConstant(constants, 3).offset()));
575 __ Ldp(double_temp1, double_temp3, ExpConstant(constants, 3));
576 __ Fmadd(double_temp1, double_temp1, input, double_temp3);
577 __ Fmov(temp2.W(), double_temp1.S());
578 __ Fsub(double_temp1, double_temp1, double_temp3);
582 ExpConstant(constants, 5).offset()));
583 __ Ldp(double_temp2, double_temp3, ExpConstant(constants, 5));
585 __ Fmul(double_temp1, double_temp1, double_temp2);
586 __ Fsub(double_temp1, double_temp1, input);
588 __ Fmul(double_temp2, double_temp1, double_temp1);
589 __ Fsub(double_temp3, double_temp3, double_temp1);
590 __ Fmul(double_temp3, double_temp3, double_temp2);
592 __ Mov(temp1.W(), Operand(temp2.W(),
LSR, 11));
594 __ Ldr(double_temp2, ExpConstant(constants, 7));
596 __ Fmul(double_temp3, double_temp3, double_temp2);
597 __ Fsub(double_temp3, double_temp3, double_temp1);
602 __ Fmov(double_temp2, 1.0);
603 __ Fadd(double_temp3, double_temp3, double_temp2);
605 __ And(temp2, temp2, 0x7ff);
606 __ Add(temp1, temp1, 0x3ff);
609 __ Mov(temp3, ExternalReference::math_exp_log_table());
613 __ Orr(temp1.W(), temp3.W(), Operand(temp1.W(),
LSL, 20));
614 __ Bfi(temp2, temp1, 32, 32);
615 __ Fmov(double_temp1, temp2);
617 __ Fmul(result, double_temp3, double_temp1);
static void * Allocate(const size_t requested, size_t *allocated, bool is_executable)
static void ProtectCode(void *address, const size_t size)
const EmbeddedVector< byte, kNoCodeAgeSequenceLength > young_sequence_
static Code * GetCodeAgeStub(Isolate *isolate, Age age, MarkingParity parity)
static Code * GetCodeFromTargetAddress(Address address)
static void PatchPlatformCodeAge(Isolate *isolate, byte *sequence, Age age, MarkingParity parity)
static bool IsYoungSequence(Isolate *isolate, byte *sequence)
static void GetCodeAgeAndParity(Code *code, Age *age, MarkingParity *parity)
static const int kFirstOffset
static const int kSecondOffset
static void FlushICache(void *start, size_t size)
static void GenerateSmiToDouble(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *allocation_memento_found)
static void GenerateDoubleToObject(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static const int kResourceDataOffset
static const int kLengthOffset
static const int kHeaderSize
static const int kMapOffset
static const int kElementsOffset
void EmitFrameSetupForCodeAgePatching()
static void EmitCodeAgeSequence(Assembler *assm, Code *stub)
static bool IsYoungSequence(Isolate *isolate, byte *sequence)
static const int kInstanceTypeOffset
static void EmitMathExp(MacroAssembler *masm, DwVfpRegister input, DwVfpRegister result, DwVfpRegister double_scratch1, DwVfpRegister double_scratch2, Register temp1, Register temp2, Register temp3)
static Address & Address_at(Address addr)
static const int kHeaderSize
static const int kParentOffset
static const int kOffsetOffset
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
virtual void AfterCall(MacroAssembler *masm) const
virtual void BeforeCall(MacroAssembler *masm) const
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
#define ASM_LOCATION(message)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
const LowDwVfpRegister d2
const uint32_t kStringEncodingMask
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
const LowDwVfpRegister d1
const uint32_t kTwoByteStringTag
const uint32_t kShortExternalStringTag
const LowDwVfpRegister d0
const int kDoubleSizeLog2
DwVfpRegister DoubleRegister
const int kPointerSizeLog2
MemOperand FieldMemOperand(Register object, int offset)
UnaryMathFunction CreateExpFunction()
const uint32_t kShortExternalStringMask
static double rawbits_to_double(uint64_t bits)
const uint32_t kStringRepresentationMask
const uint32_t kSlicedNotConsMask
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
const unsigned kDRegSizeLog2
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const uint64_t kHoleNanInt64
const intptr_t kSmiTagMask
UnaryMathFunction CreateSqrtFunction()
double(* UnaryMathFunction)(double x)
static const int kCodeAgeStubEntryOffset
const LowDwVfpRegister d3
static const int kNoCodeAgeSequenceLength
const unsigned kInstructionSize
const uint32_t kIsIndirectStringMask
Debugger support for the V8 JavaScript engine.