5 #ifndef V8_ARM64_LITHIUM_CODEGEN_ARM64_H_
6 #define V8_ARM64_LITHIUM_CODEGEN_ARM64_H_
22 class SafepointGenerator;
54 return chunk()->LookupDestination(block_id);
63 info()->is_non_deferred_calling() ||
65 info()->requires_frame();
99 return ToInteger32(LConstantOperand::cast(constant)) & 0x1f;
105 bool IsSmi(LConstantOperand* op)
const;
113 #define DECLARE_DO(type) void Do##type(L##type* node);
129 Label* allocation_entry);
155 template<
class InstrType>
159 template<
class InstrType>
162 template<
class InstrType>
168 template<
class InstrType>
174 template<
class InstrType>
179 template<
class InstrType>
183 template<
class InstrType>
246 bool key_is_constant,
259 Safepoint::DeoptMode
mode);
269 Translation* translation,
273 int* object_index_pointer,
274 int* dematerialized_index_pointer);
326 int formal_parameter_count,
334 Safepoint::Kind kind,
336 Safepoint::DeoptMode
mode);
341 Safepoint::DeoptMode
mode);
380 : codegen_(codegen) {
381 DCHECK(codegen_->info()->is_calling());
382 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
383 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
390 codegen_->masm_->Mov(to_be_pushed_lr,
lr);
392 codegen_->masm_->CallStub(&stub);
396 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
398 codegen_->masm_->CallStub(&stub);
399 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
453 : codegen_(codegen) { }
457 virtual void Emit(Label* label)
const = 0;
virtual void Emit(Label *label) const =0
MacroAssembler * masm() const
virtual void EmitInverted(Label *label) const =0
BranchGenerator(LCodeGen *codegen)
virtual ~BranchGenerator()
~PushSafepointRegistersScope()
PushSafepointRegistersScope(LCodeGen *codegen)
void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction *instr, const char *detail)
bool IsNextEmittedBlock(int block_id) const
void DeoptimizeBranch(LInstruction *instr, const char *detail, BranchType branch_type, Register reg=NoReg, int bit=-1, Deoptimizer::BailoutType *override_bailout_type=NULL)
void RestoreCallerDoubles()
void AddDeferredCode(LDeferredCode *code)
DoubleRegister ToDoubleRegister(LOperand *op) const
void RecordSafepointWithRegisters(LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
@ RECORD_SIMPLE_SAFEPOINT
@ RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
int inlined_function_count_
bool after_push_argument_
bool IsSmi(LConstantOperand *op) const
friend class SafepointGenerator
LinkRegisterStatus GetLinkRegisterState() const
TranslationBuffer translations_
MemOperand BuildSeqStringOperand(Register string, LOperand *index, String::Encoding encoding)
void DeoptimizeIfZero(Register rt, LInstruction *instr, const char *detail)
Condition EmitIsString(Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
MemOperand PrepareKeyedArrayOperand(Register base, Register elements, Register key, bool key_is_tagged, ElementsKind elements_kind, Representation representation, int base_offset)
void EmitDeepCopy(Handle< JSObject > object, Register result, Register source, Register scratch, int *offset, AllocationSiteMode mode)
void EmitCompareAndBranch(InstrType instr, Condition condition, const Register &lhs, const Operand &rhs)
void DoDeferredStackCheck(LStackCheck *instr)
void DeoptimizeIfSmi(Register rt, LInstruction *instr, const char *detail)
void DeoptimizeIfBitSet(Register rt, int bit, LInstruction *instr, const char *detail)
SafepointTableBuilder safepoints_
void EmitVectorLoadICRegisters(T *instr)
void DeoptimizeIfNotZero(Register rt, LInstruction *instr, const char *detail)
ZoneList< Handle< Object > > deoptimization_literals_
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal *instr, Label *map_check, Label *bool_load)
void RecordSafepoint(Safepoint::DeoptMode mode)
void DeoptimizeIfNotHeapNumber(Register object, LInstruction *instr)
void PopulateDeoptimizationLiteralsWithInlinedFunctions()
void AddToTranslation(LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
Operand ToShiftedRightOperand32(LOperand *right, LI *shift_info)
Operand ToOperand32(LOperand *op)
ZoneList< LEnvironment * > deoptimizations_
void DeoptimizeIfRoot(Register rt, Heap::RootListIndex index, LInstruction *instr, const char *detail)
DISALLOW_COPY_AND_ASSIGN(LCodeGen)
void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
int32_t ToInteger32(LConstantOperand *op) const
LPlatformChunk * chunk() const
friend class LGapResolver
void FinishCode(Handle< Code > code)
Handle< Object > ToHandle(LConstantOperand *op) const
void DeoptimizeIfNotSmi(Register rt, LInstruction *instr, const char *detail)
int LookupDestination(int block_id) const
void DoDeferredAllocate(LAllocate *instr)
void RecordSafepoint(LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
int JSShiftAmountFromLConstant(LOperand *constant)
void DoDeferredTaggedToI(LTaggedToI *instr)
void CallCodeGeneric(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
void DoDeferredStringCharCodeAt(LStringCharCodeAt *instr)
void CallCode(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
Safepoint::Kind expected_safepoint_kind_
ZoneList< LDeferredCode * > deferred_
void EmitBranchGeneric(InstrType instr, const BranchGenerator &branch)
bool GenerateDeferredCode()
void RecordSafepoint(LPointerMap *pointers, Safepoint::DeoptMode mode)
void EmitBranchIfNonZeroNumber(InstrType instr, const FPRegister &value, const FPRegister &scratch)
bool NeedsEagerFrame() const
DoubleRegister double_scratch()
void RegisterEnvironmentForDeoptimization(LEnvironment *environment, Safepoint::DeoptMode mode)
void DeoptimizeIfNegative(Register rt, LInstruction *instr, const char *detail)
void LoadContextFromDeferred(LOperand *context)
void GenerateOsrPrologue()
void DeoptimizeIf(Condition cond, LInstruction *instr, const char *detail)
void DoDeferredNumberTagU(LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2)
bool NeedsDeferredFrame() const
void DoDeferredInstanceMigration(LCheckMaps *instr, Register object)
void DoDeferredLoadMutableDouble(LLoadFieldByIndex *instr, Register result, Register object, Register index)
int DefineDeoptimizationLiteral(Handle< Object > literal)
Register ToRegister32(LOperand *op) const
int GetStackSlotCount() const
void CallKnownFunction(Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, R1State r1_state)
void WriteTranslation(LEnvironment *environment, Translation *translation)
void DoDeferredMathAbsTagged(LMathAbsTagged *instr, Label *exit, Label *allocation_entry)
bool GenerateSafepointTable()
void EmitTestAndBranch(InstrType instr, Condition condition, const Register &value, uint64_t mask)
static Condition TokenToCondition(Token::Value op, bool is_unsigned)
Operand ToOperand(LOperand *op)
void DeoptimizeIfBitClear(Register rt, int bit, LInstruction *instr, const char *detail)
double ToDouble(LConstantOperand *op) const
void DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index, LInstruction *instr, const char *detail)
Register ToRegister(LOperand *op) const
void CallRuntime(Runtime::FunctionId id, int num_arguments, LInstruction *instr)
void RecordAndWritePosition(int position) OVERRIDE
void PopulateDeoptimizationData(Handle< Code > code)
void CallRuntime(const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void EmitBranchIfRoot(InstrType instr, const Register &value, Heap::RootListIndex index)
bool IsInteger32Constant(LConstantOperand *op) const
void Deoptimize(LInstruction *instr, const char *detail, Deoptimizer::BailoutType *override_bailout_type=NULL)
void EmitBranchIfHeapNumber(InstrType instr, const Register &value)
void DoDeferredStringCharFromCode(LStringCharFromCode *instr)
ZoneList< Deoptimizer::JumpTableEntry > jump_table_
void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE
ZoneList< Deoptimizer::JumpTableEntry * > jump_table_
MemOperand PrepareKeyedExternalArrayOperand(Register key, Register base, Register scratch, bool key_is_smi, bool key_is_constant, int constant_key, ElementsKind elements_kind, int base_offset)
MemOperand ToMemOperand(LOperand *op) const
void GenerateBodyInstructionPre(LInstruction *instr) OVERRIDE
void RecordSafepointWithLazyDeopt(LInstruction *instr, SafepointMode safepoint_mode)
Smi * ToSmi(LConstantOperand *op) const
LCodeGen(LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
void EmitBranch(InstrType instr, Condition condition)
void DoDeferredNumberTagD(LNumberTagD *instr)
friend class LEnvironment
MacroAssembler * masm() const
virtual void Generate()=0
LDeferredCode(LCodeGen *codegen)
LCodeGen * codegen() const
int instruction_index() const
void SetExit(Label *exit)
virtual LInstruction * instr()=0
static const Function * FunctionForId(FunctionId id)
Register UnsafeAcquire(const Register ®)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
#define DCHECK(condition)
Debugger support for the V8 JavaScript engine.
#define T(name, string, precedence)