V8 Project
v8::internal::LCodeGen Class Reference

#include <lithium-codegen-arm.h>

+ Inheritance diagram for v8::internal::LCodeGen:
+ Collaboration diagram for v8::internal::LCodeGen:

Classes

class  BASE_EMBEDDED
 
class  X87Stack
 

Public Types

enum  IntegerSignedness {
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32
}
 
enum  IntegerSignedness {
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32
}
 
enum  StackMode { kMustUseFramePointer , kCanUseStackPointer }
 
enum  IntegerSignedness {
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32
}
 
enum  IntegerSignedness {
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32
}
 
enum  IntegerSignedness {
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32
}
 
enum  IntegerSignedness {
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32
}
 
enum  X87OperandType { kX87DoubleOperand , kX87FloatOperand , kX87IntOperand }
 
enum  IntegerSignedness {
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32 , SIGNED_INT32 , UNSIGNED_INT32 ,
  SIGNED_INT32 , UNSIGNED_INT32
}
 

Public Member Functions

 LCodeGen (LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
 
int LookupDestination (int block_id) const
 
bool IsNextEmittedBlock (int block_id) const
 
bool NeedsEagerFrame () const
 
bool NeedsDeferredFrame () const
 
LinkRegisterStatus GetLinkRegisterState () const
 
Register ToRegister (LOperand *op) const
 
Register EmitLoadRegister (LOperand *op, Register scratch)
 
DwVfpRegister ToDoubleRegister (LOperand *op) const
 
DwVfpRegister EmitLoadDoubleRegister (LOperand *op, SwVfpRegister flt_scratch, DwVfpRegister dbl_scratch)
 
int32_t ToRepresentation (LConstantOperand *op, const Representation &r) const
 
int32_t ToInteger32 (LConstantOperand *op) const
 
SmiToSmi (LConstantOperand *op) const
 
double ToDouble (LConstantOperand *op) const
 
Operand ToOperand (LOperand *op)
 
MemOperand ToMemOperand (LOperand *op) const
 
MemOperand ToHighMemOperand (LOperand *op) const
 
bool IsInteger32 (LConstantOperand *op) const
 
bool IsSmi (LConstantOperand *op) const
 
Handle< ObjectToHandle (LConstantOperand *op) const
 
bool GenerateCode ()
 
void FinishCode (Handle< Code > code)
 
void DoDeferredNumberTagD (LNumberTagD *instr)
 
void DoDeferredNumberTagIU (LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2, IntegerSignedness signedness)
 
void DoDeferredTaggedToI (LTaggedToI *instr)
 
void DoDeferredMathAbsTaggedHeapNumber (LMathAbs *instr)
 
void DoDeferredStackCheck (LStackCheck *instr)
 
void DoDeferredStringCharCodeAt (LStringCharCodeAt *instr)
 
void DoDeferredStringCharFromCode (LStringCharFromCode *instr)
 
void DoDeferredAllocate (LAllocate *instr)
 
void DoDeferredInstanceOfKnownGlobal (LInstanceOfKnownGlobal *instr, Label *map_check, Label *bool_load)
 
void DoDeferredInstanceMigration (LCheckMaps *instr, Register object)
 
void DoDeferredLoadMutableDouble (LLoadFieldByIndex *instr, Register result, Register object, Register index)
 
void DoParallelMove (LParallelMove *move)
 
void DoGap (LGap *instr)
 
MemOperand PrepareKeyedOperand (Register key, Register base, bool key_is_constant, int constant_key, int element_size, int shift_size, int base_offset)
 
void WriteTranslation (LEnvironment *environment, Translation *translation)
 
 LCodeGen (LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
 
 ~LCodeGen ()
 
Scopescope () const
 
int LookupDestination (int block_id) const
 
bool IsNextEmittedBlock (int block_id) const
 
bool NeedsEagerFrame () const
 
bool NeedsDeferredFrame () const
 
LinkRegisterStatus GetLinkRegisterState () const
 
bool GenerateCode ()
 
void FinishCode (Handle< Code > code)
 
Register ToRegister (LOperand *op) const
 
Register ToRegister32 (LOperand *op) const
 
Operand ToOperand (LOperand *op)
 
Operand ToOperand32 (LOperand *op)
 
MemOperand ToMemOperand (LOperand *op, StackMode stack_mode=kCanUseStackPointer) const
 
Handle< ObjectToHandle (LConstantOperand *op) const
 
template<class LI >
Operand ToShiftedRightOperand32 (LOperand *right, LI *shift_info)
 
int JSShiftAmountFromLConstant (LOperand *constant)
 
bool IsInteger32Constant (LConstantOperand *op) const
 
bool IsSmi (LConstantOperand *op) const
 
int32_t ToInteger32 (LConstantOperand *op) const
 
SmiToSmi (LConstantOperand *op) const
 
double ToDouble (LConstantOperand *op) const
 
DoubleRegister ToDoubleRegister (LOperand *op) const
 
 LCodeGen (LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
 
int LookupDestination (int block_id) const
 
bool IsNextEmittedBlock (int block_id) const
 
bool NeedsEagerFrame () const
 
bool NeedsDeferredFrame () const
 
Operand ToOperand (LOperand *op) const
 
Register ToRegister (LOperand *op) const
 
XMMRegister ToDoubleRegister (LOperand *op) const
 
bool IsInteger32 (LConstantOperand *op) const
 
bool IsSmi (LConstantOperand *op) const
 
Immediate ToImmediate (LOperand *op, const Representation &r) const
 
double ToDouble (LConstantOperand *op) const
 
Handle< ObjectToHandle (LConstantOperand *op) const
 
Operand HighOperand (LOperand *op)
 
bool GenerateCode ()
 
void FinishCode (Handle< Code > code)
 
void DoDeferredNumberTagD (LNumberTagD *instr)
 
void DoDeferredNumberTagIU (LInstruction *instr, LOperand *value, LOperand *temp, IntegerSignedness signedness)
 
void DoDeferredTaggedToI (LTaggedToI *instr, Label *done)
 
void DoDeferredMathAbsTaggedHeapNumber (LMathAbs *instr)
 
void DoDeferredStackCheck (LStackCheck *instr)
 
void DoDeferredStringCharCodeAt (LStringCharCodeAt *instr)
 
void DoDeferredStringCharFromCode (LStringCharFromCode *instr)
 
void DoDeferredAllocate (LAllocate *instr)
 
void DoDeferredInstanceOfKnownGlobal (LInstanceOfKnownGlobal *instr, Label *map_check)
 
void DoDeferredInstanceMigration (LCheckMaps *instr, Register object)
 
void DoDeferredLoadMutableDouble (LLoadFieldByIndex *instr, Register object, Register index)
 
void DoParallelMove (LParallelMove *move)
 
void DoGap (LGap *instr)
 
void WriteTranslation (LEnvironment *environment, Translation *translation)
 
void EnsureRelocSpaceForDeoptimization ()
 
 LCodeGen (LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
 
int LookupDestination (int block_id) const
 
bool IsNextEmittedBlock (int block_id) const
 
bool NeedsEagerFrame () const
 
bool NeedsDeferredFrame () const
 
RAStatus GetRAState () const
 
Register ToRegister (LOperand *op) const
 
Register EmitLoadRegister (LOperand *op, Register scratch)
 
DoubleRegister ToDoubleRegister (LOperand *op) const
 
DoubleRegister EmitLoadDoubleRegister (LOperand *op, FloatRegister flt_scratch, DoubleRegister dbl_scratch)
 
int32_t ToRepresentation (LConstantOperand *op, const Representation &r) const
 
int32_t ToInteger32 (LConstantOperand *op) const
 
SmiToSmi (LConstantOperand *op) const
 
double ToDouble (LConstantOperand *op) const
 
Operand ToOperand (LOperand *op)
 
MemOperand ToMemOperand (LOperand *op) const
 
MemOperand ToHighMemOperand (LOperand *op) const
 
bool IsInteger32 (LConstantOperand *op) const
 
bool IsSmi (LConstantOperand *op) const
 
Handle< ObjectToHandle (LConstantOperand *op) const
 
bool GenerateCode ()
 
void FinishCode (Handle< Code > code)
 
void DoDeferredNumberTagD (LNumberTagD *instr)
 
void DoDeferredNumberTagIU (LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2, IntegerSignedness signedness)
 
void DoDeferredTaggedToI (LTaggedToI *instr)
 
void DoDeferredMathAbsTaggedHeapNumber (LMathAbs *instr)
 
void DoDeferredStackCheck (LStackCheck *instr)
 
void DoDeferredStringCharCodeAt (LStringCharCodeAt *instr)
 
void DoDeferredStringCharFromCode (LStringCharFromCode *instr)
 
void DoDeferredAllocate (LAllocate *instr)
 
void DoDeferredInstanceOfKnownGlobal (LInstanceOfKnownGlobal *instr, Label *map_check)
 
void DoDeferredInstanceMigration (LCheckMaps *instr, Register object)
 
void DoDeferredLoadMutableDouble (LLoadFieldByIndex *instr, Register result, Register object, Register index)
 
void DoParallelMove (LParallelMove *move)
 
void DoGap (LGap *instr)
 
MemOperand PrepareKeyedOperand (Register key, Register base, bool key_is_constant, int constant_key, int element_size, int shift_size, int base_offset)
 
void WriteTranslation (LEnvironment *environment, Translation *translation)
 
 LCodeGen (LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
 
int LookupDestination (int block_id) const
 
bool IsNextEmittedBlock (int block_id) const
 
bool NeedsEagerFrame () const
 
bool NeedsDeferredFrame () const
 
RAStatus GetRAState () const
 
Register ToRegister (LOperand *op) const
 
Register EmitLoadRegister (LOperand *op, Register scratch)
 
DoubleRegister ToDoubleRegister (LOperand *op) const
 
DoubleRegister EmitLoadDoubleRegister (LOperand *op, FloatRegister flt_scratch, DoubleRegister dbl_scratch)
 
int32_t ToRepresentation_donotuse (LConstantOperand *op, const Representation &r) const
 
int32_t ToInteger32 (LConstantOperand *op) const
 
SmiToSmi (LConstantOperand *op) const
 
double ToDouble (LConstantOperand *op) const
 
Operand ToOperand (LOperand *op)
 
MemOperand ToMemOperand (LOperand *op) const
 
MemOperand ToHighMemOperand (LOperand *op) const
 
bool IsInteger32 (LConstantOperand *op) const
 
bool IsSmi (LConstantOperand *op) const
 
Handle< ObjectToHandle (LConstantOperand *op) const
 
bool GenerateCode ()
 
void FinishCode (Handle< Code > code)
 
void DoDeferredNumberTagD (LNumberTagD *instr)
 
void DoDeferredNumberTagIU (LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2, IntegerSignedness signedness)
 
void DoDeferredTaggedToI (LTaggedToI *instr)
 
void DoDeferredMathAbsTaggedHeapNumber (LMathAbs *instr)
 
void DoDeferredStackCheck (LStackCheck *instr)
 
void DoDeferredStringCharCodeAt (LStringCharCodeAt *instr)
 
void DoDeferredStringCharFromCode (LStringCharFromCode *instr)
 
void DoDeferredAllocate (LAllocate *instr)
 
void DoDeferredInstanceOfKnownGlobal (LInstanceOfKnownGlobal *instr, Label *map_check)
 
void DoDeferredInstanceMigration (LCheckMaps *instr, Register object)
 
void DoDeferredLoadMutableDouble (LLoadFieldByIndex *instr, Register result, Register object, Register index)
 
void DoParallelMove (LParallelMove *move)
 
void DoGap (LGap *instr)
 
MemOperand PrepareKeyedOperand (Register key, Register base, bool key_is_constant, int constant_key, int element_size, int shift_size, int base_offset)
 
void WriteTranslation (LEnvironment *environment, Translation *translation)
 
 LCodeGen (LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
 
int LookupDestination (int block_id) const
 
bool IsNextEmittedBlock (int block_id) const
 
bool NeedsEagerFrame () const
 
bool NeedsDeferredFrame () const
 
Register ToRegister (LOperand *op) const
 
XMMRegister ToDoubleRegister (LOperand *op) const
 
bool IsInteger32Constant (LConstantOperand *op) const
 
bool IsDehoistedKeyConstant (LConstantOperand *op) const
 
bool IsSmiConstant (LConstantOperand *op) const
 
int32_t ToRepresentation (LConstantOperand *op, const Representation &r) const
 
int32_t ToInteger32 (LConstantOperand *op) const
 
SmiToSmi (LConstantOperand *op) const
 
double ToDouble (LConstantOperand *op) const
 
ExternalReference ToExternalReference (LConstantOperand *op) const
 
Handle< ObjectToHandle (LConstantOperand *op) const
 
Operand ToOperand (LOperand *op) const
 
bool GenerateCode ()
 
void FinishCode (Handle< Code > code)
 
void DoDeferredNumberTagD (LNumberTagD *instr)
 
void DoDeferredNumberTagIU (LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2, IntegerSignedness signedness)
 
void DoDeferredTaggedToI (LTaggedToI *instr, Label *done)
 
void DoDeferredMathAbsTaggedHeapNumber (LMathAbs *instr)
 
void DoDeferredStackCheck (LStackCheck *instr)
 
void DoDeferredStringCharCodeAt (LStringCharCodeAt *instr)
 
void DoDeferredStringCharFromCode (LStringCharFromCode *instr)
 
void DoDeferredAllocate (LAllocate *instr)
 
void DoDeferredInstanceOfKnownGlobal (LInstanceOfKnownGlobal *instr, Label *map_check)
 
void DoDeferredInstanceMigration (LCheckMaps *instr, Register object)
 
void DoDeferredLoadMutableDouble (LLoadFieldByIndex *instr, Register object, Register index)
 
void DoParallelMove (LParallelMove *move)
 
void DoGap (LGap *instr)
 
void WriteTranslation (LEnvironment *environment, Translation *translation)
 
 LCodeGen (LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
 
int LookupDestination (int block_id) const
 
bool IsNextEmittedBlock (int block_id) const
 
bool NeedsEagerFrame () const
 
bool NeedsDeferredFrame () const
 
Operand ToOperand (LOperand *op) const
 
Register ToRegister (LOperand *op) const
 
X87Register ToX87Register (LOperand *op) const
 
bool IsInteger32 (LConstantOperand *op) const
 
bool IsSmi (LConstantOperand *op) const
 
Immediate ToImmediate (LOperand *op, const Representation &r) const
 
double ToDouble (LConstantOperand *op) const
 
void X87Mov (X87Register reg, Operand src, X87OperandType operand=kX87DoubleOperand)
 
void X87Mov (Operand src, X87Register reg, X87OperandType operand=kX87DoubleOperand)
 
void X87Mov (X87Register reg, X87Register src, X87OperandType operand=kX87DoubleOperand)
 
void X87PrepareBinaryOp (X87Register left, X87Register right, X87Register result)
 
void X87LoadForUsage (X87Register reg)
 
void X87LoadForUsage (X87Register reg1, X87Register reg2)
 
void X87PrepareToWrite (X87Register reg)
 
void X87CommitWrite (X87Register reg)
 
void X87Fxch (X87Register reg, int other_slot=0)
 
void X87Free (X87Register reg)
 
bool X87StackEmpty ()
 
Handle< ObjectToHandle (LConstantOperand *op) const
 
Operand HighOperand (LOperand *op)
 
bool GenerateCode ()
 
void FinishCode (Handle< Code > code)
 
void DoDeferredNumberTagD (LNumberTagD *instr)
 
void DoDeferredNumberTagIU (LInstruction *instr, LOperand *value, LOperand *temp, IntegerSignedness signedness)
 
void DoDeferredTaggedToI (LTaggedToI *instr, Label *done)
 
void DoDeferredMathAbsTaggedHeapNumber (LMathAbs *instr)
 
void DoDeferredStackCheck (LStackCheck *instr)
 
void DoDeferredStringCharCodeAt (LStringCharCodeAt *instr)
 
void DoDeferredStringCharFromCode (LStringCharFromCode *instr)
 
void DoDeferredAllocate (LAllocate *instr)
 
void DoDeferredInstanceOfKnownGlobal (LInstanceOfKnownGlobal *instr, Label *map_check)
 
void DoDeferredInstanceMigration (LCheckMaps *instr, Register object)
 
void DoDeferredLoadMutableDouble (LLoadFieldByIndex *instr, Register object, Register index)
 
void DoParallelMove (LParallelMove *move)
 
void DoGap (LGap *instr)
 
void WriteTranslation (LEnvironment *environment, Translation *translation)
 
void EnsureRelocSpaceForDeoptimization ()
 

Private Types

enum  SafepointMode {
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
}
 
enum  R1State { R1_UNINITIALIZED , R1_CONTAINS_TARGET }
 
enum  SafepointMode {
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
}
 
enum  SafepointMode {
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
}
 
enum  EDIState { EDI_UNINITIALIZED , EDI_CONTAINS_TARGET , EDI_UNINITIALIZED , EDI_CONTAINS_TARGET }
 
enum  SafepointMode {
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
}
 
enum  A1State { A1_UNINITIALIZED , A1_CONTAINS_TARGET , A1_UNINITIALIZED , A1_CONTAINS_TARGET }
 
enum  SafepointMode {
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
}
 
enum  A1State { A1_UNINITIALIZED , A1_CONTAINS_TARGET , A1_UNINITIALIZED , A1_CONTAINS_TARGET }
 
enum  SafepointMode {
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
}
 
enum  RDIState { RDI_UNINITIALIZED , RDI_CONTAINS_TARGET }
 
enum  SafepointMode {
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS , RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS ,
  RECORD_SIMPLE_SAFEPOINT , RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
}
 
enum  EDIState { EDI_UNINITIALIZED , EDI_CONTAINS_TARGET , EDI_UNINITIALIZED , EDI_CONTAINS_TARGET }
 
typedef std::map< int, X87Stack * > X87StackMap
 

Private Member Functions

StrictMode strict_mode () const
 
Scopescope () const
 
Register scratch0 ()
 
LowDwVfpRegister double_scratch0 ()
 
LInstructionGetNextInstruction ()
 
void EmitClassOfTest (Label *if_true, Label *if_false, Handle< String > class_name, Register input, Register temporary, Register temporary2)
 
int GetStackSlotCount () const
 
void AddDeferredCode (LDeferredCode *code)
 
void SaveCallerDoubles ()
 
void RestoreCallerDoubles ()
 
void GenerateBodyInstructionPre (LInstruction *instr) OVERRIDE
 
bool GeneratePrologue ()
 
bool GenerateDeferredCode ()
 
bool GenerateJumpTable ()
 
bool GenerateSafepointTable ()
 
void GenerateOsrPrologue ()
 
int CallCodeSize (Handle< Code > code, RelocInfo::Mode mode)
 
void CallCode (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
 
void CallCodeGeneric (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
 
void CallRuntime (const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
 
void CallRuntime (Runtime::FunctionId id, int num_arguments, LInstruction *instr)
 
void LoadContextFromDeferred (LOperand *context)
 
void CallRuntimeFromDeferred (Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
 
void CallKnownFunction (Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, R1State r1_state)
 
void RecordSafepointWithLazyDeopt (LInstruction *instr, SafepointMode safepoint_mode)
 
void RegisterEnvironmentForDeoptimization (LEnvironment *environment, Safepoint::DeoptMode mode)
 
void DeoptimizeIf (Condition condition, LInstruction *instr, const char *detail, Deoptimizer::BailoutType bailout_type)
 
void DeoptimizeIf (Condition condition, LInstruction *instr, const char *detail)
 
void AddToTranslation (LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
 
void PopulateDeoptimizationData (Handle< Code > code)
 
int DefineDeoptimizationLiteral (Handle< Object > literal)
 
void PopulateDeoptimizationLiteralsWithInlinedFunctions ()
 
Register ToRegister (int index) const
 
DwVfpRegister ToDoubleRegister (int index) const
 
MemOperand BuildSeqStringOperand (Register string, LOperand *index, String::Encoding encoding)
 
void EmitIntegerMathAbs (LMathAbs *instr)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::DeoptMode mode)
 
void RecordSafepoint (Safepoint::DeoptMode mode)
 
void RecordSafepointWithRegisters (LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
 
void RecordAndWritePosition (int position) OVERRIDE
 
void EmitGoto (int block)
 
template<class InstrType >
void EmitBranch (InstrType instr, Condition condition)
 
template<class InstrType >
void EmitFalseBranch (InstrType instr, Condition condition)
 
void EmitNumberUntagD (LNumberUntagD *instr, Register input, DwVfpRegister result, NumberUntagDMode mode)
 
Condition EmitTypeofIs (Label *true_label, Label *false_label, Register input, Handle< String > type_name)
 
Condition EmitIsObject (Register input, Register temp1, Label *is_not_object, Label *is_object)
 
Condition EmitIsString (Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
 
void EmitIsConstructCall (Register temp1, Register temp2)
 
void EmitDeepCopy (Handle< JSObject > object, Register result, Register source, int *offset, AllocationSiteMode mode)
 
void EnsureSpaceForLazyDeopt (int space_needed) OVERRIDE
 
void DoLoadKeyedExternalArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedDoubleArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedArray (LLoadKeyed *instr)
 
void DoStoreKeyedExternalArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedDoubleArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedArray (LStoreKeyed *instr)
 
template<class T >
void EmitVectorLoadICRegisters (T *instr)
 
 DISALLOW_COPY_AND_ASSIGN (LCodeGen)
 
DoubleRegister double_scratch ()
 
void DoDeferredNumberTagD (LNumberTagD *instr)
 
void DoDeferredStackCheck (LStackCheck *instr)
 
void DoDeferredStringCharCodeAt (LStringCharCodeAt *instr)
 
void DoDeferredStringCharFromCode (LStringCharFromCode *instr)
 
void DoDeferredMathAbsTagged (LMathAbsTagged *instr, Label *exit, Label *allocation_entry)
 
void DoDeferredNumberTagU (LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2)
 
void DoDeferredTaggedToI (LTaggedToI *instr, LOperand *value, LOperand *temp1, LOperand *temp2)
 
void DoDeferredAllocate (LAllocate *instr)
 
void DoDeferredInstanceOfKnownGlobal (LInstanceOfKnownGlobal *instr)
 
void DoDeferredInstanceMigration (LCheckMaps *instr, Register object)
 
void DoDeferredLoadMutableDouble (LLoadFieldByIndex *instr, Register result, Register object, Register index)
 
void EmitGoto (int block)
 
void DoGap (LGap *instr)
 
template<class InstrType >
void EmitBranchGeneric (InstrType instr, const BranchGenerator &branch)
 
template<class InstrType >
void EmitBranch (InstrType instr, Condition condition)
 
template<class InstrType >
void EmitCompareAndBranch (InstrType instr, Condition condition, const Register &lhs, const Operand &rhs)
 
template<class InstrType >
void EmitTestAndBranch (InstrType instr, Condition condition, const Register &value, uint64_t mask)
 
template<class InstrType >
void EmitBranchIfNonZeroNumber (InstrType instr, const FPRegister &value, const FPRegister &scratch)
 
template<class InstrType >
void EmitBranchIfHeapNumber (InstrType instr, const Register &value)
 
template<class InstrType >
void EmitBranchIfRoot (InstrType instr, const Register &value, Heap::RootListIndex index)
 
void EmitDeepCopy (Handle< JSObject > object, Register result, Register source, Register scratch, int *offset, AllocationSiteMode mode)
 
template<class T >
void EmitVectorLoadICRegisters (T *instr)
 
Condition EmitIsString (Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
 
int DefineDeoptimizationLiteral (Handle< Object > literal)
 
void PopulateDeoptimizationData (Handle< Code > code)
 
void PopulateDeoptimizationLiteralsWithInlinedFunctions ()
 
MemOperand BuildSeqStringOperand (Register string, Register temp, LOperand *index, String::Encoding encoding)
 
void DeoptimizeBranch (LInstruction *instr, const char *detail, BranchType branch_type, Register reg=NoReg, int bit=-1, Deoptimizer::BailoutType *override_bailout_type=NULL)
 
void Deoptimize (LInstruction *instr, const char *detail, Deoptimizer::BailoutType *override_bailout_type=NULL)
 
void DeoptimizeIf (Condition cond, LInstruction *instr, const char *detail)
 
void DeoptimizeIfZero (Register rt, LInstruction *instr, const char *detail)
 
void DeoptimizeIfNotZero (Register rt, LInstruction *instr, const char *detail)
 
void DeoptimizeIfNegative (Register rt, LInstruction *instr, const char *detail)
 
void DeoptimizeIfSmi (Register rt, LInstruction *instr, const char *detail)
 
void DeoptimizeIfNotSmi (Register rt, LInstruction *instr, const char *detail)
 
void DeoptimizeIfRoot (Register rt, Heap::RootListIndex index, LInstruction *instr, const char *detail)
 
void DeoptimizeIfNotRoot (Register rt, Heap::RootListIndex index, LInstruction *instr, const char *detail)
 
void DeoptimizeIfNotHeapNumber (Register object, LInstruction *instr)
 
void DeoptimizeIfMinusZero (DoubleRegister input, LInstruction *instr, const char *detail)
 
void DeoptimizeIfBitSet (Register rt, int bit, LInstruction *instr, const char *detail)
 
void DeoptimizeIfBitClear (Register rt, int bit, LInstruction *instr, const char *detail)
 
MemOperand PrepareKeyedExternalArrayOperand (Register key, Register base, Register scratch, bool key_is_smi, bool key_is_constant, int constant_key, ElementsKind elements_kind, int base_offset)
 
MemOperand PrepareKeyedArrayOperand (Register base, Register elements, Register key, bool key_is_tagged, ElementsKind elements_kind, Representation representation, int base_offset)
 
void RegisterEnvironmentForDeoptimization (LEnvironment *environment, Safepoint::DeoptMode mode)
 
int GetStackSlotCount () const
 
void AddDeferredCode (LDeferredCode *code)
 
void WriteTranslation (LEnvironment *environment, Translation *translation)
 
void AddToTranslation (LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
 
void SaveCallerDoubles ()
 
void RestoreCallerDoubles ()
 
void GenerateBodyInstructionPre (LInstruction *instr) OVERRIDE
 
bool GeneratePrologue ()
 
bool GenerateDeferredCode ()
 
bool GenerateJumpTable ()
 
bool GenerateSafepointTable ()
 
void GenerateOsrPrologue ()
 
void CallCode (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr)
 
void CallCodeGeneric (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode)
 
void CallRuntime (const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
 
void CallRuntime (Runtime::FunctionId id, int num_arguments, LInstruction *instr)
 
void LoadContextFromDeferred (LOperand *context)
 
void CallRuntimeFromDeferred (Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
 
void CallKnownFunction (Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, Register function_reg=NoReg)
 
void RecordAndWritePosition (int position) OVERRIDE
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::DeoptMode mode)
 
void RecordSafepoint (Safepoint::DeoptMode mode)
 
void RecordSafepointWithRegisters (LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
 
void RecordSafepointWithLazyDeopt (LInstruction *instr, SafepointMode safepoint_mode)
 
void EnsureSpaceForLazyDeopt (int space_needed) OVERRIDE
 
 DISALLOW_COPY_AND_ASSIGN (LCodeGen)
 
StrictMode strict_mode () const
 
Scopescope () const
 
XMMRegister double_scratch0 () const
 
void EmitClassOfTest (Label *if_true, Label *if_false, Handle< String > class_name, Register input, Register temporary, Register temporary2)
 
int GetStackSlotCount () const
 
void AddDeferredCode (LDeferredCode *code)
 
void SaveCallerDoubles ()
 
void RestoreCallerDoubles ()
 
void GenerateBodyInstructionPre (LInstruction *instr) OVERRIDE
 
void GenerateBodyInstructionPost (LInstruction *instr) OVERRIDE
 
bool GeneratePrologue ()
 
bool GenerateDeferredCode ()
 
bool GenerateJumpTable ()
 
bool GenerateSafepointTable ()
 
void GenerateOsrPrologue ()
 
void CallCode (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr)
 
void CallCodeGeneric (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode)
 
void CallRuntime (const Runtime::Function *fun, int argc, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
 
void CallRuntime (Runtime::FunctionId id, int argc, LInstruction *instr)
 
void CallRuntimeFromDeferred (Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
 
void LoadContextFromDeferred (LOperand *context)
 
void CallKnownFunction (Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, EDIState edi_state)
 
void RecordSafepointWithLazyDeopt (LInstruction *instr, SafepointMode safepoint_mode)
 
void RegisterEnvironmentForDeoptimization (LEnvironment *environment, Safepoint::DeoptMode mode)
 
void DeoptimizeIf (Condition cc, LInstruction *instr, const char *detail, Deoptimizer::BailoutType bailout_type)
 
void DeoptimizeIf (Condition cc, LInstruction *instr, const char *detail)
 
bool DeoptEveryNTimes ()
 
void AddToTranslation (LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
 
void PopulateDeoptimizationData (Handle< Code > code)
 
int DefineDeoptimizationLiteral (Handle< Object > literal)
 
void PopulateDeoptimizationLiteralsWithInlinedFunctions ()
 
Register ToRegister (int index) const
 
XMMRegister ToDoubleRegister (int index) const
 
int32_t ToRepresentation (LConstantOperand *op, const Representation &r) const
 
int32_t ToInteger32 (LConstantOperand *op) const
 
ExternalReference ToExternalReference (LConstantOperand *op) const
 
Operand BuildFastArrayOperand (LOperand *elements_pointer, LOperand *key, Representation key_representation, ElementsKind elements_kind, uint32_t base_offset)
 
Operand BuildSeqStringOperand (Register string, LOperand *index, String::Encoding encoding)
 
void EmitIntegerMathAbs (LMathAbs *instr)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::DeoptMode mode)
 
void RecordSafepoint (Safepoint::DeoptMode mode)
 
void RecordSafepointWithRegisters (LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
 
void RecordAndWritePosition (int position) OVERRIDE
 
void EmitGoto (int block)
 
template<class InstrType >
void EmitBranch (InstrType instr, Condition cc)
 
template<class InstrType >
void EmitFalseBranch (InstrType instr, Condition cc)
 
void EmitNumberUntagD (LNumberUntagD *instr, Register input, Register temp, XMMRegister result, NumberUntagDMode mode)
 
Condition EmitTypeofIs (LTypeofIsAndBranch *instr, Register input)
 
Condition EmitIsObject (Register input, Register temp1, Label *is_not_object, Label *is_object)
 
Condition EmitIsString (Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
 
void EmitIsConstructCall (Register temp)
 
void EmitDeepCopy (Handle< JSObject > object, Register result, Register source, int *offset, AllocationSiteMode mode)
 
void EnsureSpaceForLazyDeopt (int space_needed) OVERRIDE
 
void DoLoadKeyedExternalArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedDoubleArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedArray (LLoadKeyed *instr)
 
void DoStoreKeyedExternalArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedDoubleArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedArray (LStoreKeyed *instr)
 
template<class T >
void EmitVectorLoadICRegisters (T *instr)
 
void EmitReturn (LReturn *instr, bool dynamic_frame_alignment)
 
void EmitPushTaggedOperand (LOperand *operand)
 
 DISALLOW_COPY_AND_ASSIGN (LCodeGen)
 
StrictMode strict_mode () const
 
Scopescope () const
 
Register scratch0 ()
 
Register scratch1 ()
 
DoubleRegister double_scratch0 ()
 
LInstructionGetNextInstruction ()
 
void EmitClassOfTest (Label *if_true, Label *if_false, Handle< String > class_name, Register input, Register temporary, Register temporary2)
 
int GetStackSlotCount () const
 
void AddDeferredCode (LDeferredCode *code)
 
void SaveCallerDoubles ()
 
void RestoreCallerDoubles ()
 
void GenerateBodyInstructionPre (LInstruction *instr) OVERRIDE
 
bool GeneratePrologue ()
 
bool GenerateDeferredCode ()
 
bool GenerateJumpTable ()
 
bool GenerateSafepointTable ()
 
void GenerateOsrPrologue ()
 
void CallCode (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr)
 
void CallCodeGeneric (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode)
 
void CallRuntime (const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
 
void CallRuntime (Runtime::FunctionId id, int num_arguments, LInstruction *instr)
 
void LoadContextFromDeferred (LOperand *context)
 
void CallRuntimeFromDeferred (Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
 
void CallKnownFunction (Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, A1State a1_state)
 
void RecordSafepointWithLazyDeopt (LInstruction *instr, SafepointMode safepoint_mode)
 
void RegisterEnvironmentForDeoptimization (LEnvironment *environment, Safepoint::DeoptMode mode)
 
void DeoptimizeIf (Condition condition, LInstruction *instr, Deoptimizer::BailoutType bailout_type, const char *detail, Register src1=zero_reg, const Operand &src2=Operand(zero_reg))
 
void DeoptimizeIf (Condition condition, LInstruction *instr, const char *detail=NULL, Register src1=zero_reg, const Operand &src2=Operand(zero_reg))
 
void AddToTranslation (LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
 
void PopulateDeoptimizationData (Handle< Code > code)
 
int DefineDeoptimizationLiteral (Handle< Object > literal)
 
void PopulateDeoptimizationLiteralsWithInlinedFunctions ()
 
Register ToRegister (int index) const
 
DoubleRegister ToDoubleRegister (int index) const
 
MemOperand BuildSeqStringOperand (Register string, LOperand *index, String::Encoding encoding)
 
void EmitIntegerMathAbs (LMathAbs *instr)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::DeoptMode mode)
 
void RecordSafepoint (Safepoint::DeoptMode mode)
 
void RecordSafepointWithRegisters (LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
 
void RecordAndWritePosition (int position) OVERRIDE
 
void EmitGoto (int block)
 
template<class InstrType >
void EmitBranch (InstrType instr, Condition condition, Register src1, const Operand &src2)
 
template<class InstrType >
void EmitBranchF (InstrType instr, Condition condition, FPURegister src1, FPURegister src2)
 
template<class InstrType >
void EmitFalseBranch (InstrType instr, Condition condition, Register src1, const Operand &src2)
 
template<class InstrType >
void EmitFalseBranchF (InstrType instr, Condition condition, FPURegister src1, FPURegister src2)
 
void EmitCmpI (LOperand *left, LOperand *right)
 
void EmitNumberUntagD (LNumberUntagD *instr, Register input, DoubleRegister result, NumberUntagDMode mode)
 
Condition EmitTypeofIs (Label *true_label, Label *false_label, Register input, Handle< String > type_name, Register *cmp1, Operand *cmp2)
 
Condition EmitIsObject (Register input, Register temp1, Register temp2, Label *is_not_object, Label *is_object)
 
Condition EmitIsString (Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
 
void EmitIsConstructCall (Register temp1, Register temp2)
 
void EmitDeepCopy (Handle< JSObject > object, Register result, Register source, int *offset, AllocationSiteMode mode)
 
void EmitSignedIntegerDivisionByConstant (Register result, Register dividend, int32_t divisor, Register remainder, Register scratch, LEnvironment *environment)
 
void EnsureSpaceForLazyDeopt (int space_needed) OVERRIDE
 
void DoLoadKeyedExternalArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedDoubleArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedArray (LLoadKeyed *instr)
 
void DoStoreKeyedExternalArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedDoubleArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedArray (LStoreKeyed *instr)
 
template<class T >
void EmitVectorLoadICRegisters (T *instr)
 
 DISALLOW_COPY_AND_ASSIGN (LCodeGen)
 
StrictMode strict_mode () const
 
Scopescope () const
 
Register scratch0 ()
 
Register scratch1 ()
 
DoubleRegister double_scratch0 ()
 
LInstructionGetNextInstruction ()
 
void EmitClassOfTest (Label *if_true, Label *if_false, Handle< String > class_name, Register input, Register temporary, Register temporary2)
 
int GetStackSlotCount () const
 
void AddDeferredCode (LDeferredCode *code)
 
void SaveCallerDoubles ()
 
void RestoreCallerDoubles ()
 
void GenerateBodyInstructionPre (LInstruction *instr) OVERRIDE
 
bool GeneratePrologue ()
 
bool GenerateDeferredCode ()
 
bool GenerateJumpTable ()
 
bool GenerateSafepointTable ()
 
void GenerateOsrPrologue ()
 
void CallCode (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr)
 
void CallCodeGeneric (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode)
 
void CallRuntime (const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
 
void CallRuntime (Runtime::FunctionId id, int num_arguments, LInstruction *instr)
 
void LoadContextFromDeferred (LOperand *context)
 
void CallRuntimeFromDeferred (Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
 
void CallKnownFunction (Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, A1State a1_state)
 
void RecordSafepointWithLazyDeopt (LInstruction *instr, SafepointMode safepoint_mode)
 
void RegisterEnvironmentForDeoptimization (LEnvironment *environment, Safepoint::DeoptMode mode)
 
void DeoptimizeIf (Condition condition, LInstruction *instr, Deoptimizer::BailoutType bailout_type, const char *detail, Register src1=zero_reg, const Operand &src2=Operand(zero_reg))
 
void DeoptimizeIf (Condition condition, LInstruction *instr, const char *detail, Register src1=zero_reg, const Operand &src2=Operand(zero_reg))
 
void AddToTranslation (LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
 
void PopulateDeoptimizationData (Handle< Code > code)
 
int DefineDeoptimizationLiteral (Handle< Object > literal)
 
void PopulateDeoptimizationLiteralsWithInlinedFunctions ()
 
Register ToRegister (int index) const
 
DoubleRegister ToDoubleRegister (int index) const
 
MemOperand BuildSeqStringOperand (Register string, LOperand *index, String::Encoding encoding)
 
void EmitIntegerMathAbs (LMathAbs *instr)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::DeoptMode mode)
 
void RecordSafepoint (Safepoint::DeoptMode mode)
 
void RecordSafepointWithRegisters (LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
 
void RecordAndWritePosition (int position) OVERRIDE
 
void EmitGoto (int block)
 
template<class InstrType >
void EmitBranch (InstrType instr, Condition condition, Register src1, const Operand &src2)
 
template<class InstrType >
void EmitBranchF (InstrType instr, Condition condition, FPURegister src1, FPURegister src2)
 
template<class InstrType >
void EmitFalseBranch (InstrType instr, Condition condition, Register src1, const Operand &src2)
 
template<class InstrType >
void EmitFalseBranchF (InstrType instr, Condition condition, FPURegister src1, FPURegister src2)
 
void EmitCmpI (LOperand *left, LOperand *right)
 
void EmitNumberUntagD (LNumberUntagD *instr, Register input, DoubleRegister result, NumberUntagDMode mode)
 
Condition EmitTypeofIs (Label *true_label, Label *false_label, Register input, Handle< String > type_name, Register *cmp1, Operand *cmp2)
 
Condition EmitIsObject (Register input, Register temp1, Register temp2, Label *is_not_object, Label *is_object)
 
Condition EmitIsString (Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
 
void EmitIsConstructCall (Register temp1, Register temp2)
 
void EmitDeepCopy (Handle< JSObject > object, Register result, Register source, int *offset, AllocationSiteMode mode)
 
void EmitSignedIntegerDivisionByConstant (Register result, Register dividend, int32_t divisor, Register remainder, Register scratch, LEnvironment *environment)
 
void EnsureSpaceForLazyDeopt (int space_needed) OVERRIDE
 
void DoLoadKeyedExternalArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedDoubleArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedArray (LLoadKeyed *instr)
 
void DoStoreKeyedExternalArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedDoubleArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedArray (LStoreKeyed *instr)
 
template<class T >
void EmitVectorLoadICRegisters (T *instr)
 
 DISALLOW_COPY_AND_ASSIGN (LCodeGen)
 
StrictMode strict_mode () const
 
LPlatformChunk * chunk () const
 
Scopescope () const
 
HGraph * graph () const
 
XMMRegister double_scratch0 () const
 
void EmitClassOfTest (Label *if_true, Label *if_false, Handle< String > class_name, Register input, Register temporary, Register scratch)
 
int GetStackSlotCount () const
 
void AddDeferredCode (LDeferredCode *code)
 
void SaveCallerDoubles ()
 
void RestoreCallerDoubles ()
 
void GenerateBodyInstructionPre (LInstruction *instr) OVERRIDE
 
void GenerateBodyInstructionPost (LInstruction *instr) OVERRIDE
 
bool GeneratePrologue ()
 
bool GenerateDeferredCode ()
 
bool GenerateJumpTable ()
 
bool GenerateSafepointTable ()
 
void GenerateOsrPrologue ()
 
void CallCodeGeneric (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode, int argc)
 
void CallCode (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr)
 
void CallRuntime (const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
 
void CallRuntime (Runtime::FunctionId id, int num_arguments, LInstruction *instr)
 
void CallRuntimeFromDeferred (Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
 
void LoadContextFromDeferred (LOperand *context)
 
void CallKnownFunction (Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, RDIState rdi_state)
 
void RecordSafepointWithLazyDeopt (LInstruction *instr, SafepointMode safepoint_mode, int argc)
 
void RegisterEnvironmentForDeoptimization (LEnvironment *environment, Safepoint::DeoptMode mode)
 
void DeoptimizeIf (Condition cc, LInstruction *instr, const char *detail, Deoptimizer::BailoutType bailout_type)
 
void DeoptimizeIf (Condition cc, LInstruction *instr, const char *detail)
 
bool DeoptEveryNTimes ()
 
void AddToTranslation (LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
 
void PopulateDeoptimizationData (Handle< Code > code)
 
int DefineDeoptimizationLiteral (Handle< Object > literal)
 
void PopulateDeoptimizationLiteralsWithInlinedFunctions ()
 
Register ToRegister (int index) const
 
XMMRegister ToDoubleRegister (int index) const
 
Operand BuildFastArrayOperand (LOperand *elements_pointer, LOperand *key, Representation key_representation, ElementsKind elements_kind, uint32_t base_offset)
 
Operand BuildSeqStringOperand (Register string, LOperand *index, String::Encoding encoding)
 
void EmitIntegerMathAbs (LMathAbs *instr)
 
void EmitSmiMathAbs (LMathAbs *instr)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::DeoptMode mode)
 
void RecordSafepoint (Safepoint::DeoptMode mode)
 
void RecordSafepointWithRegisters (LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
 
void RecordAndWritePosition (int position) OVERRIDE
 
void EmitGoto (int block)
 
template<class InstrType >
void EmitBranch (InstrType instr, Condition cc)
 
template<class InstrType >
void EmitFalseBranch (InstrType instr, Condition cc)
 
void EmitNumberUntagD (LNumberUntagD *instr, Register input, XMMRegister result, NumberUntagDMode mode)
 
Condition EmitTypeofIs (LTypeofIsAndBranch *instr, Register input)
 
Condition EmitIsObject (Register input, Label *is_not_object, Label *is_object)
 
Condition EmitIsString (Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
 
void EmitIsConstructCall (Register temp)
 
void EmitPushTaggedOperand (LOperand *operand)
 
void EmitDeepCopy (Handle< JSObject > object, Register result, Register source, int *offset, AllocationSiteMode mode)
 
void EnsureSpaceForLazyDeopt (int space_needed) OVERRIDE
 
void DoLoadKeyedExternalArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedDoubleArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedArray (LLoadKeyed *instr)
 
void DoStoreKeyedExternalArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedDoubleArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedArray (LStoreKeyed *instr)
 
template<class T >
void EmitVectorLoadICRegisters (T *instr)
 
 DISALLOW_COPY_AND_ASSIGN (LCodeGen)
 
StrictMode strict_mode () const
 
Scopescope () const
 
void EmitClassOfTest (Label *if_true, Label *if_false, Handle< String > class_name, Register input, Register temporary, Register temporary2)
 
int GetStackSlotCount () const
 
void AddDeferredCode (LDeferredCode *code)
 
void GenerateBodyInstructionPre (LInstruction *instr) OVERRIDE
 
void GenerateBodyInstructionPost (LInstruction *instr) OVERRIDE
 
bool GeneratePrologue ()
 
bool GenerateDeferredCode ()
 
bool GenerateJumpTable ()
 
bool GenerateSafepointTable ()
 
void GenerateOsrPrologue ()
 
void CallCode (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr)
 
void CallCodeGeneric (Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode)
 
void CallRuntime (const Runtime::Function *fun, int argc, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
 
void CallRuntime (Runtime::FunctionId id, int argc, LInstruction *instr)
 
void CallRuntimeFromDeferred (Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
 
void LoadContextFromDeferred (LOperand *context)
 
void CallKnownFunction (Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, EDIState edi_state)
 
void RecordSafepointWithLazyDeopt (LInstruction *instr, SafepointMode safepoint_mode)
 
void RegisterEnvironmentForDeoptimization (LEnvironment *environment, Safepoint::DeoptMode mode)
 
void DeoptimizeIf (Condition cc, LInstruction *instr, const char *detail, Deoptimizer::BailoutType bailout_type)
 
void DeoptimizeIf (Condition cc, LInstruction *instr, const char *detail)
 
bool DeoptEveryNTimes ()
 
void AddToTranslation (LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
 
void PopulateDeoptimizationData (Handle< Code > code)
 
int DefineDeoptimizationLiteral (Handle< Object > literal)
 
void PopulateDeoptimizationLiteralsWithInlinedFunctions ()
 
Register ToRegister (int index) const
 
X87Register ToX87Register (int index) const
 
int32_t ToRepresentation (LConstantOperand *op, const Representation &r) const
 
int32_t ToInteger32 (LConstantOperand *op) const
 
ExternalReference ToExternalReference (LConstantOperand *op) const
 
Operand BuildFastArrayOperand (LOperand *elements_pointer, LOperand *key, Representation key_representation, ElementsKind elements_kind, uint32_t base_offset)
 
Operand BuildSeqStringOperand (Register string, LOperand *index, String::Encoding encoding)
 
void EmitIntegerMathAbs (LMathAbs *instr)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
 
void RecordSafepoint (LPointerMap *pointers, Safepoint::DeoptMode mode)
 
void RecordSafepoint (Safepoint::DeoptMode mode)
 
void RecordSafepointWithRegisters (LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
 
void RecordAndWritePosition (int position) OVERRIDE
 
void EmitGoto (int block)
 
template<class InstrType >
void EmitBranch (InstrType instr, Condition cc)
 
template<class InstrType >
void EmitFalseBranch (InstrType instr, Condition cc)
 
void EmitNumberUntagDNoSSE2 (LNumberUntagD *instr, Register input, Register temp, X87Register res_reg, NumberUntagDMode mode)
 
Condition EmitTypeofIs (LTypeofIsAndBranch *instr, Register input)
 
Condition EmitIsObject (Register input, Register temp1, Label *is_not_object, Label *is_object)
 
Condition EmitIsString (Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
 
void EmitIsConstructCall (Register temp)
 
void EmitDeepCopy (Handle< JSObject > object, Register result, Register source, int *offset, AllocationSiteMode mode)
 
void EnsureSpaceForLazyDeopt (int space_needed) OVERRIDE
 
void DoLoadKeyedExternalArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedDoubleArray (LLoadKeyed *instr)
 
void DoLoadKeyedFixedArray (LLoadKeyed *instr)
 
void DoStoreKeyedExternalArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedDoubleArray (LStoreKeyed *instr)
 
void DoStoreKeyedFixedArray (LStoreKeyed *instr)
 
template<class T >
void EmitVectorLoadICRegisters (T *instr)
 
void EmitReturn (LReturn *instr, bool dynamic_frame_alignment)
 
void EmitPushTaggedOperand (LOperand *operand)
 
void X87Fld (Operand src, X87OperandType opts)
 
void EmitFlushX87ForDeopt ()
 
void FlushX87StackIfNecessary (LInstruction *instr)
 
 DISALLOW_COPY_AND_ASSIGN (LCodeGen)
 

Static Private Member Functions

static Condition TokenToCondition (Token::Value op, bool is_unsigned)
 
static Condition TokenToCondition (Token::Value op, bool is_unsigned)
 
static Condition TokenToCondition (Token::Value op, bool is_unsigned)
 
static Condition TokenToCondition (Token::Value op, bool is_unsigned)
 
static Condition TokenToCondition (Token::Value op, bool is_unsigned)
 
static Condition TokenToCondition (Token::Value op, bool is_unsigned)
 
static Condition TokenToCondition (Token::Value op, bool is_unsigned)
 

Private Attributes

ZoneList< LEnvironment * > deoptimizations_
 
ZoneList< Deoptimizer::JumpTableEntryjump_table_
 
ZoneList< Handle< Object > > deoptimization_literals_
 
int inlined_function_count_
 
Scope *const scope_
 
TranslationBuffer translations_
 
ZoneList< LDeferredCode * > deferred_
 
int osr_pc_offset_
 
bool frame_is_built_
 
SafepointTableBuilder safepoints_
 
LGapResolver resolver_
 
Safepoint::Kind expected_safepoint_kind_
 
ZoneList< Deoptimizer::JumpTableEntry * > jump_table_
 
bool after_push_argument_
 
bool inlined_arguments_
 
int old_position_
 
bool dynamic_frame_alignment_
 
bool support_aligned_spilled_doubles_
 
X87Stack x87_stack_
 
X87StackMap x87_stack_map_
 

Friends

class LDeferredCode
 
class LEnvironment
 
class SafepointGenerator
 
class LGapResolver
 
class X87Stack
 

Detailed Description

Definition at line 24 of file lithium-codegen-arm.h.

Member Typedef Documentation

◆ X87StackMap

Definition at line 439 of file lithium-codegen-x87.h.

Member Enumeration Documentation

◆ A1State [1/2]

Enumerator
A1_UNINITIALIZED 
A1_CONTAINS_TARGET 
A1_UNINITIALIZED 
A1_CONTAINS_TARGET 

Definition at line 213 of file lithium-codegen-mips.h.

◆ A1State [2/2]

Enumerator
A1_UNINITIALIZED 
A1_CONTAINS_TARGET 
A1_UNINITIALIZED 
A1_CONTAINS_TARGET 

Definition at line 214 of file lithium-codegen-mips64.h.

214  {
217  };

◆ EDIState [1/2]

Enumerator
EDI_UNINITIALIZED 
EDI_CONTAINS_TARGET 
EDI_UNINITIALIZED 
EDI_CONTAINS_TARGET 

Definition at line 194 of file lithium-codegen-ia32.h.

◆ EDIState [2/2]

Enumerator
EDI_UNINITIALIZED 
EDI_CONTAINS_TARGET 
EDI_UNINITIALIZED 
EDI_CONTAINS_TARGET 

Definition at line 221 of file lithium-codegen-x87.h.

221  {
224  };

◆ IntegerSignedness [1/7]

Enumerator
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 

Definition at line 105 of file lithium-codegen-arm.h.

◆ IntegerSignedness [2/7]

Enumerator
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 

Definition at line 84 of file lithium-codegen-arm64.h.

◆ IntegerSignedness [3/7]

Enumerator
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 

Definition at line 95 of file lithium-codegen-ia32.h.

◆ IntegerSignedness [4/7]

Enumerator
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 

Definition at line 103 of file lithium-codegen-mips.h.

◆ IntegerSignedness [5/7]

Enumerator
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 

Definition at line 104 of file lithium-codegen-mips64.h.

◆ IntegerSignedness [6/7]

Enumerator
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 

Definition at line 88 of file lithium-codegen-x64.h.

◆ IntegerSignedness [7/7]

Enumerator
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 
SIGNED_INT32 
UNSIGNED_INT32 

Definition at line 129 of file lithium-codegen-x87.h.

◆ R1State

Enumerator
R1_UNINITIALIZED 
R1_CONTAINS_TARGET 

Definition at line 219 of file lithium-codegen-arm.h.

◆ RDIState

Enumerator
RDI_UNINITIALIZED 
RDI_CONTAINS_TARGET 

Definition at line 191 of file lithium-codegen-x64.h.

◆ SafepointMode [1/7]

Enumerator
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 

Definition at line 181 of file lithium-codegen-arm.h.

◆ SafepointMode [2/7]

Enumerator
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 

Definition at line 289 of file lithium-codegen-arm64.h.

◆ SafepointMode [3/7]

Enumerator
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 

Definition at line 161 of file lithium-codegen-ia32.h.

◆ SafepointMode [4/7]

Enumerator
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 

Definition at line 181 of file lithium-codegen-mips.h.

◆ SafepointMode [5/7]

Enumerator
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 

Definition at line 182 of file lithium-codegen-mips64.h.

◆ SafepointMode [6/7]

Enumerator
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 

Definition at line 156 of file lithium-codegen-x64.h.

◆ SafepointMode [7/7]

Enumerator
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS 
RECORD_SIMPLE_SAFEPOINT 
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 

Definition at line 190 of file lithium-codegen-x87.h.

◆ StackMode

Enumerator
kMustUseFramePointer 
kCanUseStackPointer 

Definition at line 90 of file lithium-codegen-arm64.h.

◆ X87OperandType

Enumerator
kX87DoubleOperand 
kX87FloatOperand 
kX87IntOperand 

Definition at line 82 of file lithium-codegen-x87.h.

Constructor & Destructor Documentation

◆ LCodeGen() [1/7]

v8::internal::LCodeGen::LCodeGen ( LChunk chunk,
MacroAssembler assembler,
CompilationInfo info 
)
inline

Definition at line 26 of file lithium-codegen-arm.h.

27  : LCodeGenBase(chunk, assembler, info),
28  deoptimizations_(4, info->zone()),
29  jump_table_(4, info->zone()),
30  deoptimization_literals_(8, info->zone()),
32  scope_(info->scope()),
33  translations_(info->zone()),
34  deferred_(8, info->zone()),
35  osr_pc_offset_(-1),
36  frame_is_built_(false),
37  safepoints_(info->zone()),
38  resolver_(this),
39  expected_safepoint_kind_(Safepoint::kSimple) {
41  }
TranslationBuffer translations_
SafepointTableBuilder safepoints_
ZoneList< Handle< Object > > deoptimization_literals_
void PopulateDeoptimizationLiteralsWithInlinedFunctions()
ZoneList< LEnvironment * > deoptimizations_
LPlatformChunk * chunk() const
Safepoint::Kind expected_safepoint_kind_
ZoneList< LDeferredCode * > deferred_
ZoneList< Deoptimizer::JumpTableEntry > jump_table_

References PopulateDeoptimizationLiteralsWithInlinedFunctions().

+ Here is the call graph for this function:

◆ LCodeGen() [2/7]

v8::internal::LCodeGen::LCodeGen ( LChunk chunk,
MacroAssembler assembler,
CompilationInfo info 
)
inline

Definition at line 27 of file lithium-codegen-arm64.h.

28  : LCodeGenBase(chunk, assembler, info),
29  deoptimizations_(4, info->zone()),
30  jump_table_(4, info->zone()),
31  deoptimization_literals_(8, info->zone()),
33  scope_(info->scope()),
34  translations_(info->zone()),
35  deferred_(8, info->zone()),
36  osr_pc_offset_(-1),
37  frame_is_built_(false),
38  safepoints_(info->zone()),
39  resolver_(this),
40  expected_safepoint_kind_(Safepoint::kSimple),
41  after_push_argument_(false),
42  inlined_arguments_(false) {
44  }

References PopulateDeoptimizationLiteralsWithInlinedFunctions().

+ Here is the call graph for this function:

◆ ~LCodeGen()

v8::internal::LCodeGen::~LCodeGen ( )
inline

Definition at line 46 of file lithium-codegen-arm64.h.

46  {
48  }
#define DCHECK(condition)
Definition: logging.h:205

References after_push_argument_, DCHECK, and inlined_arguments_.

◆ LCodeGen() [3/7]

v8::internal::LCodeGen::LCodeGen ( LChunk chunk,
MacroAssembler assembler,
CompilationInfo info 
)
inline

Definition at line 28 of file lithium-codegen-ia32.h.

29  : LCodeGenBase(chunk, assembler, info),
30  deoptimizations_(4, info->zone()),
31  jump_table_(4, info->zone()),
32  deoptimization_literals_(8, info->zone()),
34  scope_(info->scope()),
35  translations_(info->zone()),
36  deferred_(8, info->zone()),
39  osr_pc_offset_(-1),
40  frame_is_built_(false),
41  safepoints_(info->zone()),
42  resolver_(this),
43  expected_safepoint_kind_(Safepoint::kSimple) {
45  }

References PopulateDeoptimizationLiteralsWithInlinedFunctions().

+ Here is the call graph for this function:

◆ LCodeGen() [4/7]

v8::internal::LCodeGen::LCodeGen ( LChunk chunk,
MacroAssembler assembler,
CompilationInfo info 
)
inline

Definition at line 25 of file lithium-codegen-mips.h.

26  : LCodeGenBase(chunk, assembler, info),
27  deoptimizations_(4, info->zone()),
28  jump_table_(4, info->zone()),
29  deoptimization_literals_(8, info->zone()),
31  scope_(info->scope()),
32  translations_(info->zone()),
33  deferred_(8, info->zone()),
34  osr_pc_offset_(-1),
35  frame_is_built_(false),
36  safepoints_(info->zone()),
37  resolver_(this),
38  expected_safepoint_kind_(Safepoint::kSimple) {
40  }

References PopulateDeoptimizationLiteralsWithInlinedFunctions().

+ Here is the call graph for this function:

◆ LCodeGen() [5/7]

v8::internal::LCodeGen::LCodeGen ( LChunk chunk,
MacroAssembler assembler,
CompilationInfo info 
)
inline

Definition at line 25 of file lithium-codegen-mips64.h.

26  : LCodeGenBase(chunk, assembler, info),
27  deoptimizations_(4, info->zone()),
28  jump_table_(4, info->zone()),
29  deoptimization_literals_(8, info->zone()),
31  scope_(info->scope()),
32  translations_(info->zone()),
33  deferred_(8, info->zone()),
34  osr_pc_offset_(-1),
35  frame_is_built_(false),
36  safepoints_(info->zone()),
37  resolver_(this),
38  expected_safepoint_kind_(Safepoint::kSimple) {
40  }

References PopulateDeoptimizationLiteralsWithInlinedFunctions().

+ Here is the call graph for this function:

◆ LCodeGen() [6/7]

v8::internal::LCodeGen::LCodeGen ( LChunk chunk,
MacroAssembler assembler,
CompilationInfo info 
)
inline

Definition at line 27 of file lithium-codegen-x64.h.

28  : LCodeGenBase(chunk, assembler, info),
29  deoptimizations_(4, info->zone()),
30  jump_table_(4, info->zone()),
31  deoptimization_literals_(8, info->zone()),
33  scope_(info->scope()),
34  translations_(info->zone()),
35  deferred_(8, info->zone()),
36  osr_pc_offset_(-1),
37  frame_is_built_(false),
38  safepoints_(info->zone()),
39  resolver_(this),
40  expected_safepoint_kind_(Safepoint::kSimple) {
42  }

References PopulateDeoptimizationLiteralsWithInlinedFunctions().

+ Here is the call graph for this function:

◆ LCodeGen() [7/7]

v8::internal::LCodeGen::LCodeGen ( LChunk chunk,
MacroAssembler assembler,
CompilationInfo info 
)
inline

Definition at line 29 of file lithium-codegen-x87.h.

30  : LCodeGenBase(chunk, assembler, info),
31  deoptimizations_(4, info->zone()),
32  jump_table_(4, info->zone()),
33  deoptimization_literals_(8, info->zone()),
35  scope_(info->scope()),
36  translations_(info->zone()),
37  deferred_(8, info->zone()),
40  osr_pc_offset_(-1),
41  frame_is_built_(false),
42  x87_stack_(assembler),
43  safepoints_(info->zone()),
44  resolver_(this),
45  expected_safepoint_kind_(Safepoint::kSimple) {
47  }

References PopulateDeoptimizationLiteralsWithInlinedFunctions().

+ Here is the call graph for this function:

Member Function Documentation

◆ AddDeferredCode() [1/7]

void v8::internal::LCodeGen::AddDeferredCode ( LDeferredCode code)
inlineprivate

Definition at line 165 of file lithium-codegen-arm.h.

165 { deferred_.Add(code, zone()); }

References deferred_.

Referenced by v8::internal::LDeferredCode::LDeferredCode().

+ Here is the caller graph for this function:

◆ AddDeferredCode() [2/7]

void v8::internal::LCodeGen::AddDeferredCode ( LDeferredCode code)
inlineprivate

Definition at line 263 of file lithium-codegen-arm64.h.

263 { deferred_.Add(code, zone()); }

References deferred_.

◆ AddDeferredCode() [3/7]

void v8::internal::LCodeGen::AddDeferredCode ( LDeferredCode code)
inlineprivate

Definition at line 144 of file lithium-codegen-ia32.h.

144 { deferred_.Add(code, zone()); }

References deferred_.

◆ AddDeferredCode() [4/7]

void v8::internal::LCodeGen::AddDeferredCode ( LDeferredCode code)
inlineprivate

Definition at line 165 of file lithium-codegen-mips.h.

165 { deferred_.Add(code, zone()); }

References deferred_.

◆ AddDeferredCode() [5/7]

void v8::internal::LCodeGen::AddDeferredCode ( LDeferredCode code)
inlineprivate

Definition at line 166 of file lithium-codegen-mips64.h.

166 { deferred_.Add(code, zone()); }

References deferred_.

◆ AddDeferredCode() [6/7]

void v8::internal::LCodeGen::AddDeferredCode ( LDeferredCode code)
inlineprivate

Definition at line 138 of file lithium-codegen-x64.h.

138 { deferred_.Add(code, zone()); }

References deferred_.

◆ AddDeferredCode() [7/7]

void v8::internal::LCodeGen::AddDeferredCode ( LDeferredCode code)
inlineprivate

Definition at line 176 of file lithium-codegen-x87.h.

176 { deferred_.Add(code, zone()); }

References deferred_.

◆ AddToTranslation() [1/7]

void v8::internal::LCodeGen::AddToTranslation ( LEnvironment environment,
Translation *  translation,
LOperand op,
bool  is_tagged,
bool  is_uint32,
int object_index_pointer,
int dematerialized_index_pointer 
)
private

Definition at line 662 of file lithium-codegen-arm.cc.

668  {
669  if (op == LEnvironment::materialization_marker()) {
670  int object_index = (*object_index_pointer)++;
671  if (environment->ObjectIsDuplicateAt(object_index)) {
672  int dupe_of = environment->ObjectDuplicateOfAt(object_index);
673  translation->DuplicateObject(dupe_of);
674  return;
675  }
676  int object_length = environment->ObjectLengthAt(object_index);
677  if (environment->ObjectIsArgumentsAt(object_index)) {
678  translation->BeginArgumentsObject(object_length);
679  } else {
680  translation->BeginCapturedObject(object_length);
681  }
682  int dematerialized_index = *dematerialized_index_pointer;
683  int env_offset = environment->translation_size() + dematerialized_index;
684  *dematerialized_index_pointer += object_length;
685  for (int i = 0; i < object_length; ++i) {
686  LOperand* value = environment->values()->at(env_offset + i);
687  AddToTranslation(environment,
688  translation,
689  value,
690  environment->HasTaggedValueAt(env_offset + i),
691  environment->HasUint32ValueAt(env_offset + i),
692  object_index_pointer,
693  dematerialized_index_pointer);
694  }
695  return;
696  }
697 
698  if (op->IsStackSlot()) {
699  if (is_tagged) {
700  translation->StoreStackSlot(op->index());
701  } else if (is_uint32) {
702  translation->StoreUint32StackSlot(op->index());
703  } else {
704  translation->StoreInt32StackSlot(op->index());
705  }
706  } else if (op->IsDoubleStackSlot()) {
707  translation->StoreDoubleStackSlot(op->index());
708  } else if (op->IsRegister()) {
709  Register reg = ToRegister(op);
710  if (is_tagged) {
711  translation->StoreRegister(reg);
712  } else if (is_uint32) {
713  translation->StoreUint32Register(reg);
714  } else {
715  translation->StoreInt32Register(reg);
716  }
717  } else if (op->IsDoubleRegister()) {
719  translation->StoreDoubleRegister(reg);
720  } else if (op->IsConstantOperand()) {
721  HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op));
722  int src_index = DefineDeoptimizationLiteral(constant->handle(isolate()));
723  translation->StoreLiteral(src_index);
724  } else {
725  UNREACHABLE();
726  }
727 }
DwVfpRegister ToDoubleRegister(LOperand *op) const
void AddToTranslation(LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
int DefineDeoptimizationLiteral(Handle< Object > literal)
Register ToRegister(LOperand *op) const
#define UNREACHABLE()
Definition: logging.h:30
DwVfpRegister DoubleRegister

References chunk(), DefineDeoptimizationLiteral(), v8::internal::LOperand::index(), ToDoubleRegister(), ToRegister(), and UNREACHABLE.

Referenced by WriteTranslation().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AddToTranslation() [2/7]

void v8::internal::LCodeGen::AddToTranslation ( LEnvironment environment,
Translation *  translation,
LOperand op,
bool  is_tagged,
bool  is_uint32,
int object_index_pointer,
int dematerialized_index_pointer 
)
private

◆ AddToTranslation() [3/7]

void v8::internal::LCodeGen::AddToTranslation ( LEnvironment environment,
Translation *  translation,
LOperand op,
bool  is_tagged,
bool  is_uint32,
int object_index_pointer,
int dematerialized_index_pointer 
)
private

◆ AddToTranslation() [4/7]

void v8::internal::LCodeGen::AddToTranslation ( LEnvironment environment,
Translation *  translation,
LOperand op,
bool  is_tagged,
bool  is_uint32,
int object_index_pointer,
int dematerialized_index_pointer 
)
private

◆ AddToTranslation() [5/7]

void v8::internal::LCodeGen::AddToTranslation ( LEnvironment environment,
Translation *  translation,
LOperand op,
bool  is_tagged,
bool  is_uint32,
int object_index_pointer,
int dematerialized_index_pointer 
)
private

◆ AddToTranslation() [6/7]

void v8::internal::LCodeGen::AddToTranslation ( LEnvironment environment,
Translation *  translation,
LOperand op,
bool  is_tagged,
bool  is_uint32,
int object_index_pointer,
int dematerialized_index_pointer 
)
private

◆ AddToTranslation() [7/7]

void v8::internal::LCodeGen::AddToTranslation ( LEnvironment environment,
Translation *  translation,
LOperand op,
bool  is_tagged,
bool  is_uint32,
int object_index_pointer,
int dematerialized_index_pointer 
)
private

◆ BuildFastArrayOperand() [1/3]

Operand v8::internal::LCodeGen::BuildFastArrayOperand ( LOperand elements_pointer,
LOperand key,
Representation  key_representation,
ElementsKind  elements_kind,
uint32_t  base_offset 
)
private

◆ BuildFastArrayOperand() [2/3]

Operand v8::internal::LCodeGen::BuildFastArrayOperand ( LOperand elements_pointer,
LOperand key,
Representation  key_representation,
ElementsKind  elements_kind,
uint32_t  base_offset 
)
private

◆ BuildFastArrayOperand() [3/3]

Operand v8::internal::LCodeGen::BuildFastArrayOperand ( LOperand elements_pointer,
LOperand key,
Representation  key_representation,
ElementsKind  elements_kind,
uint32_t  base_offset 
)
private

◆ BuildSeqStringOperand() [1/7]

MemOperand v8::internal::LCodeGen::BuildSeqStringOperand ( Register  string,
LOperand index,
String::Encoding  encoding 
)
private

Definition at line 1970 of file lithium-codegen-arm.cc.

1972  {
1973  if (index->IsConstantOperand()) {
1974  int offset = ToInteger32(LConstantOperand::cast(index));
1975  if (encoding == String::TWO_BYTE_ENCODING) {
1976  offset *= kUC16Size;
1977  }
1978  STATIC_ASSERT(kCharSize == 1);
1979  return FieldMemOperand(string, SeqString::kHeaderSize + offset);
1980  }
1981  Register scratch = scratch0();
1982  DCHECK(!scratch.is(string));
1983  DCHECK(!scratch.is(ToRegister(index)));
1984  if (encoding == String::ONE_BYTE_ENCODING) {
1985  __ add(scratch, string, Operand(ToRegister(index)));
1986  } else {
1987  STATIC_ASSERT(kUC16Size == 2);
1988  __ add(scratch, string, Operand(ToRegister(index), LSL, 1));
1989  }
1990  return FieldMemOperand(scratch, SeqString::kHeaderSize);
1991 }
int32_t ToInteger32(LConstantOperand *op) const
static const int kHeaderSize
Definition: objects.h:8941
#define __
MemOperand FieldMemOperand(Register object, int offset)
const int kUC16Size
Definition: globals.h:187
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const int kCharSize
Definition: globals.h:122

References __, DCHECK, v8::internal::FieldMemOperand(), v8::internal::Register::is(), v8::internal::kCharSize, v8::internal::SeqString::kHeaderSize, v8::internal::kUC16Size, v8::internal::LSL, v8::internal::String::ONE_BYTE_ENCODING, scratch0(), v8::internal::STATIC_ASSERT(), ToInteger32(), ToRegister(), and v8::internal::String::TWO_BYTE_ENCODING.

+ Here is the call graph for this function:

◆ BuildSeqStringOperand() [2/7]

Operand v8::internal::LCodeGen::BuildSeqStringOperand ( Register  string,
LOperand index,
String::Encoding  encoding 
)
private

◆ BuildSeqStringOperand() [3/7]

MemOperand v8::internal::LCodeGen::BuildSeqStringOperand ( Register  string,
LOperand index,
String::Encoding  encoding 
)
private

◆ BuildSeqStringOperand() [4/7]

MemOperand v8::internal::LCodeGen::BuildSeqStringOperand ( Register  string,
LOperand index,
String::Encoding  encoding 
)
private

◆ BuildSeqStringOperand() [5/7]

Operand v8::internal::LCodeGen::BuildSeqStringOperand ( Register  string,
LOperand index,
String::Encoding  encoding 
)
private

◆ BuildSeqStringOperand() [6/7]

Operand v8::internal::LCodeGen::BuildSeqStringOperand ( Register  string,
LOperand index,
String::Encoding  encoding 
)
private

◆ BuildSeqStringOperand() [7/7]

MemOperand v8::internal::LCodeGen::BuildSeqStringOperand ( Register  string,
Register  temp,
LOperand index,
String::Encoding  encoding 
)
private

Definition at line 4783 of file lithium-codegen-arm64.cc.

4786  {
4787  if (index->IsConstantOperand()) {
4788  int offset = ToInteger32(LConstantOperand::cast(index));
4789  if (encoding == String::TWO_BYTE_ENCODING) {
4790  offset *= kUC16Size;
4791  }
4792  STATIC_ASSERT(kCharSize == 1);
4793  return FieldMemOperand(string, SeqString::kHeaderSize + offset);
4794  }
4795 
4796  __ Add(temp, string, SeqString::kHeaderSize - kHeapObjectTag);
4797  if (encoding == String::ONE_BYTE_ENCODING) {
4798  return MemOperand(temp, ToRegister32(index), SXTW);
4799  } else {
4800  STATIC_ASSERT(kUC16Size == 2);
4801  return MemOperand(temp, ToRegister32(index), SXTW, 1);
4802  }
4803 }
Register ToRegister32(LOperand *op) const
#define __
const int kHeapObjectTag
Definition: v8.h:5737

References __, v8::internal::FieldMemOperand(), v8::internal::kCharSize, v8::internal::SeqString::kHeaderSize, v8::internal::kHeapObjectTag, v8::internal::kUC16Size, v8::internal::String::ONE_BYTE_ENCODING, v8::internal::STATIC_ASSERT(), v8::internal::SXTW, ToInteger32(), ToRegister32(), and v8::internal::String::TWO_BYTE_ENCODING.

+ Here is the call graph for this function:

◆ CallCode() [1/7]

void v8::internal::LCodeGen::CallCode ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr 
)
private

Definition at line 381 of file lithium-codegen-arm64.cc.

383  {
385 }
void CallCodeGeneric(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp

References CallCodeGeneric(), mode(), and RECORD_SIMPLE_SAFEPOINT.

+ Here is the call graph for this function:

◆ CallCode() [2/7]

void v8::internal::LCodeGen::CallCode ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr 
)
private

◆ CallCode() [3/7]

void v8::internal::LCodeGen::CallCode ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr 
)
private

◆ CallCode() [4/7]

void v8::internal::LCodeGen::CallCode ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr 
)
private

◆ CallCode() [5/7]

void v8::internal::LCodeGen::CallCode ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr 
)
private

◆ CallCode() [6/7]

void v8::internal::LCodeGen::CallCode ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr 
)
private

◆ CallCode() [7/7]

void v8::internal::LCodeGen::CallCode ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr,
TargetAddressStorageMode  storage_mode = CAN_INLINE_TARGET_ADDRESS 
)
private

Definition at line 740 of file lithium-codegen-arm.cc.

743  {
744  CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode);
745 }

References CallCodeGeneric(), mode(), and RECORD_SIMPLE_SAFEPOINT.

+ Here is the call graph for this function:

◆ CallCodeGeneric() [1/7]

void v8::internal::LCodeGen::CallCodeGeneric ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr,
SafepointMode  safepoint_mode 
)
private

Definition at line 388 of file lithium-codegen-arm64.cc.

391  {
392  DCHECK(instr != NULL);
393 
394  Assembler::BlockPoolsScope scope(masm_);
395  __ Call(code, mode);
396  RecordSafepointWithLazyDeopt(instr, safepoint_mode);
397 
398  if ((code->kind() == Code::BINARY_OP_IC) ||
399  (code->kind() == Code::COMPARE_IC)) {
400  // Signal that we don't inline smi code before these stubs in the
401  // optimizing code generator.
403  }
404 }
static void EmitNotInlined(MacroAssembler *masm)
void RecordSafepointWithLazyDeopt(LInstruction *instr, SafepointMode safepoint_mode)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL

References __, DCHECK, v8::internal::InlineSmiCheckInfo::EmitNotInlined(), mode(), NULL, RecordSafepointWithLazyDeopt(), and scope().

+ Here is the call graph for this function:

◆ CallCodeGeneric() [2/7]

void v8::internal::LCodeGen::CallCodeGeneric ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr,
SafepointMode  safepoint_mode 
)
private

◆ CallCodeGeneric() [3/7]

void v8::internal::LCodeGen::CallCodeGeneric ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr,
SafepointMode  safepoint_mode 
)
private

◆ CallCodeGeneric() [4/7]

void v8::internal::LCodeGen::CallCodeGeneric ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr,
SafepointMode  safepoint_mode 
)
private

◆ CallCodeGeneric() [5/7]

void v8::internal::LCodeGen::CallCodeGeneric ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr,
SafepointMode  safepoint_mode 
)
private

◆ CallCodeGeneric() [6/7]

void v8::internal::LCodeGen::CallCodeGeneric ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr,
SafepointMode  safepoint_mode,
int  argc 
)
private

◆ CallCodeGeneric() [7/7]

void v8::internal::LCodeGen::CallCodeGeneric ( Handle< Code code,
RelocInfo::Mode  mode,
LInstruction instr,
SafepointMode  safepoint_mode,
TargetAddressStorageMode  storage_mode = CAN_INLINE_TARGET_ADDRESS 
)
private

Definition at line 748 of file lithium-codegen-arm.cc.

752  {
753  DCHECK(instr != NULL);
754  // Block literal pool emission to ensure nop indicating no inlined smi code
755  // is in the correct position.
756  Assembler::BlockConstPoolScope block_const_pool(masm());
757  __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode);
758  RecordSafepointWithLazyDeopt(instr, safepoint_mode);
759 
760  // Signal that we don't inline smi code before these stubs in the
761  // optimizing code generator.
762  if (code->kind() == Code::BINARY_OP_IC ||
763  code->kind() == Code::COMPARE_IC) {
764  __ nop();
765  }
766 }
friend class BlockConstPoolScope
static TypeFeedbackId None()
Definition: utils.h:945

References __, v8::internal::al, DCHECK, mode(), v8::internal::TypeFeedbackId::None(), NULL, and RecordSafepointWithLazyDeopt().

Referenced by CallCode(), and DoDeferredInstanceOfKnownGlobal().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CallCodeSize()

int v8::internal::LCodeGen::CallCodeSize ( Handle< Code code,
RelocInfo::Mode  mode 
)
private

Definition at line 730 of file lithium-codegen-arm.cc.

730  {
731  int size = masm()->CallSize(code, mode);
732  if (code->kind() == Code::BINARY_OP_IC ||
733  code->kind() == Code::COMPARE_IC) {
734  size += Assembler::kInstrSize; // extra nop() added in CallCodeGeneric.
735  }
736  return size;
737 }
static const int kInstrSize
enable harmony numeric enable harmony object literal extensions Optimize object size

References v8::internal::Assembler::kInstrSize, mode(), and size.

Referenced by DoDeferredInstanceOfKnownGlobal().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CallKnownFunction() [1/7]

void v8::internal::LCodeGen::CallKnownFunction ( Handle< JSFunction function,
int  formal_parameter_count,
int  arity,
LInstruction instr,
A1State  a1_state 
)
private

Definition at line 3548 of file lithium-codegen-mips.cc.

3552  {
3553  bool dont_adapt_arguments =
3554  formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3555  bool can_invoke_directly =
3556  dont_adapt_arguments || formal_parameter_count == arity;
3557 
3558  LPointerMap* pointers = instr->pointer_map();
3559 
3560  if (can_invoke_directly) {
3561  if (a1_state == A1_UNINITIALIZED) {
3562  __ li(a1, function);
3563  }
3564 
3565  // Change context.
3567 
3568  // Set r0 to arguments count if adaption is not needed. Assumes that r0
3569  // is available to write to at this point.
3570  if (dont_adapt_arguments) {
3571  __ li(a0, Operand(arity));
3572  }
3573 
3574  // Invoke function.
3576  __ Call(at);
3577 
3578  // Set up deoptimization.
3580  } else {
3581  SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3582  ParameterCount count(arity);
3583  ParameterCount expected(formal_parameter_count);
3584  __ InvokeFunction(function, expected, count, CALL_FUNCTION, generator);
3585  }
3586 }
static const int kContextOffset
Definition: objects.h:7381
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:6888
#define __
@ CALL_FUNCTION
const Register cp

References __, A1_UNINITIALIZED, CALL_FUNCTION, v8::internal::cp, v8::internal::FieldMemOperand(), v8::internal::JSFunction::kCodeEntryOffset, v8::internal::JSFunction::kContextOffset, v8::internal::SharedFunctionInfo::kDontAdaptArgumentsSentinel, v8::internal::LInstruction::pointer_map(), RECORD_SIMPLE_SAFEPOINT, RecordSafepointWithLazyDeopt(), and SafepointGenerator.

+ Here is the call graph for this function:

◆ CallKnownFunction() [2/7]

void v8::internal::LCodeGen::CallKnownFunction ( Handle< JSFunction function,
int  formal_parameter_count,
int  arity,
LInstruction instr,
A1State  a1_state 
)
private

◆ CallKnownFunction() [3/7]

void v8::internal::LCodeGen::CallKnownFunction ( Handle< JSFunction function,
int  formal_parameter_count,
int  arity,
LInstruction instr,
EDIState  edi_state 
)
private

◆ CallKnownFunction() [4/7]

void v8::internal::LCodeGen::CallKnownFunction ( Handle< JSFunction function,
int  formal_parameter_count,
int  arity,
LInstruction instr,
EDIState  edi_state 
)
private

◆ CallKnownFunction() [5/7]

void v8::internal::LCodeGen::CallKnownFunction ( Handle< JSFunction function,
int  formal_parameter_count,
int  arity,
LInstruction instr,
R1State  r1_state 
)
private

Definition at line 3631 of file lithium-codegen-arm.cc.

3635  {
3636  bool dont_adapt_arguments =
3637  formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3638  bool can_invoke_directly =
3639  dont_adapt_arguments || formal_parameter_count == arity;
3640 
3641  LPointerMap* pointers = instr->pointer_map();
3642 
3643  if (can_invoke_directly) {
3644  if (r1_state == R1_UNINITIALIZED) {
3645  __ Move(r1, function);
3646  }
3647 
3648  // Change context.
3650 
3651  // Set r0 to arguments count if adaption is not needed. Assumes that r0
3652  // is available to write to at this point.
3653  if (dont_adapt_arguments) {
3654  __ mov(r0, Operand(arity));
3655  }
3656 
3657  // Invoke function.
3659  __ Call(ip);
3660 
3661  // Set up deoptimization.
3663  } else {
3664  SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3665  ParameterCount count(arity);
3666  ParameterCount expected(formal_parameter_count);
3667  __ InvokeFunction(function, expected, count, CALL_FUNCTION, generator);
3668  }
3669 }
const Register r0
const Register ip
const Register r1

References __, CALL_FUNCTION, v8::internal::cp, v8::internal::FieldMemOperand(), v8::internal::ip, v8::internal::JSFunction::kCodeEntryOffset, v8::internal::JSFunction::kContextOffset, v8::internal::SharedFunctionInfo::kDontAdaptArgumentsSentinel, v8::internal::LInstruction::pointer_map(), v8::internal::r0, v8::internal::r1, R1_UNINITIALIZED, RECORD_SIMPLE_SAFEPOINT, RecordSafepointWithLazyDeopt(), and SafepointGenerator.

+ Here is the call graph for this function:

◆ CallKnownFunction() [6/7]

void v8::internal::LCodeGen::CallKnownFunction ( Handle< JSFunction function,
int  formal_parameter_count,
int  arity,
LInstruction instr,
RDIState  rdi_state 
)
private

◆ CallKnownFunction() [7/7]

void v8::internal::LCodeGen::CallKnownFunction ( Handle< JSFunction function,
int  formal_parameter_count,
int  arity,
LInstruction instr,
Register  function_reg = NoReg 
)
private

Definition at line 1987 of file lithium-codegen-arm64.cc.

1991  {
1992  bool dont_adapt_arguments =
1993  formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1994  bool can_invoke_directly =
1995  dont_adapt_arguments || formal_parameter_count == arity;
1996 
1997  // The function interface relies on the following register assignments.
1998  DCHECK(function_reg.Is(x1) || function_reg.IsNone());
1999  Register arity_reg = x0;
2000 
2001  LPointerMap* pointers = instr->pointer_map();
2002 
2003  // If necessary, load the function object.
2004  if (function_reg.IsNone()) {
2005  function_reg = x1;
2006  __ LoadObject(function_reg, function);
2007  }
2008 
2009  if (FLAG_debug_code) {
2010  Label is_not_smi;
2011  // Try to confirm that function_reg (x1) is a tagged pointer.
2012  __ JumpIfNotSmi(function_reg, &is_not_smi);
2013  __ Abort(kExpectedFunctionObject);
2014  __ Bind(&is_not_smi);
2015  }
2016 
2017  if (can_invoke_directly) {
2018  // Change context.
2019  __ Ldr(cp, FieldMemOperand(function_reg, JSFunction::kContextOffset));
2020 
2021  // Set the arguments count if adaption is not needed. Assumes that x0 is
2022  // available to write to at this point.
2023  if (dont_adapt_arguments) {
2024  __ Mov(arity_reg, arity);
2025  }
2026 
2027  // Invoke function.
2028  __ Ldr(x10, FieldMemOperand(function_reg, JSFunction::kCodeEntryOffset));
2029  __ Call(x10);
2030 
2031  // Set up deoptimization.
2033  } else {
2034  SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
2035  ParameterCount count(arity);
2036  ParameterCount expected(formal_parameter_count);
2037  __ InvokeFunction(function_reg, expected, count, CALL_FUNCTION, generator);
2038  }
2039 }

References __, CALL_FUNCTION, v8::internal::cp, DCHECK, v8::internal::FieldMemOperand(), v8::internal::CPURegister::Is(), v8::internal::CPURegister::IsNone(), v8::internal::JSFunction::kCodeEntryOffset, v8::internal::JSFunction::kContextOffset, v8::internal::SharedFunctionInfo::kDontAdaptArgumentsSentinel, v8::internal::LInstruction::pointer_map(), RECORD_SIMPLE_SAFEPOINT, RecordSafepointWithLazyDeopt(), and SafepointGenerator.

+ Here is the call graph for this function:

◆ CallRuntime() [1/14]

void v8::internal::LCodeGen::CallRuntime ( const Runtime::Function fun,
int  argc,
LInstruction instr,
SaveFPRegsMode  save_doubles = kDontSaveFPRegs 
)
private

◆ CallRuntime() [2/14]

void v8::internal::LCodeGen::CallRuntime ( const Runtime::Function fun,
int  argc,
LInstruction instr,
SaveFPRegsMode  save_doubles = kDontSaveFPRegs 
)
private

◆ CallRuntime() [3/14]

void v8::internal::LCodeGen::CallRuntime ( const Runtime::Function function,
int  num_arguments,
LInstruction instr,
SaveFPRegsMode  save_doubles = kDontSaveFPRegs 
)
private

Definition at line 769 of file lithium-codegen-arm.cc.

772  {
773  DCHECK(instr != NULL);
774 
775  __ CallRuntime(function, num_arguments, save_doubles);
776 
778 }
void CallRuntime(const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)

References __, DCHECK, NULL, RECORD_SIMPLE_SAFEPOINT, and RecordSafepointWithLazyDeopt().

Referenced by CallRuntime(), and GeneratePrologue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CallRuntime() [4/14]

void v8::internal::LCodeGen::CallRuntime ( const Runtime::Function function,
int  num_arguments,
LInstruction instr,
SaveFPRegsMode  save_doubles = kDontSaveFPRegs 
)
private

◆ CallRuntime() [5/14]

void v8::internal::LCodeGen::CallRuntime ( const Runtime::Function function,
int  num_arguments,
LInstruction instr,
SaveFPRegsMode  save_doubles = kDontSaveFPRegs 
)
private

◆ CallRuntime() [6/14]

void v8::internal::LCodeGen::CallRuntime ( const Runtime::Function function,
int  num_arguments,
LInstruction instr,
SaveFPRegsMode  save_doubles = kDontSaveFPRegs 
)
private

◆ CallRuntime() [7/14]

void v8::internal::LCodeGen::CallRuntime ( const Runtime::Function function,
int  num_arguments,
LInstruction instr,
SaveFPRegsMode  save_doubles = kDontSaveFPRegs 
)
private

◆ CallRuntime() [8/14]

void v8::internal::LCodeGen::CallRuntime ( Runtime::FunctionId  id,
int  argc,
LInstruction instr 
)
inlineprivate

Definition at line 180 of file lithium-codegen-ia32.h.

182  {
183  const Runtime::Function* function = Runtime::FunctionForId(id);
184  CallRuntime(function, argc, instr);
185  }
static const Function * FunctionForId(FunctionId id)
Definition: runtime.cc:9312

References CallRuntime(), and v8::internal::Runtime::FunctionForId().

+ Here is the call graph for this function:

◆ CallRuntime() [9/14]

void v8::internal::LCodeGen::CallRuntime ( Runtime::FunctionId  id,
int  argc,
LInstruction instr 
)
inlineprivate

Definition at line 207 of file lithium-codegen-x87.h.

209  {
210  const Runtime::Function* function = Runtime::FunctionForId(id);
211  CallRuntime(function, argc, instr);
212  }

References CallRuntime(), and v8::internal::Runtime::FunctionForId().

+ Here is the call graph for this function:

◆ CallRuntime() [10/14]

void v8::internal::LCodeGen::CallRuntime ( Runtime::FunctionId  id,
int  num_arguments,
LInstruction instr 
)
inlineprivate

Definition at line 206 of file lithium-codegen-arm.h.

208  {
209  const Runtime::Function* function = Runtime::FunctionForId(id);
210  CallRuntime(function, num_arguments, instr);
211  }

References CallRuntime(), and v8::internal::Runtime::FunctionForId().

+ Here is the call graph for this function:

◆ CallRuntime() [11/14]

void v8::internal::LCodeGen::CallRuntime ( Runtime::FunctionId  id,
int  num_arguments,
LInstruction instr 
)
inlineprivate

Definition at line 308 of file lithium-codegen-arm64.h.

310  {
311  const Runtime::Function* function = Runtime::FunctionForId(id);
312  CallRuntime(function, num_arguments, instr);
313  }

References CallRuntime(), and v8::internal::Runtime::FunctionForId().

+ Here is the call graph for this function:

◆ CallRuntime() [12/14]

void v8::internal::LCodeGen::CallRuntime ( Runtime::FunctionId  id,
int  num_arguments,
LInstruction instr 
)
inlineprivate

Definition at line 200 of file lithium-codegen-mips.h.

202  {
203  const Runtime::Function* function = Runtime::FunctionForId(id);
204  CallRuntime(function, num_arguments, instr);
205  }

References CallRuntime(), and v8::internal::Runtime::FunctionForId().

+ Here is the call graph for this function:

◆ CallRuntime() [13/14]

void v8::internal::LCodeGen::CallRuntime ( Runtime::FunctionId  id,
int  num_arguments,
LInstruction instr 
)
inlineprivate

Definition at line 201 of file lithium-codegen-mips64.h.

203  {
204  const Runtime::Function* function = Runtime::FunctionForId(id);
205  CallRuntime(function, num_arguments, instr);
206  }

References CallRuntime(), and v8::internal::Runtime::FunctionForId().

+ Here is the call graph for this function:

◆ CallRuntime() [14/14]

void v8::internal::LCodeGen::CallRuntime ( Runtime::FunctionId  id,
int  num_arguments,
LInstruction instr 
)
inlineprivate

Definition at line 177 of file lithium-codegen-x64.h.

179  {
180  const Runtime::Function* function = Runtime::FunctionForId(id);
181  CallRuntime(function, num_arguments, instr);
182  }

References CallRuntime(), and v8::internal::Runtime::FunctionForId().

+ Here is the call graph for this function:

◆ CallRuntimeFromDeferred() [1/7]

void v8::internal::LCodeGen::CallRuntimeFromDeferred ( Runtime::FunctionId  id,
int  argc,
LInstruction instr,
LOperand context 
)
private

Definition at line 796 of file lithium-codegen-arm.cc.

799  {
800  LoadContextFromDeferred(context);
801  __ CallRuntimeSaveDoubles(id);
803  instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
804 }
void RecordSafepointWithRegisters(LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
void LoadContextFromDeferred(LOperand *context)

References __, LoadContextFromDeferred(), v8::internal::LInstruction::pointer_map(), and RecordSafepointWithRegisters().

Referenced by DoDeferredAllocate(), DoDeferredMathAbsTagged(), DoDeferredMathAbsTaggedHeapNumber(), DoDeferredStringCharCodeAt(), and DoDeferredStringCharFromCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CallRuntimeFromDeferred() [2/7]

void v8::internal::LCodeGen::CallRuntimeFromDeferred ( Runtime::FunctionId  id,
int  argc,
LInstruction instr,
LOperand context 
)
private

◆ CallRuntimeFromDeferred() [3/7]

void v8::internal::LCodeGen::CallRuntimeFromDeferred ( Runtime::FunctionId  id,
int  argc,
LInstruction instr,
LOperand context 
)
private

◆ CallRuntimeFromDeferred() [4/7]

void v8::internal::LCodeGen::CallRuntimeFromDeferred ( Runtime::FunctionId  id,
int  argc,
LInstruction instr,
LOperand context 
)
private

◆ CallRuntimeFromDeferred() [5/7]

void v8::internal::LCodeGen::CallRuntimeFromDeferred ( Runtime::FunctionId  id,
int  argc,
LInstruction instr,
LOperand context 
)
private

◆ CallRuntimeFromDeferred() [6/7]

void v8::internal::LCodeGen::CallRuntimeFromDeferred ( Runtime::FunctionId  id,
int  argc,
LInstruction instr,
LOperand context 
)
private

◆ CallRuntimeFromDeferred() [7/7]

void v8::internal::LCodeGen::CallRuntimeFromDeferred ( Runtime::FunctionId  id,
int  argc,
LInstruction instr,
LOperand context 
)
private

◆ chunk()

LPlatformChunk* v8::internal::LCodeGen::chunk ( ) const
inlineprivate

Definition at line 123 of file lithium-codegen-x64.h.

123 { return chunk_; }

Referenced by AddToTranslation(), GenerateCode(), GenerateDeferredCode(), GetStackSlotCount(), graph(), LookupDestination(), PopulateDeoptimizationLiteralsWithInlinedFunctions(), RestoreCallerDoubles(), SaveCallerDoubles(), ToOperand(), and ToOperand32().

+ Here is the caller graph for this function:

◆ DefineDeoptimizationLiteral() [1/7]

int v8::internal::LCodeGen::DefineDeoptimizationLiteral ( Handle< Object literal)
private

Definition at line 974 of file lithium-codegen-arm.cc.

974  {
975  int result = deoptimization_literals_.length();
976  for (int i = 0; i < deoptimization_literals_.length(); ++i) {
977  if (deoptimization_literals_[i].is_identical_to(literal)) return i;
978  }
979  deoptimization_literals_.Add(literal, zone());
980  return result;
981 }

References deoptimization_literals_.

Referenced by AddToTranslation(), PopulateDeoptimizationLiteralsWithInlinedFunctions(), and WriteTranslation().

+ Here is the caller graph for this function:

◆ DefineDeoptimizationLiteral() [2/7]

int v8::internal::LCodeGen::DefineDeoptimizationLiteral ( Handle< Object literal)
private

◆ DefineDeoptimizationLiteral() [3/7]

int v8::internal::LCodeGen::DefineDeoptimizationLiteral ( Handle< Object literal)
private

◆ DefineDeoptimizationLiteral() [4/7]

int v8::internal::LCodeGen::DefineDeoptimizationLiteral ( Handle< Object literal)
private

◆ DefineDeoptimizationLiteral() [5/7]

int v8::internal::LCodeGen::DefineDeoptimizationLiteral ( Handle< Object literal)
private

◆ DefineDeoptimizationLiteral() [6/7]

int v8::internal::LCodeGen::DefineDeoptimizationLiteral ( Handle< Object literal)
private

◆ DefineDeoptimizationLiteral() [7/7]

int v8::internal::LCodeGen::DefineDeoptimizationLiteral ( Handle< Object literal)
private

◆ DeoptEveryNTimes() [1/3]

bool v8::internal::LCodeGen::DeoptEveryNTimes ( )
inlineprivate

Definition at line 216 of file lithium-codegen-ia32.h.

216  {
217  return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
218  }

◆ DeoptEveryNTimes() [2/3]

bool v8::internal::LCodeGen::DeoptEveryNTimes ( )
inlineprivate

Definition at line 213 of file lithium-codegen-x64.h.

213  {
214  return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
215  }

◆ DeoptEveryNTimes() [3/3]

bool v8::internal::LCodeGen::DeoptEveryNTimes ( )
inlineprivate

Definition at line 243 of file lithium-codegen-x87.h.

243  {
244  return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
245  }

◆ Deoptimize()

void v8::internal::LCodeGen::Deoptimize ( LInstruction instr,
const char *  detail,
Deoptimizer::BailoutType override_bailout_type = NULL 
)
private

Definition at line 1066 of file lithium-codegen-arm64.cc.

1067  {
1068  DeoptimizeBranch(instr, detail, always, NoReg, -1, override_bailout_type);
1069 }
void DeoptimizeBranch(LInstruction *instr, const char *detail, BranchType branch_type, Register reg=NoReg, int bit=-1, Deoptimizer::BailoutType *override_bailout_type=NULL)

References v8::internal::always, and DeoptimizeBranch().

+ Here is the call graph for this function:

◆ DeoptimizeBranch()

void v8::internal::LCodeGen::DeoptimizeBranch ( LInstruction instr,
const char *  detail,
BranchType  branch_type,
Register  reg = NoReg,
int  bit = -1,
Deoptimizer::BailoutType override_bailout_type = NULL 
)
private

Definition at line 991 of file lithium-codegen-arm64.cc.

993  {
994  LEnvironment* environment = instr->environment();
995  RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
996  Deoptimizer::BailoutType bailout_type =
997  info()->IsStub() ? Deoptimizer::LAZY : Deoptimizer::EAGER;
998 
999  if (override_bailout_type != NULL) {
1000  bailout_type = *override_bailout_type;
1001  }
1002 
1003  DCHECK(environment->HasBeenRegistered());
1004  DCHECK(info()->IsOptimizing() || info()->IsStub());
1005  int id = environment->deoptimization_index();
1006  Address entry =
1007  Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
1008 
1009  if (entry == NULL) {
1010  Abort(kBailoutWasNotPrepared);
1011  }
1012 
1013  if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
1014  Label not_zero;
1015  ExternalReference count = ExternalReference::stress_deopt_count(isolate());
1016 
1017  __ Push(x0, x1, x2);
1018  __ Mrs(x2, NZCV);
1019  __ Mov(x0, count);
1020  __ Ldr(w1, MemOperand(x0));
1021  __ Subs(x1, x1, 1);
1022  __ B(gt, &not_zero);
1023  __ Mov(w1, FLAG_deopt_every_n_times);
1024  __ Str(w1, MemOperand(x0));
1025  __ Pop(x2, x1, x0);
1027  __ Call(entry, RelocInfo::RUNTIME_ENTRY);
1028  __ Unreachable();
1029 
1030  __ Bind(&not_zero);
1031  __ Str(w1, MemOperand(x0));
1032  __ Msr(NZCV, x2);
1033  __ Pop(x2, x1, x0);
1034  }
1035 
1036  if (info()->ShouldTrapOnDeopt()) {
1037  Label dont_trap;
1038  __ B(&dont_trap, InvertBranchType(branch_type), reg, bit);
1039  __ Debug("trap_on_deopt", __LINE__, BREAK);
1040  __ Bind(&dont_trap);
1041  }
1042 
1043  Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
1044  instr->Mnemonic(), detail);
1045  DCHECK(info()->IsStub() || frame_is_built_);
1046  // Go through jump table if we need to build frame, or restore caller doubles.
1047  if (branch_type == always &&
1048  frame_is_built_ && !info()->saves_caller_doubles()) {
1049  DeoptComment(reason);
1050  __ Call(entry, RelocInfo::RUNTIME_ENTRY);
1051  } else {
1052  Deoptimizer::JumpTableEntry* table_entry =
1053  new (zone()) Deoptimizer::JumpTableEntry(entry, reason, bailout_type,
1054  !frame_is_built_);
1055  // We often have several deopts to the same entry, reuse the last
1056  // jump entry if this is the case.
1057  if (jump_table_.is_empty() ||
1058  !table_entry->IsEquivalentTo(*jump_table_.last())) {
1059  jump_table_.Add(table_entry, zone());
1060  }
1061  __ B(&jump_table_.last()->label, branch_type, reg, bit);
1062  }
1063 }
static Address GetDeoptimizationEntry(Isolate *isolate, int id, BailoutType type, GetEntryMode mode=ENSURE_ENTRY_CODE)
Definition: deoptimizer.cc:672
void RegisterEnvironmentForDeoptimization(LEnvironment *environment, Safepoint::DeoptMode mode)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
byte * Address
Definition: globals.h:101
BranchType InvertBranchType(BranchType type)

References __, v8::internal::always, v8::internal::B, v8::internal::BREAK, DCHECK, v8::internal::Deoptimizer::EAGER, v8::internal::LInstruction::environment(), frame_is_built_, v8::internal::Deoptimizer::GetDeoptimizationEntry(), v8::internal::gt, v8::internal::LInstruction::hydrogen_value(), v8::internal::InvertBranchType(), v8::internal::Deoptimizer::JumpTableEntry::IsEquivalentTo(), jump_table_, v8::internal::Deoptimizer::LAZY, LEnvironment, v8::internal::LInstruction::Mnemonic(), v8::internal::not_zero, NULL, v8::internal::NZCV, v8::internal::HValue::position(), v8::internal::compiler::Push(), v8::internal::HSourcePosition::raw(), RegisterEnvironmentForDeoptimization(), and v8::internal::RelocInfo::RUNTIME_ENTRY.

Referenced by Deoptimize(), DeoptimizeIfBitClear(), DeoptimizeIfBitSet(), DeoptimizeIfNotZero(), and DeoptimizeIfZero().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeoptimizeIf() [1/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  cc,
LInstruction instr,
const char *  detail 
)
private

◆ DeoptimizeIf() [2/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  cc,
LInstruction instr,
const char *  detail 
)
private

◆ DeoptimizeIf() [3/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  cc,
LInstruction instr,
const char *  detail 
)
private

◆ DeoptimizeIf() [4/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  cc,
LInstruction instr,
const char *  detail,
Deoptimizer::BailoutType  bailout_type 
)
private

◆ DeoptimizeIf() [5/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  cc,
LInstruction instr,
const char *  detail,
Deoptimizer::BailoutType  bailout_type 
)
private

◆ DeoptimizeIf() [6/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  cc,
LInstruction instr,
const char *  detail,
Deoptimizer::BailoutType  bailout_type 
)
private

◆ DeoptimizeIf() [7/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  cond,
LInstruction instr,
const char *  detail 
)
private

◆ DeoptimizeIf() [8/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  condition,
LInstruction instr,
const char *  detail 
)
private

Definition at line 921 of file lithium-codegen-arm.cc.

922  {
923  Deoptimizer::BailoutType bailout_type = info()->IsStub()
926  DeoptimizeIf(condition, instr, detail, bailout_type);
927 }
void DeoptimizeIf(Condition condition, LInstruction *instr, const char *detail, Deoptimizer::BailoutType bailout_type)

References DeoptimizeIf(), v8::internal::Deoptimizer::EAGER, and v8::internal::Deoptimizer::LAZY.

+ Here is the call graph for this function:

◆ DeoptimizeIf() [9/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  condition,
LInstruction instr,
const char *  detail,
Deoptimizer::BailoutType  bailout_type 
)
private

Definition at line 844 of file lithium-codegen-arm.cc.

846  {
847  LEnvironment* environment = instr->environment();
848  RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
849  DCHECK(environment->HasBeenRegistered());
850  int id = environment->deoptimization_index();
851  DCHECK(info()->IsOptimizing() || info()->IsStub());
852  Address entry =
853  Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
854  if (entry == NULL) {
855  Abort(kBailoutWasNotPrepared);
856  return;
857  }
858 
859  if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
860  Register scratch = scratch0();
861  ExternalReference count = ExternalReference::stress_deopt_count(isolate());
862 
863  // Store the condition on the stack if necessary
864  if (condition != al) {
865  __ mov(scratch, Operand::Zero(), LeaveCC, NegateCondition(condition));
866  __ mov(scratch, Operand(1), LeaveCC, condition);
867  __ push(scratch);
868  }
869 
870  __ push(r1);
871  __ mov(scratch, Operand(count));
872  __ ldr(r1, MemOperand(scratch));
873  __ sub(r1, r1, Operand(1), SetCC);
874  __ mov(r1, Operand(FLAG_deopt_every_n_times), LeaveCC, eq);
875  __ str(r1, MemOperand(scratch));
876  __ pop(r1);
877 
878  if (condition != al) {
879  // Clean up the stack before the deoptimizer call
880  __ pop(scratch);
881  }
882 
883  __ Call(entry, RelocInfo::RUNTIME_ENTRY, eq);
884 
885  // 'Restore' the condition in a slightly hacky way. (It would be better
886  // to use 'msr' and 'mrs' instructions here, but they are not supported by
887  // our ARM simulator).
888  if (condition != al) {
889  condition = ne;
890  __ cmp(scratch, Operand::Zero());
891  }
892  }
893 
894  if (info()->ShouldTrapOnDeopt()) {
895  __ stop("trap_on_deopt", condition);
896  }
897 
898  Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
899  instr->Mnemonic(), detail);
900  DCHECK(info()->IsStub() || frame_is_built_);
901  // Go through jump table if we need to handle condition, build frame, or
902  // restore caller doubles.
903  if (condition == al && frame_is_built_ &&
904  !info()->saves_caller_doubles()) {
905  DeoptComment(reason);
906  __ Call(entry, RelocInfo::RUNTIME_ENTRY);
907  } else {
908  Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
909  !frame_is_built_);
910  // We often have several deopts to the same entry, reuse the last
911  // jump entry if this is the case.
912  if (jump_table_.is_empty() ||
913  !table_entry.IsEquivalentTo(jump_table_.last())) {
914  jump_table_.Add(table_entry, zone());
915  }
916  __ b(condition, &jump_table_.last().label);
917  }
918 }
Condition NegateCondition(Condition cond)
Definition: constants-arm.h:86

References __, v8::internal::al, DCHECK, v8::internal::LInstruction::environment(), v8::internal::eq, frame_is_built_, v8::internal::Deoptimizer::GetDeoptimizationEntry(), v8::internal::LInstruction::hydrogen_value(), v8::internal::Deoptimizer::JumpTableEntry::IsEquivalentTo(), jump_table_, v8::internal::LeaveCC, LEnvironment, v8::internal::LInstruction::Mnemonic(), v8::internal::ne, v8::internal::NegateCondition(), NULL, v8::internal::HValue::position(), v8::internal::r1, v8::internal::HSourcePosition::raw(), RegisterEnvironmentForDeoptimization(), v8::internal::RelocInfo::RUNTIME_ENTRY, scratch0(), and v8::internal::SetCC.

Referenced by DeoptimizeIf(), DeoptimizeIfMinusZero(), DeoptimizeIfNotHeapNumber(), DeoptimizeIfNotRoot(), DeoptimizeIfRoot(), DoDeferredInstanceMigration(), DoDeferredMathAbsTaggedHeapNumber(), DoDeferredTaggedToI(), DoLoadKeyedExternalArray(), DoLoadKeyedFixedArray(), DoLoadKeyedFixedDoubleArray(), EmitIntegerMathAbs(), and EmitNumberUntagD().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeoptimizeIf() [10/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  condition,
LInstruction instr,
const char *  detail,
Register  src1 = zero_reg,
const Operand src2 = Operand(zero_reg) 
)
private

◆ DeoptimizeIf() [11/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  condition,
LInstruction instr,
const char *  detail = NULL,
Register  src1 = zero_reg,
const Operand src2 = Operand(zero_reg) 
)
private

Definition at line 884 of file lithium-codegen-mips.cc.

886  {
887  Deoptimizer::BailoutType bailout_type = info()->IsStub()
890  DeoptimizeIf(condition, instr, bailout_type, detail, src1, src2);
891 }

References DeoptimizeIf(), v8::internal::Deoptimizer::EAGER, and v8::internal::Deoptimizer::LAZY.

+ Here is the call graph for this function:

◆ DeoptimizeIf() [12/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  condition,
LInstruction instr,
Deoptimizer::BailoutType  bailout_type,
const char *  detail,
Register  src1 = zero_reg,
const Operand src2 = Operand(zero_reg) 
)
private

Definition at line 817 of file lithium-codegen-mips.cc.

820  {
821  LEnvironment* environment = instr->environment();
822  RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
823  DCHECK(environment->HasBeenRegistered());
824  int id = environment->deoptimization_index();
825  DCHECK(info()->IsOptimizing() || info()->IsStub());
826  Address entry =
827  Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
828  if (entry == NULL) {
829  Abort(kBailoutWasNotPrepared);
830  return;
831  }
832 
833  if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
834  Register scratch = scratch0();
835  ExternalReference count = ExternalReference::stress_deopt_count(isolate());
836  Label no_deopt;
837  __ Push(a1, scratch);
838  __ li(scratch, Operand(count));
839  __ lw(a1, MemOperand(scratch));
840  __ Subu(a1, a1, Operand(1));
841  __ Branch(&no_deopt, ne, a1, Operand(zero_reg));
842  __ li(a1, Operand(FLAG_deopt_every_n_times));
843  __ sw(a1, MemOperand(scratch));
844  __ Pop(a1, scratch);
845 
846  __ Call(entry, RelocInfo::RUNTIME_ENTRY);
847  __ bind(&no_deopt);
848  __ sw(a1, MemOperand(scratch));
849  __ Pop(a1, scratch);
850  }
851 
852  if (info()->ShouldTrapOnDeopt()) {
853  Label skip;
854  if (condition != al) {
855  __ Branch(&skip, NegateCondition(condition), src1, src2);
856  }
857  __ stop("trap_on_deopt");
858  __ bind(&skip);
859  }
860 
861  Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
862  instr->Mnemonic(), detail);
863  DCHECK(info()->IsStub() || frame_is_built_);
864  // Go through jump table if we need to handle condition, build frame, or
865  // restore caller doubles.
866  if (condition == al && frame_is_built_ &&
867  !info()->saves_caller_doubles()) {
868  DeoptComment(reason);
869  __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
870  } else {
871  Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
872  !frame_is_built_);
873  // We often have several deopts to the same entry, reuse the last
874  // jump entry if this is the case.
875  if (jump_table_.is_empty() ||
876  !table_entry.IsEquivalentTo(jump_table_.last())) {
877  jump_table_.Add(table_entry, zone());
878  }
879  __ Branch(&jump_table_.last().label, condition, src1, src2);
880  }
881 }

References __, v8::internal::al, DCHECK, v8::internal::LInstruction::environment(), frame_is_built_, v8::internal::Deoptimizer::GetDeoptimizationEntry(), v8::internal::LInstruction::hydrogen_value(), v8::internal::Deoptimizer::JumpTableEntry::IsEquivalentTo(), jump_table_, LEnvironment, v8::internal::LInstruction::Mnemonic(), v8::internal::ne, v8::internal::NegateCondition(), NULL, v8::internal::HValue::position(), v8::internal::compiler::Push(), v8::internal::HSourcePosition::raw(), RegisterEnvironmentForDeoptimization(), v8::internal::RelocInfo::RUNTIME_ENTRY, and scratch0().

+ Here is the call graph for this function:

◆ DeoptimizeIf() [13/13]

void v8::internal::LCodeGen::DeoptimizeIf ( Condition  condition,
LInstruction instr,
Deoptimizer::BailoutType  bailout_type,
const char *  detail,
Register  src1 = zero_reg,
const Operand src2 = Operand(zero_reg) 
)
private

◆ DeoptimizeIfBitClear()

void v8::internal::LCodeGen::DeoptimizeIfBitClear ( Register  rt,
int  bit,
LInstruction instr,
const char *  detail 
)
private

Definition at line 1142 of file lithium-codegen-arm64.cc.

1143  {
1144  DeoptimizeBranch(instr, detail, reg_bit_clear, rt, bit);
1145 }

References DeoptimizeBranch(), and v8::internal::reg_bit_clear.

Referenced by DeoptimizeIfSmi().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeoptimizeIfBitSet()

void v8::internal::LCodeGen::DeoptimizeIfBitSet ( Register  rt,
int  bit,
LInstruction instr,
const char *  detail 
)
private

Definition at line 1136 of file lithium-codegen-arm64.cc.

1137  {
1138  DeoptimizeBranch(instr, detail, reg_bit_set, rt, bit);
1139 }

References DeoptimizeBranch(), and v8::internal::reg_bit_set.

Referenced by DeoptimizeIfNegative(), and DeoptimizeIfNotSmi().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeoptimizeIfMinusZero()

void v8::internal::LCodeGen::DeoptimizeIfMinusZero ( DoubleRegister  input,
LInstruction instr,
const char *  detail 
)
private

Definition at line 1123 of file lithium-codegen-arm64.cc.

1124  {
1125  __ TestForMinusZero(input);
1126  DeoptimizeIf(vs, instr, detail);
1127 }

References __, DeoptimizeIf(), and v8::internal::vs.

+ Here is the call graph for this function:

◆ DeoptimizeIfNegative()

void v8::internal::LCodeGen::DeoptimizeIfNegative ( Register  rt,
LInstruction instr,
const char *  detail 
)
private

Definition at line 1090 of file lithium-codegen-arm64.cc.

1091  {
1092  int sign_bit = rt.Is64Bits() ? kXSignBit : kWSignBit;
1093  DeoptimizeIfBitSet(rt, sign_bit, instr, detail);
1094 }
void DeoptimizeIfBitSet(Register rt, int bit, LInstruction *instr, const char *detail)
const int64_t kWSignBit
const int64_t kXSignBit

References DeoptimizeIfBitSet(), v8::internal::CPURegister::Is64Bits(), v8::internal::kWSignBit, and v8::internal::kXSignBit.

Referenced by DoDeferredTaggedToI().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeoptimizeIfNotHeapNumber()

void v8::internal::LCodeGen::DeoptimizeIfNotHeapNumber ( Register  object,
LInstruction instr 
)
private

Definition at line 1130 of file lithium-codegen-arm64.cc.

1130  {
1131  __ CompareObjectMap(object, Heap::kHeapNumberMapRootIndex);
1132  DeoptimizeIf(ne, instr, "not heap number");
1133 }

References __, DeoptimizeIf(), and v8::internal::ne.

Referenced by DoDeferredMathAbsTagged(), and DoDeferredTaggedToI().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeoptimizeIfNotRoot()

void v8::internal::LCodeGen::DeoptimizeIfNotRoot ( Register  rt,
Heap::RootListIndex  index,
LInstruction instr,
const char *  detail 
)
private

Definition at line 1116 of file lithium-codegen-arm64.cc.

1117  {
1118  __ CompareRoot(rt, index);
1119  DeoptimizeIf(ne, instr, detail);
1120 }

References __, DeoptimizeIf(), and v8::internal::ne.

Referenced by DoDeferredTaggedToI().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeoptimizeIfNotSmi()

void v8::internal::LCodeGen::DeoptimizeIfNotSmi ( Register  rt,
LInstruction instr,
const char *  detail 
)
private

Definition at line 1103 of file lithium-codegen-arm64.cc.

1104  {
1105  DeoptimizeIfBitSet(rt, MaskToBit(kSmiTagMask), instr, detail);
1106 }
int MaskToBit(uint64_t mask)
const intptr_t kSmiTagMask
Definition: v8.h:5744

References DeoptimizeIfBitSet(), v8::internal::kSmiTagMask, and v8::internal::MaskToBit().

+ Here is the call graph for this function:

◆ DeoptimizeIfNotZero()

void v8::internal::LCodeGen::DeoptimizeIfNotZero ( Register  rt,
LInstruction instr,
const char *  detail 
)
private

Definition at line 1084 of file lithium-codegen-arm64.cc.

1085  {
1086  DeoptimizeBranch(instr, detail, reg_not_zero, rt);
1087 }

References DeoptimizeBranch(), and v8::internal::reg_not_zero.

+ Here is the call graph for this function:

◆ DeoptimizeIfRoot()

void v8::internal::LCodeGen::DeoptimizeIfRoot ( Register  rt,
Heap::RootListIndex  index,
LInstruction instr,
const char *  detail 
)
private

Definition at line 1109 of file lithium-codegen-arm64.cc.

1110  {
1111  __ CompareRoot(rt, index);
1112  DeoptimizeIf(eq, instr, detail);
1113 }

References __, DeoptimizeIf(), and v8::internal::eq.

+ Here is the call graph for this function:

◆ DeoptimizeIfSmi()

void v8::internal::LCodeGen::DeoptimizeIfSmi ( Register  rt,
LInstruction instr,
const char *  detail 
)
private

Definition at line 1097 of file lithium-codegen-arm64.cc.

1098  {
1099  DeoptimizeIfBitClear(rt, MaskToBit(kSmiTagMask), instr, detail);
1100 }
void DeoptimizeIfBitClear(Register rt, int bit, LInstruction *instr, const char *detail)

References DeoptimizeIfBitClear(), v8::internal::kSmiTagMask, and v8::internal::MaskToBit().

+ Here is the call graph for this function:

◆ DeoptimizeIfZero()

void v8::internal::LCodeGen::DeoptimizeIfZero ( Register  rt,
LInstruction instr,
const char *  detail 
)
private

Definition at line 1078 of file lithium-codegen-arm64.cc.

1079  {
1080  DeoptimizeBranch(instr, detail, reg_zero, rt);
1081 }

References DeoptimizeBranch(), and v8::internal::reg_zero.

+ Here is the call graph for this function:

◆ DISALLOW_COPY_AND_ASSIGN() [1/7]

v8::internal::LCodeGen::DISALLOW_COPY_AND_ASSIGN ( LCodeGen  )
private

◆ DISALLOW_COPY_AND_ASSIGN() [2/7]

v8::internal::LCodeGen::DISALLOW_COPY_AND_ASSIGN ( LCodeGen  )
private

◆ DISALLOW_COPY_AND_ASSIGN() [3/7]

v8::internal::LCodeGen::DISALLOW_COPY_AND_ASSIGN ( LCodeGen  )
private

◆ DISALLOW_COPY_AND_ASSIGN() [4/7]

v8::internal::LCodeGen::DISALLOW_COPY_AND_ASSIGN ( LCodeGen  )
private

◆ DISALLOW_COPY_AND_ASSIGN() [5/7]

v8::internal::LCodeGen::DISALLOW_COPY_AND_ASSIGN ( LCodeGen  )
private

◆ DISALLOW_COPY_AND_ASSIGN() [6/7]

v8::internal::LCodeGen::DISALLOW_COPY_AND_ASSIGN ( LCodeGen  )
private

◆ DISALLOW_COPY_AND_ASSIGN() [7/7]

v8::internal::LCodeGen::DISALLOW_COPY_AND_ASSIGN ( LCodeGen  )
private

◆ DoDeferredAllocate() [1/7]

void v8::internal::LCodeGen::DoDeferredAllocate ( LAllocate *  instr)

Definition at line 5389 of file lithium-codegen-arm.cc.

5389  {
5390  Register result = ToRegister(instr->result());
5391 
5392  // TODO(3095996): Get rid of this. For now, we need to make the
5393  // result register contain a valid pointer because it is already
5394  // contained in the register pointer map.
5395  __ mov(result, Operand(Smi::FromInt(0)));
5396 
5397  PushSafepointRegistersScope scope(this);
5398  if (instr->size()->IsRegister()) {
5399  Register size = ToRegister(instr->size());
5400  DCHECK(!size.is(result));
5401  __ SmiTag(size);
5402  __ push(size);
5403  } else {
5404  int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5405  if (size >= 0 && size <= Smi::kMaxValue) {
5407  } else {
5408  // We should never get here at runtime => abort
5409  __ stop("invalid allocation size");
5410  return;
5411  }
5412  }
5413 
5415  instr->hydrogen()->MustAllocateDoubleAligned());
5416  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5417  DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5418  DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5420  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5421  DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5423  } else {
5425  }
5427 
5429  Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
5430  __ StoreToSafepointRegisterSlot(r0, result);
5431 }
static U update(U previous, T value)
Definition: utils.h:223
static U encode(T value)
Definition: utils.h:217
void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
static const int kMaxValue
Definition: objects.h:1272
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
int int32_t
Definition: unicode.cc:24
@ OLD_DATA_SPACE
Definition: globals.h:361
@ OLD_POINTER_SPACE
Definition: globals.h:360

References __, CallRuntimeFromDeferred(), DCHECK, v8::internal::BitFieldBase< T, shift, size, U >::encode(), v8::internal::anonymous_namespace{flags.cc}::flags, v8::internal::Smi::FromInt(), v8::internal::Smi::kMaxValue, v8::internal::NEW_SPACE, v8::internal::OLD_DATA_SPACE, v8::internal::OLD_POINTER_SPACE, v8::internal::compiler::Push(), v8::internal::r0, scope(), size, ToInteger32(), ToRegister(), and v8::internal::BitFieldBase< T, shift, size, U >::update().

+ Here is the call graph for this function:

◆ DoDeferredAllocate() [2/7]

void v8::internal::LCodeGen::DoDeferredAllocate ( LAllocate *  instr)
private

◆ DoDeferredAllocate() [3/7]

void v8::internal::LCodeGen::DoDeferredAllocate ( LAllocate *  instr)

◆ DoDeferredAllocate() [4/7]

void v8::internal::LCodeGen::DoDeferredAllocate ( LAllocate *  instr)

◆ DoDeferredAllocate() [5/7]

void v8::internal::LCodeGen::DoDeferredAllocate ( LAllocate *  instr)

◆ DoDeferredAllocate() [6/7]

void v8::internal::LCodeGen::DoDeferredAllocate ( LAllocate *  instr)

◆ DoDeferredAllocate() [7/7]

void v8::internal::LCodeGen::DoDeferredAllocate ( LAllocate *  instr)

◆ DoDeferredInstanceMigration() [1/7]

void v8::internal::LCodeGen::DoDeferredInstanceMigration ( LCheckMaps *  instr,
Register  object 
)

Definition at line 5178 of file lithium-codegen-arm.cc.

5178  {
5179  {
5180  PushSafepointRegistersScope scope(this);
5181  __ push(object);
5182  __ mov(cp, Operand::Zero());
5183  __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
5185  instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
5186  __ StoreToSafepointRegisterSlot(r0, scratch0());
5187  }
5188  __ tst(scratch0(), Operand(kSmiTagMask));
5189  DeoptimizeIf(eq, instr, "instance migration failed");
5190 }

References __, v8::internal::cp, DeoptimizeIf(), v8::internal::eq, v8::internal::kSmiTagMask, v8::internal::r0, RecordSafepointWithRegisters(), scope(), and scratch0().

+ Here is the call graph for this function:

◆ DoDeferredInstanceMigration() [2/7]

void v8::internal::LCodeGen::DoDeferredInstanceMigration ( LCheckMaps *  instr,
Register  object 
)
private

◆ DoDeferredInstanceMigration() [3/7]

void v8::internal::LCodeGen::DoDeferredInstanceMigration ( LCheckMaps *  instr,
Register  object 
)

◆ DoDeferredInstanceMigration() [4/7]

void v8::internal::LCodeGen::DoDeferredInstanceMigration ( LCheckMaps *  instr,
Register  object 
)

◆ DoDeferredInstanceMigration() [5/7]

void v8::internal::LCodeGen::DoDeferredInstanceMigration ( LCheckMaps *  instr,
Register  object 
)

◆ DoDeferredInstanceMigration() [6/7]

void v8::internal::LCodeGen::DoDeferredInstanceMigration ( LCheckMaps *  instr,
Register  object 
)

◆ DoDeferredInstanceMigration() [7/7]

void v8::internal::LCodeGen::DoDeferredInstanceMigration ( LCheckMaps *  instr,
Register  object 
)

◆ DoDeferredInstanceOfKnownGlobal() [1/7]

void v8::internal::LCodeGen::DoDeferredInstanceOfKnownGlobal ( LInstanceOfKnownGlobal *  instr)
private

Definition at line 3137 of file lithium-codegen-arm64.cc.

3137  {
3138  Register result = ToRegister(instr->result());
3139  DCHECK(result.Is(x0)); // InstanceofStub returns its result in x0.
3141  flags = static_cast<InstanceofStub::Flags>(
3143  flags = static_cast<InstanceofStub::Flags>(
3145  flags = static_cast<InstanceofStub::Flags>(
3147 
3148  PushSafepointRegistersScope scope(this);
3149  LoadContextFromDeferred(instr->context());
3150 
3151  // Prepare InstanceofStub arguments.
3152  DCHECK(ToRegister(instr->value()).Is(InstanceofStub::left()));
3153  __ LoadObject(InstanceofStub::right(), instr->function());
3154 
3155  InstanceofStub stub(isolate(), flags);
3156  CallCodeGeneric(stub.GetCode(),
3158  instr,
3160  LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
3161  safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
3162 
3163  // Put the result value into the result register slot.
3164  __ StoreToSafepointRegisterSlot(result, result);
3165 }
static Register right()
Definition: code-stubs.h:686
static Register left()
Definition: code-stubs.h:685
bool Is(const CPURegister &other) const

References __, CallCodeGeneric(), v8::internal::RelocInfo::CODE_TARGET, DCHECK, v8::internal::anonymous_namespace{flags.cc}::flags, v8::internal::CPURegister::Is(), v8::internal::InstanceofStub::kArgsInRegisters, v8::internal::InstanceofStub::kCallSiteInlineCheck, v8::internal::InstanceofStub::kNoFlags, v8::internal::InstanceofStub::kReturnTrueFalseObject, v8::internal::InstanceofStub::left(), LEnvironment, LoadContextFromDeferred(), RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS, v8::internal::InstanceofStub::right(), safepoints_, scope(), and ToRegister().

+ Here is the call graph for this function:

◆ DoDeferredInstanceOfKnownGlobal() [2/7]

void v8::internal::LCodeGen::DoDeferredInstanceOfKnownGlobal ( LInstanceOfKnownGlobal *  instr,
Label *  map_check 
)

Definition at line 2769 of file lithium-codegen-mips.cc.

2770  {
2771  Register result = ToRegister(instr->result());
2772  DCHECK(result.is(v0));
2773 
2775  flags = static_cast<InstanceofStub::Flags>(
2777  flags = static_cast<InstanceofStub::Flags>(
2779  flags = static_cast<InstanceofStub::Flags>(
2781  InstanceofStub stub(isolate(), flags);
2782 
2783  PushSafepointRegistersScope scope(this);
2784  LoadContextFromDeferred(instr->context());
2785 
2786  // Get the temp register reserved by the instruction. This needs to be t0 as
2787  // its slot of the pushing of safepoint registers is used to communicate the
2788  // offset to the location of the map check.
2789  Register temp = ToRegister(instr->temp());
2790  DCHECK(temp.is(t0));
2791  __ li(InstanceofStub::right(), instr->function());
2792  static const int kAdditionalDelta = 7;
2793  int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2794  Label before_push_delta;
2795  __ bind(&before_push_delta);
2796  {
2797  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
2798  __ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE);
2799  __ StoreToSafepointRegisterSlot(temp, temp);
2800  }
2801  CallCodeGeneric(stub.GetCode(),
2803  instr,
2805  LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2806  safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2807  // Put the result value into the result register slot and
2808  // restore all registers.
2809  __ StoreToSafepointRegisterSlot(result, result);
2810 }
friend class BlockTrampolinePoolScope
const int kPointerSize
Definition: globals.h:129

References __, CallCodeGeneric(), v8::internal::RelocInfo::CODE_TARGET, v8::internal::CONSTANT_SIZE, DCHECK, v8::internal::anonymous_namespace{flags.cc}::flags, v8::internal::Register::is(), v8::internal::InstanceofStub::kArgsInRegisters, v8::internal::InstanceofStub::kCallSiteInlineCheck, v8::internal::InstanceofStub::kNoFlags, v8::internal::kPointerSize, v8::internal::InstanceofStub::kReturnTrueFalseObject, LEnvironment, LoadContextFromDeferred(), RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS, v8::internal::InstanceofStub::right(), safepoints_, scope(), and ToRegister().

+ Here is the call graph for this function:

◆ DoDeferredInstanceOfKnownGlobal() [3/7]

void v8::internal::LCodeGen::DoDeferredInstanceOfKnownGlobal ( LInstanceOfKnownGlobal *  instr,
Label *  map_check 
)

◆ DoDeferredInstanceOfKnownGlobal() [4/7]

void v8::internal::LCodeGen::DoDeferredInstanceOfKnownGlobal ( LInstanceOfKnownGlobal *  instr,
Label *  map_check 
)

◆ DoDeferredInstanceOfKnownGlobal() [5/7]

void v8::internal::LCodeGen::DoDeferredInstanceOfKnownGlobal ( LInstanceOfKnownGlobal *  instr,
Label *  map_check 
)

◆ DoDeferredInstanceOfKnownGlobal() [6/7]

void v8::internal::LCodeGen::DoDeferredInstanceOfKnownGlobal ( LInstanceOfKnownGlobal *  instr,
Label *  map_check 
)

◆ DoDeferredInstanceOfKnownGlobal() [7/7]

void v8::internal::LCodeGen::DoDeferredInstanceOfKnownGlobal ( LInstanceOfKnownGlobal *  instr,
Label *  map_check,
Label *  bool_load 
)

Definition at line 2865 of file lithium-codegen-arm.cc.

2867  {
2869  flags = static_cast<InstanceofStub::Flags>(
2871  flags = static_cast<InstanceofStub::Flags>(
2873  flags = static_cast<InstanceofStub::Flags>(
2875  InstanceofStub stub(isolate(), flags);
2876 
2877  PushSafepointRegistersScope scope(this);
2878  LoadContextFromDeferred(instr->context());
2879 
2880  __ Move(InstanceofStub::right(), instr->function());
2881 
2882  int call_size = CallCodeSize(stub.GetCode(), RelocInfo::CODE_TARGET);
2883  int additional_delta = (call_size / Assembler::kInstrSize) + 4;
2884  // Make sure that code size is predicable, since we use specific constants
2885  // offsets in the code to find embedded values..
2886  PredictableCodeSizeScope predictable(
2887  masm_, (additional_delta + 1) * Assembler::kInstrSize);
2888  // Make sure we don't emit any additional entries in the constant pool before
2889  // the call to ensure that the CallCodeSize() calculated the correct number of
2890  // instructions for the constant pool load.
2891  {
2892  ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2893  int map_check_delta =
2894  masm_->InstructionsGeneratedSince(map_check) + additional_delta;
2895  int bool_load_delta =
2896  masm_->InstructionsGeneratedSince(bool_load) + additional_delta;
2897  Label before_push_delta;
2898  __ bind(&before_push_delta);
2899  __ BlockConstPoolFor(additional_delta);
2900  // r5 is used to communicate the offset to the location of the map check.
2901  __ mov(r5, Operand(map_check_delta * kPointerSize));
2902  // r6 is used to communicate the offset to the location of the bool load.
2903  __ mov(r6, Operand(bool_load_delta * kPointerSize));
2904  // The mov above can generate one or two instructions. The delta was
2905  // computed for two instructions, so we need to pad here in case of one
2906  // instruction.
2907  while (masm_->InstructionsGeneratedSince(&before_push_delta) != 4) {
2908  __ nop();
2909  }
2910  }
2911  CallCodeGeneric(stub.GetCode(),
2913  instr,
2915  LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2916  safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2917  // Put the result value (r0) into the result register slot and
2918  // restore all registers.
2919  __ StoreToSafepointRegisterSlot(r0, ToRegister(instr->result()));
2920 }
int CallCodeSize(Handle< Code > code, RelocInfo::Mode mode)
const Register r6
const Register r5

References __, CallCodeGeneric(), CallCodeSize(), v8::internal::RelocInfo::CODE_TARGET, v8::internal::anonymous_namespace{flags.cc}::flags, v8::internal::InstanceofStub::kArgsInRegisters, v8::internal::InstanceofStub::kCallSiteInlineCheck, v8::internal::Assembler::kInstrSize, v8::internal::InstanceofStub::kNoFlags, v8::internal::kPointerSize, v8::internal::InstanceofStub::kReturnTrueFalseObject, LEnvironment, LoadContextFromDeferred(), v8::internal::r0, v8::internal::r5, v8::internal::r6, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS, v8::internal::InstanceofStub::right(), safepoints_, scope(), and ToRegister().

+ Here is the call graph for this function:

◆ DoDeferredLoadMutableDouble() [1/7]

void v8::internal::LCodeGen::DoDeferredLoadMutableDouble ( LLoadFieldByIndex *  instr,
Register  object,
Register  index 
)

◆ DoDeferredLoadMutableDouble() [2/7]

void v8::internal::LCodeGen::DoDeferredLoadMutableDouble ( LLoadFieldByIndex *  instr,
Register  object,
Register  index 
)

◆ DoDeferredLoadMutableDouble() [3/7]

void v8::internal::LCodeGen::DoDeferredLoadMutableDouble ( LLoadFieldByIndex *  instr,
Register  object,
Register  index 
)

◆ DoDeferredLoadMutableDouble() [4/7]

void v8::internal::LCodeGen::DoDeferredLoadMutableDouble ( LLoadFieldByIndex *  instr,
Register  result,
Register  object,
Register  index 
)

Definition at line 5820 of file lithium-codegen-arm.cc.

5823  {
5824  PushSafepointRegistersScope scope(this);
5825  __ Push(object);
5826  __ Push(index);
5827  __ mov(cp, Operand::Zero());
5828  __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
5830  instr->pointer_map(), 2, Safepoint::kNoLazyDeopt);
5831  __ StoreToSafepointRegisterSlot(r0, result);
5832 }

References __, v8::internal::cp, v8::internal::compiler::Push(), v8::internal::r0, RecordSafepointWithRegisters(), and scope().

+ Here is the call graph for this function:

◆ DoDeferredLoadMutableDouble() [5/7]

void v8::internal::LCodeGen::DoDeferredLoadMutableDouble ( LLoadFieldByIndex *  instr,
Register  result,
Register  object,
Register  index 
)
private

◆ DoDeferredLoadMutableDouble() [6/7]

void v8::internal::LCodeGen::DoDeferredLoadMutableDouble ( LLoadFieldByIndex *  instr,
Register  result,
Register  object,
Register  index 
)

◆ DoDeferredLoadMutableDouble() [7/7]

void v8::internal::LCodeGen::DoDeferredLoadMutableDouble ( LLoadFieldByIndex *  instr,
Register  result,
Register  object,
Register  index 
)

◆ DoDeferredMathAbsTagged()

void v8::internal::LCodeGen::DoDeferredMathAbsTagged ( LMathAbsTagged instr,
Label *  exit,
Label *  allocation_entry 
)
private

Definition at line 3741 of file lithium-codegen-arm64.cc.

3743  {
3744  // Handle the tricky cases of MathAbsTagged:
3745  // - HeapNumber inputs.
3746  // - Negative inputs produce a positive result, so a new HeapNumber is
3747  // allocated to hold it.
3748  // - Positive inputs are returned as-is, since there is no need to allocate
3749  // a new HeapNumber for the result.
3750  // - The (smi) input -0x80000000, produces +0x80000000, which does not fit
3751  // a smi. In this case, the inline code sets the result and jumps directly
3752  // to the allocation_entry label.
3753  DCHECK(instr->context() != NULL);
3754  DCHECK(ToRegister(instr->context()).is(cp));
3755  Register input = ToRegister(instr->value());
3756  Register temp1 = ToRegister(instr->temp1());
3757  Register temp2 = ToRegister(instr->temp2());
3758  Register result_bits = ToRegister(instr->temp3());
3759  Register result = ToRegister(instr->result());
3760 
3761  Label runtime_allocation;
3762 
3763  // Deoptimize if the input is not a HeapNumber.
3764  DeoptimizeIfNotHeapNumber(input, instr);
3765 
3766  // If the argument is positive, we can return it as-is, without any need to
3767  // allocate a new HeapNumber for the result. We have to do this in integer
3768  // registers (rather than with fabs) because we need to be able to distinguish
3769  // the two zeroes.
3770  __ Ldr(result_bits, FieldMemOperand(input, HeapNumber::kValueOffset));
3771  __ Mov(result, input);
3772  __ Tbz(result_bits, kXSignBit, exit);
3773 
3774  // Calculate abs(input) by clearing the sign bit.
3775  __ Bic(result_bits, result_bits, kXSignMask);
3776 
3777  // Allocate a new HeapNumber to hold the result.
3778  // result_bits The bit representation of the (double) result.
3779  __ Bind(allocation_entry);
3780  __ AllocateHeapNumber(result, &runtime_allocation, temp1, temp2);
3781  // The inline (non-deferred) code will store result_bits into result.
3782  __ B(exit);
3783 
3784  __ Bind(&runtime_allocation);
3785  if (FLAG_debug_code) {
3786  // Because result is in the pointer map, we need to make sure it has a valid
3787  // tagged value before we call the runtime. We speculatively set it to the
3788  // input (for abs(+x)) or to a smi (for abs(-SMI_MIN)), so it should already
3789  // be valid.
3790  Label result_ok;
3791  Register input = ToRegister(instr->value());
3792  __ JumpIfSmi(result, &result_ok);
3793  __ Cmp(input, result);
3794  __ Assert(eq, kUnexpectedValue);
3795  __ Bind(&result_ok);
3796  }
3797 
3798  { PushSafepointRegistersScope scope(this);
3799  CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr,
3800  instr->context());
3801  __ StoreToSafepointRegisterSlot(x0, result);
3802  }
3803  // The inline (non-deferred) code will store result_bits into result.
3804 }
static const int kValueOffset
Definition: objects.h:1506
void DeoptimizeIfNotHeapNumber(Register object, LInstruction *instr)
const int64_t kXSignMask
bool is(Register reg) const

References __, v8::internal::B, CallRuntimeFromDeferred(), v8::internal::LMathAbsTagged::context(), v8::internal::cp, DCHECK, DeoptimizeIfNotHeapNumber(), v8::internal::eq, v8::internal::FieldMemOperand(), v8::internal::Register::is(), v8::internal::HeapNumber::kValueOffset, v8::internal::kXSignBit, v8::internal::kXSignMask, NULL, v8::internal::LTemplateResultInstruction< R >::result(), scope(), v8::internal::LMathAbsTagged::temp1(), v8::internal::LMathAbsTagged::temp2(), v8::internal::LMathAbsTagged::temp3(), ToRegister(), and v8::internal::LMathAbsTagged::value().

+ Here is the call graph for this function:

◆ DoDeferredMathAbsTaggedHeapNumber() [1/6]

void v8::internal::LCodeGen::DoDeferredMathAbsTaggedHeapNumber ( LMathAbs *  instr)

Definition at line 3672 of file lithium-codegen-arm.cc.

3672  {
3673  DCHECK(instr->context() != NULL);
3674  DCHECK(ToRegister(instr->context()).is(cp));
3675  Register input = ToRegister(instr->value());
3676  Register result = ToRegister(instr->result());
3677  Register scratch = scratch0();
3678 
3679  // Deoptimize if not a heap number.
3680  __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3681  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3682  __ cmp(scratch, Operand(ip));
3683  DeoptimizeIf(ne, instr, "not a heap number");
3684 
3685  Label done;
3686  Register exponent = scratch0();
3687  scratch = no_reg;
3688  __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
3689  // Check the sign of the argument. If the argument is positive, just
3690  // return it.
3691  __ tst(exponent, Operand(HeapNumber::kSignMask));
3692  // Move the input to the result if necessary.
3693  __ Move(result, input);
3694  __ b(eq, &done);
3695 
3696  // Input is negative. Reverse its sign.
3697  // Preserve the value of all registers.
3698  {
3699  PushSafepointRegistersScope scope(this);
3700 
3701  // Registers were saved at the safepoint, so we can use
3702  // many scratch registers.
3703  Register tmp1 = input.is(r1) ? r0 : r1;
3704  Register tmp2 = input.is(r2) ? r0 : r2;
3705  Register tmp3 = input.is(r3) ? r0 : r3;
3706  Register tmp4 = input.is(r4) ? r0 : r4;
3707 
3708  // exponent: floating point exponent value.
3709 
3710  Label allocated, slow;
3711  __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
3712  __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
3713  __ b(&allocated);
3714 
3715  // Slow case: Call the runtime system to do the number allocation.
3716  __ bind(&slow);
3717 
3718  CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr,
3719  instr->context());
3720  // Set the pointer to the new heap number in tmp.
3721  if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
3722  // Restore input_reg after call to runtime.
3723  __ LoadFromSafepointRegisterSlot(input, input);
3724  __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
3725 
3726  __ bind(&allocated);
3727  // exponent: floating point exponent value.
3728  // tmp1: allocated heap number.
3729  __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
3730  __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
3731  __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
3732  __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
3733 
3734  __ StoreToSafepointRegisterSlot(tmp1, result);
3735  }
3736 
3737  __ bind(&done);
3738 }
static const uint32_t kSignMask
Definition: objects.h:1522
static const int kMapOffset
Definition: objects.h:1427
const Register r2
const Register r3
const Register r4
const Register no_reg

References __, CallRuntimeFromDeferred(), v8::internal::cp, DCHECK, DeoptimizeIf(), v8::internal::eq, v8::internal::FieldMemOperand(), v8::internal::ip, v8::internal::Register::is(), v8::internal::HeapObject::kMapOffset, v8::internal::HeapNumber::kSignMask, v8::internal::ne, v8::internal::no_reg, NULL, v8::internal::r0, v8::internal::r1, v8::internal::r2, v8::internal::r3, v8::internal::r4, scope(), scratch0(), and ToRegister().

+ Here is the call graph for this function:

◆ DoDeferredMathAbsTaggedHeapNumber() [2/6]

void v8::internal::LCodeGen::DoDeferredMathAbsTaggedHeapNumber ( LMathAbs *  instr)

◆ DoDeferredMathAbsTaggedHeapNumber() [3/6]

void v8::internal::LCodeGen::DoDeferredMathAbsTaggedHeapNumber ( LMathAbs *  instr)

◆ DoDeferredMathAbsTaggedHeapNumber() [4/6]

void v8::internal::LCodeGen::DoDeferredMathAbsTaggedHeapNumber ( LMathAbs *  instr)

◆ DoDeferredMathAbsTaggedHeapNumber() [5/6]

void v8::internal::LCodeGen::DoDeferredMathAbsTaggedHeapNumber ( LMathAbs *  instr)

◆ DoDeferredMathAbsTaggedHeapNumber() [6/6]

void v8::internal::LCodeGen::DoDeferredMathAbsTaggedHeapNumber ( LMathAbs *  instr)

◆ DoDeferredNumberTagD() [1/7]

void v8::internal::LCodeGen::DoDeferredNumberTagD ( LNumberTagD *  instr)

Definition at line 4814 of file lithium-codegen-arm.cc.

4814  {
4815  // TODO(3095996): Get rid of this. For now, we need to make the
4816  // result register contain a valid pointer because it is already
4817  // contained in the register pointer map.
4818  Register reg = ToRegister(instr->result());
4819  __ mov(reg, Operand::Zero());
4820 
4821  PushSafepointRegistersScope scope(this);
4822  // NumberTagI and NumberTagD use the context from the frame, rather than
4823  // the environment's HContext or HInlinedContext value.
4824  // They only call Runtime::kAllocateHeapNumber.
4825  // The corresponding HChange instructions are added in a phase that does
4826  // not have easy access to the local context.
4828  __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4830  instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4831  __ sub(r0, r0, Operand(kHeapObjectTag));
4832  __ StoreToSafepointRegisterSlot(r0, reg);
4833 }
static const int kContextOffset
Definition: frames.h:162
const Register fp

References __, v8::internal::cp, v8::internal::fp, v8::internal::StandardFrameConstants::kContextOffset, v8::internal::kHeapObjectTag, v8::internal::r0, RecordSafepointWithRegisters(), scope(), and ToRegister().

+ Here is the call graph for this function:

◆ DoDeferredNumberTagD() [2/7]

void v8::internal::LCodeGen::DoDeferredNumberTagD ( LNumberTagD *  instr)
private

◆ DoDeferredNumberTagD() [3/7]

void v8::internal::LCodeGen::DoDeferredNumberTagD ( LNumberTagD *  instr)

◆ DoDeferredNumberTagD() [4/7]

void v8::internal::LCodeGen::DoDeferredNumberTagD ( LNumberTagD *  instr)

◆ DoDeferredNumberTagD() [5/7]

void v8::internal::LCodeGen::DoDeferredNumberTagD ( LNumberTagD *  instr)

◆ DoDeferredNumberTagD() [6/7]

void v8::internal::LCodeGen::DoDeferredNumberTagD ( LNumberTagD *  instr)

◆ DoDeferredNumberTagD() [7/7]

void v8::internal::LCodeGen::DoDeferredNumberTagD ( LNumberTagD *  instr)

◆ DoDeferredNumberTagIU() [1/6]

void v8::internal::LCodeGen::DoDeferredNumberTagIU ( LInstruction instr,
LOperand value,
LOperand temp,
IntegerSignedness  signedness 
)

◆ DoDeferredNumberTagIU() [2/6]

void v8::internal::LCodeGen::DoDeferredNumberTagIU ( LInstruction instr,
LOperand value,
LOperand temp,
IntegerSignedness  signedness 
)

◆ DoDeferredNumberTagIU() [3/6]

void v8::internal::LCodeGen::DoDeferredNumberTagIU ( LInstruction instr,
LOperand value,
LOperand temp1,
LOperand temp2,
IntegerSignedness  signedness 
)

Definition at line 4713 of file lithium-codegen-arm.cc.

4717  {
4718  Label done, slow;
4719  Register src = ToRegister(value);
4720  Register dst = ToRegister(instr->result());
4721  Register tmp1 = scratch0();
4722  Register tmp2 = ToRegister(temp1);
4723  Register tmp3 = ToRegister(temp2);
4724  LowDwVfpRegister dbl_scratch = double_scratch0();
4725 
4726  if (signedness == SIGNED_INT32) {
4727  // There was overflow, so bits 30 and 31 of the original integer
4728  // disagree. Try to allocate a heap number in new space and store
4729  // the value in there. If that fails, call the runtime system.
4730  if (dst.is(src)) {
4731  __ SmiUntag(src, dst);
4732  __ eor(src, src, Operand(0x80000000));
4733  }
4734  __ vmov(dbl_scratch.low(), src);
4735  __ vcvt_f64_s32(dbl_scratch, dbl_scratch.low());
4736  } else {
4737  __ vmov(dbl_scratch.low(), src);
4738  __ vcvt_f64_u32(dbl_scratch, dbl_scratch.low());
4739  }
4740 
4741  if (FLAG_inline_new) {
4742  __ LoadRoot(tmp3, Heap::kHeapNumberMapRootIndex);
4743  __ AllocateHeapNumber(dst, tmp1, tmp2, tmp3, &slow, DONT_TAG_RESULT);
4744  __ b(&done);
4745  }
4746 
4747  // Slow case: Call the runtime system to do the number allocation.
4748  __ bind(&slow);
4749  {
4750  // TODO(3095996): Put a valid pointer value in the stack slot where the
4751  // result register is stored, as this register is in the pointer map, but
4752  // contains an integer value.
4753  __ mov(dst, Operand::Zero());
4754 
4755  // Preserve the value of all registers.
4756  PushSafepointRegistersScope scope(this);
4757 
4758  // NumberTagI and NumberTagD use the context from the frame, rather than
4759  // the environment's HContext or HInlinedContext value.
4760  // They only call Runtime::kAllocateHeapNumber.
4761  // The corresponding HChange instructions are added in a phase that does
4762  // not have easy access to the local context.
4764  __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4766  instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4767  __ sub(r0, r0, Operand(kHeapObjectTag));
4768  __ StoreToSafepointRegisterSlot(r0, dst);
4769  }
4770 
4771  // Done. Put the value in dbl_scratch into the value of the allocated heap
4772  // number.
4773  __ bind(&done);
4774  __ vstr(dbl_scratch, dst, HeapNumber::kValueOffset);
4775  __ add(dst, dst, Operand(kHeapObjectTag));
4776 }
LowDwVfpRegister double_scratch0()

References __, v8::internal::cp, v8::internal::DONT_TAG_RESULT, double_scratch0(), v8::internal::fp, v8::internal::Register::is(), v8::internal::StandardFrameConstants::kContextOffset, v8::internal::kHeapObjectTag, v8::internal::HeapNumber::kValueOffset, v8::internal::LowDwVfpRegister::low(), v8::internal::LInstruction::pointer_map(), v8::internal::r0, RecordSafepointWithRegisters(), v8::internal::LInstruction::result(), scope(), scratch0(), SIGNED_INT32, and ToRegister().

+ Here is the call graph for this function:

◆ DoDeferredNumberTagIU() [4/6]

void v8::internal::LCodeGen::DoDeferredNumberTagIU ( LInstruction instr,
LOperand value,
LOperand temp1,
LOperand temp2,
IntegerSignedness  signedness 
)

◆ DoDeferredNumberTagIU() [5/6]

void v8::internal::LCodeGen::DoDeferredNumberTagIU ( LInstruction instr,
LOperand value,
LOperand temp1,
LOperand temp2,
IntegerSignedness  signedness 
)

◆ DoDeferredNumberTagIU() [6/6]

void v8::internal::LCodeGen::DoDeferredNumberTagIU ( LInstruction instr,
LOperand value,
LOperand temp1,
LOperand temp2,
IntegerSignedness  signedness 
)

◆ DoDeferredNumberTagU()

void v8::internal::LCodeGen::DoDeferredNumberTagU ( LInstruction instr,
LOperand value,
LOperand temp1,
LOperand temp2 
)
private

Definition at line 4572 of file lithium-codegen-arm64.cc.

4575  {
4576  Label slow, convert_and_store;
4577  Register src = ToRegister32(value);
4578  Register dst = ToRegister(instr->result());
4579  Register scratch1 = ToRegister(temp1);
4580 
4581  if (FLAG_inline_new) {
4582  Register scratch2 = ToRegister(temp2);
4583  __ AllocateHeapNumber(dst, &slow, scratch1, scratch2);
4584  __ B(&convert_and_store);
4585  }
4586 
4587  // Slow case: call the runtime system to do the number allocation.
4588  __ Bind(&slow);
4589  // TODO(3095996): Put a valid pointer value in the stack slot where the result
4590  // register is stored, as this register is in the pointer map, but contains an
4591  // integer value.
4592  __ Mov(dst, 0);
4593  {
4594  // Preserve the value of all registers.
4595  PushSafepointRegistersScope scope(this);
4596 
4597  // NumberTagU and NumberTagD use the context from the frame, rather than
4598  // the environment's HContext or HInlinedContext value.
4599  // They only call Runtime::kAllocateHeapNumber.
4600  // The corresponding HChange instructions are added in a phase that does
4601  // not have easy access to the local context.
4603  __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4605  instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4606  __ StoreToSafepointRegisterSlot(x0, dst);
4607  }
4608 
4609  // Convert number to floating point and store in the newly allocated heap
4610  // number.
4611  __ Bind(&convert_and_store);
4612  DoubleRegister dbl_scratch = double_scratch();
4613  __ Ucvtf(dbl_scratch, src);
4614  __ Str(dbl_scratch, FieldMemOperand(dst, HeapNumber::kValueOffset));
4615 }

References __, v8::internal::B, v8::internal::cp, double_scratch(), v8::internal::FieldMemOperand(), v8::internal::fp, v8::internal::StandardFrameConstants::kContextOffset, v8::internal::HeapNumber::kValueOffset, v8::internal::LInstruction::pointer_map(), RecordSafepointWithRegisters(), v8::internal::LInstruction::result(), scope(), scratch1(), ToRegister(), and ToRegister32().

+ Here is the call graph for this function:

◆ DoDeferredStackCheck() [1/7]

void v8::internal::LCodeGen::DoDeferredStackCheck ( LStackCheck *  instr)

Definition at line 5676 of file lithium-codegen-arm.cc.

5676  {
5677  PushSafepointRegistersScope scope(this);
5678  LoadContextFromDeferred(instr->context());
5679  __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5682  DCHECK(instr->HasEnvironment());
5683  LEnvironment* env = instr->environment();
5684  safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5685 }

References __, DCHECK, LEnvironment, LoadContextFromDeferred(), RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS, RecordSafepointWithLazyDeopt(), safepoints_, and scope().

+ Here is the call graph for this function:

◆ DoDeferredStackCheck() [2/7]

void v8::internal::LCodeGen::DoDeferredStackCheck ( LStackCheck *  instr)
private

◆ DoDeferredStackCheck() [3/7]

void v8::internal::LCodeGen::DoDeferredStackCheck ( LStackCheck *  instr)

◆ DoDeferredStackCheck() [4/7]

void v8::internal::LCodeGen::DoDeferredStackCheck ( LStackCheck *  instr)

◆ DoDeferredStackCheck() [5/7]

void v8::internal::LCodeGen::DoDeferredStackCheck ( LStackCheck *  instr)

◆ DoDeferredStackCheck() [6/7]

void v8::internal::LCodeGen::DoDeferredStackCheck ( LStackCheck *  instr)

◆ DoDeferredStackCheck() [7/7]

void v8::internal::LCodeGen::DoDeferredStackCheck ( LStackCheck *  instr)

◆ DoDeferredStringCharCodeAt() [1/7]

void v8::internal::LCodeGen::DoDeferredStringCharCodeAt ( LStringCharCodeAt *  instr)

Definition at line 4550 of file lithium-codegen-arm.cc.

4550  {
4551  Register string = ToRegister(instr->string());
4552  Register result = ToRegister(instr->result());
4553  Register scratch = scratch0();
4554 
4555  // TODO(3095996): Get rid of this. For now, we need to make the
4556  // result register contain a valid pointer because it is already
4557  // contained in the register pointer map.
4558  __ mov(result, Operand::Zero());
4559 
4560  PushSafepointRegistersScope scope(this);
4561  __ push(string);
4562  // Push the index as a smi. This is safe because of the checks in
4563  // DoStringCharCodeAt above.
4564  if (instr->index()->IsConstantOperand()) {
4565  int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
4566  __ mov(scratch, Operand(Smi::FromInt(const_index)));
4567  __ push(scratch);
4568  } else {
4569  Register index = ToRegister(instr->index());
4570  __ SmiTag(index);
4571  __ push(index);
4572  }
4573  CallRuntimeFromDeferred(Runtime::kStringCharCodeAtRT, 2, instr,
4574  instr->context());
4575  __ AssertSmi(r0);
4576  __ SmiUntag(r0);
4577  __ StoreToSafepointRegisterSlot(r0, result);
4578 }

References __, CallRuntimeFromDeferred(), v8::internal::Smi::FromInt(), v8::internal::r0, scope(), scratch0(), ToInteger32(), and ToRegister().

+ Here is the call graph for this function:

◆ DoDeferredStringCharCodeAt() [2/7]

void v8::internal::LCodeGen::DoDeferredStringCharCodeAt ( LStringCharCodeAt *  instr)
private

◆ DoDeferredStringCharCodeAt() [3/7]

void v8::internal::LCodeGen::DoDeferredStringCharCodeAt ( LStringCharCodeAt *  instr)

◆ DoDeferredStringCharCodeAt() [4/7]

void v8::internal::LCodeGen::DoDeferredStringCharCodeAt ( LStringCharCodeAt *  instr)

◆ DoDeferredStringCharCodeAt() [5/7]

void v8::internal::LCodeGen::DoDeferredStringCharCodeAt ( LStringCharCodeAt *  instr)

◆ DoDeferredStringCharCodeAt() [6/7]

void v8::internal::LCodeGen::DoDeferredStringCharCodeAt ( LStringCharCodeAt *  instr)

◆ DoDeferredStringCharCodeAt() [7/7]

void v8::internal::LCodeGen::DoDeferredStringCharCodeAt ( LStringCharCodeAt *  instr)

◆ DoDeferredStringCharFromCode() [1/7]

void v8::internal::LCodeGen::DoDeferredStringCharFromCode ( LStringCharFromCode *  instr)

Definition at line 4614 of file lithium-codegen-arm.cc.

4614  {
4615  Register char_code = ToRegister(instr->char_code());
4616  Register result = ToRegister(instr->result());
4617 
4618  // TODO(3095996): Get rid of this. For now, we need to make the
4619  // result register contain a valid pointer because it is already
4620  // contained in the register pointer map.
4621  __ mov(result, Operand::Zero());
4622 
4623  PushSafepointRegistersScope scope(this);
4624  __ SmiTag(char_code);
4625  __ push(char_code);
4626  CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
4627  __ StoreToSafepointRegisterSlot(r0, result);
4628 }

References __, CallRuntimeFromDeferred(), v8::internal::r0, scope(), and ToRegister().

+ Here is the call graph for this function:

◆ DoDeferredStringCharFromCode() [2/7]

void v8::internal::LCodeGen::DoDeferredStringCharFromCode ( LStringCharFromCode *  instr)
private

◆ DoDeferredStringCharFromCode() [3/7]

void v8::internal::LCodeGen::DoDeferredStringCharFromCode ( LStringCharFromCode *  instr)

◆ DoDeferredStringCharFromCode() [4/7]

void v8::internal::LCodeGen::DoDeferredStringCharFromCode ( LStringCharFromCode *  instr)

◆ DoDeferredStringCharFromCode() [5/7]

void v8::internal::LCodeGen::DoDeferredStringCharFromCode ( LStringCharFromCode *  instr)

◆ DoDeferredStringCharFromCode() [6/7]

void v8::internal::LCodeGen::DoDeferredStringCharFromCode ( LStringCharFromCode *  instr)

◆ DoDeferredStringCharFromCode() [7/7]

void v8::internal::LCodeGen::DoDeferredStringCharFromCode ( LStringCharFromCode *  instr)

◆ DoDeferredTaggedToI() [1/7]

void v8::internal::LCodeGen::DoDeferredTaggedToI ( LTaggedToI *  instr)

Definition at line 4926 of file lithium-codegen-arm.cc.

4926  {
4927  Register input_reg = ToRegister(instr->value());
4928  Register scratch1 = scratch0();
4929  Register scratch2 = ToRegister(instr->temp());
4930  LowDwVfpRegister double_scratch = double_scratch0();
4931  DwVfpRegister double_scratch2 = ToDoubleRegister(instr->temp2());
4932 
4933  DCHECK(!scratch1.is(input_reg) && !scratch1.is(scratch2));
4934  DCHECK(!scratch2.is(input_reg) && !scratch2.is(scratch1));
4935 
4936  Label done;
4937 
4938  // The input was optimistically untagged; revert it.
4939  // The carry flag is set when we reach this deferred code as we just executed
4940  // SmiUntag(heap_object, SetCC)
4942  __ adc(scratch2, input_reg, Operand(input_reg));
4943 
4944  // Heap number map check.
4946  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4947  __ cmp(scratch1, Operand(ip));
4948 
4949  if (instr->truncating()) {
4950  // Performs a truncating conversion of a floating point number as used by
4951  // the JS bitwise operations.
4952  Label no_heap_number, check_bools, check_false;
4953  __ b(ne, &no_heap_number);
4954  __ TruncateHeapNumberToI(input_reg, scratch2);
4955  __ b(&done);
4956 
4957  // Check for Oddballs. Undefined/False is converted to zero and True to one
4958  // for truncating conversions.
4959  __ bind(&no_heap_number);
4960  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4961  __ cmp(scratch2, Operand(ip));
4962  __ b(ne, &check_bools);
4963  __ mov(input_reg, Operand::Zero());
4964  __ b(&done);
4965 
4966  __ bind(&check_bools);
4967  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4968  __ cmp(scratch2, Operand(ip));
4969  __ b(ne, &check_false);
4970  __ mov(input_reg, Operand(1));
4971  __ b(&done);
4972 
4973  __ bind(&check_false);
4974  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
4975  __ cmp(scratch2, Operand(ip));
4976  DeoptimizeIf(ne, instr, "not a heap number/undefined/true/false");
4977  __ mov(input_reg, Operand::Zero());
4978  } else {
4979  DeoptimizeIf(ne, instr, "not a heap number");
4980 
4981  __ sub(ip, scratch2, Operand(kHeapObjectTag));
4982  __ vldr(double_scratch2, ip, HeapNumber::kValueOffset);
4983  __ TryDoubleToInt32Exact(input_reg, double_scratch2, double_scratch);
4984  DeoptimizeIf(ne, instr, "lost precision or NaN");
4985 
4986  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
4987  __ cmp(input_reg, Operand::Zero());
4988  __ b(ne, &done);
4989  __ VmovHigh(scratch1, double_scratch2);
4990  __ tst(scratch1, Operand(HeapNumber::kSignMask));
4991  DeoptimizeIf(ne, instr, "minus zero");
4992  }
4993  }
4994  __ bind(&done);
4995 }

References __, DCHECK, DeoptimizeIf(), double_scratch(), double_scratch0(), v8::internal::FieldMemOperand(), v8::internal::ip, v8::internal::Register::is(), v8::internal::HValue::kBailoutOnMinusZero, v8::internal::kHeapObjectTag, v8::internal::HeapObject::kMapOffset, v8::internal::HeapNumber::kSignMask, v8::internal::HeapNumber::kValueOffset, v8::internal::ne, scratch0(), scratch1(), v8::internal::STATIC_ASSERT(), ToDoubleRegister(), and ToRegister().

+ Here is the call graph for this function:

◆ DoDeferredTaggedToI() [2/7]

void v8::internal::LCodeGen::DoDeferredTaggedToI ( LTaggedToI *  instr)

◆ DoDeferredTaggedToI() [3/7]

void v8::internal::LCodeGen::DoDeferredTaggedToI ( LTaggedToI *  instr)

◆ DoDeferredTaggedToI() [4/7]

void v8::internal::LCodeGen::DoDeferredTaggedToI ( LTaggedToI *  instr,
Label *  done 
)

◆ DoDeferredTaggedToI() [5/7]

void v8::internal::LCodeGen::DoDeferredTaggedToI ( LTaggedToI *  instr,
Label *  done 
)

◆ DoDeferredTaggedToI() [6/7]

void v8::internal::LCodeGen::DoDeferredTaggedToI ( LTaggedToI *  instr,
Label *  done 
)

◆ DoDeferredTaggedToI() [7/7]

void v8::internal::LCodeGen::DoDeferredTaggedToI ( LTaggedToI *  instr,
LOperand value,
LOperand temp1,
LOperand temp2 
)
private

Definition at line 5589 of file lithium-codegen-arm64.cc.

5592  {
5593  Register input = ToRegister(value);
5594  Register scratch1 = ToRegister(temp1);
5595  DoubleRegister dbl_scratch1 = double_scratch();
5596 
5597  Label done;
5598 
5599  if (instr->truncating()) {
5600  Register output = ToRegister(instr->result());
5601  Label check_bools;
5602 
5603  // If it's not a heap number, jump to undefined check.
5604  __ JumpIfNotHeapNumber(input, &check_bools);
5605 
5606  // A heap number: load value and convert to int32 using truncating function.
5607  __ TruncateHeapNumberToI(output, input);
5608  __ B(&done);
5609 
5610  __ Bind(&check_bools);
5611 
5612  Register true_root = output;
5613  Register false_root = scratch1;
5614  __ LoadTrueFalseRoots(true_root, false_root);
5615  __ Cmp(input, true_root);
5616  __ Cset(output, eq);
5617  __ Ccmp(input, false_root, ZFlag, ne);
5618  __ B(eq, &done);
5619 
5620  // Output contains zero, undefined is converted to zero for truncating
5621  // conversions.
5622  DeoptimizeIfNotRoot(input, Heap::kUndefinedValueRootIndex, instr,
5623  "not a heap number/undefined/true/false");
5624  } else {
5625  Register output = ToRegister32(instr->result());
5626  DoubleRegister dbl_scratch2 = ToDoubleRegister(temp2);
5627 
5628  DeoptimizeIfNotHeapNumber(input, instr);
5629 
5630  // A heap number: load value and convert to int32 using non-truncating
5631  // function. If the result is out of range, branch to deoptimize.
5632  __ Ldr(dbl_scratch1, FieldMemOperand(input, HeapNumber::kValueOffset));
5633  __ TryRepresentDoubleAsInt32(output, dbl_scratch1, dbl_scratch2);
5634  DeoptimizeIf(ne, instr, "lost precision or NaN");
5635 
5636  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
5637  __ Cmp(output, 0);
5638  __ B(ne, &done);
5639  __ Fmov(scratch1, dbl_scratch1);
5640  DeoptimizeIfNegative(scratch1, instr, "minus zero");
5641  }
5642  }
5643  __ Bind(&done);
5644 }
void DeoptimizeIfNegative(Register rt, LInstruction *instr, const char *detail)
void DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index, LInstruction *instr, const char *detail)

References __, v8::internal::B, DeoptimizeIf(), DeoptimizeIfNegative(), DeoptimizeIfNotHeapNumber(), DeoptimizeIfNotRoot(), double_scratch(), v8::internal::eq, v8::internal::FieldMemOperand(), v8::internal::HValue::kBailoutOnMinusZero, v8::internal::HeapNumber::kValueOffset, v8::internal::ne, scratch1(), ToDoubleRegister(), ToRegister(), ToRegister32(), and v8::internal::ZFlag.

+ Here is the call graph for this function:

◆ DoGap() [1/7]

void v8::internal::LCodeGen::DoGap ( LGap instr)

Definition at line 1088 of file lithium-codegen-arm.cc.

1088  {
1089  for (int i = LGap::FIRST_INNER_POSITION;
1091  i++) {
1092  LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
1093  LParallelMove* move = gap->GetParallelMove(inner_pos);
1094  if (move != NULL) DoParallelMove(move);
1095  }
1096 }
void DoParallelMove(LParallelMove *move)

References DoParallelMove(), v8::internal::LGap::FIRST_INNER_POSITION, v8::internal::LGap::GetParallelMove(), v8::internal::LGap::LAST_INNER_POSITION, and NULL.

+ Here is the call graph for this function:

◆ DoGap() [2/7]

void v8::internal::LCodeGen::DoGap ( LGap instr)
private

◆ DoGap() [3/7]

void v8::internal::LCodeGen::DoGap ( LGap instr)

◆ DoGap() [4/7]

void v8::internal::LCodeGen::DoGap ( LGap instr)

◆ DoGap() [5/7]

void v8::internal::LCodeGen::DoGap ( LGap instr)

◆ DoGap() [6/7]

void v8::internal::LCodeGen::DoGap ( LGap instr)

◆ DoGap() [7/7]

void v8::internal::LCodeGen::DoGap ( LGap instr)

◆ DoLoadKeyedExternalArray() [1/6]

void v8::internal::LCodeGen::DoLoadKeyedExternalArray ( LLoadKeyed instr)
private

Definition at line 3214 of file lithium-codegen-arm.cc.

3214  {
3215  Register external_pointer = ToRegister(instr->elements());
3216  Register key = no_reg;
3217  ElementsKind elements_kind = instr->elements_kind();
3218  bool key_is_constant = instr->key()->IsConstantOperand();
3219  int constant_key = 0;
3220  if (key_is_constant) {
3221  constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3222  if (constant_key & 0xF0000000) {
3223  Abort(kArrayIndexConstantValueTooBig);
3224  }
3225  } else {
3226  key = ToRegister(instr->key());
3227  }
3228  int element_size_shift = ElementsKindToShiftSize(elements_kind);
3229  int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
3230  ? (element_size_shift - kSmiTagSize) : element_size_shift;
3231  int base_offset = instr->base_offset();
3232 
3233  if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
3234  elements_kind == FLOAT32_ELEMENTS ||
3235  elements_kind == EXTERNAL_FLOAT64_ELEMENTS ||
3236  elements_kind == FLOAT64_ELEMENTS) {
3237  int base_offset = instr->base_offset();
3238  DwVfpRegister result = ToDoubleRegister(instr->result());
3239  Operand operand = key_is_constant
3240  ? Operand(constant_key << element_size_shift)
3241  : Operand(key, LSL, shift_size);
3242  __ add(scratch0(), external_pointer, operand);
3243  if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
3244  elements_kind == FLOAT32_ELEMENTS) {
3245  __ vldr(double_scratch0().low(), scratch0(), base_offset);
3246  __ vcvt_f64_f32(result, double_scratch0().low());
3247  } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
3248  __ vldr(result, scratch0(), base_offset);
3249  }
3250  } else {
3251  Register result = ToRegister(instr->result());
3252  MemOperand mem_operand = PrepareKeyedOperand(
3253  key, external_pointer, key_is_constant, constant_key,
3254  element_size_shift, shift_size, base_offset);
3255  switch (elements_kind) {
3257  case INT8_ELEMENTS:
3258  __ ldrsb(result, mem_operand);
3259  break;
3262  case UINT8_ELEMENTS:
3264  __ ldrb(result, mem_operand);
3265  break;
3267  case INT16_ELEMENTS:
3268  __ ldrsh(result, mem_operand);
3269  break;
3271  case UINT16_ELEMENTS:
3272  __ ldrh(result, mem_operand);
3273  break;
3275  case INT32_ELEMENTS:
3276  __ ldr(result, mem_operand);
3277  break;
3279  case UINT32_ELEMENTS:
3280  __ ldr(result, mem_operand);
3281  if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
3282  __ cmp(result, Operand(0x80000000));
3283  DeoptimizeIf(cs, instr, "negative value");
3284  }
3285  break;
3286  case FLOAT32_ELEMENTS:
3287  case FLOAT64_ELEMENTS:
3291  case FAST_HOLEY_ELEMENTS:
3293  case FAST_DOUBLE_ELEMENTS:
3294  case FAST_ELEMENTS:
3295  case FAST_SMI_ELEMENTS:
3296  case DICTIONARY_ELEMENTS:
3298  UNREACHABLE();
3299  break;
3300  }
3301  }
3302 }
MemOperand PrepareKeyedOperand(Register key, Register base, bool key_is_constant, int constant_key, int element_size, int shift_size, int base_offset)
const int kSmiTagSize
Definition: v8.h:5743
@ EXTERNAL_UINT16_ELEMENTS
Definition: elements-kind.h:36
@ UINT8_CLAMPED_ELEMENTS
Definition: elements-kind.h:52
@ EXTERNAL_INT16_ELEMENTS
Definition: elements-kind.h:35
@ EXTERNAL_UINT8_ELEMENTS
Definition: elements-kind.h:34
@ EXTERNAL_INT32_ELEMENTS
Definition: elements-kind.h:37
@ FAST_HOLEY_DOUBLE_ELEMENTS
Definition: elements-kind.h:27
@ SLOPPY_ARGUMENTS_ELEMENTS
Definition: elements-kind.h:31
@ EXTERNAL_INT8_ELEMENTS
Definition: elements-kind.h:33
@ EXTERNAL_FLOAT32_ELEMENTS
Definition: elements-kind.h:39
@ EXTERNAL_FLOAT64_ELEMENTS
Definition: elements-kind.h:40
@ FAST_HOLEY_SMI_ELEMENTS
Definition: elements-kind.h:17
@ EXTERNAL_UINT32_ELEMENTS
Definition: elements-kind.h:38
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
Definition: elements-kind.h:41
int ElementsKindToShiftSize(ElementsKind elements_kind)

References __, v8::internal::LLoadKeyed< T >::base_offset(), v8::internal::cs, DeoptimizeIf(), v8::internal::DICTIONARY_ELEMENTS, double_scratch0(), v8::internal::LLoadKeyed< T >::elements(), v8::internal::LLoadKeyed< T >::elements_kind(), v8::internal::ElementsKindToShiftSize(), v8::internal::EXTERNAL_FLOAT32_ELEMENTS, v8::internal::EXTERNAL_FLOAT64_ELEMENTS, v8::internal::EXTERNAL_INT16_ELEMENTS, v8::internal::EXTERNAL_INT32_ELEMENTS, v8::internal::EXTERNAL_INT8_ELEMENTS, v8::internal::EXTERNAL_UINT16_ELEMENTS, v8::internal::EXTERNAL_UINT32_ELEMENTS, v8::internal::EXTERNAL_UINT8_CLAMPED_ELEMENTS, v8::internal::EXTERNAL_UINT8_ELEMENTS, v8::internal::FAST_DOUBLE_ELEMENTS, v8::internal::FAST_ELEMENTS, v8::internal::FAST_HOLEY_DOUBLE_ELEMENTS, v8::internal::FAST_HOLEY_ELEMENTS, v8::internal::FAST_HOLEY_SMI_ELEMENTS, v8::internal::FAST_SMI_ELEMENTS, v8::internal::FLOAT32_ELEMENTS, v8::internal::FLOAT64_ELEMENTS, v8::internal::INT16_ELEMENTS, v8::internal::INT32_ELEMENTS, v8::internal::INT8_ELEMENTS, v8::internal::LLoadKeyed< T >::key(), v8::internal::kSmiTagSize, v8::internal::HValue::kUint32, v8::internal::LSL, v8::internal::no_reg, PrepareKeyedOperand(), v8::internal::LTemplateResultInstruction< R >::result(), scratch0(), v8::internal::SLOPPY_ARGUMENTS_ELEMENTS, ToDoubleRegister(), ToInteger32(), ToRegister(), v8::internal::UINT16_ELEMENTS, v8::internal::UINT32_ELEMENTS, v8::internal::UINT8_CLAMPED_ELEMENTS, v8::internal::UINT8_ELEMENTS, and UNREACHABLE.

+ Here is the call graph for this function:

◆ DoLoadKeyedExternalArray() [2/6]

void v8::internal::LCodeGen::DoLoadKeyedExternalArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedExternalArray() [3/6]

void v8::internal::LCodeGen::DoLoadKeyedExternalArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedExternalArray() [4/6]

void v8::internal::LCodeGen::DoLoadKeyedExternalArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedExternalArray() [5/6]

void v8::internal::LCodeGen::DoLoadKeyedExternalArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedExternalArray() [6/6]

void v8::internal::LCodeGen::DoLoadKeyedExternalArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedFixedArray() [1/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedArray ( LLoadKeyed instr)
private

Definition at line 3341 of file lithium-codegen-arm.cc.

3341  {
3342  Register elements = ToRegister(instr->elements());
3343  Register result = ToRegister(instr->result());
3344  Register scratch = scratch0();
3345  Register store_base = scratch;
3346  int offset = instr->base_offset();
3347 
3348  if (instr->key()->IsConstantOperand()) {
3349  LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3350  offset += ToInteger32(const_operand) * kPointerSize;
3351  store_base = elements;
3352  } else {
3353  Register key = ToRegister(instr->key());
3354  // Even though the HLoadKeyed instruction forces the input
3355  // representation for the key to be an integer, the input gets replaced
3356  // during bound check elimination with the index argument to the bounds
3357  // check, which can be tagged, so that case must be handled here, too.
3358  if (instr->hydrogen()->key()->representation().IsSmi()) {
3359  __ add(scratch, elements, Operand::PointerOffsetFromSmiKey(key));
3360  } else {
3361  __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3362  }
3363  }
3364  __ ldr(result, MemOperand(store_base, offset));
3365 
3366  // Check for the hole value.
3367  if (instr->hydrogen()->RequiresHoleCheck()) {
3368  if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
3369  __ SmiTst(result);
3370  DeoptimizeIf(ne, instr, "not a Smi");
3371  } else {
3372  __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
3373  __ cmp(result, scratch);
3374  DeoptimizeIf(eq, instr, "hole");
3375  }
3376  }
3377 }
const int kPointerSizeLog2
Definition: globals.h:147
bool IsFastSmiElementsKind(ElementsKind kind)

References __, v8::internal::LLoadKeyed< T >::base_offset(), DeoptimizeIf(), v8::internal::LLoadKeyed< T >::elements(), v8::internal::LLoadKeyed< T >::elements_kind(), v8::internal::eq, v8::internal::IsFastSmiElementsKind(), v8::internal::LLoadKeyed< T >::key(), v8::internal::kPointerSize, v8::internal::kPointerSizeLog2, v8::internal::LSL, v8::internal::ne, v8::internal::LTemplateResultInstruction< R >::result(), scratch0(), ToInteger32(), and ToRegister().

+ Here is the call graph for this function:

◆ DoLoadKeyedFixedArray() [2/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedFixedArray() [3/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedFixedArray() [4/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedFixedArray() [5/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedFixedArray() [6/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedFixedDoubleArray() [1/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedDoubleArray ( LLoadKeyed instr)
private

Definition at line 3305 of file lithium-codegen-arm.cc.

3305  {
3306  Register elements = ToRegister(instr->elements());
3307  bool key_is_constant = instr->key()->IsConstantOperand();
3308  Register key = no_reg;
3309  DwVfpRegister result = ToDoubleRegister(instr->result());
3310  Register scratch = scratch0();
3311 
3312  int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
3313 
3314  int base_offset = instr->base_offset();
3315  if (key_is_constant) {
3316  int constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3317  if (constant_key & 0xF0000000) {
3318  Abort(kArrayIndexConstantValueTooBig);
3319  }
3320  base_offset += constant_key * kDoubleSize;
3321  }
3322  __ add(scratch, elements, Operand(base_offset));
3323 
3324  if (!key_is_constant) {
3325  key = ToRegister(instr->key());
3326  int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
3327  ? (element_size_shift - kSmiTagSize) : element_size_shift;
3328  __ add(scratch, scratch, Operand(key, LSL, shift_size));
3329  }
3330 
3331  __ vldr(result, scratch, 0);
3332 
3333  if (instr->hydrogen()->RequiresHoleCheck()) {
3334  __ ldr(scratch, MemOperand(scratch, sizeof(kHoleNanLower32)));
3335  __ cmp(scratch, Operand(kHoleNanUpper32));
3336  DeoptimizeIf(eq, instr, "hole");
3337  }
3338 }
const int kDoubleSize
Definition: globals.h:127
const uint32_t kHoleNanLower32
Definition: globals.h:657
const uint32_t kHoleNanUpper32
Definition: globals.h:656

References __, v8::internal::LLoadKeyed< T >::base_offset(), DeoptimizeIf(), v8::internal::LLoadKeyed< T >::elements(), v8::internal::ElementsKindToShiftSize(), v8::internal::eq, v8::internal::FAST_DOUBLE_ELEMENTS, v8::internal::kDoubleSize, v8::internal::LLoadKeyed< T >::key(), v8::internal::kHoleNanLower32, v8::internal::kHoleNanUpper32, v8::internal::kSmiTagSize, v8::internal::LSL, v8::internal::no_reg, v8::internal::LTemplateResultInstruction< R >::result(), scratch0(), ToDoubleRegister(), ToInteger32(), and ToRegister().

+ Here is the call graph for this function:

◆ DoLoadKeyedFixedDoubleArray() [2/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedDoubleArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedFixedDoubleArray() [3/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedDoubleArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedFixedDoubleArray() [4/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedDoubleArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedFixedDoubleArray() [5/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedDoubleArray ( LLoadKeyed instr)
private

◆ DoLoadKeyedFixedDoubleArray() [6/6]

void v8::internal::LCodeGen::DoLoadKeyedFixedDoubleArray ( LLoadKeyed instr)
private

◆ DoParallelMove() [1/6]

void v8::internal::LCodeGen::DoParallelMove ( LParallelMove *  move)

Definition at line 1083 of file lithium-codegen-arm.cc.

1083  {
1084  resolver_.Resolve(move);
1085 }

References resolver_.

Referenced by DoGap().

+ Here is the caller graph for this function:

◆ DoParallelMove() [2/6]

void v8::internal::LCodeGen::DoParallelMove ( LParallelMove *  move)

◆ DoParallelMove() [3/6]

void v8::internal::LCodeGen::DoParallelMove ( LParallelMove *  move)

◆ DoParallelMove() [4/6]

void v8::internal::LCodeGen::DoParallelMove ( LParallelMove *  move)

◆ DoParallelMove() [5/6]

void v8::internal::LCodeGen::DoParallelMove ( LParallelMove *  move)

◆ DoParallelMove() [6/6]

void v8::internal::LCodeGen::DoParallelMove ( LParallelMove *  move)

◆ DoStoreKeyedExternalArray() [1/6]

void v8::internal::LCodeGen::DoStoreKeyedExternalArray ( LStoreKeyed instr)
private

Definition at line 4266 of file lithium-codegen-arm.cc.

4266  {
4267  Register external_pointer = ToRegister(instr->elements());
4268  Register key = no_reg;
4269  ElementsKind elements_kind = instr->elements_kind();
4270  bool key_is_constant = instr->key()->IsConstantOperand();
4271  int constant_key = 0;
4272  if (key_is_constant) {
4273  constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
4274  if (constant_key & 0xF0000000) {
4275  Abort(kArrayIndexConstantValueTooBig);
4276  }
4277  } else {
4278  key = ToRegister(instr->key());
4279  }
4280  int element_size_shift = ElementsKindToShiftSize(elements_kind);
4281  int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
4282  ? (element_size_shift - kSmiTagSize) : element_size_shift;
4283  int base_offset = instr->base_offset();
4284 
4285  if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
4286  elements_kind == FLOAT32_ELEMENTS ||
4287  elements_kind == EXTERNAL_FLOAT64_ELEMENTS ||
4288  elements_kind == FLOAT64_ELEMENTS) {
4289  Register address = scratch0();
4290  DwVfpRegister value(ToDoubleRegister(instr->value()));
4291  if (key_is_constant) {
4292  if (constant_key != 0) {
4293  __ add(address, external_pointer,
4294  Operand(constant_key << element_size_shift));
4295  } else {
4296  address = external_pointer;
4297  }
4298  } else {
4299  __ add(address, external_pointer, Operand(key, LSL, shift_size));
4300  }
4301  if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
4302  elements_kind == FLOAT32_ELEMENTS) {
4303  __ vcvt_f32_f64(double_scratch0().low(), value);
4304  __ vstr(double_scratch0().low(), address, base_offset);
4305  } else { // Storing doubles, not floats.
4306  __ vstr(value, address, base_offset);
4307  }
4308  } else {
4309  Register value(ToRegister(instr->value()));
4310  MemOperand mem_operand = PrepareKeyedOperand(
4311  key, external_pointer, key_is_constant, constant_key,
4312  element_size_shift, shift_size,
4313  base_offset);
4314  switch (elements_kind) {
4318  case UINT8_ELEMENTS:
4320  case INT8_ELEMENTS:
4321  __ strb(value, mem_operand);
4322  break;
4325  case INT16_ELEMENTS:
4326  case UINT16_ELEMENTS:
4327  __ strh(value, mem_operand);
4328  break;
4331  case INT32_ELEMENTS:
4332  case UINT32_ELEMENTS:
4333  __ str(value, mem_operand);
4334  break;
4335  case FLOAT32_ELEMENTS:
4336  case FLOAT64_ELEMENTS:
4339  case FAST_DOUBLE_ELEMENTS:
4340  case FAST_ELEMENTS:
4341  case FAST_SMI_ELEMENTS:
4343  case FAST_HOLEY_ELEMENTS:
4345  case DICTIONARY_ELEMENTS:
4347  UNREACHABLE();
4348  break;
4349  }
4350  }
4351 }

References __, v8::internal::LStoreKeyed< T >::base_offset(), v8::internal::DICTIONARY_ELEMENTS, double_scratch0(), v8::internal::LStoreKeyed< T >::elements(), v8::internal::LStoreKeyed< T >::elements_kind(), v8::internal::ElementsKindToShiftSize(), v8::internal::EXTERNAL_FLOAT32_ELEMENTS, v8::internal::EXTERNAL_FLOAT64_ELEMENTS, v8::internal::EXTERNAL_INT16_ELEMENTS, v8::internal::EXTERNAL_INT32_ELEMENTS, v8::internal::EXTERNAL_INT8_ELEMENTS, v8::internal::EXTERNAL_UINT16_ELEMENTS, v8::internal::EXTERNAL_UINT32_ELEMENTS, v8::internal::EXTERNAL_UINT8_CLAMPED_ELEMENTS, v8::internal::EXTERNAL_UINT8_ELEMENTS, v8::internal::FAST_DOUBLE_ELEMENTS, v8::internal::FAST_ELEMENTS, v8::internal::FAST_HOLEY_DOUBLE_ELEMENTS, v8::internal::FAST_HOLEY_ELEMENTS, v8::internal::FAST_HOLEY_SMI_ELEMENTS, v8::internal::FAST_SMI_ELEMENTS, v8::internal::FLOAT32_ELEMENTS, v8::internal::FLOAT64_ELEMENTS, v8::internal::INT16_ELEMENTS, v8::internal::INT32_ELEMENTS, v8::internal::INT8_ELEMENTS, v8::internal::LStoreKeyed< T >::key(), v8::internal::kSmiTagSize, v8::internal::LSL, v8::internal::no_reg, PrepareKeyedOperand(), scratch0(), v8::internal::SLOPPY_ARGUMENTS_ELEMENTS, ToDoubleRegister(), ToInteger32(), ToRegister(), v8::internal::UINT16_ELEMENTS, v8::internal::UINT32_ELEMENTS, v8::internal::UINT8_CLAMPED_ELEMENTS, v8::internal::UINT8_ELEMENTS, UNREACHABLE, and v8::internal::LStoreKeyed< T >::value().

+ Here is the call graph for this function:

◆ DoStoreKeyedExternalArray() [2/6]

void v8::internal::LCodeGen::DoStoreKeyedExternalArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedExternalArray() [3/6]

void v8::internal::LCodeGen::DoStoreKeyedExternalArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedExternalArray() [4/6]

void v8::internal::LCodeGen::DoStoreKeyedExternalArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedExternalArray() [5/6]

void v8::internal::LCodeGen::DoStoreKeyedExternalArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedExternalArray() [6/6]

void v8::internal::LCodeGen::DoStoreKeyedExternalArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedFixedArray() [1/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedArray ( LStoreKeyed instr)
private

Definition at line 4395 of file lithium-codegen-arm.cc.

4395  {
4396  Register value = ToRegister(instr->value());
4397  Register elements = ToRegister(instr->elements());
4398  Register key = instr->key()->IsRegister() ? ToRegister(instr->key())
4399  : no_reg;
4400  Register scratch = scratch0();
4401  Register store_base = scratch;
4402  int offset = instr->base_offset();
4403 
4404  // Do the store.
4405  if (instr->key()->IsConstantOperand()) {
4406  DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4407  LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
4408  offset += ToInteger32(const_operand) * kPointerSize;
4409  store_base = elements;
4410  } else {
4411  // Even though the HLoadKeyed instruction forces the input
4412  // representation for the key to be an integer, the input gets replaced
4413  // during bound check elimination with the index argument to the bounds
4414  // check, which can be tagged, so that case must be handled here, too.
4415  if (instr->hydrogen()->key()->representation().IsSmi()) {
4416  __ add(scratch, elements, Operand::PointerOffsetFromSmiKey(key));
4417  } else {
4418  __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
4419  }
4420  }
4421  __ str(value, MemOperand(store_base, offset));
4422 
4423  if (instr->hydrogen()->NeedsWriteBarrier()) {
4424  SmiCheck check_needed =
4425  instr->hydrogen()->value()->type().IsHeapObject()
4427  // Compute address of modified element and store it into key register.
4428  __ add(key, store_base, Operand(offset));
4429  __ RecordWrite(elements,
4430  key,
4431  value,
4433  kSaveFPRegs,
4435  check_needed,
4436  instr->hydrogen()->PointersToHereCheckForValue());
4437  }
4438 }
LinkRegisterStatus GetLinkRegisterState() const

References __, v8::internal::LStoreKeyed< T >::base_offset(), DCHECK, v8::internal::LStoreKeyed< T >::elements(), v8::internal::EMIT_REMEMBERED_SET, GetLinkRegisterState(), v8::internal::INLINE_SMI_CHECK, v8::internal::LStoreKeyed< T >::key(), v8::internal::kPointerSize, v8::internal::kPointerSizeLog2, v8::internal::kSaveFPRegs, v8::internal::LSL, v8::internal::no_reg, v8::internal::OMIT_SMI_CHECK, scratch0(), ToInteger32(), ToRegister(), and v8::internal::LStoreKeyed< T >::value().

+ Here is the call graph for this function:

◆ DoStoreKeyedFixedArray() [2/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedFixedArray() [3/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedFixedArray() [4/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedFixedArray() [5/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedFixedArray() [6/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedFixedDoubleArray() [1/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedDoubleArray ( LStoreKeyed instr)
private

Definition at line 4354 of file lithium-codegen-arm.cc.

4354  {
4355  DwVfpRegister value = ToDoubleRegister(instr->value());
4356  Register elements = ToRegister(instr->elements());
4357  Register scratch = scratch0();
4358  DwVfpRegister double_scratch = double_scratch0();
4359  bool key_is_constant = instr->key()->IsConstantOperand();
4360  int base_offset = instr->base_offset();
4361 
4362  // Calculate the effective address of the slot in the array to store the
4363  // double value.
4364  int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
4365  if (key_is_constant) {
4366  int constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
4367  if (constant_key & 0xF0000000) {
4368  Abort(kArrayIndexConstantValueTooBig);
4369  }
4370  __ add(scratch, elements,
4371  Operand((constant_key << element_size_shift) + base_offset));
4372  } else {
4373  int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
4374  ? (element_size_shift - kSmiTagSize) : element_size_shift;
4375  __ add(scratch, elements, Operand(base_offset));
4376  __ add(scratch, scratch,
4377  Operand(ToRegister(instr->key()), LSL, shift_size));
4378  }
4379 
4380  if (instr->NeedsCanonicalization()) {
4381  // Force a canonical NaN.
4382  if (masm()->emit_debug_code()) {
4383  __ vmrs(ip);
4384  __ tst(ip, Operand(kVFPDefaultNaNModeControlBit));
4385  __ Assert(ne, kDefaultNaNModeNotSet);
4386  }
4387  __ VFPCanonicalizeNaN(double_scratch, value);
4388  __ vstr(double_scratch, scratch, 0);
4389  } else {
4390  __ vstr(value, scratch, 0);
4391  }
4392 }
const uint32_t kVFPDefaultNaNModeControlBit

References __, v8::internal::LStoreKeyed< T >::base_offset(), double_scratch(), double_scratch0(), v8::internal::LStoreKeyed< T >::elements(), v8::internal::ElementsKindToShiftSize(), v8::internal::FAST_DOUBLE_ELEMENTS, v8::internal::ip, v8::internal::LStoreKeyed< T >::key(), v8::internal::kSmiTagSize, v8::internal::kVFPDefaultNaNModeControlBit, v8::internal::LSL, v8::internal::ne, v8::internal::LStoreKeyed< T >::NeedsCanonicalization(), scratch0(), ToDoubleRegister(), ToInteger32(), ToRegister(), and v8::internal::LStoreKeyed< T >::value().

+ Here is the call graph for this function:

◆ DoStoreKeyedFixedDoubleArray() [2/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedDoubleArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedFixedDoubleArray() [3/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedDoubleArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedFixedDoubleArray() [4/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedDoubleArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedFixedDoubleArray() [5/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedDoubleArray ( LStoreKeyed instr)
private

◆ DoStoreKeyedFixedDoubleArray() [6/6]

void v8::internal::LCodeGen::DoStoreKeyedFixedDoubleArray ( LStoreKeyed instr)
private

◆ double_scratch()

DoubleRegister v8::internal::LCodeGen::double_scratch ( )
inlineprivate

Definition at line 120 of file lithium-codegen-arm64.h.

120 { return crankshaft_fp_scratch; }

Referenced by DoDeferredNumberTagU(), DoDeferredTaggedToI(), and DoStoreKeyedFixedDoubleArray().

+ Here is the caller graph for this function:

◆ double_scratch0() [1/5]

LowDwVfpRegister v8::internal::LCodeGen::double_scratch0 ( )
inlineprivate

Definition at line 152 of file lithium-codegen-arm.h.

152 { return kScratchDoubleReg; }
#define kScratchDoubleReg

References kScratchDoubleReg.

Referenced by DoDeferredNumberTagIU(), DoDeferredTaggedToI(), DoLoadKeyedExternalArray(), DoStoreKeyedExternalArray(), DoStoreKeyedFixedDoubleArray(), and EmitNumberUntagD().

+ Here is the caller graph for this function:

◆ double_scratch0() [2/5]

DoubleRegister v8::internal::LCodeGen::double_scratch0 ( )
inlineprivate

Definition at line 152 of file lithium-codegen-mips.h.

152 { return kLithiumScratchDouble; }
#define kLithiumScratchDouble

References kLithiumScratchDouble.

◆ double_scratch0() [3/5]

DoubleRegister v8::internal::LCodeGen::double_scratch0 ( )
inlineprivate

Definition at line 153 of file lithium-codegen-mips64.h.

153 { return kLithiumScratchDouble; }

References kLithiumScratchDouble.

◆ double_scratch0() [4/5]

XMMRegister v8::internal::LCodeGen::double_scratch0 ( ) const
inlineprivate

Definition at line 133 of file lithium-codegen-ia32.h.

133 { return xmm0; }
const XMMRegister xmm0

References v8::internal::xmm0.

◆ double_scratch0() [5/5]

XMMRegister v8::internal::LCodeGen::double_scratch0 ( ) const
inlineprivate

Definition at line 127 of file lithium-codegen-x64.h.

127 { return xmm0; }

References v8::internal::xmm0.

◆ EmitBranch() [1/7]

template<class InstrType >
void v8::internal::LCodeGen::EmitBranch ( InstrType  instr,
Condition  cc 
)
private

◆ EmitBranch() [2/7]

template<class InstrType >
void v8::internal::LCodeGen::EmitBranch ( InstrType  instr,
Condition  cc 
)
private

◆ EmitBranch() [3/7]

template<class InstrType >
void v8::internal::LCodeGen::EmitBranch ( InstrType  instr,
Condition  cc 
)
private

◆ EmitBranch() [4/7]

template<class InstrType >
void v8::internal::LCodeGen::EmitBranch ( InstrType  instr,
Condition  condition 
)
private

Definition at line 2184 of file lithium-codegen-arm.cc.

2184  {
2185  int left_block = instr->TrueDestination(chunk_);
2186  int right_block = instr->FalseDestination(chunk_);
2187 
2188  int next_block = GetNextEmittedBlock();
2189 
2190  if (right_block == left_block || condition == al) {
2191  EmitGoto(left_block);
2192  } else if (left_block == next_block) {
2193  __ b(NegateCondition(condition), chunk_->GetAssemblyLabel(right_block));
2194  } else if (right_block == next_block) {
2195  __ b(condition, chunk_->GetAssemblyLabel(left_block));
2196  } else {
2197  __ b(condition, chunk_->GetAssemblyLabel(left_block));
2198  __ b(chunk_->GetAssemblyLabel(right_block));
2199  }
2200 }

References __, v8::internal::al, EmitGoto(), and v8::internal::NegateCondition().

+ Here is the call graph for this function:

◆ EmitBranch() [5/7]

template<class InstrType >
void v8::internal::LCodeGen::EmitBranch ( InstrType  instr,
Condition  condition 
)
private

◆ EmitBranch() [6/7]

template<class InstrType >
void v8::internal::LCodeGen::EmitBranch ( InstrType  instr,
Condition  condition,
Register  src1,
const Operand src2 
)
private

Definition at line 2043 of file lithium-codegen-mips.cc.

2046  {
2047  int left_block = instr->TrueDestination(chunk_);
2048  int right_block = instr->FalseDestination(chunk_);
2049 
2050  int next_block = GetNextEmittedBlock();
2051  if (right_block == left_block || condition == al) {
2052  EmitGoto(left_block);
2053  } else if (left_block == next_block) {
2054  __ Branch(chunk_->GetAssemblyLabel(right_block),
2055  NegateCondition(condition), src1, src2);
2056  } else if (right_block == next_block) {
2057  __ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2);
2058  } else {
2059  __ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2);
2060  __ Branch(chunk_->GetAssemblyLabel(right_block));
2061  }
2062 }

References __, v8::internal::al, EmitGoto(), and v8::internal::NegateCondition().

+ Here is the call graph for this function:

◆ EmitBranch() [7/7]

template<class InstrType >
void v8::internal::LCodeGen::EmitBranch ( InstrType  instr,
Condition  condition,
Register  src1,
const Operand src2 
)
private

◆ EmitBranchF() [1/2]

template<class InstrType >
void v8::internal::LCodeGen::EmitBranchF ( InstrType  instr,
Condition  condition,
FPURegister  src1,
FPURegister  src2 
)
private

Definition at line 2066 of file lithium-codegen-mips.cc.

2069  {
2070  int right_block = instr->FalseDestination(chunk_);
2071  int left_block = instr->TrueDestination(chunk_);
2072 
2073  int next_block = GetNextEmittedBlock();
2074  if (right_block == left_block) {
2075  EmitGoto(left_block);
2076  } else if (left_block == next_block) {
2077  __ BranchF(chunk_->GetAssemblyLabel(right_block), NULL,
2078  NegateCondition(condition), src1, src2);
2079  } else if (right_block == next_block) {
2080  __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL,
2081  condition, src1, src2);
2082  } else {
2083  __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL,
2084  condition, src1, src2);
2085  __ Branch(chunk_->GetAssemblyLabel(right_block));
2086  }
2087 }

References __, EmitGoto(), v8::internal::NegateCondition(), and NULL.

+ Here is the call graph for this function:

◆ EmitBranchF() [2/2]

template<class InstrType >
void v8::internal::LCodeGen::EmitBranchF ( InstrType  instr,
Condition  condition,
FPURegister  src1,
FPURegister  src2 
)
private

◆ EmitBranchGeneric()

template<class InstrType >
void v8::internal::LCodeGen::EmitBranchGeneric ( InstrType  instr,
const BranchGenerator branch 
)
private

Definition at line 1374 of file lithium-codegen-arm64.cc.

1375  {
1376  int left_block = instr->TrueDestination(chunk_);
1377  int right_block = instr->FalseDestination(chunk_);
1378 
1379  int next_block = GetNextEmittedBlock();
1380 
1381  if (right_block == left_block) {
1382  EmitGoto(left_block);
1383  } else if (left_block == next_block) {
1384  branch.EmitInverted(chunk_->GetAssemblyLabel(right_block));
1385  } else {
1386  branch.Emit(chunk_->GetAssemblyLabel(left_block));
1387  if (right_block != next_block) {
1388  __ B(chunk_->GetAssemblyLabel(right_block));
1389  }
1390  }
1391 }

References __, v8::internal::B, and EmitGoto().

Referenced by EmitBranchIfHeapNumber(), EmitBranchIfNonZeroNumber(), EmitBranchIfRoot(), EmitCompareAndBranch(), and EmitTestAndBranch().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EmitBranchIfHeapNumber()

template<class InstrType >
void v8::internal::LCodeGen::EmitBranchIfHeapNumber ( InstrType  instr,
const Register value 
)
private

Definition at line 1434 of file lithium-codegen-arm64.cc.

1435  {
1436  BranchIfHeapNumber branch(this, value);
1437  EmitBranchGeneric(instr, branch);
1438 }
void EmitBranchGeneric(InstrType instr, const BranchGenerator &branch)

References EmitBranchGeneric().

+ Here is the call graph for this function:

◆ EmitBranchIfNonZeroNumber()

template<class InstrType >
void v8::internal::LCodeGen::EmitBranchIfNonZeroNumber ( InstrType  instr,
const FPRegister value,
const FPRegister scratch 
)
private

Definition at line 1425 of file lithium-codegen-arm64.cc.

1427  {
1428  BranchIfNonZeroNumber branch(this, value, scratch);
1429  EmitBranchGeneric(instr, branch);
1430 }

References EmitBranchGeneric().

+ Here is the call graph for this function:

◆ EmitBranchIfRoot()

template<class InstrType >
void v8::internal::LCodeGen::EmitBranchIfRoot ( InstrType  instr,
const Register value,
Heap::RootListIndex  index 
)
private

Definition at line 1442 of file lithium-codegen-arm64.cc.

1444  {
1445  BranchIfRoot branch(this, value, index);
1446  EmitBranchGeneric(instr, branch);
1447 }

References EmitBranchGeneric().

+ Here is the call graph for this function:

◆ EmitClassOfTest() [1/6]

void v8::internal::LCodeGen::EmitClassOfTest ( Label *  if_true,
Label *  if_false,
Handle< String class_name,
Register  input,
Register  temporary,
Register  scratch 
)
private

◆ EmitClassOfTest() [2/6]

void v8::internal::LCodeGen::EmitClassOfTest ( Label *  if_true,
Label *  if_false,
Handle< String class_name,
Register  input,
Register  temporary,
Register  temporary2 
)
private

Definition at line 2680 of file lithium-codegen-arm.cc.

2685  {
2686  DCHECK(!input.is(temp));
2687  DCHECK(!input.is(temp2));
2688  DCHECK(!temp.is(temp2));
2689 
2690  __ JumpIfSmi(input, is_false);
2691 
2692  if (String::Equals(isolate()->factory()->Function_string(), class_name)) {
2693  // Assuming the following assertions, we can use the same compares to test
2694  // for both being a function type and being in the object type range.
2699  LAST_SPEC_OBJECT_TYPE - 1);
2701  __ CompareObjectType(input, temp, temp2, FIRST_SPEC_OBJECT_TYPE);
2702  __ b(lt, is_false);
2703  __ b(eq, is_true);
2704  __ cmp(temp2, Operand(LAST_SPEC_OBJECT_TYPE));
2705  __ b(eq, is_true);
2706  } else {
2707  // Faster code path to avoid two compares: subtract lower bound from the
2708  // actual type and do a signed compare with the width of the type range.
2709  __ ldr(temp, FieldMemOperand(input, HeapObject::kMapOffset));
2710  __ ldrb(temp2, FieldMemOperand(temp, Map::kInstanceTypeOffset));
2711  __ sub(temp2, temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2712  __ cmp(temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
2714  __ b(gt, is_false);
2715  }
2716 
2717  // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
2718  // Check if the constructor in the map is a function.
2719  __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
2720 
2721  // Objects with a non-function constructor have class 'Object'.
2722  __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
2723  if (class_name->IsOneByteEqualTo(STATIC_CHAR_VECTOR("Object"))) {
2724  __ b(ne, is_true);
2725  } else {
2726  __ b(ne, is_false);
2727  }
2728 
2729  // temp now contains the constructor function. Grab the
2730  // instance class name from there.
2732  __ ldr(temp, FieldMemOperand(temp,
2734  // The class name we are testing against is internalized since it's a literal.
2735  // The name in the constructor is internalized because of the way the context
2736  // is booted. This routine isn't expected to work for random API-created
2737  // classes and it doesn't have to because you can't access it with natives
2738  // syntax. Since both sides are internalized it is sufficient to use an
2739  // identity comparison.
2740  __ cmp(temp, Operand(class_name));
2741  // End with the answer in flags.
2742 }
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
static const int kInstanceTypeOffset
Definition: objects.h:6229
static const int kConstructorOffset
Definition: objects.h:6191
static const int kInstanceClassNameOffset
Definition: objects.h:6897
bool Equals(String *other)
Definition: objects-inl.h:3336
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:785
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
Definition: objects.h:788
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
Definition: objects.h:784
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
@ LAST_SPEC_OBJECT_TYPE
Definition: objects.h:782
@ JS_FUNCTION_TYPE
Definition: objects.h:749
#define STATIC_CHAR_VECTOR(x)
Definition: vector.h:154

References __, DCHECK, v8::internal::eq, v8::internal::String::Equals(), v8::internal::FieldMemOperand(), v8::internal::FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, v8::internal::FIRST_SPEC_OBJECT_TYPE, v8::internal::gt, v8::internal::Register::is(), v8::internal::JS_FUNCTION_TYPE, v8::internal::Map::kConstructorOffset, v8::internal::SharedFunctionInfo::kInstanceClassNameOffset, v8::internal::Map::kInstanceTypeOffset, v8::internal::HeapObject::kMapOffset, v8::internal::JSFunction::kSharedFunctionInfoOffset, v8::internal::LAST_NONCALLABLE_SPEC_OBJECT_TYPE, v8::internal::LAST_SPEC_OBJECT_TYPE, v8::internal::LAST_TYPE, v8::internal::lt, v8::internal::ne, v8::internal::NUM_OF_CALLABLE_SPEC_OBJECT_TYPES, v8::internal::STATIC_ASSERT(), and STATIC_CHAR_VECTOR.

+ Here is the call graph for this function:

◆ EmitClassOfTest() [3/6]

void v8::internal::LCodeGen::EmitClassOfTest ( Label *  if_true,
Label *  if_false,
Handle< String class_name,
Register  input,
Register  temporary,
Register  temporary2 
)
private

◆ EmitClassOfTest() [4/6]

void v8::internal::LCodeGen::EmitClassOfTest ( Label *  if_true,
Label *  if_false,
Handle< String class_name,
Register  input,
Register  temporary,
Register  temporary2 
)
private

◆ EmitClassOfTest() [5/6]

void v8::internal::LCodeGen::EmitClassOfTest ( Label *  if_true,
Label *  if_false,
Handle< String class_name,
Register  input,
Register  temporary,
Register  temporary2 
)
private

◆ EmitClassOfTest() [6/6]

void v8::internal::LCodeGen::EmitClassOfTest ( Label *  if_true,
Label *  if_false,
Handle< String class_name,
Register  input,
Register  temporary,
Register  temporary2 
)
private

◆ EmitCmpI() [1/2]

void v8::internal::LCodeGen::EmitCmpI ( LOperand left,
LOperand right 
)
private

◆ EmitCmpI() [2/2]

void v8::internal::LCodeGen::EmitCmpI ( LOperand left,
LOperand right 
)
private

◆ EmitCompareAndBranch()

template<class InstrType >
void v8::internal::LCodeGen::EmitCompareAndBranch ( InstrType  instr,
Condition  condition,
const Register lhs,
const Operand rhs 
)
private

Definition at line 1403 of file lithium-codegen-arm64.cc.

1406  {
1407  DCHECK((condition != al) && (condition != nv));
1408  CompareAndBranch branch(this, condition, lhs, rhs);
1409  EmitBranchGeneric(instr, branch);
1410 }

References v8::internal::al, DCHECK, EmitBranchGeneric(), and v8::internal::nv.

+ Here is the call graph for this function:

◆ EmitDeepCopy() [1/7]

void v8::internal::LCodeGen::EmitDeepCopy ( Handle< JSObject object,
Register  result,
Register  source,
int offset,
AllocationSiteMode  mode 
)
private

◆ EmitDeepCopy() [2/7]

void v8::internal::LCodeGen::EmitDeepCopy ( Handle< JSObject object,
Register  result,
Register  source,
int offset,
AllocationSiteMode  mode 
)
private

◆ EmitDeepCopy() [3/7]

void v8::internal::LCodeGen::EmitDeepCopy ( Handle< JSObject object,
Register  result,
Register  source,
int offset,
AllocationSiteMode  mode 
)
private

◆ EmitDeepCopy() [4/7]

void v8::internal::LCodeGen::EmitDeepCopy ( Handle< JSObject object,
Register  result,
Register  source,
int offset,
AllocationSiteMode  mode 
)
private

◆ EmitDeepCopy() [5/7]

void v8::internal::LCodeGen::EmitDeepCopy ( Handle< JSObject object,
Register  result,
Register  source,
int offset,
AllocationSiteMode  mode 
)
private

◆ EmitDeepCopy() [6/7]

void v8::internal::LCodeGen::EmitDeepCopy ( Handle< JSObject object,
Register  result,
Register  source,
int offset,
AllocationSiteMode  mode 
)
private

◆ EmitDeepCopy() [7/7]

void v8::internal::LCodeGen::EmitDeepCopy ( Handle< JSObject object,
Register  result,
Register  source,
Register  scratch,
int offset,
AllocationSiteMode  mode 
)
private

◆ EmitFalseBranch() [1/6]

template<class InstrType >
void v8::internal::LCodeGen::EmitFalseBranch ( InstrType  instr,
Condition  cc 
)
private

◆ EmitFalseBranch() [2/6]

template<class InstrType >
void v8::internal::LCodeGen::EmitFalseBranch ( InstrType  instr,
Condition  cc 
)
private

◆ EmitFalseBranch() [3/6]

template<class InstrType >
void v8::internal::LCodeGen::EmitFalseBranch ( InstrType  instr,
Condition  cc 
)
private

◆ EmitFalseBranch() [4/6]

template<class InstrType >
void v8::internal::LCodeGen::EmitFalseBranch ( InstrType  instr,
Condition  condition 
)
private

Definition at line 2204 of file lithium-codegen-arm.cc.

2204  {
2205  int false_block = instr->FalseDestination(chunk_);
2206  __ b(condition, chunk_->GetAssemblyLabel(false_block));
2207 }

References __.

◆ EmitFalseBranch() [5/6]

template<class InstrType >
void v8::internal::LCodeGen::EmitFalseBranch ( InstrType  instr,
Condition  condition,
Register  src1,
const Operand src2 
)
private

Definition at line 2091 of file lithium-codegen-mips.cc.

2094  {
2095  int false_block = instr->FalseDestination(chunk_);
2096  __ Branch(chunk_->GetAssemblyLabel(false_block), condition, src1, src2);
2097 }

References __.

◆ EmitFalseBranch() [6/6]

template<class InstrType >
void v8::internal::LCodeGen::EmitFalseBranch ( InstrType  instr,
Condition  condition,
Register  src1,
const Operand src2 
)
private

◆ EmitFalseBranchF() [1/2]

template<class InstrType >
void v8::internal::LCodeGen::EmitFalseBranchF ( InstrType  instr,
Condition  condition,
FPURegister  src1,
FPURegister  src2 
)
private

Definition at line 2101 of file lithium-codegen-mips.cc.

2104  {
2105  int false_block = instr->FalseDestination(chunk_);
2106  __ BranchF(chunk_->GetAssemblyLabel(false_block), NULL,
2107  condition, src1, src2);
2108 }

References __, and NULL.

◆ EmitFalseBranchF() [2/2]

template<class InstrType >
void v8::internal::LCodeGen::EmitFalseBranchF ( InstrType  instr,
Condition  condition,
FPURegister  src1,
FPURegister  src2 
)
private

◆ EmitFlushX87ForDeopt()

void v8::internal::LCodeGen::EmitFlushX87ForDeopt ( )
private

◆ EmitGoto() [1/7]

void v8::internal::LCodeGen::EmitGoto ( int  block)
private

Definition at line 2351 of file lithium-codegen-arm.cc.

2351  {
2352  if (!IsNextEmittedBlock(block)) {
2353  __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block)));
2354  }
2355 }
bool IsNextEmittedBlock(int block_id) const
int LookupDestination(int block_id) const

References __, IsNextEmittedBlock(), and LookupDestination().

Referenced by EmitBranch(), EmitBranchF(), and EmitBranchGeneric().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EmitGoto() [2/7]

void v8::internal::LCodeGen::EmitGoto ( int  block)
private

◆ EmitGoto() [3/7]

void v8::internal::LCodeGen::EmitGoto ( int  block)
private

◆ EmitGoto() [4/7]

void v8::internal::LCodeGen::EmitGoto ( int  block)
private

◆ EmitGoto() [5/7]

void v8::internal::LCodeGen::EmitGoto ( int  block)
private

◆ EmitGoto() [6/7]

void v8::internal::LCodeGen::EmitGoto ( int  block)
private

◆ EmitGoto() [7/7]

void v8::internal::LCodeGen::EmitGoto ( int  block)
private

◆ EmitIntegerMathAbs() [1/6]

void v8::internal::LCodeGen::EmitIntegerMathAbs ( LMathAbs *  instr)
private

Definition at line 3741 of file lithium-codegen-arm.cc.

3741  {
3742  Register input = ToRegister(instr->value());
3743  Register result = ToRegister(instr->result());
3744  __ cmp(input, Operand::Zero());
3745  __ Move(result, input, pl);
3746  // We can make rsb conditional because the previous cmp instruction
3747  // will clear the V (overflow) flag and rsb won't set this flag
3748  // if input is positive.
3749  __ rsb(result, input, Operand::Zero(), SetCC, mi);
3750  // Deoptimize on overflow.
3751  DeoptimizeIf(vs, instr, "overflow");
3752 }

References __, DeoptimizeIf(), v8::internal::mi, v8::internal::pl, v8::internal::SetCC, ToRegister(), and v8::internal::vs.

+ Here is the call graph for this function:

◆ EmitIntegerMathAbs() [2/6]

void v8::internal::LCodeGen::EmitIntegerMathAbs ( LMathAbs *  instr)
private

◆ EmitIntegerMathAbs() [3/6]

void v8::internal::LCodeGen::EmitIntegerMathAbs ( LMathAbs *  instr)
private

◆ EmitIntegerMathAbs() [4/6]

void v8::internal::LCodeGen::EmitIntegerMathAbs ( LMathAbs *  instr)
private

◆ EmitIntegerMathAbs() [5/6]

void v8::internal::LCodeGen::EmitIntegerMathAbs ( LMathAbs *  instr)
private

◆ EmitIntegerMathAbs() [6/6]

void v8::internal::LCodeGen::EmitIntegerMathAbs ( LMathAbs *  instr)
private

◆ EmitIsConstructCall() [1/6]

void v8::internal::LCodeGen::EmitIsConstructCall ( Register  temp)
private

◆ EmitIsConstructCall() [2/6]

void v8::internal::LCodeGen::EmitIsConstructCall ( Register  temp)
private

◆ EmitIsConstructCall() [3/6]

void v8::internal::LCodeGen::EmitIsConstructCall ( Register  temp)
private

◆ EmitIsConstructCall() [4/6]

void v8::internal::LCodeGen::EmitIsConstructCall ( Register  temp1,
Register  temp2 
)
private

Definition at line 5607 of file lithium-codegen-arm.cc.

5607  {
5608  DCHECK(!temp1.is(temp2));
5609  // Get the frame pointer for the calling frame.
5611 
5612  // Skip the arguments adaptor frame if it exists.
5614  __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5616 
5617  // Check the marker in the calling frame.
5619  __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
5620 }
static const int kMarkerOffset
Definition: frames.h:161
static const int kCallerFPOffset
Definition: frames.h:165

References __, v8::internal::compiler::ARGUMENTS_ADAPTOR, DCHECK, v8::internal::eq, v8::internal::fp, v8::internal::Smi::FromInt(), v8::internal::Register::is(), v8::internal::StandardFrameConstants::kCallerFPOffset, v8::internal::StandardFrameConstants::kContextOffset, and v8::internal::StandardFrameConstants::kMarkerOffset.

+ Here is the call graph for this function:

◆ EmitIsConstructCall() [5/6]

void v8::internal::LCodeGen::EmitIsConstructCall ( Register  temp1,
Register  temp2 
)
private

◆ EmitIsConstructCall() [6/6]

void v8::internal::LCodeGen::EmitIsConstructCall ( Register  temp1,
Register  temp2 
)
private

◆ EmitIsObject() [1/6]

Condition v8::internal::LCodeGen::EmitIsObject ( Register  input,
Label *  is_not_object,
Label *  is_object 
)
private

◆ EmitIsObject() [2/6]

Condition v8::internal::LCodeGen::EmitIsObject ( Register  input,
Register  temp1,
Label *  is_not_object,
Label *  is_object 
)
private

Definition at line 2500 of file lithium-codegen-arm.cc.

2503  {
2504  Register temp2 = scratch0();
2505  __ JumpIfSmi(input, is_not_object);
2506 
2507  __ LoadRoot(temp2, Heap::kNullValueRootIndex);
2508  __ cmp(input, temp2);
2509  __ b(eq, is_object);
2510 
2511  // Load map.
2512  __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
2513  // Undetectable objects behave like undefined.
2514  __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
2515  __ tst(temp2, Operand(1 << Map::kIsUndetectable));
2516  __ b(ne, is_not_object);
2517 
2518  // Load instance type and check that it is in object type range.
2519  __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
2520  __ cmp(temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2521  __ b(lt, is_not_object);
2522  __ cmp(temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2523  return le;
2524 }
static const int kIsUndetectable
Definition: objects.h:6244
static const int kBitFieldOffset
Definition: objects.h:6228

References __, v8::internal::eq, v8::internal::FieldMemOperand(), v8::internal::FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, v8::internal::Map::kBitFieldOffset, v8::internal::Map::kInstanceTypeOffset, v8::internal::Map::kIsUndetectable, v8::internal::HeapObject::kMapOffset, v8::internal::LAST_NONCALLABLE_SPEC_OBJECT_TYPE, v8::internal::le, v8::internal::lt, v8::internal::ne, and scratch0().

+ Here is the call graph for this function:

◆ EmitIsObject() [3/6]

Condition v8::internal::LCodeGen::EmitIsObject ( Register  input,
Register  temp1,
Label *  is_not_object,
Label *  is_object 
)
private

◆ EmitIsObject() [4/6]

Condition v8::internal::LCodeGen::EmitIsObject ( Register  input,
Register  temp1,
Label *  is_not_object,
Label *  is_object 
)
private

◆ EmitIsObject() [5/6]

Condition v8::internal::LCodeGen::EmitIsObject ( Register  input,
Register  temp1,
Register  temp2,
Label *  is_not_object,
Label *  is_object 
)
private

Definition at line 2403 of file lithium-codegen-mips.cc.

2407  {
2408  __ JumpIfSmi(input, is_not_object);
2409 
2410  __ LoadRoot(temp2, Heap::kNullValueRootIndex);
2411  __ Branch(is_object, eq, input, Operand(temp2));
2412 
2413  // Load map.
2414  __ lw(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
2415  // Undetectable objects behave like undefined.
2416  __ lbu(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
2417  __ And(temp2, temp2, Operand(1 << Map::kIsUndetectable));
2418  __ Branch(is_not_object, ne, temp2, Operand(zero_reg));
2419 
2420  // Load instance type and check that it is in object type range.
2421  __ lbu(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
2422  __ Branch(is_not_object,
2423  lt, temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2424 
2425  return le;
2426 }

References __, v8::internal::eq, v8::internal::FieldMemOperand(), v8::internal::FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, v8::internal::Map::kBitFieldOffset, v8::internal::Map::kInstanceTypeOffset, v8::internal::Map::kIsUndetectable, v8::internal::HeapObject::kMapOffset, v8::internal::le, v8::internal::lt, and v8::internal::ne.

+ Here is the call graph for this function:

◆ EmitIsObject() [6/6]

Condition v8::internal::LCodeGen::EmitIsObject ( Register  input,
Register  temp1,
Register  temp2,
Label *  is_not_object,
Label *  is_object 
)
private

◆ EmitIsString() [1/7]

Condition v8::internal::LCodeGen::EmitIsString ( Register  input,
Register  temp1,
Label *  is_not_string,
SmiCheck  check_needed = INLINE_SMI_CHECK 
)
private

Definition at line 2539 of file lithium-codegen-arm.cc.

2542  {
2543  if (check_needed == INLINE_SMI_CHECK) {
2544  __ JumpIfSmi(input, is_not_string);
2545  }
2546  __ CompareObjectType(input, temp1, temp1, FIRST_NONSTRING_TYPE);
2547 
2548  return lt;
2549 }
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758

References __, v8::internal::FIRST_NONSTRING_TYPE, v8::internal::INLINE_SMI_CHECK, and v8::internal::lt.

◆ EmitIsString() [2/7]

Condition v8::internal::LCodeGen::EmitIsString ( Register  input,
Register  temp1,
Label *  is_not_string,
SmiCheck  check_needed 
)
private

◆ EmitIsString() [3/7]

Condition v8::internal::LCodeGen::EmitIsString ( Register  input,
Register  temp1,
Label *  is_not_string,
SmiCheck  check_needed 
)
private

◆ EmitIsString() [4/7]

Condition v8::internal::LCodeGen::EmitIsString ( Register  input,
Register  temp1,
Label *  is_not_string,
SmiCheck  check_needed 
)
private

◆ EmitIsString() [5/7]

Condition v8::internal::LCodeGen::EmitIsString ( Register  input,
Register  temp1,
Label *  is_not_string,
SmiCheck  check_needed 
)
private

◆ EmitIsString() [6/7]

Condition v8::internal::LCodeGen::EmitIsString ( Register  input,
Register  temp1,
Label *  is_not_string,
SmiCheck  check_needed 
)
private

◆ EmitIsString() [7/7]

Condition v8::internal::LCodeGen::EmitIsString ( Register  input,
Register  temp1,
Label *  is_not_string,
SmiCheck  check_needed 
)
private

◆ EmitLoadDoubleRegister() [1/3]

DoubleRegister v8::internal::LCodeGen::EmitLoadDoubleRegister ( LOperand op,
FloatRegister  flt_scratch,
DoubleRegister  dbl_scratch 
)

Definition at line 464 of file lithium-codegen-mips.cc.

466  {
467  if (op->IsDoubleRegister()) {
468  return ToDoubleRegister(op->index());
469  } else if (op->IsConstantOperand()) {
470  LConstantOperand* const_op = LConstantOperand::cast(op);
471  HConstant* constant = chunk_->LookupConstant(const_op);
472  Handle<Object> literal = constant->handle(isolate());
473  Representation r = chunk_->LookupLiteralRepresentation(const_op);
474  if (r.IsInteger32()) {
475  DCHECK(literal->IsNumber());
476  __ li(at, Operand(static_cast<int32_t>(literal->Number())));
477  __ mtc1(at, flt_scratch);
478  __ cvt_d_w(dbl_scratch, flt_scratch);
479  return dbl_scratch;
480  } else if (r.IsDouble()) {
481  Abort(kUnsupportedDoubleImmediate);
482  } else if (r.IsTagged()) {
483  Abort(kUnsupportedTaggedImmediate);
484  }
485  } else if (op->IsStackSlot()) {
486  MemOperand mem_op = ToMemOperand(op);
487  __ ldc1(dbl_scratch, mem_op);
488  return dbl_scratch;
489  }
490  UNREACHABLE();
491  return dbl_scratch;
492 }
MemOperand ToMemOperand(LOperand *op) const

References __, DCHECK, v8::internal::LOperand::index(), v8::internal::Representation::IsDouble(), v8::internal::Representation::IsInteger32(), v8::internal::Representation::IsTagged(), ToDoubleRegister(), ToMemOperand(), and UNREACHABLE.

+ Here is the call graph for this function:

◆ EmitLoadDoubleRegister() [2/3]

DoubleRegister v8::internal::LCodeGen::EmitLoadDoubleRegister ( LOperand op,
FloatRegister  flt_scratch,
DoubleRegister  dbl_scratch 
)

◆ EmitLoadDoubleRegister() [3/3]

DwVfpRegister v8::internal::LCodeGen::EmitLoadDoubleRegister ( LOperand op,
SwVfpRegister  flt_scratch,
DwVfpRegister  dbl_scratch 
)

Definition at line 467 of file lithium-codegen-arm.cc.

469  {
470  if (op->IsDoubleRegister()) {
471  return ToDoubleRegister(op->index());
472  } else if (op->IsConstantOperand()) {
473  LConstantOperand* const_op = LConstantOperand::cast(op);
474  HConstant* constant = chunk_->LookupConstant(const_op);
475  Handle<Object> literal = constant->handle(isolate());
476  Representation r = chunk_->LookupLiteralRepresentation(const_op);
477  if (r.IsInteger32()) {
478  DCHECK(literal->IsNumber());
479  __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
480  __ vmov(flt_scratch, ip);
481  __ vcvt_f64_s32(dbl_scratch, flt_scratch);
482  return dbl_scratch;
483  } else if (r.IsDouble()) {
484  Abort(kUnsupportedDoubleImmediate);
485  } else if (r.IsTagged()) {
486  Abort(kUnsupportedTaggedImmediate);
487  }
488  } else if (op->IsStackSlot()) {
489  // TODO(regis): Why is vldr not taking a MemOperand?
490  // __ vldr(dbl_scratch, ToMemOperand(op));
491  MemOperand mem_op = ToMemOperand(op);
492  __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
493  return dbl_scratch;
494  }
495  UNREACHABLE();
496  return dbl_scratch;
497 }

References __, DCHECK, v8::internal::LOperand::index(), v8::internal::ip, v8::internal::Representation::IsDouble(), v8::internal::Representation::IsInteger32(), v8::internal::Representation::IsTagged(), v8::internal::MemOperand::offset(), ToDoubleRegister(), ToMemOperand(), and UNREACHABLE.

+ Here is the call graph for this function:

◆ EmitLoadRegister() [1/3]

Register v8::internal::LCodeGen::EmitLoadRegister ( LOperand op,
Register  scratch 
)

Definition at line 434 of file lithium-codegen-arm.cc.

434  {
435  if (op->IsRegister()) {
436  return ToRegister(op->index());
437  } else if (op->IsConstantOperand()) {
438  LConstantOperand* const_op = LConstantOperand::cast(op);
439  HConstant* constant = chunk_->LookupConstant(const_op);
440  Handle<Object> literal = constant->handle(isolate());
441  Representation r = chunk_->LookupLiteralRepresentation(const_op);
442  if (r.IsInteger32()) {
443  DCHECK(literal->IsNumber());
444  __ mov(scratch, Operand(static_cast<int32_t>(literal->Number())));
445  } else if (r.IsDouble()) {
446  Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
447  } else {
448  DCHECK(r.IsSmiOrTagged());
449  __ Move(scratch, literal);
450  }
451  return scratch;
452  } else if (op->IsStackSlot()) {
453  __ ldr(scratch, ToMemOperand(op));
454  return scratch;
455  }
456  UNREACHABLE();
457  return scratch;
458 }

References __, DCHECK, v8::internal::LOperand::index(), v8::internal::Representation::IsDouble(), v8::internal::Representation::IsInteger32(), v8::internal::Representation::IsSmiOrTagged(), ToMemOperand(), ToRegister(), and UNREACHABLE.

+ Here is the call graph for this function:

◆ EmitLoadRegister() [2/3]

Register v8::internal::LCodeGen::EmitLoadRegister ( LOperand op,
Register  scratch 
)

◆ EmitLoadRegister() [3/3]

Register v8::internal::LCodeGen::EmitLoadRegister ( LOperand op,
Register  scratch 
)

◆ EmitNumberUntagD() [1/5]

void v8::internal::LCodeGen::EmitNumberUntagD ( LNumberUntagD *  instr,
Register  input,
DoubleRegister  result,
NumberUntagDMode  mode 
)
private

◆ EmitNumberUntagD() [2/5]

void v8::internal::LCodeGen::EmitNumberUntagD ( LNumberUntagD *  instr,
Register  input,
DoubleRegister  result,
NumberUntagDMode  mode 
)
private

◆ EmitNumberUntagD() [3/5]

void v8::internal::LCodeGen::EmitNumberUntagD ( LNumberUntagD *  instr,
Register  input,
DwVfpRegister  result,
NumberUntagDMode  mode 
)
private

Definition at line 4869 of file lithium-codegen-arm.cc.

4871  {
4872  bool can_convert_undefined_to_nan =
4873  instr->hydrogen()->can_convert_undefined_to_nan();
4874  bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero();
4875 
4876  Register scratch = scratch0();
4877  SwVfpRegister flt_scratch = double_scratch0().low();
4878  DCHECK(!result_reg.is(double_scratch0()));
4879  Label convert, load_smi, done;
4881  // Smi check.
4882  __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
4883  // Heap number map check.
4884  __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4885  __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4886  __ cmp(scratch, Operand(ip));
4887  if (can_convert_undefined_to_nan) {
4888  __ b(ne, &convert);
4889  } else {
4890  DeoptimizeIf(ne, instr, "not a heap number");
4891  }
4892  // load heap number
4893  __ vldr(result_reg, input_reg, HeapNumber::kValueOffset - kHeapObjectTag);
4894  if (deoptimize_on_minus_zero) {
4895  __ VmovLow(scratch, result_reg);
4896  __ cmp(scratch, Operand::Zero());
4897  __ b(ne, &done);
4898  __ VmovHigh(scratch, result_reg);
4899  __ cmp(scratch, Operand(HeapNumber::kSignMask));
4900  DeoptimizeIf(eq, instr, "minus zero");
4901  }
4902  __ jmp(&done);
4903  if (can_convert_undefined_to_nan) {
4904  __ bind(&convert);
4905  // Convert undefined (and hole) to NaN.
4906  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4907  __ cmp(input_reg, Operand(ip));
4908  DeoptimizeIf(ne, instr, "not a heap number/undefined");
4909  __ LoadRoot(scratch, Heap::kNanValueRootIndex);
4910  __ vldr(result_reg, scratch, HeapNumber::kValueOffset - kHeapObjectTag);
4911  __ jmp(&done);
4912  }
4913  } else {
4914  __ SmiUntag(scratch, input_reg);
4916  }
4917  // Smi to double register conversion
4918  __ bind(&load_smi);
4919  // scratch: untagged value of input_reg
4920  __ vmov(flt_scratch, scratch);
4921  __ vcvt_f64_s32(result_reg, flt_scratch);
4922  __ bind(&done);
4923 }
@ NUMBER_CANDIDATE_IS_SMI
Definition: lithium.h:756
@ NUMBER_CANDIDATE_IS_ANY_TAGGED
Definition: lithium.h:757
SwVfpRegister low() const

References __, DCHECK, DeoptimizeIf(), double_scratch0(), v8::internal::eq, v8::internal::FieldMemOperand(), v8::internal::ip, v8::internal::DwVfpRegister::is(), v8::internal::kHeapObjectTag, v8::internal::HeapObject::kMapOffset, v8::internal::HeapNumber::kSignMask, v8::internal::HeapNumber::kValueOffset, v8::internal::LowDwVfpRegister::low(), mode(), v8::internal::ne, v8::internal::NUMBER_CANDIDATE_IS_ANY_TAGGED, v8::internal::NUMBER_CANDIDATE_IS_SMI, and scratch0().

+ Here is the call graph for this function:

◆ EmitNumberUntagD() [4/5]

void v8::internal::LCodeGen::EmitNumberUntagD ( LNumberUntagD *  instr,
Register  input,
Register  temp,
XMMRegister  result,
NumberUntagDMode  mode 
)
private

◆ EmitNumberUntagD() [5/5]

void v8::internal::LCodeGen::EmitNumberUntagD ( LNumberUntagD *  instr,
Register  input,
XMMRegister  result,
NumberUntagDMode  mode 
)
private

◆ EmitNumberUntagDNoSSE2()

void v8::internal::LCodeGen::EmitNumberUntagDNoSSE2 ( LNumberUntagD *  instr,
Register  input,
Register  temp,
X87Register  res_reg,
NumberUntagDMode  mode 
)
private

◆ EmitPushTaggedOperand() [1/3]

void v8::internal::LCodeGen::EmitPushTaggedOperand ( LOperand operand)
private

◆ EmitPushTaggedOperand() [2/3]

void v8::internal::LCodeGen::EmitPushTaggedOperand ( LOperand operand)
private

◆ EmitPushTaggedOperand() [3/3]

void v8::internal::LCodeGen::EmitPushTaggedOperand ( LOperand operand)
private

◆ EmitReturn() [1/2]

void v8::internal::LCodeGen::EmitReturn ( LReturn *  instr,
bool  dynamic_frame_alignment 
)
private

◆ EmitReturn() [2/2]

void v8::internal::LCodeGen::EmitReturn ( LReturn *  instr,
bool  dynamic_frame_alignment 
)
private

◆ EmitSignedIntegerDivisionByConstant() [1/2]

void v8::internal::LCodeGen::EmitSignedIntegerDivisionByConstant ( Register  result,
Register  dividend,
int32_t  divisor,
Register  remainder,
Register  scratch,
LEnvironment environment 
)
private

◆ EmitSignedIntegerDivisionByConstant() [2/2]

void v8::internal::LCodeGen::EmitSignedIntegerDivisionByConstant ( Register  result,
Register  dividend,
int32_t  divisor,
Register  remainder,
Register  scratch,
LEnvironment environment 
)
private

◆ EmitSmiMathAbs()

void v8::internal::LCodeGen::EmitSmiMathAbs ( LMathAbs *  instr)
private

◆ EmitTestAndBranch()

template<class InstrType >
void v8::internal::LCodeGen::EmitTestAndBranch ( InstrType  instr,
Condition  condition,
const Register value,
uint64_t  mask 
)
private

Definition at line 1414 of file lithium-codegen-arm64.cc.

1417  {
1418  DCHECK((condition != al) && (condition != nv));
1419  TestAndBranch branch(this, condition, value, mask);
1420  EmitBranchGeneric(instr, branch);
1421 }

References v8::internal::al, DCHECK, EmitBranchGeneric(), and v8::internal::nv.

+ Here is the call graph for this function:

◆ EmitTypeofIs() [1/6]

Condition v8::internal::LCodeGen::EmitTypeofIs ( Label *  true_label,
Label *  false_label,
Register  input,
Handle< String type_name 
)
private

Definition at line 5525 of file lithium-codegen-arm.cc.

5528  {
5529  Condition final_branch_condition = kNoCondition;
5530  Register scratch = scratch0();
5531  Factory* factory = isolate()->factory();
5532  if (String::Equals(type_name, factory->number_string())) {
5533  __ JumpIfSmi(input, true_label);
5534  __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
5535  __ CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
5536  final_branch_condition = eq;
5537 
5538  } else if (String::Equals(type_name, factory->string_string())) {
5539  __ JumpIfSmi(input, false_label);
5540  __ CompareObjectType(input, scratch, no_reg, FIRST_NONSTRING_TYPE);
5541  __ b(ge, false_label);
5542  __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
5543  __ tst(scratch, Operand(1 << Map::kIsUndetectable));
5544  final_branch_condition = eq;
5545 
5546  } else if (String::Equals(type_name, factory->symbol_string())) {
5547  __ JumpIfSmi(input, false_label);
5548  __ CompareObjectType(input, scratch, no_reg, SYMBOL_TYPE);
5549  final_branch_condition = eq;
5550 
5551  } else if (String::Equals(type_name, factory->boolean_string())) {
5552  __ CompareRoot(input, Heap::kTrueValueRootIndex);
5553  __ b(eq, true_label);
5554  __ CompareRoot(input, Heap::kFalseValueRootIndex);
5555  final_branch_condition = eq;
5556 
5557  } else if (String::Equals(type_name, factory->undefined_string())) {
5558  __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
5559  __ b(eq, true_label);
5560  __ JumpIfSmi(input, false_label);
5561  // Check for undetectable objects => true.
5562  __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
5563  __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
5564  __ tst(scratch, Operand(1 << Map::kIsUndetectable));
5565  final_branch_condition = ne;
5566 
5567  } else if (String::Equals(type_name, factory->function_string())) {
5569  Register type_reg = scratch;
5570  __ JumpIfSmi(input, false_label);
5571  __ CompareObjectType(input, scratch, type_reg, JS_FUNCTION_TYPE);
5572  __ b(eq, true_label);
5573  __ cmp(type_reg, Operand(JS_FUNCTION_PROXY_TYPE));
5574  final_branch_condition = eq;
5575 
5576  } else if (String::Equals(type_name, factory->object_string())) {
5577  Register map = scratch;
5578  __ JumpIfSmi(input, false_label);
5579  __ CompareRoot(input, Heap::kNullValueRootIndex);
5580  __ b(eq, true_label);
5581  __ CheckObjectTypeRange(input,
5582  map,
5585  false_label);
5586  // Check for undetectable objects => false.
5587  __ ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
5588  __ tst(scratch, Operand(1 << Map::kIsUndetectable));
5589  final_branch_condition = eq;
5590 
5591  } else {
5592  __ b(false_label);
5593  }
5594 
5595  return final_branch_condition;
5596 }
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
@ JS_FUNCTION_PROXY_TYPE
Definition: objects.h:726

References __, v8::internal::eq, v8::internal::String::Equals(), v8::internal::FieldMemOperand(), v8::internal::FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, v8::internal::FIRST_NONSTRING_TYPE, v8::internal::ge, v8::internal::JS_FUNCTION_PROXY_TYPE, v8::internal::JS_FUNCTION_TYPE, v8::internal::Map::kBitFieldOffset, v8::internal::Map::kIsUndetectable, v8::internal::HeapObject::kMapOffset, v8::internal::kNoCondition, v8::internal::LAST_NONCALLABLE_SPEC_OBJECT_TYPE, map, v8::internal::ne, v8::internal::no_reg, v8::internal::NUM_OF_CALLABLE_SPEC_OBJECT_TYPES, scratch0(), v8::internal::STATIC_ASSERT(), and v8::internal::SYMBOL_TYPE.

+ Here is the call graph for this function:

◆ EmitTypeofIs() [2/6]

Condition v8::internal::LCodeGen::EmitTypeofIs ( Label *  true_label,
Label *  false_label,
Register  input,
Handle< String type_name,
Register cmp1,
Operand cmp2 
)
private

Definition at line 5523 of file lithium-codegen-mips.cc.

5528  {
5529  // This function utilizes the delay slot heavily. This is used to load
5530  // values that are always usable without depending on the type of the input
5531  // register.
5532  Condition final_branch_condition = kNoCondition;
5533  Register scratch = scratch0();
5534  Factory* factory = isolate()->factory();
5535  if (String::Equals(type_name, factory->number_string())) {
5536  __ JumpIfSmi(input, true_label);
5537  __ lw(input, FieldMemOperand(input, HeapObject::kMapOffset));
5538  __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5539  *cmp1 = input;
5540  *cmp2 = Operand(at);
5541  final_branch_condition = eq;
5542 
5543  } else if (String::Equals(type_name, factory->string_string())) {
5544  __ JumpIfSmi(input, false_label);
5545  __ GetObjectType(input, input, scratch);
5546  __ Branch(USE_DELAY_SLOT, false_label,
5547  ge, scratch, Operand(FIRST_NONSTRING_TYPE));
5548  // input is an object so we can load the BitFieldOffset even if we take the
5549  // other branch.
5550  __ lbu(at, FieldMemOperand(input, Map::kBitFieldOffset));
5551  __ And(at, at, 1 << Map::kIsUndetectable);
5552  *cmp1 = at;
5553  *cmp2 = Operand(zero_reg);
5554  final_branch_condition = eq;
5555 
5556  } else if (String::Equals(type_name, factory->symbol_string())) {
5557  __ JumpIfSmi(input, false_label);
5558  __ GetObjectType(input, input, scratch);
5559  *cmp1 = scratch;
5560  *cmp2 = Operand(SYMBOL_TYPE);
5561  final_branch_condition = eq;
5562 
5563  } else if (String::Equals(type_name, factory->boolean_string())) {
5564  __ LoadRoot(at, Heap::kTrueValueRootIndex);
5565  __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input));
5566  __ LoadRoot(at, Heap::kFalseValueRootIndex);
5567  *cmp1 = at;
5568  *cmp2 = Operand(input);
5569  final_branch_condition = eq;
5570 
5571  } else if (String::Equals(type_name, factory->undefined_string())) {
5572  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5573  __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input));
5574  // The first instruction of JumpIfSmi is an And - it is safe in the delay
5575  // slot.
5576  __ JumpIfSmi(input, false_label);
5577  // Check for undetectable objects => true.
5578  __ lw(input, FieldMemOperand(input, HeapObject::kMapOffset));
5579  __ lbu(at, FieldMemOperand(input, Map::kBitFieldOffset));
5580  __ And(at, at, 1 << Map::kIsUndetectable);
5581  *cmp1 = at;
5582  *cmp2 = Operand(zero_reg);
5583  final_branch_condition = ne;
5584 
5585  } else if (String::Equals(type_name, factory->function_string())) {
5587  __ JumpIfSmi(input, false_label);
5588  __ GetObjectType(input, scratch, input);
5589  __ Branch(true_label, eq, input, Operand(JS_FUNCTION_TYPE));
5590  *cmp1 = input;
5591  *cmp2 = Operand(JS_FUNCTION_PROXY_TYPE);
5592  final_branch_condition = eq;
5593 
5594  } else if (String::Equals(type_name, factory->object_string())) {
5595  __ JumpIfSmi(input, false_label);
5596  __ LoadRoot(at, Heap::kNullValueRootIndex);
5597  __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input));
5598  Register map = input;
5599  __ GetObjectType(input, map, scratch);
5600  __ Branch(false_label,
5601  lt, scratch, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
5602  __ Branch(USE_DELAY_SLOT, false_label,
5603  gt, scratch, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
5604  // map is still valid, so the BitField can be loaded in delay slot.
5605  // Check for undetectable objects => false.
5607  __ And(at, at, 1 << Map::kIsUndetectable);
5608  *cmp1 = at;
5609  *cmp2 = Operand(zero_reg);
5610  final_branch_condition = eq;
5611 
5612  } else {
5613  *cmp1 = at;
5614  *cmp2 = Operand(zero_reg); // Set to valid regs, to avoid caller assertion.
5615  __ Branch(false_label);
5616  }
5617 
5618  return final_branch_condition;
5619 }

References __, v8::internal::eq, v8::internal::String::Equals(), v8::internal::FieldMemOperand(), v8::internal::FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, v8::internal::FIRST_NONSTRING_TYPE, v8::internal::ge, v8::internal::gt, v8::internal::JS_FUNCTION_PROXY_TYPE, v8::internal::JS_FUNCTION_TYPE, v8::internal::Map::kBitFieldOffset, v8::internal::Map::kIsUndetectable, v8::internal::HeapObject::kMapOffset, v8::internal::kNoCondition, v8::internal::LAST_NONCALLABLE_SPEC_OBJECT_TYPE, v8::internal::lt, map, v8::internal::ne, v8::internal::NUM_OF_CALLABLE_SPEC_OBJECT_TYPES, scratch0(), v8::internal::STATIC_ASSERT(), v8::internal::SYMBOL_TYPE, and v8::internal::USE_DELAY_SLOT.

+ Here is the call graph for this function:

◆ EmitTypeofIs() [3/6]

Condition v8::internal::LCodeGen::EmitTypeofIs ( Label *  true_label,
Label *  false_label,
Register  input,
Handle< String type_name,
Register cmp1,
Operand cmp2 
)
private

◆ EmitTypeofIs() [4/6]

Condition v8::internal::LCodeGen::EmitTypeofIs ( LTypeofIsAndBranch *  instr,
Register  input 
)
private

◆ EmitTypeofIs() [5/6]

Condition v8::internal::LCodeGen::EmitTypeofIs ( LTypeofIsAndBranch *  instr,
Register  input 
)
private

◆ EmitTypeofIs() [6/6]

Condition v8::internal::LCodeGen::EmitTypeofIs ( LTypeofIsAndBranch *  instr,
Register  input 
)
private

◆ EmitVectorLoadICRegisters() [1/7]

template<class T >
void v8::internal::LCodeGen::EmitVectorLoadICRegisters ( T instr)
private

Definition at line 2995 of file lithium-codegen-arm.cc.

2995  {
2996  DCHECK(FLAG_vector_ics);
2997  Register vector = ToRegister(instr->temp_vector());
2999  __ Move(vector, instr->hydrogen()->feedback_vector());
3000  // No need to allocate this register.
3003  Operand(Smi::FromInt(instr->hydrogen()->slot())));
3004 }
static const Register VectorRegister()

References __, DCHECK, v8::internal::Smi::FromInt(), v8::internal::Register::is(), v8::internal::r0, v8::internal::VectorLoadICTrampolineDescriptor::SlotRegister(), ToRegister(), and v8::internal::VectorLoadICDescriptor::VectorRegister().

+ Here is the call graph for this function:

◆ EmitVectorLoadICRegisters() [2/7]

template<class T >
void v8::internal::LCodeGen::EmitVectorLoadICRegisters ( T instr)
private

◆ EmitVectorLoadICRegisters() [3/7]

template<class T >
void v8::internal::LCodeGen::EmitVectorLoadICRegisters ( T instr)
private

◆ EmitVectorLoadICRegisters() [4/7]

template<class T >
void v8::internal::LCodeGen::EmitVectorLoadICRegisters ( T instr)
private

◆ EmitVectorLoadICRegisters() [5/7]

template<class T >
void v8::internal::LCodeGen::EmitVectorLoadICRegisters ( T instr)
private

◆ EmitVectorLoadICRegisters() [6/7]

template<class T >
void v8::internal::LCodeGen::EmitVectorLoadICRegisters ( T instr)
private

◆ EmitVectorLoadICRegisters() [7/7]

template<class T >
void v8::internal::LCodeGen::EmitVectorLoadICRegisters ( T instr)
private

◆ EnsureRelocSpaceForDeoptimization() [1/2]

void v8::internal::LCodeGen::EnsureRelocSpaceForDeoptimization ( )

◆ EnsureRelocSpaceForDeoptimization() [2/2]

void v8::internal::LCodeGen::EnsureRelocSpaceForDeoptimization ( )

◆ EnsureSpaceForLazyDeopt() [1/7]

void v8::internal::LCodeGen::EnsureSpaceForLazyDeopt ( int  space_needed)
private

Definition at line 5623 of file lithium-codegen-arm.cc.

5623  {
5624  if (!info()->IsStub()) {
5625  // Ensure that we have enough space after the previous lazy-bailout
5626  // instruction for patching the code here.
5627  int current_pc = masm()->pc_offset();
5628  if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5629  // Block literal pool emission for duration of padding.
5630  Assembler::BlockConstPoolScope block_const_pool(masm());
5631  int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5632  DCHECK_EQ(0, padding_size % Assembler::kInstrSize);
5633  while (padding_size > 0) {
5634  __ nop();
5635  padding_size -= Assembler::kInstrSize;
5636  }
5637  }
5638  }
5639  last_lazy_deopt_pc_ = masm()->pc_offset();
5640 }
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206

References __, DCHECK_EQ, and v8::internal::Assembler::kInstrSize.

Referenced by GenerateBodyInstructionPre().

+ Here is the caller graph for this function:

◆ EnsureSpaceForLazyDeopt() [2/7]

void v8::internal::LCodeGen::EnsureSpaceForLazyDeopt ( int  space_needed)
private

◆ EnsureSpaceForLazyDeopt() [3/7]

void v8::internal::LCodeGen::EnsureSpaceForLazyDeopt ( int  space_needed)
private

◆ EnsureSpaceForLazyDeopt() [4/7]

void v8::internal::LCodeGen::EnsureSpaceForLazyDeopt ( int  space_needed)
private

◆ EnsureSpaceForLazyDeopt() [5/7]

void v8::internal::LCodeGen::EnsureSpaceForLazyDeopt ( int  space_needed)
private

◆ EnsureSpaceForLazyDeopt() [6/7]

void v8::internal::LCodeGen::EnsureSpaceForLazyDeopt ( int  space_needed)
private

◆ EnsureSpaceForLazyDeopt() [7/7]

void v8::internal::LCodeGen::EnsureSpaceForLazyDeopt ( int  space_needed)
private

◆ FinishCode() [1/7]

void v8::internal::LCodeGen::FinishCode ( Handle< Code code)

Definition at line 60 of file lithium-codegen-arm.cc.

60  {
61  DCHECK(is_done());
62  code->set_stack_slots(GetStackSlotCount());
63  code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
64  if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code);
66 }
void PopulateDeoptimizationData(Handle< Code > code)

References DCHECK, GetStackSlotCount(), PopulateDeoptimizationData(), and safepoints_.

Referenced by v8::internal::LChunk::Codegen().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FinishCode() [2/7]

void v8::internal::LCodeGen::FinishCode ( Handle< Code code)

◆ FinishCode() [3/7]

void v8::internal::LCodeGen::FinishCode ( Handle< Code code)

◆ FinishCode() [4/7]

void v8::internal::LCodeGen::FinishCode ( Handle< Code code)

◆ FinishCode() [5/7]

void v8::internal::LCodeGen::FinishCode ( Handle< Code code)

◆ FinishCode() [6/7]

void v8::internal::LCodeGen::FinishCode ( Handle< Code code)

◆ FinishCode() [7/7]

void v8::internal::LCodeGen::FinishCode ( Handle< Code code)

◆ FlushX87StackIfNecessary()

void v8::internal::LCodeGen::FlushX87StackIfNecessary ( LInstruction instr)
inlineprivate

Definition at line 355 of file lithium-codegen-x87.h.

355  {
356  x87_stack_.FlushIfNecessary(instr, this);
357  }
void FlushIfNecessary(LInstruction *instr, LCodeGen *cgen)

References v8::internal::LCodeGen::X87Stack::FlushIfNecessary(), and x87_stack_.

+ Here is the call graph for this function:

◆ GenerateBodyInstructionPost() [1/3]

void v8::internal::LCodeGen::GenerateBodyInstructionPost ( LInstruction instr)
private

◆ GenerateBodyInstructionPost() [2/3]

void v8::internal::LCodeGen::GenerateBodyInstructionPost ( LInstruction instr)
private

◆ GenerateBodyInstructionPost() [3/3]

void v8::internal::LCodeGen::GenerateBodyInstructionPost ( LInstruction instr)
private

◆ GenerateBodyInstructionPre() [1/7]

void v8::internal::LCodeGen::GenerateBodyInstructionPre ( LInstruction instr)
private

Definition at line 255 of file lithium-codegen-arm.cc.

255  {
256  if (instr->IsCall()) {
258  }
259  if (!instr->IsLazyBailout() && !instr->IsGap()) {
260  safepoints_.BumpLastLazySafepointIndex();
261  }
262 }
void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE

References EnsureSpaceForLazyDeopt(), v8::internal::LInstruction::IsCall(), v8::internal::LInstruction::IsGap(), v8::internal::Deoptimizer::patch_size(), and safepoints_.

+ Here is the call graph for this function:

◆ GenerateBodyInstructionPre() [2/7]

void v8::internal::LCodeGen::GenerateBodyInstructionPre ( LInstruction instr)
private

◆ GenerateBodyInstructionPre() [3/7]

void v8::internal::LCodeGen::GenerateBodyInstructionPre ( LInstruction instr)
private

◆ GenerateBodyInstructionPre() [4/7]

void v8::internal::LCodeGen::GenerateBodyInstructionPre ( LInstruction instr)
private

◆ GenerateBodyInstructionPre() [5/7]

void v8::internal::LCodeGen::GenerateBodyInstructionPre ( LInstruction instr)
private

◆ GenerateBodyInstructionPre() [6/7]

void v8::internal::LCodeGen::GenerateBodyInstructionPre ( LInstruction instr)
private

◆ GenerateBodyInstructionPre() [7/7]

void v8::internal::LCodeGen::GenerateBodyInstructionPre ( LInstruction instr)
private

◆ GenerateCode() [1/7]

bool v8::internal::LCodeGen::GenerateCode ( )

Definition at line 45 of file lithium-codegen-arm.cc.

45  {
46  LPhase phase("Z_Code generation", chunk());
47  DCHECK(is_unused());
48  status_ = GENERATING;
49 
50  // Open a frame scope to indicate that there is a frame on the stack. The
51  // NONE indicates that the scope shouldn't actually generate code to set up
52  // the frame (that is done in GeneratePrologue).
53  FrameScope frame_scope(masm_, StackFrame::NONE);
54 
55  return GeneratePrologue() && GenerateBody() && GenerateDeferredCode() &&
57 }
@ NONE

References chunk(), DCHECK, GenerateDeferredCode(), GenerateJumpTable(), GeneratePrologue(), GenerateSafepointTable(), and NONE.

Referenced by v8::internal::LChunk::Codegen().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GenerateCode() [2/7]

bool v8::internal::LCodeGen::GenerateCode ( )

◆ GenerateCode() [3/7]

bool v8::internal::LCodeGen::GenerateCode ( )

◆ GenerateCode() [4/7]

bool v8::internal::LCodeGen::GenerateCode ( )

◆ GenerateCode() [5/7]

bool v8::internal::LCodeGen::GenerateCode ( )

◆ GenerateCode() [6/7]

bool v8::internal::LCodeGen::GenerateCode ( )

◆ GenerateCode() [7/7]

bool v8::internal::LCodeGen::GenerateCode ( )

◆ GenerateDeferredCode() [1/7]

bool v8::internal::LCodeGen::GenerateDeferredCode ( )
private

Definition at line 265 of file lithium-codegen-arm.cc.

265  {
266  DCHECK(is_generating());
267  if (deferred_.length() > 0) {
268  for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
269  LDeferredCode* code = deferred_[i];
270 
271  HValue* value =
272  instructions_->at(code->instruction_index())->hydrogen_value();
274  chunk()->graph()->SourcePositionToScriptPosition(value->position()));
275 
276  Comment(";;; <@%d,#%d> "
277  "-------------------- Deferred %s --------------------",
278  code->instruction_index(),
279  code->instr()->hydrogen_value()->id(),
280  code->instr()->Mnemonic());
281  __ bind(code->entry());
282  if (NeedsDeferredFrame()) {
283  Comment(";;; Build frame");
285  DCHECK(info()->IsStub());
286  frame_is_built_ = true;
287  __ PushFixedFrame();
288  __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
289  __ push(scratch0());
291  Comment(";;; Deferred code");
292  }
293  code->Generate();
294  if (NeedsDeferredFrame()) {
295  Comment(";;; Destroy frame");
297  __ pop(ip);
298  __ PopFixedFrame();
299  frame_is_built_ = false;
300  }
301  __ jmp(code->exit());
302  }
303  }
304 
305  // Force constant pool emission at the end of the deferred code to make
306  // sure that no constant pools are emitted after.
307  masm()->CheckConstPool(true, false);
308 
309  return !is_aborted();
310 }
void RecordAndWritePosition(int position) OVERRIDE
static const int kFixedFrameSizeFromFp
Definition: frames.h:157
const Register sp

References __, chunk(), DCHECK, deferred_, v8::internal::LDeferredCode::entry(), v8::internal::LDeferredCode::exit(), v8::internal::fp, frame_is_built_, v8::internal::Smi::FromInt(), v8::internal::LDeferredCode::Generate(), graph(), v8::internal::LInstruction::hydrogen_value(), v8::internal::HValue::id(), v8::internal::LDeferredCode::instr(), v8::internal::LDeferredCode::instruction_index(), v8::internal::ip, v8::internal::StandardFrameConstants::kFixedFrameSizeFromFp, v8::internal::LInstruction::Mnemonic(), NeedsDeferredFrame(), v8::internal::HValue::position(), RecordAndWritePosition(), scratch0(), v8::internal::sp, and v8::internal::STUB.

Referenced by GenerateCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GenerateDeferredCode() [2/7]

bool v8::internal::LCodeGen::GenerateDeferredCode ( )
private

◆ GenerateDeferredCode() [3/7]

bool v8::internal::LCodeGen::GenerateDeferredCode ( )
private

◆ GenerateDeferredCode() [4/7]

bool v8::internal::LCodeGen::GenerateDeferredCode ( )
private

◆ GenerateDeferredCode() [5/7]

bool v8::internal::LCodeGen::GenerateDeferredCode ( )
private

◆ GenerateDeferredCode() [6/7]

bool v8::internal::LCodeGen::GenerateDeferredCode ( )
private

◆ GenerateDeferredCode() [7/7]

bool v8::internal::LCodeGen::GenerateDeferredCode ( )
private

◆ GenerateJumpTable() [1/7]

bool v8::internal::LCodeGen::GenerateJumpTable ( )
private

Definition at line 313 of file lithium-codegen-arm.cc.

313  {
314  // Check that the jump table is accessible from everywhere in the function
315  // code, i.e. that offsets to the table can be encoded in the 24bit signed
316  // immediate of a branch instruction.
317  // To simplify we consider the code size from the first instruction to the
318  // end of the jump table. We also don't consider the pc load delta.
319  // Each entry in the jump table generates one instruction and inlines one
320  // 32bit data after it.
321  if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
322  jump_table_.length() * 7)) {
323  Abort(kGeneratedCodeIsTooLarge);
324  }
325 
326  if (jump_table_.length() > 0) {
327  Label needs_frame, call_deopt_entry;
328 
329  Comment(";;; -------------------- Jump table --------------------");
330  Address base = jump_table_[0].address;
331 
332  Register entry_offset = scratch0();
333 
334  int length = jump_table_.length();
335  for (int i = 0; i < length; i++) {
336  Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
337  __ bind(&table_entry->label);
338 
339  DCHECK_EQ(jump_table_[0].bailout_type, table_entry->bailout_type);
340  Address entry = table_entry->address;
341  DeoptComment(table_entry->reason);
342 
343  // Second-level deopt table entries are contiguous and small, so instead
344  // of loading the full, absolute address of each one, load an immediate
345  // offset which will be added to the base address later.
346  __ mov(entry_offset, Operand(entry - base));
347 
348  if (table_entry->needs_frame) {
349  DCHECK(!info()->saves_caller_doubles());
350  if (needs_frame.is_bound()) {
351  __ b(&needs_frame);
352  } else {
353  __ bind(&needs_frame);
354  Comment(";;; call deopt with frame");
355  __ PushFixedFrame();
356  // This variant of deopt can only be used with stubs. Since we don't
357  // have a function pointer to install in the stack frame that we're
358  // building, install a special marker there instead.
359  DCHECK(info()->IsStub());
360  __ mov(ip, Operand(Smi::FromInt(StackFrame::STUB)));
361  __ push(ip);
362  __ add(fp, sp,
364  __ bind(&call_deopt_entry);
365  // Add the base address to the offset previously loaded in
366  // entry_offset.
367  __ add(entry_offset, entry_offset,
368  Operand(ExternalReference::ForDeoptEntry(base)));
369  __ blx(entry_offset);
370  }
371 
372  masm()->CheckConstPool(false, false);
373  } else {
374  // The last entry can fall through into `call_deopt_entry`, avoiding a
375  // branch.
376  bool need_branch = ((i + 1) != length) || call_deopt_entry.is_bound();
377 
378  if (need_branch) __ b(&call_deopt_entry);
379 
380  masm()->CheckConstPool(false, !need_branch);
381  }
382  }
383 
384  if (!call_deopt_entry.is_bound()) {
385  Comment(";;; call deopt");
386  __ bind(&call_deopt_entry);
387 
388  if (info()->saves_caller_doubles()) {
389  DCHECK(info()->IsStub());
391  }
392 
393  // Add the base address to the offset previously loaded in entry_offset.
394  __ add(entry_offset, entry_offset,
395  Operand(ExternalReference::ForDeoptEntry(base)));
396  __ blx(entry_offset);
397  }
398  }
399 
400  // Force constant pool emission at the end of the deopt jump table to make
401  // sure that no constant pools are emitted after.
402  masm()->CheckConstPool(true, false);
403 
404  // The deoptimization jump table is the last part of the instruction
405  // sequence. Mark the generated code as done unless we bailed out.
406  if (!is_aborted()) status_ = DONE;
407  return !is_aborted();
408 }

References __, v8::internal::Deoptimizer::JumpTableEntry::address, v8::internal::Deoptimizer::JumpTableEntry::bailout_type, DCHECK, DCHECK_EQ, v8::internal::DONE, v8::internal::fp, v8::internal::Smi::FromInt(), v8::internal::ip, jump_table_, v8::internal::StandardFrameConstants::kFixedFrameSizeFromFp, v8::internal::Assembler::kInstrSize, v8::internal::Deoptimizer::JumpTableEntry::label, v8::internal::Deoptimizer::JumpTableEntry::needs_frame, v8::internal::Deoptimizer::JumpTableEntry::reason, RestoreCallerDoubles(), scratch0(), v8::internal::sp, and v8::internal::STUB.

Referenced by GenerateCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GenerateJumpTable() [2/7]

bool v8::internal::LCodeGen::GenerateJumpTable ( )
private

◆ GenerateJumpTable() [3/7]

bool v8::internal::LCodeGen::GenerateJumpTable ( )
private

◆ GenerateJumpTable() [4/7]

bool v8::internal::LCodeGen::GenerateJumpTable ( )
private

◆ GenerateJumpTable() [5/7]

bool v8::internal::LCodeGen::GenerateJumpTable ( )
private

◆ GenerateJumpTable() [6/7]

bool v8::internal::LCodeGen::GenerateJumpTable ( )
private

◆ GenerateJumpTable() [7/7]

bool v8::internal::LCodeGen::GenerateJumpTable ( )
private

◆ GenerateOsrPrologue() [1/7]

void v8::internal::LCodeGen::GenerateOsrPrologue ( )
private

Definition at line 240 of file lithium-codegen-arm.cc.

240  {
241  // Generate the OSR entry prologue at the first unknown OSR value, or if there
242  // are none, at the OSR entrypoint instruction.
243  if (osr_pc_offset_ >= 0) return;
244 
245  osr_pc_offset_ = masm()->pc_offset();
246 
247  // Adjust the frame size, subsuming the unoptimized frame into the
248  // optimized frame.
249  int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots();
250  DCHECK(slots >= 0);
251  __ sub(sp, sp, Operand(slots * kPointerSize));
252 }

References __, DCHECK, GetStackSlotCount(), graph(), v8::internal::kPointerSize, osr_pc_offset_, and v8::internal::sp.

+ Here is the call graph for this function:

◆ GenerateOsrPrologue() [2/7]

void v8::internal::LCodeGen::GenerateOsrPrologue ( )
private

◆ GenerateOsrPrologue() [3/7]

void v8::internal::LCodeGen::GenerateOsrPrologue ( )
private

◆ GenerateOsrPrologue() [4/7]

void v8::internal::LCodeGen::GenerateOsrPrologue ( )
private

◆ GenerateOsrPrologue() [5/7]

void v8::internal::LCodeGen::GenerateOsrPrologue ( )
private

◆ GenerateOsrPrologue() [6/7]

void v8::internal::LCodeGen::GenerateOsrPrologue ( )
private

◆ GenerateOsrPrologue() [7/7]

void v8::internal::LCodeGen::GenerateOsrPrologue ( )
private

◆ GeneratePrologue() [1/7]

bool v8::internal::LCodeGen::GeneratePrologue ( )
private

Definition at line 101 of file lithium-codegen-arm.cc.

101  {
102  DCHECK(is_generating());
103 
104  if (info()->IsOptimizing()) {
106 
107 #ifdef DEBUG
108  if (strlen(FLAG_stop_at) > 0 &&
109  info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110  __ stop("stop_at");
111  }
112 #endif
113 
114  // r1: Callee's JS function.
115  // cp: Callee's context.
116  // pp: Callee's constant pool pointer (if FLAG_enable_ool_constant_pool)
117  // fp: Caller's frame pointer.
118  // lr: Caller's pc.
119 
120  // Sloppy mode functions and builtins need to replace the receiver with the
121  // global proxy when called as functions (without an explicit receiver
122  // object).
123  if (info_->this_has_uses() &&
124  info_->strict_mode() == SLOPPY &&
125  !info_->is_native()) {
126  Label ok;
127  int receiver_offset = info_->scope()->num_parameters() * kPointerSize;
128  __ ldr(r2, MemOperand(sp, receiver_offset));
129  __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
130  __ b(ne, &ok);
131 
132  __ ldr(r2, GlobalObjectOperand());
134 
135  __ str(r2, MemOperand(sp, receiver_offset));
136 
137  __ bind(&ok);
138  }
139  }
140 
141  info()->set_prologue_offset(masm_->pc_offset());
142  if (NeedsEagerFrame()) {
143  if (info()->IsStub()) {
144  __ StubPrologue();
145  } else {
146  __ Prologue(info()->IsCodePreAgingActive());
147  }
148  frame_is_built_ = true;
149  info_->AddNoFrameRange(0, masm_->pc_offset());
150  }
151 
152  // Reserve space for the stack slots needed by the code.
153  int slots = GetStackSlotCount();
154  if (slots > 0) {
155  if (FLAG_debug_code) {
156  __ sub(sp, sp, Operand(slots * kPointerSize));
157  __ push(r0);
158  __ push(r1);
159  __ add(r0, sp, Operand(slots * kPointerSize));
160  __ mov(r1, Operand(kSlotsZapValue));
161  Label loop;
162  __ bind(&loop);
163  __ sub(r0, r0, Operand(kPointerSize));
164  __ str(r1, MemOperand(r0, 2 * kPointerSize));
165  __ cmp(r0, sp);
166  __ b(ne, &loop);
167  __ pop(r1);
168  __ pop(r0);
169  } else {
170  __ sub(sp, sp, Operand(slots * kPointerSize));
171  }
172  }
173 
174  if (info()->saves_caller_doubles()) {
176  }
177 
178  // Possibly allocate a local context.
179  int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
180  if (heap_slots > 0) {
181  Comment(";;; Allocate local context");
182  bool need_write_barrier = true;
183  // Argument to NewContext is the function, which is in r1.
184  if (heap_slots <= FastNewContextStub::kMaximumSlots) {
185  FastNewContextStub stub(isolate(), heap_slots);
186  __ CallStub(&stub);
187  // Result of FastNewContextStub is always in new space.
188  need_write_barrier = false;
189  } else {
190  __ push(r1);
191  __ CallRuntime(Runtime::kNewFunctionContext, 1);
192  }
193  RecordSafepoint(Safepoint::kNoLazyDeopt);
194  // Context is returned in both r0 and cp. It replaces the context
195  // passed to us. It's saved in the stack and kept live in cp.
196  __ mov(cp, r0);
198  // Copy any necessary parameters into the context.
199  int num_parameters = scope()->num_parameters();
200  for (int i = 0; i < num_parameters; i++) {
201  Variable* var = scope()->parameter(i);
202  if (var->IsContextSlot()) {
203  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204  (num_parameters - 1 - i) * kPointerSize;
205  // Load parameter from stack.
206  __ ldr(r0, MemOperand(fp, parameter_offset));
207  // Store it in the context.
208  MemOperand target = ContextOperand(cp, var->index());
209  __ str(r0, target);
210  // Update the write barrier. This clobbers r3 and r0.
211  if (need_write_barrier) {
212  __ RecordWriteContextSlot(
213  cp,
214  target.offset(),
215  r0,
216  r3,
218  kSaveFPRegs);
219  } else if (FLAG_debug_code) {
220  Label done;
221  __ JumpIfInNewSpace(cp, r0, &done);
222  __ Abort(kExpectedNewSpaceObject);
223  __ bind(&done);
224  }
225  }
226  }
227  Comment(";;; End allocate local context");
228  }
229 
230  // Trace the call.
231  if (FLAG_trace && info()->IsOptimizing()) {
232  // We have not executed any compiled code yet, so cp still holds the
233  // incoming context.
234  __ CallRuntime(Runtime::kTraceEnter, 0);
235  }
236  return !is_aborted();
237 }
static const int kGlobalProxyOffset
Definition: objects.h:7461
void RecordSafepoint(LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
static void MaybeCallEntryHook(MacroAssembler *masm)
int num_parameters() const
Definition: scopes.h:321
Variable * parameter(int index) const
Definition: scopes.h:316
static const int kCallerSPOffset
Definition: frames.h:167
Vector< const char > CStrVector(const char *data)
Definition: vector.h:158
MemOperand ContextOperand(Register context, int index)
MemOperand GlobalObjectOperand()
const uint32_t kSlotsZapValue
Definition: globals.h:273

References __, CallRuntime(), v8::internal::ContextOperand(), v8::internal::cp, v8::internal::CStrVector(), DCHECK, v8::internal::FieldMemOperand(), v8::internal::fp, frame_is_built_, GetLinkRegisterState(), GetStackSlotCount(), v8::internal::GlobalObjectOperand(), v8::internal::Variable::index(), v8::internal::Variable::IsContextSlot(), v8::internal::StandardFrameConstants::kCallerSPOffset, v8::internal::StandardFrameConstants::kContextOffset, v8::internal::GlobalObject::kGlobalProxyOffset, v8::internal::kPointerSize, v8::internal::kSaveFPRegs, v8::internal::kSlotsZapValue, v8::internal::ProfileEntryHookStub::MaybeCallEntryHook(), v8::internal::Context::MIN_CONTEXT_SLOTS, v8::internal::ne, NeedsEagerFrame(), v8::internal::Scope::num_parameters(), v8::internal::MemOperand::offset(), v8::internal::Scope::parameter(), v8::internal::r0, v8::internal::r1, v8::internal::r2, v8::internal::r3, RecordSafepoint(), SaveCallerDoubles(), scope(), v8::internal::SLOPPY, and v8::internal::sp.

Referenced by GenerateCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GeneratePrologue() [2/7]

bool v8::internal::LCodeGen::GeneratePrologue ( )
private

◆ GeneratePrologue() [3/7]

bool v8::internal::LCodeGen::GeneratePrologue ( )
private

◆ GeneratePrologue() [4/7]

bool v8::internal::LCodeGen::GeneratePrologue ( )
private

◆ GeneratePrologue() [5/7]

bool v8::internal::LCodeGen::GeneratePrologue ( )
private

◆ GeneratePrologue() [6/7]

bool v8::internal::LCodeGen::GeneratePrologue ( )
private

◆ GeneratePrologue() [7/7]

bool v8::internal::LCodeGen::GeneratePrologue ( )
private

◆ GenerateSafepointTable() [1/7]

bool v8::internal::LCodeGen::GenerateSafepointTable ( )
private

Definition at line 411 of file lithium-codegen-arm.cc.

411  {
412  DCHECK(is_done());
413  safepoints_.Emit(masm(), GetStackSlotCount());
414  return !is_aborted();
415 }

References DCHECK, GetStackSlotCount(), and safepoints_.

Referenced by GenerateCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GenerateSafepointTable() [2/7]

bool v8::internal::LCodeGen::GenerateSafepointTable ( )
private

◆ GenerateSafepointTable() [3/7]

bool v8::internal::LCodeGen::GenerateSafepointTable ( )
private

◆ GenerateSafepointTable() [4/7]

bool v8::internal::LCodeGen::GenerateSafepointTable ( )
private

◆ GenerateSafepointTable() [5/7]

bool v8::internal::LCodeGen::GenerateSafepointTable ( )
private

◆ GenerateSafepointTable() [6/7]

bool v8::internal::LCodeGen::GenerateSafepointTable ( )
private

◆ GenerateSafepointTable() [7/7]

bool v8::internal::LCodeGen::GenerateSafepointTable ( )
private

◆ GetLinkRegisterState() [1/2]

LinkRegisterStatus v8::internal::LCodeGen::GetLinkRegisterState ( ) const
inline

Definition at line 62 of file lithium-codegen-arm.h.

References frame_is_built_, v8::internal::kLRHasBeenSaved, and v8::internal::kLRHasNotBeenSaved.

Referenced by DoStoreKeyedFixedArray(), and GeneratePrologue().

+ Here is the caller graph for this function:

◆ GetLinkRegisterState() [2/2]

LinkRegisterStatus v8::internal::LCodeGen::GetLinkRegisterState ( ) const
inline

◆ GetNextInstruction() [1/3]

LInstruction* v8::internal::LCodeGen::GetNextInstruction ( )
private

◆ GetNextInstruction() [2/3]

LInstruction* v8::internal::LCodeGen::GetNextInstruction ( )
private

◆ GetNextInstruction() [3/3]

LInstruction* v8::internal::LCodeGen::GetNextInstruction ( )
private

◆ GetRAState() [1/2]

RAStatus v8::internal::LCodeGen::GetRAState ( ) const
inline

◆ GetRAState() [2/2]

RAStatus v8::internal::LCodeGen::GetRAState ( ) const
inline

◆ GetStackSlotCount() [1/7]

int v8::internal::LCodeGen::GetStackSlotCount ( ) const
inlineprivate

Definition at line 163 of file lithium-codegen-arm.h.

163 { return chunk()->spill_slot_count(); }

References chunk().

Referenced by FinishCode(), GenerateOsrPrologue(), GeneratePrologue(), GenerateSafepointTable(), NeedsEagerFrame(), and ToMemOperand().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GetStackSlotCount() [2/7]

int v8::internal::LCodeGen::GetStackSlotCount ( ) const
inlineprivate

Definition at line 261 of file lithium-codegen-arm64.h.

261 { return chunk()->spill_slot_count(); }

References chunk().

+ Here is the call graph for this function:

◆ GetStackSlotCount() [3/7]

int v8::internal::LCodeGen::GetStackSlotCount ( ) const
inlineprivate

Definition at line 142 of file lithium-codegen-ia32.h.

142 { return chunk()->spill_slot_count(); }

References chunk().

+ Here is the call graph for this function:

◆ GetStackSlotCount() [4/7]

int v8::internal::LCodeGen::GetStackSlotCount ( ) const
inlineprivate

Definition at line 163 of file lithium-codegen-mips.h.

163 { return chunk()->spill_slot_count(); }

References chunk().

+ Here is the call graph for this function:

◆ GetStackSlotCount() [5/7]

int v8::internal::LCodeGen::GetStackSlotCount ( ) const
inlineprivate

Definition at line 164 of file lithium-codegen-mips64.h.

164 { return chunk()->spill_slot_count(); }

References chunk().

+ Here is the call graph for this function:

◆ GetStackSlotCount() [6/7]

int v8::internal::LCodeGen::GetStackSlotCount ( ) const
inlineprivate

Definition at line 136 of file lithium-codegen-x64.h.

136 { return chunk()->spill_slot_count(); }

References chunk().

+ Here is the call graph for this function:

◆ GetStackSlotCount() [7/7]

int v8::internal::LCodeGen::GetStackSlotCount ( ) const
inlineprivate

Definition at line 174 of file lithium-codegen-x87.h.

174 { return chunk()->spill_slot_count(); }

References chunk().

+ Here is the call graph for this function:

◆ graph()

HGraph* v8::internal::LCodeGen::graph ( ) const
inlineprivate

Definition at line 125 of file lithium-codegen-x64.h.

125 { return chunk()->graph(); }

References chunk().

Referenced by GenerateDeferredCode(), and GenerateOsrPrologue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ HighOperand() [1/2]

Operand v8::internal::LCodeGen::HighOperand ( LOperand op)

◆ HighOperand() [2/2]

Operand v8::internal::LCodeGen::HighOperand ( LOperand op)

◆ IsDehoistedKeyConstant()

bool v8::internal::LCodeGen::IsDehoistedKeyConstant ( LConstantOperand *  op) const

◆ IsInteger32() [1/5]

bool v8::internal::LCodeGen::IsInteger32 ( LConstantOperand *  op) const

Definition at line 507 of file lithium-codegen-arm.cc.

507  {
508  return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
509 }

◆ IsInteger32() [2/5]

bool v8::internal::LCodeGen::IsInteger32 ( LConstantOperand *  op) const

◆ IsInteger32() [3/5]

bool v8::internal::LCodeGen::IsInteger32 ( LConstantOperand *  op) const

◆ IsInteger32() [4/5]

bool v8::internal::LCodeGen::IsInteger32 ( LConstantOperand *  op) const

◆ IsInteger32() [5/5]

bool v8::internal::LCodeGen::IsInteger32 ( LConstantOperand *  op) const

◆ IsInteger32Constant() [1/2]

bool v8::internal::LCodeGen::IsInteger32Constant ( LConstantOperand *  op) const

Definition at line 1323 of file lithium-codegen-arm64.cc.

1323  {
1324  return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
1325 }

◆ IsInteger32Constant() [2/2]

bool v8::internal::LCodeGen::IsInteger32Constant ( LConstantOperand *  op) const

◆ IsNextEmittedBlock() [1/7]

bool v8::internal::LCodeGen::IsNextEmittedBlock ( int  block_id) const
inline

Definition at line 48 of file lithium-codegen-arm.h.

48  {
49  return LookupDestination(block_id) == GetNextEmittedBlock();
50  }

References LookupDestination().

Referenced by EmitGoto().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsNextEmittedBlock() [2/7]

bool v8::internal::LCodeGen::IsNextEmittedBlock ( int  block_id) const
inline

Definition at line 57 of file lithium-codegen-arm64.h.

57  {
58  return LookupDestination(block_id) == GetNextEmittedBlock();
59  }

References LookupDestination().

+ Here is the call graph for this function:

◆ IsNextEmittedBlock() [3/7]

bool v8::internal::LCodeGen::IsNextEmittedBlock ( int  block_id) const
inline

Definition at line 51 of file lithium-codegen-ia32.h.

51  {
52  return LookupDestination(block_id) == GetNextEmittedBlock();
53  }

References LookupDestination().

+ Here is the call graph for this function:

◆ IsNextEmittedBlock() [4/7]

bool v8::internal::LCodeGen::IsNextEmittedBlock ( int  block_id) const
inline

Definition at line 47 of file lithium-codegen-mips.h.

47  {
48  return LookupDestination(block_id) == GetNextEmittedBlock();
49  }

References LookupDestination().

+ Here is the call graph for this function:

◆ IsNextEmittedBlock() [5/7]

bool v8::internal::LCodeGen::IsNextEmittedBlock ( int  block_id) const
inline

Definition at line 47 of file lithium-codegen-mips64.h.

47  {
48  return LookupDestination(block_id) == GetNextEmittedBlock();
49  }

References LookupDestination().

+ Here is the call graph for this function:

◆ IsNextEmittedBlock() [6/7]

bool v8::internal::LCodeGen::IsNextEmittedBlock ( int  block_id) const
inline

Definition at line 48 of file lithium-codegen-x64.h.

48  {
49  return LookupDestination(block_id) == GetNextEmittedBlock();
50  }

References LookupDestination().

+ Here is the call graph for this function:

◆ IsNextEmittedBlock() [7/7]

bool v8::internal::LCodeGen::IsNextEmittedBlock ( int  block_id) const
inline

Definition at line 53 of file lithium-codegen-x87.h.

53  {
54  return LookupDestination(block_id) == GetNextEmittedBlock();
55  }

References LookupDestination().

+ Here is the call graph for this function:

◆ IsSmi() [1/6]

bool v8::internal::LCodeGen::IsSmi ( LConstantOperand *  op) const

Definition at line 512 of file lithium-codegen-arm.cc.

512  {
513  return chunk_->LookupLiteralRepresentation(op).IsSmi();
514 }

◆ IsSmi() [2/6]

bool v8::internal::LCodeGen::IsSmi ( LConstantOperand *  op) const

◆ IsSmi() [3/6]

bool v8::internal::LCodeGen::IsSmi ( LConstantOperand *  op) const

◆ IsSmi() [4/6]

bool v8::internal::LCodeGen::IsSmi ( LConstantOperand *  op) const

◆ IsSmi() [5/6]

bool v8::internal::LCodeGen::IsSmi ( LConstantOperand *  op) const

◆ IsSmi() [6/6]

bool v8::internal::LCodeGen::IsSmi ( LConstantOperand *  op) const

◆ IsSmiConstant()

bool v8::internal::LCodeGen::IsSmiConstant ( LConstantOperand *  op) const

◆ JSShiftAmountFromLConstant()

int v8::internal::LCodeGen::JSShiftAmountFromLConstant ( LOperand constant)
inline

Definition at line 98 of file lithium-codegen-arm64.h.

98  {
99  return ToInteger32(LConstantOperand::cast(constant)) & 0x1f;
100  }

References ToInteger32().

Referenced by ToShiftedRightOperand32().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ LoadContextFromDeferred() [1/7]

void v8::internal::LCodeGen::LoadContextFromDeferred ( LOperand context)
private

Definition at line 781 of file lithium-codegen-arm.cc.

781  {
782  if (context->IsRegister()) {
783  __ Move(cp, ToRegister(context));
784  } else if (context->IsStackSlot()) {
785  __ ldr(cp, ToMemOperand(context));
786  } else if (context->IsConstantOperand()) {
787  HConstant* constant =
788  chunk_->LookupConstant(LConstantOperand::cast(context));
789  __ Move(cp, Handle<Object>::cast(constant->handle(isolate())));
790  } else {
791  UNREACHABLE();
792  }
793 }
static Handle< T > cast(Handle< S > that)
Definition: handles.h:116

References __, v8::internal::cp, ToMemOperand(), ToRegister(), and UNREACHABLE.

Referenced by CallRuntimeFromDeferred(), DoDeferredInstanceOfKnownGlobal(), and DoDeferredStackCheck().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ LoadContextFromDeferred() [2/7]

void v8::internal::LCodeGen::LoadContextFromDeferred ( LOperand context)
private

◆ LoadContextFromDeferred() [3/7]

void v8::internal::LCodeGen::LoadContextFromDeferred ( LOperand context)
private

◆ LoadContextFromDeferred() [4/7]

void v8::internal::LCodeGen::LoadContextFromDeferred ( LOperand context)
private

◆ LoadContextFromDeferred() [5/7]

void v8::internal::LCodeGen::LoadContextFromDeferred ( LOperand context)
private

◆ LoadContextFromDeferred() [6/7]

void v8::internal::LCodeGen::LoadContextFromDeferred ( LOperand context)
private

◆ LoadContextFromDeferred() [7/7]

void v8::internal::LCodeGen::LoadContextFromDeferred ( LOperand context)
private

◆ LookupDestination() [1/7]

int v8::internal::LCodeGen::LookupDestination ( int  block_id) const
inline

Definition at line 44 of file lithium-codegen-arm.h.

44  {
45  return chunk()->LookupDestination(block_id);
46  }

References chunk().

Referenced by EmitGoto(), and IsNextEmittedBlock().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ LookupDestination() [2/7]

int v8::internal::LCodeGen::LookupDestination ( int  block_id) const
inline

Definition at line 53 of file lithium-codegen-arm64.h.

53  {
54  return chunk()->LookupDestination(block_id);
55  }

References chunk().

+ Here is the call graph for this function:

◆ LookupDestination() [3/7]

int v8::internal::LCodeGen::LookupDestination ( int  block_id) const
inline

Definition at line 47 of file lithium-codegen-ia32.h.

47  {
48  return chunk()->LookupDestination(block_id);
49  }

References chunk().

+ Here is the call graph for this function:

◆ LookupDestination() [4/7]

int v8::internal::LCodeGen::LookupDestination ( int  block_id) const
inline

Definition at line 43 of file lithium-codegen-mips.h.

43  {
44  return chunk()->LookupDestination(block_id);
45  }

References chunk().

+ Here is the call graph for this function:

◆ LookupDestination() [5/7]

int v8::internal::LCodeGen::LookupDestination ( int  block_id) const
inline

Definition at line 43 of file lithium-codegen-mips64.h.

43  {
44  return chunk()->LookupDestination(block_id);
45  }

References chunk().

+ Here is the call graph for this function:

◆ LookupDestination() [6/7]

int v8::internal::LCodeGen::LookupDestination ( int  block_id) const
inline

Definition at line 44 of file lithium-codegen-x64.h.

44  {
45  return chunk()->LookupDestination(block_id);
46  }

References chunk().

+ Here is the call graph for this function:

◆ LookupDestination() [7/7]

int v8::internal::LCodeGen::LookupDestination ( int  block_id) const
inline

Definition at line 49 of file lithium-codegen-x87.h.

49  {
50  return chunk()->LookupDestination(block_id);
51  }

References chunk().

+ Here is the call graph for this function:

◆ NeedsDeferredFrame() [1/7]

bool v8::internal::LCodeGen::NeedsDeferredFrame ( ) const
inline

Definition at line 58 of file lithium-codegen-arm.h.

58  {
59  return !NeedsEagerFrame() && info()->is_deferred_calling();
60  }

References NeedsEagerFrame().

Referenced by v8::internal::LDeferredCode::done(), and GenerateDeferredCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NeedsDeferredFrame() [2/7]

bool v8::internal::LCodeGen::NeedsDeferredFrame ( ) const
inline

Definition at line 67 of file lithium-codegen-arm64.h.

67  {
68  return !NeedsEagerFrame() && info()->is_deferred_calling();
69  }

References NeedsEagerFrame().

+ Here is the call graph for this function:

◆ NeedsDeferredFrame() [3/7]

bool v8::internal::LCodeGen::NeedsDeferredFrame ( ) const
inline

Definition at line 61 of file lithium-codegen-ia32.h.

61  {
62  return !NeedsEagerFrame() && info()->is_deferred_calling();
63  }

References NeedsEagerFrame().

+ Here is the call graph for this function:

◆ NeedsDeferredFrame() [4/7]

bool v8::internal::LCodeGen::NeedsDeferredFrame ( ) const
inline

Definition at line 57 of file lithium-codegen-mips.h.

57  {
58  return !NeedsEagerFrame() && info()->is_deferred_calling();
59  }

References NeedsEagerFrame().

+ Here is the call graph for this function:

◆ NeedsDeferredFrame() [5/7]

bool v8::internal::LCodeGen::NeedsDeferredFrame ( ) const
inline

Definition at line 57 of file lithium-codegen-mips64.h.

57  {
58  return !NeedsEagerFrame() && info()->is_deferred_calling();
59  }

References NeedsEagerFrame().

+ Here is the call graph for this function:

◆ NeedsDeferredFrame() [6/7]

bool v8::internal::LCodeGen::NeedsDeferredFrame ( ) const
inline

Definition at line 58 of file lithium-codegen-x64.h.

58  {
59  return !NeedsEagerFrame() && info()->is_deferred_calling();
60  }

References NeedsEagerFrame().

+ Here is the call graph for this function:

◆ NeedsDeferredFrame() [7/7]

bool v8::internal::LCodeGen::NeedsDeferredFrame ( ) const
inline

Definition at line 63 of file lithium-codegen-x87.h.

63  {
64  return !NeedsEagerFrame() && info()->is_deferred_calling();
65  }

References NeedsEagerFrame().

+ Here is the call graph for this function:

◆ NeedsEagerFrame() [1/7]

bool v8::internal::LCodeGen::NeedsEagerFrame ( ) const
inline

Definition at line 52 of file lithium-codegen-arm.h.

52  {
53  return GetStackSlotCount() > 0 ||
54  info()->is_non_deferred_calling() ||
55  !info()->IsStub() ||
56  info()->requires_frame();
57  }

References GetStackSlotCount().

Referenced by v8::internal::LChunk::Codegen(), GeneratePrologue(), NeedsDeferredFrame(), RestoreCallerDoubles(), SaveCallerDoubles(), ToHighMemOperand(), and ToMemOperand().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NeedsEagerFrame() [2/7]

bool v8::internal::LCodeGen::NeedsEagerFrame ( ) const
inline

Definition at line 61 of file lithium-codegen-arm64.h.

61  {
62  return GetStackSlotCount() > 0 ||
63  info()->is_non_deferred_calling() ||
64  !info()->IsStub() ||
65  info()->requires_frame();
66  }

References GetStackSlotCount().

+ Here is the call graph for this function:

◆ NeedsEagerFrame() [3/7]

bool v8::internal::LCodeGen::NeedsEagerFrame ( ) const
inline

Definition at line 55 of file lithium-codegen-ia32.h.

55  {
56  return GetStackSlotCount() > 0 ||
57  info()->is_non_deferred_calling() ||
58  !info()->IsStub() ||
59  info()->requires_frame();
60  }

References GetStackSlotCount().

+ Here is the call graph for this function:

◆ NeedsEagerFrame() [4/7]

bool v8::internal::LCodeGen::NeedsEagerFrame ( ) const
inline

Definition at line 51 of file lithium-codegen-mips.h.

51  {
52  return GetStackSlotCount() > 0 ||
53  info()->is_non_deferred_calling() ||
54  !info()->IsStub() ||
55  info()->requires_frame();
56  }

References GetStackSlotCount().

+ Here is the call graph for this function:

◆ NeedsEagerFrame() [5/7]

bool v8::internal::LCodeGen::NeedsEagerFrame ( ) const
inline

Definition at line 51 of file lithium-codegen-mips64.h.

51  {
52  return GetStackSlotCount() > 0 ||
53  info()->is_non_deferred_calling() ||
54  !info()->IsStub() ||
55  info()->requires_frame();
56  }

References GetStackSlotCount().

+ Here is the call graph for this function:

◆ NeedsEagerFrame() [6/7]

bool v8::internal::LCodeGen::NeedsEagerFrame ( ) const
inline

Definition at line 52 of file lithium-codegen-x64.h.

52  {
53  return GetStackSlotCount() > 0 ||
54  info()->is_non_deferred_calling() ||
55  !info()->IsStub() ||
56  info()->requires_frame();
57  }

References GetStackSlotCount().

+ Here is the call graph for this function:

◆ NeedsEagerFrame() [7/7]

bool v8::internal::LCodeGen::NeedsEagerFrame ( ) const
inline

Definition at line 57 of file lithium-codegen-x87.h.

57  {
58  return GetStackSlotCount() > 0 ||
59  info()->is_non_deferred_calling() ||
60  !info()->IsStub() ||
61  info()->requires_frame();
62  }

References GetStackSlotCount().

+ Here is the call graph for this function:

◆ PopulateDeoptimizationData() [1/7]

void v8::internal::LCodeGen::PopulateDeoptimizationData ( Handle< Code code)
private

Definition at line 930 of file lithium-codegen-arm.cc.

930  {
931  int length = deoptimizations_.length();
932  if (length == 0) return;
933  Handle<DeoptimizationInputData> data =
934  DeoptimizationInputData::New(isolate(), length, TENURED);
935 
936  Handle<ByteArray> translations =
937  translations_.CreateByteArray(isolate()->factory());
938  data->SetTranslationByteArray(*translations);
939  data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
940  data->SetOptimizationId(Smi::FromInt(info_->optimization_id()));
941  if (info_->IsOptimizing()) {
942  // Reference to shared function info does not change between phases.
943  AllowDeferredHandleDereference allow_handle_dereference;
944  data->SetSharedFunctionInfo(*info_->shared_info());
945  } else {
946  data->SetSharedFunctionInfo(Smi::FromInt(0));
947  }
948 
949  Handle<FixedArray> literals =
950  factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
951  { AllowDeferredHandleDereference copy_handles;
952  for (int i = 0; i < deoptimization_literals_.length(); i++) {
954  }
955  data->SetLiteralArray(*literals);
956  }
957 
958  data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
959  data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
960 
961  // Populate the deoptimization entries.
962  for (int i = 0; i < length; i++) {
964  data->SetAstId(i, env->ast_id());
965  data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
966  data->SetArgumentsStackHeight(i,
967  Smi::FromInt(env->arguments_stack_height()));
968  data->SetPc(i, Smi::FromInt(env->pc_offset()));
969  }
970  code->set_deoptimization_data(*data);
971 }
static Handle< DeoptimizationInputData > New(Isolate *isolate, int deopt_entry_count, PretenureFlag pretenure)
Definition: objects.cc:7918
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Definition: assert-scope.h:130

References deoptimization_literals_, deoptimizations_, v8::internal::Smi::FromInt(), inlined_function_count_, LEnvironment, literals(), v8::internal::DeoptimizationInputData::New(), osr_pc_offset_, v8::internal::TENURED, and translations_.

Referenced by FinishCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PopulateDeoptimizationData() [2/7]

void v8::internal::LCodeGen::PopulateDeoptimizationData ( Handle< Code code)
private

◆ PopulateDeoptimizationData() [3/7]

void v8::internal::LCodeGen::PopulateDeoptimizationData ( Handle< Code code)
private

◆ PopulateDeoptimizationData() [4/7]

void v8::internal::LCodeGen::PopulateDeoptimizationData ( Handle< Code code)
private

◆ PopulateDeoptimizationData() [5/7]

void v8::internal::LCodeGen::PopulateDeoptimizationData ( Handle< Code code)
private

◆ PopulateDeoptimizationData() [6/7]

void v8::internal::LCodeGen::PopulateDeoptimizationData ( Handle< Code code)
private

◆ PopulateDeoptimizationData() [7/7]

void v8::internal::LCodeGen::PopulateDeoptimizationData ( Handle< Code code)
private

◆ PopulateDeoptimizationLiteralsWithInlinedFunctions() [1/7]

void v8::internal::LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions ( )
private

Definition at line 984 of file lithium-codegen-arm.cc.

984  {
985  DCHECK(deoptimization_literals_.length() == 0);
986 
987  const ZoneList<Handle<JSFunction> >* inlined_closures =
988  chunk()->inlined_closures();
989 
990  for (int i = 0, length = inlined_closures->length();
991  i < length;
992  i++) {
993  DefineDeoptimizationLiteral(inlined_closures->at(i));
994  }
995 
997 }

References v8::internal::List< T, AllocationPolicy >::at(), chunk(), DCHECK, DefineDeoptimizationLiteral(), deoptimization_literals_, and inlined_function_count_.

Referenced by LCodeGen().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PopulateDeoptimizationLiteralsWithInlinedFunctions() [2/7]

void v8::internal::LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions ( )
private

◆ PopulateDeoptimizationLiteralsWithInlinedFunctions() [3/7]

void v8::internal::LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions ( )
private

◆ PopulateDeoptimizationLiteralsWithInlinedFunctions() [4/7]

void v8::internal::LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions ( )
private

◆ PopulateDeoptimizationLiteralsWithInlinedFunctions() [5/7]

void v8::internal::LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions ( )
private

◆ PopulateDeoptimizationLiteralsWithInlinedFunctions() [6/7]

void v8::internal::LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions ( )
private

◆ PopulateDeoptimizationLiteralsWithInlinedFunctions() [7/7]

void v8::internal::LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions ( )
private

◆ PrepareKeyedArrayOperand()

MemOperand v8::internal::LCodeGen::PrepareKeyedArrayOperand ( Register  base,
Register  elements,
Register  key,
bool  key_is_tagged,
ElementsKind  elements_kind,
Representation  representation,
int  base_offset 
)
private

Definition at line 3519 of file lithium-codegen-arm64.cc.

3525  {
3526  STATIC_ASSERT(static_cast<unsigned>(kSmiValueSize) == kWRegSizeInBits);
3527  STATIC_ASSERT(kSmiTag == 0);
3528  int element_size_shift = ElementsKindToShiftSize(elements_kind);
3529 
3530  // Even though the HLoad/StoreKeyed instructions force the input
3531  // representation for the key to be an integer, the input gets replaced during
3532  // bounds check elimination with the index argument to the bounds check, which
3533  // can be tagged, so that case must be handled here, too.
3534  if (key_is_tagged) {
3535  __ Add(base, elements, Operand::UntagSmiAndScale(key, element_size_shift));
3536  if (representation.IsInteger32()) {
3537  DCHECK(elements_kind == FAST_SMI_ELEMENTS);
3538  // Read or write only the smi payload in the case of fast smi arrays.
3539  return UntagSmiMemOperand(base, base_offset);
3540  } else {
3541  return MemOperand(base, base_offset);
3542  }
3543  } else {
3544  // Sign extend key because it could be a 32-bit negative value or contain
3545  // garbage in the top 32-bits. The address computation happens in 64-bit.
3546  DCHECK((element_size_shift >= 0) && (element_size_shift <= 4));
3547  if (representation.IsInteger32()) {
3548  DCHECK(elements_kind == FAST_SMI_ELEMENTS);
3549  // Read or write only the smi payload in the case of fast smi arrays.
3550  __ Add(base, elements, Operand(key, SXTW, element_size_shift));
3551  return UntagSmiMemOperand(base, base_offset);
3552  } else {
3553  __ Add(base, elements, base_offset);
3554  return MemOperand(base, key, SXTW, element_size_shift);
3555  }
3556  }
3557 }
static Operand UntagSmiAndScale(Register smi, int scale)
const unsigned kWRegSizeInBits
const int kSmiValueSize
Definition: v8.h:5806
const int kSmiTag
Definition: v8.h:5742
MemOperand UntagSmiMemOperand(Register object, int offset)

References __, DCHECK, v8::internal::ElementsKindToShiftSize(), v8::internal::FAST_SMI_ELEMENTS, v8::internal::Representation::IsInteger32(), v8::internal::kSmiTag, v8::internal::kSmiValueSize, v8::internal::kWRegSizeInBits, v8::internal::STATIC_ASSERT(), v8::internal::SXTW, v8::internal::Operand::UntagSmiAndScale(), and v8::internal::UntagSmiMemOperand().

+ Here is the call graph for this function:

◆ PrepareKeyedExternalArrayOperand()

MemOperand v8::internal::LCodeGen::PrepareKeyedExternalArrayOperand ( Register  key,
Register  base,
Register  scratch,
bool  key_is_smi,
bool  key_is_constant,
int  constant_key,
ElementsKind  elements_kind,
int  base_offset 
)
private

Definition at line 3399 of file lithium-codegen-arm64.cc.

3407  {
3408  int element_size_shift = ElementsKindToShiftSize(elements_kind);
3409 
3410  if (key_is_constant) {
3411  int key_offset = constant_key << element_size_shift;
3412  return MemOperand(base, key_offset + base_offset);
3413  }
3414 
3415  if (key_is_smi) {
3416  __ Add(scratch, base, Operand::UntagSmiAndScale(key, element_size_shift));
3417  return MemOperand(scratch, base_offset);
3418  }
3419 
3420  if (base_offset == 0) {
3421  return MemOperand(base, key, SXTW, element_size_shift);
3422  }
3423 
3424  DCHECK(!AreAliased(scratch, key));
3425  __ Add(scratch, base, base_offset);
3426  return MemOperand(scratch, key, SXTW, element_size_shift);
3427 }
bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)

References __, v8::internal::AreAliased(), DCHECK, v8::internal::ElementsKindToShiftSize(), v8::internal::SXTW, and v8::internal::Operand::UntagSmiAndScale().

+ Here is the call graph for this function:

◆ PrepareKeyedOperand() [1/3]

MemOperand v8::internal::LCodeGen::PrepareKeyedOperand ( Register  key,
Register  base,
bool  key_is_constant,
int  constant_key,
int  element_size,
int  shift_size,
int  base_offset 
)

Definition at line 3391 of file lithium-codegen-arm.cc.

3397  {
3398  if (key_is_constant) {
3399  return MemOperand(base, (constant_key << element_size) + base_offset);
3400  }
3401 
3402  if (base_offset == 0) {
3403  if (shift_size >= 0) {
3404  return MemOperand(base, key, LSL, shift_size);
3405  } else {
3406  DCHECK_EQ(-1, shift_size);
3407  return MemOperand(base, key, LSR, 1);
3408  }
3409  }
3410 
3411  if (shift_size >= 0) {
3412  __ add(scratch0(), base, Operand(key, LSL, shift_size));
3413  return MemOperand(scratch0(), base_offset);
3414  } else {
3415  DCHECK_EQ(-1, shift_size);
3416  __ add(scratch0(), base, Operand(key, ASR, 1));
3417  return MemOperand(scratch0(), base_offset);
3418  }
3419 }

References __, v8::internal::ASR, DCHECK_EQ, v8::internal::LSL, v8::internal::LSR, and scratch0().

Referenced by DoLoadKeyedExternalArray(), and DoStoreKeyedExternalArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrepareKeyedOperand() [2/3]

MemOperand v8::internal::LCodeGen::PrepareKeyedOperand ( Register  key,
Register  base,
bool  key_is_constant,
int  constant_key,
int  element_size,
int  shift_size,
int  base_offset 
)

◆ PrepareKeyedOperand() [3/3]

MemOperand v8::internal::LCodeGen::PrepareKeyedOperand ( Register  key,
Register  base,
bool  key_is_constant,
int  constant_key,
int  element_size,
int  shift_size,
int  base_offset 
)

◆ RecordAndWritePosition() [1/7]

void v8::internal::LCodeGen::RecordAndWritePosition ( int  position)
private

Definition at line 1057 of file lithium-codegen-arm.cc.

1057  {
1058  if (position == RelocInfo::kNoPosition) return;
1059  masm()->positions_recorder()->RecordPosition(position);
1060  masm()->positions_recorder()->WriteRecordedPositions();
1061 }
static const int kNoPosition
Definition: assembler.h:317

References v8::internal::RelocInfo::kNoPosition.

Referenced by GenerateDeferredCode().

+ Here is the caller graph for this function:

◆ RecordAndWritePosition() [2/7]

void v8::internal::LCodeGen::RecordAndWritePosition ( int  position)
private

◆ RecordAndWritePosition() [3/7]

void v8::internal::LCodeGen::RecordAndWritePosition ( int  position)
private

◆ RecordAndWritePosition() [4/7]

void v8::internal::LCodeGen::RecordAndWritePosition ( int  position)
private

◆ RecordAndWritePosition() [5/7]

void v8::internal::LCodeGen::RecordAndWritePosition ( int  position)
private

◆ RecordAndWritePosition() [6/7]

void v8::internal::LCodeGen::RecordAndWritePosition ( int  position)
private

◆ RecordAndWritePosition() [7/7]

void v8::internal::LCodeGen::RecordAndWritePosition ( int  position)
private

◆ RecordSafepoint() [1/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::DeoptMode  mode 
)
private

Definition at line 1037 of file lithium-codegen-arm.cc.

1038  {
1039  RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
1040 }

References RecordSafepoint().

+ Here is the call graph for this function:

◆ RecordSafepoint() [2/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [3/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [4/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [5/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [6/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [7/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [8/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::Kind  kind,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

Definition at line 1012 of file lithium-codegen-arm.cc.

1016  {
1018 
1019  const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
1020  Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
1021  kind, arguments, deopt_mode);
1022  for (int i = 0; i < operands->length(); i++) {
1023  LOperand* pointer = operands->at(i);
1024  if (pointer->IsStackSlot()) {
1025  safepoint.DefinePointerSlot(pointer->index(), zone());
1026  } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
1027  safepoint.DefinePointerRegister(ToRegister(pointer), zone());
1028  }
1029  }
1030  if (FLAG_enable_ool_constant_pool && (kind & Safepoint::kWithRegisters)) {
1031  // Register pp always contains a pointer to the constant pool.
1032  safepoint.DefinePointerRegister(pp, zone());
1033  }
1034 }
const Register pp

References v8::internal::List< T, AllocationPolicy >::at(), DCHECK, expected_safepoint_kind_, v8::internal::LOperand::index(), v8::internal::pp, safepoints_, and ToRegister().

Referenced by GeneratePrologue(), RecordSafepoint(), RecordSafepointWithLazyDeopt(), and RecordSafepointWithRegisters().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordSafepoint() [9/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::Kind  kind,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [10/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::Kind  kind,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [11/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::Kind  kind,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [12/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::Kind  kind,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [13/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::Kind  kind,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [14/21]

void v8::internal::LCodeGen::RecordSafepoint ( LPointerMap *  pointers,
Safepoint::Kind  kind,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepoint() [15/21]

void v8::internal::LCodeGen::RecordSafepoint ( Safepoint::DeoptMode  mode)
private

Definition at line 1043 of file lithium-codegen-arm.cc.

1043  {
1044  LPointerMap empty_pointers(zone());
1045  RecordSafepoint(&empty_pointers, deopt_mode);
1046 }

References RecordSafepoint().

+ Here is the call graph for this function:

◆ RecordSafepoint() [16/21]

void v8::internal::LCodeGen::RecordSafepoint ( Safepoint::DeoptMode  mode)
private

◆ RecordSafepoint() [17/21]

void v8::internal::LCodeGen::RecordSafepoint ( Safepoint::DeoptMode  mode)
private

◆ RecordSafepoint() [18/21]

void v8::internal::LCodeGen::RecordSafepoint ( Safepoint::DeoptMode  mode)
private

◆ RecordSafepoint() [19/21]

void v8::internal::LCodeGen::RecordSafepoint ( Safepoint::DeoptMode  mode)
private

◆ RecordSafepoint() [20/21]

void v8::internal::LCodeGen::RecordSafepoint ( Safepoint::DeoptMode  mode)
private

◆ RecordSafepoint() [21/21]

void v8::internal::LCodeGen::RecordSafepoint ( Safepoint::DeoptMode  mode)
private

◆ RecordSafepointWithLazyDeopt() [1/7]

void v8::internal::LCodeGen::RecordSafepointWithLazyDeopt ( LInstruction instr,
SafepointMode  safepoint_mode 
)
private

Definition at line 1000 of file lithium-codegen-arm.cc.

1001  {
1002  if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
1003  RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
1004  } else {
1007  instr->pointer_map(), 0, Safepoint::kLazyDeopt);
1008  }
1009 }

References DCHECK, v8::internal::LInstruction::pointer_map(), RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS, RECORD_SIMPLE_SAFEPOINT, RecordSafepoint(), and RecordSafepointWithRegisters().

Referenced by CallCodeGeneric(), CallKnownFunction(), CallRuntime(), and DoDeferredStackCheck().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordSafepointWithLazyDeopt() [2/7]

void v8::internal::LCodeGen::RecordSafepointWithLazyDeopt ( LInstruction instr,
SafepointMode  safepoint_mode 
)
private

◆ RecordSafepointWithLazyDeopt() [3/7]

void v8::internal::LCodeGen::RecordSafepointWithLazyDeopt ( LInstruction instr,
SafepointMode  safepoint_mode 
)
private

◆ RecordSafepointWithLazyDeopt() [4/7]

void v8::internal::LCodeGen::RecordSafepointWithLazyDeopt ( LInstruction instr,
SafepointMode  safepoint_mode 
)
private

◆ RecordSafepointWithLazyDeopt() [5/7]

void v8::internal::LCodeGen::RecordSafepointWithLazyDeopt ( LInstruction instr,
SafepointMode  safepoint_mode 
)
private

◆ RecordSafepointWithLazyDeopt() [6/7]

void v8::internal::LCodeGen::RecordSafepointWithLazyDeopt ( LInstruction instr,
SafepointMode  safepoint_mode 
)
private

◆ RecordSafepointWithLazyDeopt() [7/7]

void v8::internal::LCodeGen::RecordSafepointWithLazyDeopt ( LInstruction instr,
SafepointMode  safepoint_mode,
int  argc 
)
private

◆ RecordSafepointWithRegisters() [1/7]

void v8::internal::LCodeGen::RecordSafepointWithRegisters ( LPointerMap *  pointers,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

Definition at line 1049 of file lithium-codegen-arm.cc.

1051  {
1053  pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
1054 }

References RecordSafepoint().

Referenced by CallRuntimeFromDeferred(), DoDeferredInstanceMigration(), DoDeferredLoadMutableDouble(), DoDeferredNumberTagD(), DoDeferredNumberTagIU(), DoDeferredNumberTagU(), and RecordSafepointWithLazyDeopt().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordSafepointWithRegisters() [2/7]

void v8::internal::LCodeGen::RecordSafepointWithRegisters ( LPointerMap *  pointers,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepointWithRegisters() [3/7]

void v8::internal::LCodeGen::RecordSafepointWithRegisters ( LPointerMap *  pointers,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepointWithRegisters() [4/7]

void v8::internal::LCodeGen::RecordSafepointWithRegisters ( LPointerMap *  pointers,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepointWithRegisters() [5/7]

void v8::internal::LCodeGen::RecordSafepointWithRegisters ( LPointerMap *  pointers,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepointWithRegisters() [6/7]

void v8::internal::LCodeGen::RecordSafepointWithRegisters ( LPointerMap *  pointers,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RecordSafepointWithRegisters() [7/7]

void v8::internal::LCodeGen::RecordSafepointWithRegisters ( LPointerMap *  pointers,
int  arguments,
Safepoint::DeoptMode  mode 
)
private

◆ RegisterEnvironmentForDeoptimization() [1/7]

void v8::internal::LCodeGen::RegisterEnvironmentForDeoptimization ( LEnvironment environment,
Safepoint::DeoptMode  mode 
)
private

Definition at line 807 of file lithium-codegen-arm.cc.

808  {
809  environment->set_has_been_used();
810  if (!environment->HasBeenRegistered()) {
811  // Physical stack frame layout:
812  // -x ............. -4 0 ..................................... y
813  // [incoming arguments] [spill slots] [pushed outgoing arguments]
814 
815  // Layout of the environment:
816  // 0 ..................................................... size-1
817  // [parameters] [locals] [expression stack including arguments]
818 
819  // Layout of the translation:
820  // 0 ........................................................ size - 1 + 4
821  // [expression stack including arguments] [locals] [4 words] [parameters]
822  // |>------------ translation_size ------------<|
823 
824  int frame_count = 0;
825  int jsframe_count = 0;
826  for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
827  ++frame_count;
828  if (e->frame_type() == JS_FUNCTION) {
829  ++jsframe_count;
830  }
831  }
832  Translation translation(&translations_, frame_count, jsframe_count, zone());
833  WriteTranslation(environment, &translation);
834  int deoptimization_index = deoptimizations_.length();
835  int pc_offset = masm()->pc_offset();
836  environment->Register(deoptimization_index,
837  translation.index(),
838  (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
839  deoptimizations_.Add(environment, zone());
840  }
841 }
void WriteTranslation(LEnvironment *environment, Translation *translation)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:17

References v8::internal::List< T, AllocationPolicy >::Add(), deoptimizations_, v8::internal::JS_FUNCTION, LEnvironment, mode(), NULL, translations_, and WriteTranslation().

Referenced by DeoptimizeBranch(), and DeoptimizeIf().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RegisterEnvironmentForDeoptimization() [2/7]

void v8::internal::LCodeGen::RegisterEnvironmentForDeoptimization ( LEnvironment environment,
Safepoint::DeoptMode  mode 
)
private

◆ RegisterEnvironmentForDeoptimization() [3/7]

void v8::internal::LCodeGen::RegisterEnvironmentForDeoptimization ( LEnvironment environment,
Safepoint::DeoptMode  mode 
)
private

◆ RegisterEnvironmentForDeoptimization() [4/7]

void v8::internal::LCodeGen::RegisterEnvironmentForDeoptimization ( LEnvironment environment,
Safepoint::DeoptMode  mode 
)
private

◆ RegisterEnvironmentForDeoptimization() [5/7]

void v8::internal::LCodeGen::RegisterEnvironmentForDeoptimization ( LEnvironment environment,
Safepoint::DeoptMode  mode 
)
private

◆ RegisterEnvironmentForDeoptimization() [6/7]

void v8::internal::LCodeGen::RegisterEnvironmentForDeoptimization ( LEnvironment environment,
Safepoint::DeoptMode  mode 
)
private

◆ RegisterEnvironmentForDeoptimization() [7/7]

void v8::internal::LCodeGen::RegisterEnvironmentForDeoptimization ( LEnvironment environment,
Safepoint::DeoptMode  mode 
)
private

◆ RestoreCallerDoubles() [1/6]

void v8::internal::LCodeGen::RestoreCallerDoubles ( )
private

Definition at line 85 of file lithium-codegen-arm.cc.

85  {
86  DCHECK(info()->saves_caller_doubles());
88  Comment(";;; Restore clobbered callee double registers");
89  BitVector* doubles = chunk()->allocated_double_registers();
90  BitVector::Iterator save_iterator(doubles);
91  int count = 0;
92  while (!save_iterator.Done()) {
93  __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
94  MemOperand(sp, count * kDoubleSize));
95  save_iterator.Advance();
96  count++;
97  }
98 }
static DwVfpRegister FromAllocationIndex(int index)

References __, chunk(), DCHECK, v8::internal::DwVfpRegister::FromAllocationIndex(), v8::internal::kDoubleSize, NeedsEagerFrame(), and v8::internal::sp.

Referenced by GenerateJumpTable().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RestoreCallerDoubles() [2/6]

void v8::internal::LCodeGen::RestoreCallerDoubles ( )
private

◆ RestoreCallerDoubles() [3/6]

void v8::internal::LCodeGen::RestoreCallerDoubles ( )
private

◆ RestoreCallerDoubles() [4/6]

void v8::internal::LCodeGen::RestoreCallerDoubles ( )
private

◆ RestoreCallerDoubles() [5/6]

void v8::internal::LCodeGen::RestoreCallerDoubles ( )
private

◆ RestoreCallerDoubles() [6/6]

void v8::internal::LCodeGen::RestoreCallerDoubles ( )
private

◆ SaveCallerDoubles() [1/6]

void v8::internal::LCodeGen::SaveCallerDoubles ( )
private

Definition at line 69 of file lithium-codegen-arm.cc.

69  {
70  DCHECK(info()->saves_caller_doubles());
72  Comment(";;; Save clobbered callee double registers");
73  int count = 0;
74  BitVector* doubles = chunk()->allocated_double_registers();
75  BitVector::Iterator save_iterator(doubles);
76  while (!save_iterator.Done()) {
77  __ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
78  MemOperand(sp, count * kDoubleSize));
79  save_iterator.Advance();
80  count++;
81  }
82 }

References __, chunk(), DCHECK, v8::internal::DwVfpRegister::FromAllocationIndex(), v8::internal::kDoubleSize, NeedsEagerFrame(), and v8::internal::sp.

Referenced by GeneratePrologue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SaveCallerDoubles() [2/6]

void v8::internal::LCodeGen::SaveCallerDoubles ( )
private

◆ SaveCallerDoubles() [3/6]

void v8::internal::LCodeGen::SaveCallerDoubles ( )
private

◆ SaveCallerDoubles() [4/6]

void v8::internal::LCodeGen::SaveCallerDoubles ( )
private

◆ SaveCallerDoubles() [5/6]

void v8::internal::LCodeGen::SaveCallerDoubles ( )
private

◆ SaveCallerDoubles() [6/6]

void v8::internal::LCodeGen::SaveCallerDoubles ( )
private

◆ scope() [1/7]

Scope* v8::internal::LCodeGen::scope ( ) const
inlineprivate

◆ scope() [2/7]

Scope* v8::internal::LCodeGen::scope ( ) const
inline

Definition at line 51 of file lithium-codegen-arm64.h.

51 { return scope_; }

References scope_.

◆ scope() [3/7]

Scope* v8::internal::LCodeGen::scope ( ) const
inlineprivate

Definition at line 131 of file lithium-codegen-ia32.h.

131 { return scope_; }

References scope_.

◆ scope() [4/7]

Scope* v8::internal::LCodeGen::scope ( ) const
inlineprivate

Definition at line 148 of file lithium-codegen-mips.h.

148 { return scope_; }

References scope_.

◆ scope() [5/7]

Scope* v8::internal::LCodeGen::scope ( ) const
inlineprivate

Definition at line 149 of file lithium-codegen-mips64.h.

149 { return scope_; }

References scope_.

◆ scope() [6/7]

Scope* v8::internal::LCodeGen::scope ( ) const
inlineprivate

Definition at line 124 of file lithium-codegen-x64.h.

124 { return scope_; }

References scope_.

◆ scope() [7/7]

Scope* v8::internal::LCodeGen::scope ( ) const
inlineprivate

Definition at line 165 of file lithium-codegen-x87.h.

165 { return scope_; }

References scope_.

◆ scratch0() [1/3]

◆ scratch0() [2/3]

Register v8::internal::LCodeGen::scratch0 ( )
inlineprivate

Definition at line 150 of file lithium-codegen-mips.h.

150 { return kLithiumScratchReg; }
#define kLithiumScratchReg

References kLithiumScratchReg.

◆ scratch0() [3/3]

Register v8::internal::LCodeGen::scratch0 ( )
inlineprivate

Definition at line 151 of file lithium-codegen-mips64.h.

151 { return kLithiumScratchReg; }

References kLithiumScratchReg.

◆ scratch1() [1/2]

Register v8::internal::LCodeGen::scratch1 ( )
inlineprivate

Definition at line 151 of file lithium-codegen-mips.h.

151 { return kLithiumScratchReg2; }
#define kLithiumScratchReg2

References kLithiumScratchReg2.

Referenced by DoDeferredNumberTagU(), and DoDeferredTaggedToI().

+ Here is the caller graph for this function:

◆ scratch1() [2/2]

Register v8::internal::LCodeGen::scratch1 ( )
inlineprivate

Definition at line 152 of file lithium-codegen-mips64.h.

152 { return kLithiumScratchReg2; }

References kLithiumScratchReg2.

◆ strict_mode() [1/6]

StrictMode v8::internal::LCodeGen::strict_mode ( ) const
inlineprivate

Definition at line 147 of file lithium-codegen-arm.h.

147 { return info()->strict_mode(); }

◆ strict_mode() [2/6]

StrictMode v8::internal::LCodeGen::strict_mode ( ) const
inlineprivate

Definition at line 129 of file lithium-codegen-ia32.h.

129 { return info()->strict_mode(); }

◆ strict_mode() [3/6]

StrictMode v8::internal::LCodeGen::strict_mode ( ) const
inlineprivate

Definition at line 146 of file lithium-codegen-mips.h.

146 { return info()->strict_mode(); }

◆ strict_mode() [4/6]

StrictMode v8::internal::LCodeGen::strict_mode ( ) const
inlineprivate

Definition at line 147 of file lithium-codegen-mips64.h.

147 { return info()->strict_mode(); }

◆ strict_mode() [5/6]

StrictMode v8::internal::LCodeGen::strict_mode ( ) const
inlineprivate

Definition at line 121 of file lithium-codegen-x64.h.

121 { return info()->strict_mode(); }

◆ strict_mode() [6/6]

StrictMode v8::internal::LCodeGen::strict_mode ( ) const
inlineprivate

Definition at line 163 of file lithium-codegen-x87.h.

163 { return info()->strict_mode(); }

◆ ToDouble() [1/7]

double v8::internal::LCodeGen::ToDouble ( LConstantOperand *  op) const

Definition at line 538 of file lithium-codegen-arm.cc.

538  {
539  HConstant* constant = chunk_->LookupConstant(op);
540  DCHECK(constant->HasDoubleValue());
541  return constant->DoubleValue();
542 }

References DCHECK.

◆ ToDouble() [2/7]

double v8::internal::LCodeGen::ToDouble ( LConstantOperand *  op) const

◆ ToDouble() [3/7]

double v8::internal::LCodeGen::ToDouble ( LConstantOperand *  op) const

◆ ToDouble() [4/7]

double v8::internal::LCodeGen::ToDouble ( LConstantOperand *  op) const

◆ ToDouble() [5/7]

double v8::internal::LCodeGen::ToDouble ( LConstantOperand *  op) const

◆ ToDouble() [6/7]

double v8::internal::LCodeGen::ToDouble ( LConstantOperand *  op) const

◆ ToDouble() [7/7]

double v8::internal::LCodeGen::ToDouble ( LConstantOperand *  op) const

◆ ToDoubleRegister() [1/11]

DoubleRegister v8::internal::LCodeGen::ToDoubleRegister ( int  index) const
private

Definition at line 423 of file lithium-codegen-arm.cc.

423  {
425 }

References v8::internal::DwVfpRegister::FromAllocationIndex().

+ Here is the call graph for this function:

◆ ToDoubleRegister() [2/11]

XMMRegister v8::internal::LCodeGen::ToDoubleRegister ( int  index) const
private

◆ ToDoubleRegister() [3/11]

DoubleRegister v8::internal::LCodeGen::ToDoubleRegister ( int  index) const
private

◆ ToDoubleRegister() [4/11]

DoubleRegister v8::internal::LCodeGen::ToDoubleRegister ( int  index) const
private

◆ ToDoubleRegister() [5/11]

XMMRegister v8::internal::LCodeGen::ToDoubleRegister ( int  index) const
private

◆ ToDoubleRegister() [6/11]

DoubleRegister v8::internal::LCodeGen::ToDoubleRegister ( LOperand op) const

Definition at line 461 of file lithium-codegen-arm.cc.

461  {
462  DCHECK(op->IsDoubleRegister());
463  return ToDoubleRegister(op->index());
464 }

References DCHECK, and v8::internal::LOperand::index().

Referenced by AddToTranslation(), DoDeferredTaggedToI(), DoLoadKeyedExternalArray(), DoLoadKeyedFixedDoubleArray(), DoStoreKeyedExternalArray(), DoStoreKeyedFixedDoubleArray(), and EmitLoadDoubleRegister().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ToDoubleRegister() [7/11]

DoubleRegister v8::internal::LCodeGen::ToDoubleRegister ( LOperand op) const

◆ ToDoubleRegister() [8/11]

XMMRegister v8::internal::LCodeGen::ToDoubleRegister ( LOperand op) const

◆ ToDoubleRegister() [9/11]

DoubleRegister v8::internal::LCodeGen::ToDoubleRegister ( LOperand op) const

◆ ToDoubleRegister() [10/11]

DoubleRegister v8::internal::LCodeGen::ToDoubleRegister ( LOperand op) const

◆ ToDoubleRegister() [11/11]

XMMRegister v8::internal::LCodeGen::ToDoubleRegister ( LOperand op) const

◆ ToExternalReference() [1/3]

ExternalReference v8::internal::LCodeGen::ToExternalReference ( LConstantOperand *  op) const
private

◆ ToExternalReference() [2/3]

ExternalReference v8::internal::LCodeGen::ToExternalReference ( LConstantOperand *  op) const

◆ ToExternalReference() [3/3]

ExternalReference v8::internal::LCodeGen::ToExternalReference ( LConstantOperand *  op) const
private

◆ ToHandle() [1/7]

Handle< Object > v8::internal::LCodeGen::ToHandle ( LConstantOperand *  op) const

Definition at line 500 of file lithium-codegen-arm.cc.

500  {
501  HConstant* constant = chunk_->LookupConstant(op);
502  DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
503  return constant->handle(isolate());
504 }

References DCHECK.

◆ ToHandle() [2/7]

Handle<Object> v8::internal::LCodeGen::ToHandle ( LConstantOperand *  op) const

◆ ToHandle() [3/7]

Handle<Object> v8::internal::LCodeGen::ToHandle ( LConstantOperand *  op) const

◆ ToHandle() [4/7]

Handle<Object> v8::internal::LCodeGen::ToHandle ( LConstantOperand *  op) const

◆ ToHandle() [5/7]

Handle<Object> v8::internal::LCodeGen::ToHandle ( LConstantOperand *  op) const

◆ ToHandle() [6/7]

Handle<Object> v8::internal::LCodeGen::ToHandle ( LConstantOperand *  op) const

◆ ToHandle() [7/7]

Handle<Object> v8::internal::LCodeGen::ToHandle ( LConstantOperand *  op) const

◆ ToHighMemOperand() [1/3]

MemOperand v8::internal::LCodeGen::ToHighMemOperand ( LOperand op) const

Definition at line 593 of file lithium-codegen-arm.cc.

593  {
594  DCHECK(op->IsDoubleStackSlot());
595  if (NeedsEagerFrame()) {
596  return MemOperand(fp, StackSlotOffset(op->index()) + kPointerSize);
597  } else {
598  // Retrieve parameter without eager stack-frame relative to the
599  // stack-pointer.
600  return MemOperand(
602  }
603 }
int StackSlotOffset(int index)
Definition: lithium.cc:254
static int ArgumentsOffsetWithoutFrame(int index)

References v8::internal::ArgumentsOffsetWithoutFrame(), DCHECK, v8::internal::fp, v8::internal::LOperand::index(), v8::internal::kPointerSize, NeedsEagerFrame(), v8::internal::sp, and v8::internal::StackSlotOffset().

+ Here is the call graph for this function:

◆ ToHighMemOperand() [2/3]

MemOperand v8::internal::LCodeGen::ToHighMemOperand ( LOperand op) const

◆ ToHighMemOperand() [3/3]

MemOperand v8::internal::LCodeGen::ToHighMemOperand ( LOperand op) const

◆ ToImmediate() [1/2]

Immediate v8::internal::LCodeGen::ToImmediate ( LOperand op,
const Representation r 
) const
inline

Definition at line 72 of file lithium-codegen-ia32.h.

72  {
73  return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
74  }
int32_t ToRepresentation(LConstantOperand *op, const Representation &r) const

References ToRepresentation().

+ Here is the call graph for this function:

◆ ToImmediate() [2/2]

Immediate v8::internal::LCodeGen::ToImmediate ( LOperand op,
const Representation r 
) const
inline

Definition at line 74 of file lithium-codegen-x87.h.

74  {
75  return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
76  }

References ToRepresentation().

+ Here is the call graph for this function:

◆ ToInteger32() [1/7]

int32_t v8::internal::LCodeGen::ToInteger32 ( LConstantOperand *  op) const

Definition at line 517 of file lithium-codegen-arm.cc.

517  {
519 }
static Representation Integer32()

References v8::internal::Representation::Integer32(), and ToRepresentation().

Referenced by BuildSeqStringOperand(), DoDeferredAllocate(), DoDeferredStringCharCodeAt(), DoLoadKeyedExternalArray(), DoLoadKeyedFixedArray(), DoLoadKeyedFixedDoubleArray(), DoStoreKeyedExternalArray(), DoStoreKeyedFixedArray(), DoStoreKeyedFixedDoubleArray(), JSShiftAmountFromLConstant(), and ToRegister32().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ToInteger32() [2/7]

int32_t v8::internal::LCodeGen::ToInteger32 ( LConstantOperand *  op) const

◆ ToInteger32() [3/7]

int32_t v8::internal::LCodeGen::ToInteger32 ( LConstantOperand *  op) const
private

◆ ToInteger32() [4/7]

int32_t v8::internal::LCodeGen::ToInteger32 ( LConstantOperand *  op) const

◆ ToInteger32() [5/7]

int32_t v8::internal::LCodeGen::ToInteger32 ( LConstantOperand *  op) const

◆ ToInteger32() [6/7]

int32_t v8::internal::LCodeGen::ToInteger32 ( LConstantOperand *  op) const

◆ ToInteger32() [7/7]

int32_t v8::internal::LCodeGen::ToInteger32 ( LConstantOperand *  op) const
private

◆ TokenToCondition() [1/7]

Condition v8::internal::LCodeGen::TokenToCondition ( Token::Value  op,
bool  is_unsigned 
)
staticprivate

Definition at line 2363 of file lithium-codegen-arm.cc.

2363  {
2364  Condition cond = kNoCondition;
2365  switch (op) {
2366  case Token::EQ:
2367  case Token::EQ_STRICT:
2368  cond = eq;
2369  break;
2370  case Token::NE:
2371  case Token::NE_STRICT:
2372  cond = ne;
2373  break;
2374  case Token::LT:
2375  cond = is_unsigned ? lo : lt;
2376  break;
2377  case Token::GT:
2378  cond = is_unsigned ? hi : gt;
2379  break;
2380  case Token::LTE:
2381  cond = is_unsigned ? ls : le;
2382  break;
2383  case Token::GTE:
2384  cond = is_unsigned ? hs : ge;
2385  break;
2386  case Token::IN:
2387  case Token::INSTANCEOF:
2388  default:
2389  UNREACHABLE();
2390  }
2391  return cond;
2392 }
#define IN

References v8::internal::eq, v8::internal::EQ, v8::internal::ge, v8::internal::gt, v8::internal::hi, v8::internal::hs, IN, v8::internal::kNoCondition, v8::internal::le, v8::internal::lo, v8::internal::ls, v8::internal::lt, v8::internal::ne, and UNREACHABLE.

◆ TokenToCondition() [2/7]

static Condition v8::internal::LCodeGen::TokenToCondition ( Token::Value  op,
bool  is_unsigned 
)
staticprivate

◆ TokenToCondition() [3/7]

static Condition v8::internal::LCodeGen::TokenToCondition ( Token::Value  op,
bool  is_unsigned 
)
staticprivate

◆ TokenToCondition() [4/7]

static Condition v8::internal::LCodeGen::TokenToCondition ( Token::Value  op,
bool  is_unsigned 
)
staticprivate

◆ TokenToCondition() [5/7]

static Condition v8::internal::LCodeGen::TokenToCondition ( Token::Value  op,
bool  is_unsigned 
)
staticprivate

◆ TokenToCondition() [6/7]

static Condition v8::internal::LCodeGen::TokenToCondition ( Token::Value  op,
bool  is_unsigned 
)
staticprivate

◆ TokenToCondition() [7/7]

static Condition v8::internal::LCodeGen::TokenToCondition ( Token::Value  op,
bool  is_unsigned 
)
staticprivate

◆ ToMemOperand() [1/4]

MemOperand v8::internal::LCodeGen::ToMemOperand ( LOperand op) const

Definition at line 579 of file lithium-codegen-arm.cc.

579  {
580  DCHECK(!op->IsRegister());
581  DCHECK(!op->IsDoubleRegister());
582  DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
583  if (NeedsEagerFrame()) {
584  return MemOperand(fp, StackSlotOffset(op->index()));
585  } else {
586  // Retrieve parameter without eager stack-frame relative to the
587  // stack-pointer.
588  return MemOperand(sp, ArgumentsOffsetWithoutFrame(op->index()));
589  }
590 }

References v8::internal::ArgumentsOffsetWithoutFrame(), DCHECK, v8::internal::fp, v8::internal::LOperand::index(), NeedsEagerFrame(), v8::internal::sp, and v8::internal::StackSlotOffset().

Referenced by EmitLoadDoubleRegister(), EmitLoadRegister(), and LoadContextFromDeferred().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ToMemOperand() [2/4]

MemOperand v8::internal::LCodeGen::ToMemOperand ( LOperand op) const

◆ ToMemOperand() [3/4]

MemOperand v8::internal::LCodeGen::ToMemOperand ( LOperand op) const

◆ ToMemOperand() [4/4]

MemOperand v8::internal::LCodeGen::ToMemOperand ( LOperand op,
StackMode  stack_mode = kCanUseStackPointer 
) const

Definition at line 1257 of file lithium-codegen-arm64.cc.

1257  {
1258  DCHECK(op != NULL);
1259  DCHECK(!op->IsRegister());
1260  DCHECK(!op->IsDoubleRegister());
1261  DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
1262  if (NeedsEagerFrame()) {
1263  int fp_offset = StackSlotOffset(op->index());
1264  if (op->index() >= 0) {
1265  // Loads and stores have a bigger reach in positive offset than negative.
1266  // When the load or the store can't be done in one instruction via fp
1267  // (too big negative offset), we try to access via jssp (positive offset).
1268  // We can reference a stack slot from jssp only if jssp references the end
1269  // of the stack slots. It's not the case when:
1270  // - stack_mode != kCanUseStackPointer: this is the case when a deferred
1271  // code saved the registers.
1272  // - after_push_argument_: arguments has been pushed for a call.
1273  // - inlined_arguments_: inlined arguments have been pushed once. All the
1274  // remainder of the function cannot trust jssp any longer.
1275  // - saves_caller_doubles: some double registers have been pushed, jssp
1276  // references the end of the double registers and not the end of the
1277  // stack slots.
1278  // Also, if the offset from fp is small enough to make a load/store in
1279  // one instruction, we use a fp access.
1280  if ((stack_mode == kCanUseStackPointer) && !after_push_argument_ &&
1281  !inlined_arguments_ && !is_int9(fp_offset) &&
1282  !info()->saves_caller_doubles()) {
1283  int jssp_offset =
1284  (GetStackSlotCount() - op->index() - 1) * kPointerSize;
1285  return MemOperand(masm()->StackPointer(), jssp_offset);
1286  }
1287  }
1288  return MemOperand(fp, fp_offset);
1289  } else {
1290  // Retrieve parameter without eager stack-frame relative to the
1291  // stack-pointer.
1292  return MemOperand(masm()->StackPointer(),
1293  ArgumentsOffsetWithoutFrame(op->index()));
1294  }
1295 }

References after_push_argument_, v8::internal::ArgumentsOffsetWithoutFrame(), DCHECK, v8::internal::fp, GetStackSlotCount(), v8::internal::LOperand::index(), inlined_arguments_, kCanUseStackPointer, v8::internal::kPointerSize, NeedsEagerFrame(), NULL, and v8::internal::StackSlotOffset().

+ Here is the call graph for this function:

◆ ToOperand() [1/7]

Operand v8::internal::LCodeGen::ToOperand ( LOperand op)

Definition at line 545 of file lithium-codegen-arm.cc.

545  {
546  if (op->IsConstantOperand()) {
547  LConstantOperand* const_op = LConstantOperand::cast(op);
548  HConstant* constant = chunk()->LookupConstant(const_op);
549  Representation r = chunk_->LookupLiteralRepresentation(const_op);
550  if (r.IsSmi()) {
551  DCHECK(constant->HasSmiValue());
552  return Operand(Smi::FromInt(constant->Integer32Value()));
553  } else if (r.IsInteger32()) {
554  DCHECK(constant->HasInteger32Value());
555  return Operand(constant->Integer32Value());
556  } else if (r.IsDouble()) {
557  Abort(kToOperandUnsupportedDoubleImmediate);
558  }
559  DCHECK(r.IsTagged());
560  return Operand(constant->handle(isolate()));
561  } else if (op->IsRegister()) {
562  return Operand(ToRegister(op));
563  } else if (op->IsDoubleRegister()) {
564  Abort(kToOperandIsDoubleRegisterUnimplemented);
565  return Operand::Zero();
566  }
567  // Stack slots not implemented, use ToMemOperand instead.
568  UNREACHABLE();
569  return Operand::Zero();
570 }

References chunk(), DCHECK, v8::internal::Smi::FromInt(), v8::internal::Representation::IsDouble(), v8::internal::Representation::IsInteger32(), v8::internal::Representation::IsSmi(), v8::internal::Representation::IsTagged(), ToRegister(), and UNREACHABLE.

+ Here is the call graph for this function:

◆ ToOperand() [2/7]

Operand v8::internal::LCodeGen::ToOperand ( LOperand op)

◆ ToOperand() [3/7]

Operand v8::internal::LCodeGen::ToOperand ( LOperand op)

◆ ToOperand() [4/7]

Operand v8::internal::LCodeGen::ToOperand ( LOperand op)

◆ ToOperand() [5/7]

Operand v8::internal::LCodeGen::ToOperand ( LOperand op) const

◆ ToOperand() [6/7]

Operand v8::internal::LCodeGen::ToOperand ( LOperand op) const

◆ ToOperand() [7/7]

Operand v8::internal::LCodeGen::ToOperand ( LOperand op) const

◆ ToOperand32()

Operand v8::internal::LCodeGen::ToOperand32 ( LOperand op)

Definition at line 1230 of file lithium-codegen-arm64.cc.

1230  {
1231  DCHECK(op != NULL);
1232  if (op->IsRegister()) {
1233  return Operand(ToRegister32(op));
1234  } else if (op->IsConstantOperand()) {
1235  LConstantOperand* const_op = LConstantOperand::cast(op);
1236  HConstant* constant = chunk()->LookupConstant(const_op);
1237  Representation r = chunk_->LookupLiteralRepresentation(const_op);
1238  if (r.IsInteger32()) {
1239  return Operand(constant->Integer32Value());
1240  } else {
1241  // Other constants not implemented.
1242  Abort(kToOperand32UnsupportedImmediate);
1243  }
1244  }
1245  // Other cases are not implemented.
1246  UNREACHABLE();
1247  return Operand(0);
1248 }

References chunk(), DCHECK, v8::internal::Representation::IsInteger32(), NULL, ToRegister32(), and UNREACHABLE.

Referenced by ToShiftedRightOperand32().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ToRegister() [1/13]

Register v8::internal::LCodeGen::ToRegister ( int  index) const
private

Definition at line 418 of file lithium-codegen-arm.cc.

418  {
419  return Register::FromAllocationIndex(index);
420 }
static Register FromAllocationIndex(int index)

References v8::internal::Register::FromAllocationIndex().

+ Here is the call graph for this function:

◆ ToRegister() [2/13]

Register v8::internal::LCodeGen::ToRegister ( int  index) const
private

◆ ToRegister() [3/13]

Register v8::internal::LCodeGen::ToRegister ( int  index) const
private

◆ ToRegister() [4/13]

Register v8::internal::LCodeGen::ToRegister ( int  index) const
private

◆ ToRegister() [5/13]

Register v8::internal::LCodeGen::ToRegister ( int  index) const
private

◆ ToRegister() [6/13]

Register v8::internal::LCodeGen::ToRegister ( int  index) const
private

◆ ToRegister() [7/13]

Register v8::internal::LCodeGen::ToRegister ( LOperand op) const

Definition at line 428 of file lithium-codegen-arm.cc.

428  {
429  DCHECK(op->IsRegister());
430  return ToRegister(op->index());
431 }

References DCHECK, and v8::internal::LOperand::index().

Referenced by AddToTranslation(), BuildSeqStringOperand(), DoDeferredAllocate(), DoDeferredInstanceOfKnownGlobal(), DoDeferredMathAbsTagged(), DoDeferredMathAbsTaggedHeapNumber(), DoDeferredNumberTagD(), DoDeferredNumberTagIU(), DoDeferredNumberTagU(), DoDeferredStringCharCodeAt(), DoDeferredStringCharFromCode(), DoDeferredTaggedToI(), DoLoadKeyedExternalArray(), DoLoadKeyedFixedArray(), DoLoadKeyedFixedDoubleArray(), DoStoreKeyedExternalArray(), DoStoreKeyedFixedArray(), DoStoreKeyedFixedDoubleArray(), EmitIntegerMathAbs(), EmitLoadRegister(), EmitVectorLoadICRegisters(), LoadContextFromDeferred(), RecordSafepoint(), ToOperand(), and ToRegister32().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ToRegister() [8/13]

Register v8::internal::LCodeGen::ToRegister ( LOperand op) const

◆ ToRegister() [9/13]

Register v8::internal::LCodeGen::ToRegister ( LOperand op) const

◆ ToRegister() [10/13]

Register v8::internal::LCodeGen::ToRegister ( LOperand op) const

◆ ToRegister() [11/13]

Register v8::internal::LCodeGen::ToRegister ( LOperand op) const

◆ ToRegister() [12/13]

Register v8::internal::LCodeGen::ToRegister ( LOperand op) const

◆ ToRegister() [13/13]

Register v8::internal::LCodeGen::ToRegister ( LOperand op) const

◆ ToRegister32()

Register v8::internal::LCodeGen::ToRegister32 ( LOperand op) const

Definition at line 1177 of file lithium-codegen-arm64.cc.

1177  {
1178  DCHECK(op != NULL);
1179  if (op->IsConstantOperand()) {
1180  // If this is a constant operand, the result must be the zero register.
1181  DCHECK(ToInteger32(LConstantOperand::cast(op)) == 0);
1182  return wzr;
1183  } else {
1184  return ToRegister(op).W();
1185  }
1186 }

References DCHECK, NULL, ToInteger32(), ToRegister(), and v8::internal::CPURegister::W().

Referenced by BuildSeqStringOperand(), DoDeferredNumberTagU(), DoDeferredTaggedToI(), ToOperand32(), and ToShiftedRightOperand32().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ToRepresentation() [1/5]

int32_t v8::internal::LCodeGen::ToRepresentation ( LConstantOperand *  op,
const Representation r 
) const

Definition at line 522 of file lithium-codegen-arm.cc.

523  {
524  HConstant* constant = chunk_->LookupConstant(op);
525  int32_t value = constant->Integer32Value();
526  if (r.IsInteger32()) return value;
527  DCHECK(r.IsSmiOrTagged());
528  return reinterpret_cast<int32_t>(Smi::FromInt(value));
529 }

References DCHECK, v8::internal::Smi::FromInt(), v8::internal::Representation::IsInteger32(), and v8::internal::Representation::IsSmiOrTagged().

Referenced by ToImmediate(), and ToInteger32().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ToRepresentation() [2/5]

int32_t v8::internal::LCodeGen::ToRepresentation ( LConstantOperand *  op,
const Representation r 
) const
private

◆ ToRepresentation() [3/5]

int32_t v8::internal::LCodeGen::ToRepresentation ( LConstantOperand *  op,
const Representation r 
) const

◆ ToRepresentation() [4/5]

int32_t v8::internal::LCodeGen::ToRepresentation ( LConstantOperand *  op,
const Representation r 
) const

◆ ToRepresentation() [5/5]

int32_t v8::internal::LCodeGen::ToRepresentation ( LConstantOperand *  op,
const Representation r 
) const
private

◆ ToRepresentation_donotuse()

int32_t v8::internal::LCodeGen::ToRepresentation_donotuse ( LConstantOperand *  op,
const Representation r 
) const

Definition at line 464 of file lithium-codegen-mips64.cc.

465  {
466  HConstant* constant = chunk_->LookupConstant(op);
467  int32_t value = constant->Integer32Value();
468  if (r.IsInteger32()) return value;
469  DCHECK(r.IsSmiOrTagged());
470  return reinterpret_cast<int64_t>(Smi::FromInt(value));
471 }

References DCHECK, v8::internal::Smi::FromInt(), v8::internal::Representation::IsInteger32(), and v8::internal::Representation::IsSmiOrTagged().

+ Here is the call graph for this function:

◆ ToShiftedRightOperand32()

template<class LI >
Operand v8::internal::LCodeGen::ToShiftedRightOperand32 ( LOperand right,
LI *  shift_info 
)

Definition at line 1306 of file lithium-codegen-arm64.cc.

1306  {
1307  if (shift_info->shift() == NO_SHIFT) {
1308  return ToOperand32(right);
1309  } else {
1310  return Operand(
1311  ToRegister32(right),
1312  shift_info->shift(),
1313  JSShiftAmountFromLConstant(shift_info->shift_amount()));
1314  }
1315 }
Operand ToOperand32(LOperand *op)
int JSShiftAmountFromLConstant(LOperand *constant)

References JSShiftAmountFromLConstant(), v8::internal::NO_SHIFT, ToOperand32(), and ToRegister32().

+ Here is the call graph for this function:

◆ ToSmi() [1/5]

Smi * v8::internal::LCodeGen::ToSmi ( LConstantOperand *  op) const

Definition at line 532 of file lithium-codegen-arm.cc.

532  {
533  HConstant* constant = chunk_->LookupConstant(op);
534  return Smi::FromInt(constant->Integer32Value());
535 }

References v8::internal::Smi::FromInt().

+ Here is the call graph for this function:

◆ ToSmi() [2/5]

Smi* v8::internal::LCodeGen::ToSmi ( LConstantOperand *  op) const

◆ ToSmi() [3/5]

Smi* v8::internal::LCodeGen::ToSmi ( LConstantOperand *  op) const

◆ ToSmi() [4/5]

Smi* v8::internal::LCodeGen::ToSmi ( LConstantOperand *  op) const

◆ ToSmi() [5/5]

Smi* v8::internal::LCodeGen::ToSmi ( LConstantOperand *  op) const

◆ ToX87Register() [1/2]

X87Register v8::internal::LCodeGen::ToX87Register ( int  index) const
private

◆ ToX87Register() [2/2]

X87Register v8::internal::LCodeGen::ToX87Register ( LOperand op) const

◆ WriteTranslation() [1/7]

void v8::internal::LCodeGen::WriteTranslation ( LEnvironment environment,
Translation *  translation 
)

Definition at line 606 of file lithium-codegen-arm.cc.

607  {
608  if (environment == NULL) return;
609 
610  // The translation includes one command per value in the environment.
611  int translation_size = environment->translation_size();
612  // The output frame height does not include the parameters.
613  int height = translation_size - environment->parameter_count();
614 
615  WriteTranslation(environment->outer(), translation);
616  bool has_closure_id = !info()->closure().is_null() &&
617  !info()->closure().is_identical_to(environment->closure());
618  int closure_id = has_closure_id
619  ? DefineDeoptimizationLiteral(environment->closure())
620  : Translation::kSelfLiteralId;
621 
622  switch (environment->frame_type()) {
623  case JS_FUNCTION:
624  translation->BeginJSFrame(environment->ast_id(), closure_id, height);
625  break;
626  case JS_CONSTRUCT:
627  translation->BeginConstructStubFrame(closure_id, translation_size);
628  break;
629  case JS_GETTER:
630  DCHECK(translation_size == 1);
631  DCHECK(height == 0);
632  translation->BeginGetterStubFrame(closure_id);
633  break;
634  case JS_SETTER:
635  DCHECK(translation_size == 2);
636  DCHECK(height == 0);
637  translation->BeginSetterStubFrame(closure_id);
638  break;
639  case STUB:
640  translation->BeginCompiledStubFrame();
641  break;
642  case ARGUMENTS_ADAPTOR:
643  translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
644  break;
645  }
646 
647  int object_index = 0;
648  int dematerialized_index = 0;
649  for (int i = 0; i < translation_size; ++i) {
650  LOperand* value = environment->values()->at(i);
651  AddToTranslation(environment,
652  translation,
653  value,
654  environment->HasTaggedValueAt(i),
655  environment->HasUint32ValueAt(i),
656  &object_index,
657  &dematerialized_index);
658  }
659 }
@ ARGUMENTS_ADAPTOR
Definition: hydrogen.h:546

References AddToTranslation(), v8::internal::ARGUMENTS_ADAPTOR, DCHECK, DefineDeoptimizationLiteral(), v8::internal::JS_CONSTRUCT, v8::internal::JS_FUNCTION, v8::internal::JS_GETTER, v8::internal::JS_SETTER, NULL, and v8::internal::STUB.

Referenced by RegisterEnvironmentForDeoptimization().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ WriteTranslation() [2/7]

void v8::internal::LCodeGen::WriteTranslation ( LEnvironment environment,
Translation *  translation 
)
private

◆ WriteTranslation() [3/7]

void v8::internal::LCodeGen::WriteTranslation ( LEnvironment environment,
Translation *  translation 
)

◆ WriteTranslation() [4/7]

void v8::internal::LCodeGen::WriteTranslation ( LEnvironment environment,
Translation *  translation 
)

◆ WriteTranslation() [5/7]

void v8::internal::LCodeGen::WriteTranslation ( LEnvironment environment,
Translation *  translation 
)

◆ WriteTranslation() [6/7]

void v8::internal::LCodeGen::WriteTranslation ( LEnvironment environment,
Translation *  translation 
)

◆ WriteTranslation() [7/7]

void v8::internal::LCodeGen::WriteTranslation ( LEnvironment environment,
Translation *  translation 
)

◆ X87CommitWrite()

void v8::internal::LCodeGen::X87CommitWrite ( X87Register  reg)
inline

Definition at line 97 of file lithium-codegen-x87.h.

97 { x87_stack_.CommitWrite(reg); }
void CommitWrite(X87Register reg)

References v8::internal::LCodeGen::X87Stack::CommitWrite(), and x87_stack_.

+ Here is the call graph for this function:

◆ X87Fld()

void v8::internal::LCodeGen::X87Fld ( Operand  src,
X87OperandType  opts 
)
private

◆ X87Free()

void v8::internal::LCodeGen::X87Free ( X87Register  reg)
inline

Definition at line 102 of file lithium-codegen-x87.h.

102  {
103  x87_stack_.Free(reg);
104  }
void Free(X87Register reg)

References v8::internal::LCodeGen::X87Stack::Free(), and x87_stack_.

+ Here is the call graph for this function:

◆ X87Fxch()

void v8::internal::LCodeGen::X87Fxch ( X87Register  reg,
int  other_slot = 0 
)
inline

Definition at line 99 of file lithium-codegen-x87.h.

99  {
100  x87_stack_.Fxch(reg, other_slot);
101  }
void Fxch(X87Register reg, int other_slot=0)

References v8::internal::LCodeGen::X87Stack::Fxch(), and x87_stack_.

+ Here is the call graph for this function:

◆ X87LoadForUsage() [1/2]

void v8::internal::LCodeGen::X87LoadForUsage ( X87Register  reg)

◆ X87LoadForUsage() [2/2]

void v8::internal::LCodeGen::X87LoadForUsage ( X87Register  reg1,
X87Register  reg2 
)

◆ X87Mov() [1/3]

void v8::internal::LCodeGen::X87Mov ( Operand  src,
X87Register  reg,
X87OperandType  operand = kX87DoubleOperand 
)

◆ X87Mov() [2/3]

void v8::internal::LCodeGen::X87Mov ( X87Register  reg,
Operand  src,
X87OperandType  operand = kX87DoubleOperand 
)

◆ X87Mov() [3/3]

void v8::internal::LCodeGen::X87Mov ( X87Register  reg,
X87Register  src,
X87OperandType  operand = kX87DoubleOperand 
)

◆ X87PrepareBinaryOp()

void v8::internal::LCodeGen::X87PrepareBinaryOp ( X87Register  left,
X87Register  right,
X87Register  result 
)

◆ X87PrepareToWrite()

void v8::internal::LCodeGen::X87PrepareToWrite ( X87Register  reg)
inline

Definition at line 96 of file lithium-codegen-x87.h.

96 { x87_stack_.PrepareToWrite(reg); }
void PrepareToWrite(X87Register reg)

References v8::internal::LCodeGen::X87Stack::PrepareToWrite(), and x87_stack_.

+ Here is the call graph for this function:

◆ X87StackEmpty()

bool v8::internal::LCodeGen::X87StackEmpty ( )
inline

Definition at line 107 of file lithium-codegen-x87.h.

107  {
108  return x87_stack_.depth() == 0;
109  }

References v8::internal::LCodeGen::X87Stack::depth(), and x87_stack_.

+ Here is the call graph for this function:

Friends And Related Function Documentation

◆ LDeferredCode

LDeferredCode
friend

Definition at line 373 of file lithium-codegen-arm.h.

◆ LEnvironment

◆ LGapResolver

LGapResolver
friend

Definition at line 324 of file lithium-codegen-ia32.h.

◆ SafepointGenerator

SafepointGenerator
friend

Definition at line 375 of file lithium-codegen-arm.h.

Referenced by CallKnownFunction().

◆ X87Stack

friend class X87Stack
friend

Definition at line 474 of file lithium-codegen-x87.h.

Member Data Documentation

◆ after_push_argument_

bool v8::internal::LCodeGen::after_push_argument_
private

Definition at line 369 of file lithium-codegen-arm64.h.

Referenced by ToMemOperand(), and ~LCodeGen().

◆ deferred_

ZoneList< LDeferredCode * > v8::internal::LCodeGen::deferred_
private

Definition at line 340 of file lithium-codegen-arm.h.

Referenced by AddDeferredCode(), and GenerateDeferredCode().

◆ deoptimization_literals_

ZoneList< Handle< Object > > v8::internal::LCodeGen::deoptimization_literals_
private

◆ deoptimizations_

ZoneList< LEnvironment * > v8::internal::LCodeGen::deoptimizations_
private

◆ dynamic_frame_alignment_

bool v8::internal::LCodeGen::dynamic_frame_alignment_
private

Definition at line 341 of file lithium-codegen-ia32.h.

◆ expected_safepoint_kind_

Safepoint::Kind v8::internal::LCodeGen::expected_safepoint_kind_
private

Definition at line 351 of file lithium-codegen-arm.h.

Referenced by RecordSafepoint().

◆ frame_is_built_

bool v8::internal::LCodeGen::frame_is_built_
private

◆ inlined_arguments_

bool v8::internal::LCodeGen::inlined_arguments_
private

Definition at line 373 of file lithium-codegen-arm64.h.

Referenced by ToMemOperand(), and ~LCodeGen().

◆ inlined_function_count_

int v8::internal::LCodeGen::inlined_function_count_
private

◆ jump_table_ [1/2]

ZoneList< Deoptimizer::JumpTableEntry > v8::internal::LCodeGen::jump_table_
private

Definition at line 335 of file lithium-codegen-arm.h.

Referenced by DeoptimizeBranch(), DeoptimizeIf(), and GenerateJumpTable().

◆ jump_table_ [2/2]

ZoneList<Deoptimizer::JumpTableEntry*> v8::internal::LCodeGen::jump_table_
private

Definition at line 348 of file lithium-codegen-arm64.h.

◆ old_position_

int v8::internal::LCodeGen::old_position_
private

Definition at line 375 of file lithium-codegen-arm64.h.

◆ osr_pc_offset_

int v8::internal::LCodeGen::osr_pc_offset_
private

Definition at line 341 of file lithium-codegen-arm.h.

Referenced by GenerateOsrPrologue(), and PopulateDeoptimizationData().

◆ resolver_

LGapResolver v8::internal::LCodeGen::resolver_
private

Definition at line 349 of file lithium-codegen-arm.h.

Referenced by DoParallelMove().

◆ safepoints_

SafepointTableBuilder v8::internal::LCodeGen::safepoints_
private

◆ scope_

Scope *const v8::internal::LCodeGen::scope_
private

Definition at line 338 of file lithium-codegen-arm.h.

Referenced by scope().

◆ support_aligned_spilled_doubles_

bool v8::internal::LCodeGen::support_aligned_spilled_doubles_
private

Definition at line 342 of file lithium-codegen-ia32.h.

◆ translations_

TranslationBuffer v8::internal::LCodeGen::translations_
private

◆ x87_stack_

X87Stack v8::internal::LCodeGen::x87_stack_
private

◆ x87_stack_map_

X87StackMap v8::internal::LCodeGen::x87_stack_map_
private

Definition at line 440 of file lithium-codegen-x87.h.


The documentation for this class was generated from the following files: