5 #ifndef V8_ARM64_REGEXP_MACRO_ASSEMBLER_ARM64_H_
6 #define V8_ARM64_REGEXP_MACRO_ASSEMBLER_ARM64_H_
17 #ifndef V8_INTERPRETED_REGEXP
26 virtual void Bind(Label* label);
37 bool check_end_of_string);
58 Label* on_not_in_range);
68 virtual void GoTo(Label* label);
74 Label* on_end_of_input,
75 bool check_bounds =
true,
234 DCHECK(register_index >= 0);
238 if ((register_index % 2) == 0) {
Isolate * isolate() const
virtual IrregexpImplementation Implementation()
Isolate * isolate() const
virtual void CheckNotCharacterAfterAnd(unsigned c, unsigned mask, Label *on_not_equal)
static const int kDirectCall
static const int kCalleeSavedRegisters
RegisterState GetRegisterState(int register_index)
virtual void GoTo(Label *label)
static const int kReturnAddress
virtual void CheckCharacterLT(uc16 limit, Label *on_less)
virtual void CheckAtStart(Label *on_at_start)
void StoreRegister(int register_index, Register source)
Register current_input_offset()
void Push(Register source)
virtual void CheckCharacter(unsigned c, Label *on_equal)
static const size_t kRegExpCodeSize
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset)
Register backtrack_stackpointer()
MemOperand register_location(int register_index)
void CallCheckStackGuardState(Register scratch)
virtual void CheckNotBackReference(int start_reg, Label *on_no_match)
void LoadCurrentCharacterUnchecked(int cp_offset, int character_count)
virtual void LoadCurrentCharacter(int cp_offset, Label *on_end_of_input, bool check_bounds=true, int characters=1)
MemOperand capture_location(int register_index, Register scratch)
virtual void Bind(Label *label)
virtual void ReadStackPointerFromRegister(int reg)
virtual void CheckNotCharacterAfterMinusAnd(uc16 c, uc16 minus, uc16 mask, Label *on_not_equal)
static const int kStackBase
virtual void ReadCurrentPositionFromRegister(int reg)
virtual void WriteStackPointerToRegister(int reg)
Register GetCachedRegister(int register_index)
void Pop(Register target)
RegExpMacroAssemblerARM64(Mode mode, int registers_to_save, Zone *zone)
virtual void AdvanceCurrentPosition(int by)
virtual void CheckCharacters(Vector< const uc16 > str, int cp_offset, Label *on_failure, bool check_end_of_string)
virtual void PushBacktrack(Label *label)
virtual bool CheckSpecialCharacterClass(uc16 type, Label *on_no_match)
virtual void CheckGreedyLoop(Label *on_tos_equals_current_position)
virtual void PopCurrentPosition()
virtual void IfRegisterGE(int reg, int comparand, Label *if_ge)
Register GetRegister(int register_index, Register maybe_result)
virtual void CheckNotAtStart(Label *on_not_at_start)
virtual void CheckCharacterAfterAnd(unsigned c, unsigned mask, Label *on_equal)
virtual int stack_limit_slack()
virtual void PopRegister(int register_index)
virtual void CheckCharacterInRange(uc16 from, uc16 to, Label *on_in_range)
virtual void SetRegister(int register_index, int to)
Label check_preempt_label_
virtual ~RegExpMacroAssemblerARM64()
virtual void PushCurrentPosition()
static const int kNumRegistersToUnroll
virtual void CheckNotBackReferenceIgnoreCase(int start_reg, Label *on_no_match)
Register non_position_value()
virtual void AdvanceRegister(int reg, int by)
static const int kOutputSize
Label stack_overflow_label_
virtual Handle< HeapObject > GetCode(Handle< String > source)
virtual void CheckPosition(int cp_offset, Label *on_outside_input)
Register current_character()
static const int kSecondaryReturnAddress
static const int kSuccessCounter
virtual bool CanReadUnaligned()
static const int kIsolate
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame, int start_offset, const byte **input_start, const byte **input_end)
static const int kFirstCaptureOnStack
void RestoreLinkRegister()
virtual void IfRegisterLT(int reg, int comparand, Label *if_lt)
Register twice_non_position_value()
void CallIf(Label *to, Condition condition)
static const int kFirstRegisterOnStack
void CompareAndBranchOrBacktrack(Register reg, int immediate, Condition condition, Label *to)
virtual void CheckNotCharacter(unsigned c, Label *on_not_equal)
virtual void IfRegisterEqPos(int reg, Label *if_eq)
static const int kNumCachedRegisters
void BranchOrBacktrack(Condition condition, Label *to)
virtual void PushRegister(int register_index, StackCheckFlag check_stack_limit)
virtual void ClearRegisters(int reg_from, int reg_to)
virtual void SetCurrentPositionFromEnd(int by)
virtual void CheckCharacterNotInRange(uc16 from, uc16 to, Label *on_not_in_range)
virtual void CheckCharacterGT(uc16 limit, Label *on_greater)
virtual void CheckBitInTable(Handle< ByteArray > table, Label *on_bit_set)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
#define DCHECK(condition)
Debugger support for the V8 JavaScript engine.