7 #if V8_TARGET_ARCH_ARM64
16 #define __ ACCESS_MASM(masm)
43 PatchingAssembler patcher(
reinterpret_cast<Instruction*
>(
rinfo()->
pc()), 5);
45 debug_info_->GetIsolate()->builtins()->Return_DebugBreak()->entry();
58 patcher.dc64(
reinterpret_cast<int64_t
>(entry));
71 return rinfo->IsPatchedReturnSequence();
78 return rinfo()->IsPatchedDebugBreakSlotSequence();
102 PatchingAssembler patcher(
reinterpret_cast<Instruction*
>(
rinfo()->
pc()), 4);
104 debug_info_->GetIsolate()->builtins()->Slot_DebugBreak()->entry();
116 patcher.dc64(
reinterpret_cast<int64_t
>(entry));
127 static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
155 DCHECK((object_regs & non_object_regs) == 0);
156 DCHECK((scratch.Bit() & object_regs) == 0);
157 DCHECK((scratch.Bit() & non_object_regs) == 0);
158 DCHECK((masm->TmpList()->list() & (object_regs | non_object_regs)) == 0);
161 CPURegList non_object_list =
163 while (!non_object_list.IsEmpty()) {
165 Register reg = Register(non_object_list.PopLowestIndex());
166 __ Lsr(scratch, reg, 32);
167 __ SmiTagAndPush(scratch, reg);
178 if (object_regs != 0) {
179 __ PushXRegList(object_regs);
183 __ RecordComment(
"// Calling from debug break to runtime - come in - over");
186 __ Mov(x1, ExternalReference::debug_break(masm->isolate()));
188 CEntryStub stub(masm->isolate(), 1);
192 if (object_regs != 0) {
193 __ PopXRegList(object_regs);
198 while (!non_object_list.IsEmpty()) {
205 Register reg = Register(non_object_list.PopHighestIndex());
206 __ Pop(scratch, reg);
207 __ Bfxil(reg, scratch, 32, 32);
219 ExternalReference after_break_target =
220 ExternalReference::debug_after_break_target_address(masm->isolate());
221 __ Mov(scratch, after_break_target);
233 Generate_DebugBreakCallHelper(masm, x1.Bit() | x3.Bit(), 0, x10);
241 Generate_DebugBreakCallHelper(masm, receiver.Bit() |
name.Bit(), 0, x10);
250 Generate_DebugBreakCallHelper(
251 masm, receiver.Bit() |
name.Bit() | value.Bit(), 0, x10);
266 Generate_DebugBreakCallHelper(
267 masm, receiver.Bit() |
name.Bit() | value.Bit(), 0, x10);
276 Generate_DebugBreakCallHelper(masm, x0.Bit(), 0, x10);
284 Generate_DebugBreakCallHelper(masm, x0.Bit(), 0, x10);
293 Generate_DebugBreakCallHelper(masm, x1.Bit(), 0, x10);
303 Generate_DebugBreakCallHelper(masm, x1.Bit(), x0.Bit(), x10);
308 MacroAssembler* masm) {
316 Generate_DebugBreakCallHelper(
317 masm, x1.Bit() | x2.Bit() | x3.Bit(), x0.Bit(), x10);
326 __ RecordDebugBreakSlot();
336 Generate_DebugBreakCallHelper(masm, 0, 0, x10);
346 ExternalReference restarter_frame_function_slot =
347 ExternalReference::debug_restarter_frame_function_pointer_address(
349 UseScratchRegisterScope temps(masm);
350 Register scratch = temps.AcquireX();
352 __ Mov(scratch, restarter_frame_function_slot);
357 __ AssertStackConsistency();
static const int kDebugBreakSlotInstructions
static const int kJSRetSequenceInstructions
Handle< DebugInfo > debug_info_
bool IsDebugBreakAtSlot()
void SetDebugBreakAtSlot()
void ClearDebugBreakAtSlot()
bool IsDebugBreakAtReturn()
void ClearDebugBreakAtReturn()
RelocInfo * original_rinfo()
void SetDebugBreakAtReturn()
static const int kHeaderSize
static void GenerateSlot(MacroAssembler *masm)
static void GenerateStoreICDebugBreak(MacroAssembler *masm)
static void GenerateLoadICDebugBreak(MacroAssembler *masm)
static void GeneratePlainReturnLiveEdit(MacroAssembler *masm)
static void GenerateCallConstructStubDebugBreak(MacroAssembler *masm)
static void GenerateCallConstructStubRecordDebugBreak(MacroAssembler *masm)
static void GenerateCallFunctionStubDebugBreak(MacroAssembler *masm)
static void GenerateCompareNilICDebugBreak(MacroAssembler *masm)
static void GenerateKeyedStoreICDebugBreak(MacroAssembler *masm)
static void GenerateCallICStubDebugBreak(MacroAssembler *masm)
static void GenerateFrameDropperLiveEdit(MacroAssembler *masm)
static void GenerateKeyedLoadICDebugBreak(MacroAssembler *masm)
static void GenerateReturnDebugBreak(MacroAssembler *masm)
static void GenerateSlotDebugBreak(MacroAssembler *masm)
static bool IsDebugBreakAtReturn(RelocInfo *rinfo)
static const int kSharedFunctionInfoOffset
static const int kContextOffset
static const int kFramePaddingValue
static const bool kFrameDropperSupported
static const int kFramePaddingInitialSize
static const Register ReceiverRegister()
static const Register NameRegister()
static bool IsJSReturn(Mode mode)
void PatchCode(byte *instructions, int instruction_count)
static const int kCodeOffset
static Smi * FromInt(int value)
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
#define DCHECK(condition)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
const unsigned kXRegSizeInBits
const unsigned kWRegSizeInBits
MemOperand FieldMemOperand(Register object, int offset)
const unsigned kLoadLiteralScaleLog2
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const unsigned kInstructionSize
const RegList kCallerSaved
Debugger support for the V8 JavaScript engine.