37 #ifndef V8_X87_ASSEMBLER_X87_INL_H_
38 #define V8_X87_ASSEMBLER_X87_INL_H_
56 void RelocInfo::apply(intptr_t delta,
ICacheFlushMode icache_flush_mode) {
89 Address RelocInfo::target_address() {
95 Address RelocInfo::target_address_address() {
103 Address RelocInfo::constant_pool_entry_address() {
109 int RelocInfo::target_address_size() {
114 void RelocInfo::set_target_address(
Address target,
124 host(),
this, HeapObject::cast(target_code));
129 Object* RelocInfo::target_object() {
135 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
141 void RelocInfo::set_target_object(
Object* target,
151 target->IsHeapObject()) {
158 Address RelocInfo::target_reference() {
164 Address RelocInfo::target_runtime_entry(Assembler* origin) {
170 void RelocInfo::set_target_runtime_entry(
Address target,
174 if (target_address() != target) {
175 set_target_address(target, write_barrier_mode, icache_flush_mode);
180 Handle<Cell> RelocInfo::target_cell_handle() {
183 return Handle<Cell>(
reinterpret_cast<Cell**
>(address));
187 Cell* RelocInfo::target_cell() {
193 void RelocInfo::set_target_cell(Cell* cell,
211 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
218 Code* RelocInfo::code_age_stub() {
226 void RelocInfo::set_code_age_stub(Code* stub,
235 Address RelocInfo::call_address() {
242 void RelocInfo::set_call_address(
Address target) {
249 host(),
this, HeapObject::cast(target_code));
254 Object* RelocInfo::call_object() {
255 return *call_object_address();
259 void RelocInfo::set_call_object(
Object* target) {
260 *call_object_address() = target;
264 Object** RelocInfo::call_object_address() {
267 return reinterpret_cast<Object**
>(
pc_ + 1);
271 void RelocInfo::WipeOut() {
283 bool RelocInfo::IsPatchedReturnSequence() {
288 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
296 visitor->VisitEmbeddedPointer(
this);
299 visitor->VisitCodeTarget(
this);
301 visitor->VisitCell(
this);
303 visitor->VisitExternalReference(
this);
306 visitor->VisitCodeAgeSequence(
this);
308 IsPatchedReturnSequence()) ||
310 IsPatchedDebugBreakSlotSequence())) &&
311 isolate->debug()->has_break_points()) {
312 visitor->VisitDebugTarget(
this);
314 visitor->VisitRuntimeEntry(
this);
319 template<
typename StaticVisitor>
323 StaticVisitor::VisitEmbeddedPointer(heap,
this);
326 StaticVisitor::VisitCodeTarget(heap,
this);
328 StaticVisitor::VisitCell(heap,
this);
330 StaticVisitor::VisitExternalReference(
this);
333 StaticVisitor::VisitCodeAgeSequence(heap,
this);
334 }
else if (heap->isolate()->debug()->has_break_points() &&
336 IsPatchedReturnSequence()) ||
338 IsPatchedDebugBreakSlotSequence()))) {
339 StaticVisitor::VisitDebugTarget(heap,
this);
341 StaticVisitor::VisitRuntimeEntry(
this);
354 x_ =
reinterpret_cast<int32_t>(ext.address());
360 x_ =
reinterpret_cast<int32_t>(internal_offset);
369 if (obj->IsHeapObject()) {
370 DCHECK(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
371 x_ =
reinterpret_cast<intptr_t
>(
handle.location());
375 x_ =
reinterpret_cast<intptr_t
>(obj);
382 x_ =
reinterpret_cast<intptr_t
>(
value);
388 x_ =
reinterpret_cast<int32_t>(addr);
404 if (obj->IsHeapObject()) {
405 emit(
reinterpret_cast<intptr_t
>(
handle.location()),
409 emit(
reinterpret_cast<intptr_t
>(obj));
429 emit(
reinterpret_cast<intptr_t
>(code.location()), rmode,
id);
435 Label* label =
reinterpret_cast<Label*
>(x.x_);
445 if (label->is_bound()) {
450 emit_disp(label, Displacement::CODE_RELATIVE);
464 ConstantPoolArray* constant_pool) {
470 ConstantPoolArray* constant_pool,
492 return Displacement(
long_at(
L->pos()));
502 Displacement disp(
L, type);
504 emit(
static_cast<int>(disp.data()));
510 if (
L->is_near_linked()) {
511 int offset =
L->near_link_pos() -
pc_offset();
513 disp =
static_cast<byte>(offset & 0xFF);
520 void Operand::set_modrm(
int mod, Register rm) {
522 buf_[0] = mod << 6 | rm.code();
527 void Operand::set_sib(
ScaleFactor scale, Register index, Register base) {
529 DCHECK((scale & -4) == 0);
532 buf_[1] = scale << 6 | index.code() << 3 | base.code();
537 void Operand::set_disp8(int8_t disp) {
538 DCHECK(len_ == 1 || len_ == 2);
539 *
reinterpret_cast<int8_t*
>(&buf_[len_++]) = disp;
544 DCHECK(len_ == 1 || len_ == 2);
560 set_dispr(disp, rmode);
567 set_dispr(imm.x_, imm.rmode_);
Isolate * isolate() const
static const int kSpecialTargetSize
void emit_w(const Immediate &x)
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
void disp_at_put(Label *L, Displacement disp)
void emit_code_relative_offset(Label *label)
void emit_disp(Label *L, Displacement::Type type)
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
void emit_near_disp(Label *L)
void long_at_put(int pos, uint32_t x)
static const int kPatchDebugBreakSlotReturnOffset
static Address break_address_from_return_address(Address pc)
Displacement disp_at(Label *L)
void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data=0)
uint32_t long_at(int pos)
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target, ICacheFlushMode icache_flush_mode=FLUSH_ICACHE_IF_NEEDED)
static Address target_address_from_return_address(Address pc)
static const int kCallTargetAddressOffset
static const int kValueOffset
static Cell * FromValueAddress(Address value)
static Code * GetCodeFromTargetAddress(Address address)
byte * instruction_start()
static const int kHeaderSize
static void FlushICache(void *start, size_t size)
static bool SupportsCrankshaft()
IncrementalMarking * incremental_marking()
static Object *& Object_at(Address addr)
static Address & Address_at(Address addr)
static Handle< Object > & Object_Handle_at(Address addr)
Operand(Register reg, Shift shift=LSL, unsigned shift_amount=0)
static bool IsDebugBreakSlot(Mode mode)
static bool IsJSReturn(Mode mode)
static bool IsEmbeddedObject(Mode mode)
static bool IsRuntimeEntry(Mode mode)
static bool IsCodeTarget(Mode mode)
static bool IsNone(Mode mode)
static bool IsExternalReference(Mode mode)
static bool IsInternalReference(Mode mode)
static bool IsCodeAgeSequence(Mode mode)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
TypeImpl< ZoneTypeConfig > Type
kSerializedDataOffset Object
Handle< T > handle(T *t, Isolate *isolate)
static const byte kCallOpcode
static const int kNoCodeAgeSequenceLength
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Debugger support for the V8 JavaScript engine.