V8 Project
assembler-mips-inl.h
Go to the documentation of this file.
1 
2 // Copyright (c) 1994-2006 Sun Microsystems Inc.
3 // All Rights Reserved.
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 // - Redistributions of source code must retain the above copyright notice,
10 // this list of conditions and the following disclaimer.
11 //
12 // - Redistribution in binary form must reproduce the above copyright
13 // notice, this list of conditions and the following disclaimer in the
14 // documentation and/or other materials provided with the distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 
32 // The original source code covered by the above license above has been
33 // modified significantly by Google Inc.
34 // Copyright 2012 the V8 project authors. All rights reserved.
35 
36 
37 #ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
38 #define V8_MIPS_ASSEMBLER_MIPS_INL_H_
39 
41 
42 #include "src/assembler.h"
43 #include "src/debug.h"
44 
45 
46 namespace v8 {
47 namespace internal {
48 
49 
51 
52 
53 // -----------------------------------------------------------------------------
54 // Operand and MemOperand.
55 
56 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
57  rm_ = no_reg;
58  imm32_ = immediate;
59  rmode_ = rmode;
60 }
61 
62 
63 Operand::Operand(const ExternalReference& f) {
64  rm_ = no_reg;
65  imm32_ = reinterpret_cast<int32_t>(f.address());
67 }
68 
69 
70 Operand::Operand(Smi* value) {
71  rm_ = no_reg;
72  imm32_ = reinterpret_cast<intptr_t>(value);
73  rmode_ = RelocInfo::NONE32;
74 }
75 
76 
77 Operand::Operand(Register rm) {
78  rm_ = rm;
79 }
80 
81 
82 bool Operand::is_reg() const {
83  return rm_.is_valid();
84 }
85 
86 
89 }
90 
91 
94 }
95 
96 
99 }
100 
101 
103  return NumAllocatableRegisters();
104 }
105 
106 
108  DCHECK(reg.code() % 2 == 0);
110  DCHECK(reg.is_valid());
111  DCHECK(!reg.is(kDoubleRegZero));
113  return (reg.code() / 2);
114 }
115 
116 
117 // -----------------------------------------------------------------------------
118 // RelocInfo.
119 
120 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
121  if (IsCodeTarget(rmode_)) {
122  uint32_t scope1 = (uint32_t) target_address() & ~kImm28Mask;
123  uint32_t scope2 = reinterpret_cast<uint32_t>(pc_) & ~kImm28Mask;
124 
125  if (scope1 != scope2) {
127  }
128  }
130  // Absolute code pointer inside code object moves with the code object.
131  byte* p = reinterpret_cast<byte*>(pc_);
132  int count = Assembler::RelocateInternalReference(p, delta);
133  CpuFeatures::FlushICache(p, count * sizeof(uint32_t));
134  }
135 }
136 
137 
138 Address RelocInfo::target_address() {
141 }
142 
143 
144 Address RelocInfo::target_address_address() {
147  rmode_ == EMBEDDED_OBJECT ||
149  // Read the address of the word containing the target_address in an
150  // instruction stream.
151  // The only architecture-independent user of this function is the serializer.
152  // The serializer uses it to find out how many raw bytes of instruction to
153  // output before the next target.
154  // For an instruction like LUI/ORI where the target bits are mixed into the
155  // instruction bits, the size of the target will be zero, indicating that the
156  // serializer should not step forward in memory after a target is resolved
157  // and written. In this case the target_address_address function should
158  // return the end of the instructions to be patched, allowing the
159  // deserializer to deserialize the instructions as raw bytes and put them in
160  // place, ready to be patched with the target. After jump optimization,
161  // that is the address of the instruction that follows J/JAL/JR/JALR
162  // instruction.
163  return reinterpret_cast<Address>(
165 }
166 
167 
168 Address RelocInfo::constant_pool_entry_address() {
169  UNREACHABLE();
170  return NULL;
171 }
172 
173 
174 int RelocInfo::target_address_size() {
176 }
177 
178 
179 void RelocInfo::set_target_address(Address target,
180  WriteBarrierMode write_barrier_mode,
181  ICacheFlushMode icache_flush_mode) {
183  Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
184  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
185  host() != NULL && IsCodeTarget(rmode_)) {
186  Object* target_code = Code::GetCodeFromTargetAddress(target);
187  host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
188  host(), this, HeapObject::cast(target_code));
189  }
190 }
191 
192 
194  return pc - kCallTargetAddressOffset;
195 }
196 
197 
200 }
201 
202 
203 Object* RelocInfo::target_object() {
205  return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
206 }
207 
208 
209 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
211  return Handle<Object>(reinterpret_cast<Object**>(
213 }
214 
215 
216 void RelocInfo::set_target_object(Object* target,
217  WriteBarrierMode write_barrier_mode,
218  ICacheFlushMode icache_flush_mode) {
221  reinterpret_cast<Address>(target),
222  icache_flush_mode);
223  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
224  host() != NULL &&
225  target->IsHeapObject()) {
226  host()->GetHeap()->incremental_marking()->RecordWrite(
227  host(), &Memory::Object_at(pc_), HeapObject::cast(target));
228  }
229 }
230 
231 
232 Address RelocInfo::target_reference() {
235 }
236 
237 
238 Address RelocInfo::target_runtime_entry(Assembler* origin) {
240  return target_address();
241 }
242 
243 
244 void RelocInfo::set_target_runtime_entry(Address target,
245  WriteBarrierMode write_barrier_mode,
246  ICacheFlushMode icache_flush_mode) {
248  if (target_address() != target)
249  set_target_address(target, write_barrier_mode, icache_flush_mode);
250 }
251 
252 
253 Handle<Cell> RelocInfo::target_cell_handle() {
255  Address address = Memory::Address_at(pc_);
256  return Handle<Cell>(reinterpret_cast<Cell**>(address));
257 }
258 
259 
260 Cell* RelocInfo::target_cell() {
263 }
264 
265 
266 void RelocInfo::set_target_cell(Cell* cell,
267  WriteBarrierMode write_barrier_mode,
268  ICacheFlushMode icache_flush_mode) {
270  Address address = cell->address() + Cell::kValueOffset;
271  Memory::Address_at(pc_) = address;
272  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
273  // TODO(1550) We are passing NULL as a slot because cell can never be on
274  // evacuation candidate.
275  host()->GetHeap()->incremental_marking()->RecordWrite(
276  host(), NULL, cell);
277  }
278 }
279 
280 
282 
283 
284 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
285  UNREACHABLE(); // This should never be reached on Arm.
286  return Handle<Object>();
287 }
288 
289 
290 Code* RelocInfo::code_age_stub() {
294 }
295 
296 
297 void RelocInfo::set_code_age_stub(Code* stub,
298  ICacheFlushMode icache_flush_mode) {
301  host_,
302  stub->instruction_start());
303 }
304 
305 
306 Address RelocInfo::call_address() {
307  DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
308  (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
309  // The pc_ offset of 0 assumes mips patched return sequence per
310  // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
311  // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
313 }
314 
315 
316 void RelocInfo::set_call_address(Address target) {
317  DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
318  (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
319  // The pc_ offset of 0 assumes mips patched return sequence per
320  // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
321  // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
323  if (host() != NULL) {
324  Object* target_code = Code::GetCodeFromTargetAddress(target);
325  host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
326  host(), this, HeapObject::cast(target_code));
327  }
328 }
329 
330 
331 Object* RelocInfo::call_object() {
332  return *call_object_address();
333 }
334 
335 
336 Object** RelocInfo::call_object_address() {
337  DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
338  (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
339  return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
340 }
341 
342 
343 void RelocInfo::set_call_object(Object* target) {
344  *call_object_address() = target;
345 }
346 
347 
348 void RelocInfo::WipeOut() {
350  IsCodeTarget(rmode_) ||
354 }
355 
356 
357 bool RelocInfo::IsPatchedReturnSequence() {
358  Instr instr0 = Assembler::instr_at(pc_);
361  bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
362  (instr1 & kOpcodeMask) == ORI &&
363  ((instr2 & kOpcodeMask) == JAL ||
364  ((instr2 & kOpcodeMask) == SPECIAL &&
365  (instr2 & kFunctionFieldMask) == JALR)));
366  return patched_return;
367 }
368 
369 
370 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
371  Instr current_instr = Assembler::instr_at(pc_);
372  return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
373 }
374 
375 
376 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
379  visitor->VisitEmbeddedPointer(this);
380  } else if (RelocInfo::IsCodeTarget(mode)) {
381  visitor->VisitCodeTarget(this);
382  } else if (mode == RelocInfo::CELL) {
383  visitor->VisitCell(this);
384  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
385  visitor->VisitExternalReference(this);
386  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
387  visitor->VisitCodeAgeSequence(this);
388  } else if (((RelocInfo::IsJSReturn(mode) &&
389  IsPatchedReturnSequence()) ||
391  IsPatchedDebugBreakSlotSequence())) &&
392  isolate->debug()->has_break_points()) {
393  visitor->VisitDebugTarget(this);
394  } else if (RelocInfo::IsRuntimeEntry(mode)) {
395  visitor->VisitRuntimeEntry(this);
396  }
397 }
398 
399 
400 template<typename StaticVisitor>
401 void RelocInfo::Visit(Heap* heap) {
404  StaticVisitor::VisitEmbeddedPointer(heap, this);
405  } else if (RelocInfo::IsCodeTarget(mode)) {
406  StaticVisitor::VisitCodeTarget(heap, this);
407  } else if (mode == RelocInfo::CELL) {
408  StaticVisitor::VisitCell(heap, this);
409  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
410  StaticVisitor::VisitExternalReference(this);
411  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
412  StaticVisitor::VisitCodeAgeSequence(heap, this);
413  } else if (heap->isolate()->debug()->has_break_points() &&
415  IsPatchedReturnSequence()) ||
417  IsPatchedDebugBreakSlotSequence()))) {
418  StaticVisitor::VisitDebugTarget(heap, this);
419  } else if (RelocInfo::IsRuntimeEntry(mode)) {
420  StaticVisitor::VisitRuntimeEntry(this);
421  }
422 }
423 
424 
425 // -----------------------------------------------------------------------------
426 // Assembler.
427 
428 
429 void Assembler::CheckBuffer() {
430  if (buffer_space() <= kGap) {
431  GrowBuffer();
432  }
433 }
434 
435 
437  if (pc_offset() >= next_buffer_check_) {
439  }
440 }
441 
442 
443 void Assembler::emit(Instr x) {
444  if (!is_buffer_growth_blocked()) {
445  CheckBuffer();
446  }
447  *reinterpret_cast<Instr*>(pc_) = x;
448  pc_ += kInstrSize;
450 }
451 
452 
453 } } // namespace v8::internal
454 
455 #endif // V8_MIPS_ASSEMBLER_MIPS_INL_H_
#define kDoubleRegZero
#define kLithiumScratchDouble
static const int kSpecialTargetSize
bool is_buffer_growth_blocked() const
static const int kInstrSize
Instruction * pc() const
static Address target_address_at(Address pc, ConstantPoolArray *constant_pool)
static const int kInstructionsFor32BitConstant
static bool IsNop(Instr instr, int type=NON_MARKING_NOP)
static const int kPatchDebugBreakSlotReturnOffset
static Address break_address_from_return_address(Address pc)
static const int kGap
static void set_target_address_at(Address pc, ConstantPoolArray *constant_pool, Address target, ICacheFlushMode icache_flush_mode=FLUSH_ICACHE_IF_NEEDED)
static Address target_address_from_return_address(Address pc)
static const int kCallTargetAddressOffset
static void JumpLabelToJumpRegister(Address pc)
static int RelocateInternalReference(byte *pc, intptr_t pc_delta)
static const int kValueOffset
Definition: objects.h:9446
static Cell * FromValueAddress(Address value)
Definition: objects.h:9431
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:5018
byte * instruction_start()
Definition: objects-inl.h:6176
static void FlushICache(void *start, size_t size)
static bool IsSupported(CpuFeature f)
Definition: assembler.h:184
Heap * GetHeap() const
Definition: objects-inl.h:1379
IncrementalMarking * incremental_marking()
Definition: heap.h:1205
static Object *& Object_at(Address addr)
Definition: v8memory.h:60
static Address & Address_at(Address addr)
Definition: v8memory.h:56
Operand(Register reg, Shift shift=LSL, unsigned shift_amount=0)
Immediate immediate() const
static bool IsDebugBreakSlot(Mode mode)
Definition: assembler.h:436
static bool IsJSReturn(Mode mode)
Definition: assembler.h:412
static bool IsEmbeddedObject(Mode mode)
Definition: assembler.h:402
static bool IsRuntimeEntry(Mode mode)
Definition: assembler.h:405
static bool IsCodeTarget(Mode mode)
Definition: assembler.h:399
static bool IsExternalReference(Mode mode)
Definition: assembler.h:430
Code * host() const
Definition: assembler.h:463
Mode rmode() const
Definition: assembler.h:459
static bool IsInternalReference(Mode mode)
Definition: assembler.h:433
static bool IsCodeAgeSequence(Mode mode)
Definition: assembler.h:442
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK(condition)
Definition: logging.h:205
int int32_t
Definition: unicode.cc:24
@ UPDATE_WRITE_BARRIER
Definition: objects.h:235
kSerializedDataOffset Object
Definition: objects-inl.h:5322
const int kFunctionFieldMask
const Register pc
byte * Address
Definition: globals.h:101
const int kImm28Mask
const Register no_reg
const int kOpcodeMask
static const int kNoCodeAgeSequenceLength
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
static int NumAllocatableAliasedRegisters()
static const int kMaxNumAllocatableRegisters
static int ToAllocationIndex(FPURegister reg)
bool is(FPURegister creg) const
static const int kMaxNumRegisters
static int NumAllocatableRegisters()
static const int kMaxNumAllocatableRegisters
Definition: assembler-arm.h:96