5 #ifndef V8_COMPILER_INSTRUCTION_H_
6 #define V8_COMPILER_INSTRUCTION_H_
39 #define INSTRUCTION_OPERAND_LIST(V) \
40 V(Constant, CONSTANT, 128) \
41 V(Immediate, IMMEDIATE, 128) \
42 V(StackSlot, STACK_SLOT, 128) \
43 V(DoubleStackSlot, DOUBLE_STACK_SLOT, 128) \
44 V(Register, REGISTER, Register::kNumRegisters) \
45 V(DoubleRegister, DOUBLE_REGISTER, DoubleRegister::kMaxNumRegisters)
65 #define INSTRUCTION_OPERAND_PREDICATE(name, type, number) \
66 bool Is##name() const { return kind() == type; }
70 #undef INSTRUCTION_OPERAND_PREDICATE
161 DCHECK(op->IsUnallocated());
166 DCHECK(op->IsUnallocated());
270 class MoveOperands
FINAL {
273 : source_(source), destination_(destination) {}
287 return !IsEliminated() && source()->Equals(operand);
293 return IsEliminated() || source_->Equals(destination_) || IsIgnored() ||
294 (destination_ !=
NULL && destination_->IsConstant());
298 return destination_ !=
NULL && destination_->IsIgnored();
305 return source_ ==
NULL;
315 template <InstructionOperand::Kind kOperandKind,
int kNumCachedOperands>
320 if (index < kNumCachedOperands)
return &cache[index];
321 return new (zone) SubKindOperand(index);
326 return reinterpret_cast<SubKindOperand*
>(op);
341 #define INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number) \
342 typedef SubKindOperand<InstructionOperand::type, number> name##Operand;
344 #undef INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS
347 class ParallelMove
FINAL :
public ZoneObject {
352 move_operands_.Add(MoveOperands(from,
to), zone);
359 return &move_operands_;
371 : pointer_operands_(8, zone),
372 untagged_operands_(0, zone),
373 instruction_position_(-1) {}
376 for (
int i = 0;
i < untagged_operands_.length(); ++
i) {
377 RemovePointer(untagged_operands_[
i]);
379 untagged_operands_.Clear();
380 return &pointer_operands_;
385 DCHECK(instruction_position_ == -1);
386 instruction_position_ = pos;
453 (output_count + input_count + temp_count - 1) *
456 opcode, output_count, outputs, input_count, inputs, temp_count, temps);
495 void*
operator new(size_t,
void* location) {
return location; }
497 void operator delete(
void* pointer,
void* location) {
UNREACHABLE(); }
517 for (
size_t i = 0;
i < output_count; ++
i) {
520 for (
size_t i = 0;
i < input_count; ++
i) {
523 for (
size_t i = 0;
i < temp_count; ++
i) {
524 operands_[output_count + input_count +
i] = temps[
i];
602 BasicBlock*
block()
const {
return block_; }
605 static BlockStartInstruction*
New(
Zone* zone, BasicBlock* block) {
606 void* buffer = zone->
New(
sizeof(BlockStartInstruction));
607 return new (buffer) BlockStartInstruction(block);
612 return static_cast<BlockStartInstruction*
>(instr);
626 static SourcePositionInstruction*
New(
Zone* zone, SourcePosition position) {
627 void* buffer = zone->
New(
sizeof(SourcePositionInstruction));
628 return new (buffer) SourcePositionInstruction(position);
635 return static_cast<SourcePositionInstruction*
>(instr);
640 return static_cast<const SourcePositionInstruction*
>(instr);
646 source_position_(source_position) {
647 DCHECK(!source_position_.IsInvalid());
648 DCHECK(!source_position_.IsUnknown());
655 class Constant
FINAL {
667 explicit Constant(int64_t v) : type_(kInt64), value_(v) {}
671 : type_(kExternalReference), value_(
bit_cast<intptr_t>(ref)) {}
673 : type_(kHeapObject), value_(
bit_cast<intptr_t>(obj)) {}
679 return static_cast<int32_t>(value_);
683 if (type() == kInt32)
return ToInt32();
690 return bit_cast<float>(
static_cast<int32_t>(value_));
694 if (type() == kInt32)
return ToInt32();
696 return bit_cast<double>(value_);
701 return bit_cast<ExternalReference>(
static_cast<intptr_t
>(value_));
706 return bit_cast<Handle<HeapObject> >(
static_cast<intptr_t
>(value_));
745 size_t total_size = 0;
747 iter = iter->outer_state_) {
748 total_size += iter->size();
768 iter = iter->outer_state_) {
777 iter = iter->outer_state_) {
801 typedef std::map<int, Constant, std::less<int>,
820 instructions_(zone()),
821 next_virtual_register_(graph->NodeCount()),
822 pointer_maps_(zone()),
826 deoptimization_entries_(zone()) {}
834 return static_cast<int>(schedule_->rpo_order()->size());
838 return (*schedule_->rpo_order())[rpo_number];
842 return block->loop_header_;
845 int GetLoopEnd(BasicBlock* block)
const {
return block->loop_end_; }
869 bool IsGapAt(
int index)
const {
return InstructionAt(index)->IsGapMoves(); }
872 DCHECK(index <
static_cast<int>(instructions_.size()));
873 return instructions_[index];
890 DCHECK(constants_.find(virtual_register) == constants_.end());
891 constants_.insert(std::make_pair(virtual_register, constant));
894 ConstantMap::const_iterator it = constants_.find(virtual_register);
895 DCHECK(it != constants_.end());
904 int index =
static_cast<int>(immediates_.size());
905 immediates_.push_back(constant);
910 DCHECK(index <
static_cast<int>(immediates_.size()));
911 return immediates_[index];
static U update(U previous, T value)
static const uint32_t kSize
static uint32_t encode(T value)
static T decode(uint32_t value)
Source to read snapshot and builtins files from.
static StateId FromInt(int id)
static SubKindOperand * Create(int index, Zone *zone)
bool IsEliminated() const
int VirtualRegisterCount() const
BasicBlock * BlockAt(int rpo_number) const
int GetLoopEnd(BasicBlock *block) const
static BlockStartInstruction * New(Zone *zone, BasicBlock *block)
const PointerMapDeque * pointer_maps() const
ConstantDeque immediates_
Schedule * schedule() const
void StartBlock(BasicBlock *block)
SourcePosition source_position() const
Constant GetConstant(int virtual_register) const
Handle< HeapObject > ToHeapObject() const
const_iterator end() const
InstructionOperand * source() const
const ZoneList< InstructionOperand * > * GetNormalizedOperands()
int NextVirtualRegister()
InstructionSequence(Linkage *linkage, Graph *graph, Schedule *schedule)
bool Blocks(InstructionOperand *operand) const
Isolate * isolate() const
DeoptimizationVector deoptimization_entries_
InstructionOperand * source_
void RecordPointer(InstructionOperand *op, Zone *zone)
ExternalReference ToExternalReference() const
static SubKindOperand * cast(InstructionOperand *op)
InstructionOperand * destination() const
VirtualRegisterSet doubles_
InstructionDeque instructions_
Constant(ExternalReference ref)
FrameStateDescriptor * GetFrameStateDescriptor(StateId deoptimization_id)
void set_source(InstructionOperand *operand)
void AddConstant(int virtual_register, Constant constant)
void RemovePointer(InstructionOperand *op)
ZoneList< MoveOperands > move_operands_
InstructionOperand * destination_
StateId AddFrameStateDescriptor(FrameStateDescriptor *descriptor)
Instruction * InstructionAt(int index) const
void AddGapMove(int index, InstructionOperand *from, InstructionOperand *to)
int instruction_position_
int next_virtual_register_
void AddMove(InstructionOperand *from, InstructionOperand *to, Zone *zone)
SourcePositionInstruction(SourcePosition source_position)
void set_instruction_position(int pos)
static const SourcePositionInstruction * cast(const Instruction *instr)
BlockStartInstruction * GetBlockStart(BasicBlock *block)
InstructionDeque::const_iterator const_iterator
void MarkAsReference(int virtual_register)
const ZoneList< MoveOperands > * move_operands() const
int instruction_position() const
SourcePosition source_position_
BasicBlock * GetBasicBlock(int instruction_index)
BlockStartInstruction(BasicBlock *block)
bool IsDouble(int virtual_register) const
Linkage * linkage() const
static SourcePositionInstruction * New(Zone *zone, SourcePosition position)
const_iterator begin() const
static SubKindOperand * cache
void set_destination(InstructionOperand *operand)
SubKindOperand(int index)
int BasicBlockCount() const
static SourcePositionInstruction * cast(Instruction *instr)
VirtualRegisterSet references_
int GetVirtualRegister(Node *node) const
ZoneList< InstructionOperand * > pointer_operands_
void MarkAsDouble(int virtual_register)
Constant GetImmediate(int index) const
Constant(Handle< HeapObject > obj)
bool IsReference(int virtual_register) const
ZoneList< InstructionOperand * > untagged_operands_
ZoneList< MoveOperands > * move_operands()
int GetFrameStateDescriptorCount()
PointerMapDeque pointer_maps_
int AddInstruction(Instruction *instr, BasicBlock *block)
bool IsGapAt(int index) const
static void TearDownCache()
int AddImmediate(Constant constant)
static BlockStartInstruction * cast(Instruction *instr)
BasicBlock * block() const
GapInstruction * GapAt(int index) const
const Immediates & immediates() const
void RecordUntagged(InstructionOperand *op, Zone *zone)
MoveOperands(InstructionOperand *source, InstructionOperand *destination)
BasicBlock * GetContainingLoop(BasicBlock *block)
Label * GetLabel(BasicBlock *block)
void EndBlock(BasicBlock *block)
std::set< int, std::less< int >, ZoneIntAllocator > VirtualRegisterSet
FrameStateDescriptor * outer_state() const
OutputFrameStateCombine state_combine() const
size_t GetJSFrameCount() const
size_t parameters_count() const
size_t stack_count() const
FrameStateDescriptor * outer_state_
OutputFrameStateCombine frame_state_combine_
size_t GetFrameCount() const
MaybeHandle< JSFunction > jsfunction_
size_t GetTotalSize() const
size_t locals_count() const
size_t GetHeight(OutputFrameStateCombine override) const
FrameStateDescriptor(const FrameStateCallInfo &state_info, size_t parameters_count, size_t locals_count, size_t stack_count, FrameStateDescriptor *outer_state=NULL)
FrameStateType type() const
BailoutId bailout_id() const
MaybeHandle< JSFunction > jsfunction() const
ParallelMove * GetOrCreateParallelMove(InnerPosition pos, Zone *zone)
static GapInstruction * cast(Instruction *instr)
GapInstruction(InstructionCode opcode)
friend OStream & operator<<(OStream &os, const Instruction &instr)
static GapInstruction * New(Zone *zone)
ParallelMove * GetParallelMove(InnerPosition pos)
ParallelMove * parallel_moves_[LAST_INNER_POSITION+1]
static const GapInstruction * cast(const Instruction *instr)
BitField< Kind, 0, 3 > KindField
bool Equals(InstructionOperand *other) const
InstructionOperand(Kind kind, int index)
void ConvertTo(Kind kind, int index)
static void TearDownCaches()
static void SetUpCaches()
InstructionOperand * Output() const
bool ClobbersTemps() const
Instruction * MarkAsCall()
ArchOpcode arch_opcode() const
InstructionOperand * operands_[1]
FlagsMode flags_mode() const
InstructionCode opcode() const
size_t OutputCount() const
BitField< size_t, 24, 6 > TempCountField
BitField< size_t, 8, 16 > InputCountField
BitField< bool, 31, 1 > IsControlField
AddressingMode addressing_mode() const
bool ClobbersDoubleRegisters() const
static Instruction * New(Zone *zone, InstructionCode opcode)
bool IsBlockStart() const
bool HasPointerMap() const
size_t InputCount() const
Instruction(InstructionCode opcode)
InstructionOperand * TempAt(size_t i) const
FlagsCondition flags_condition() const
Instruction(InstructionCode opcode, size_t output_count, InstructionOperand **outputs, size_t input_count, InstructionOperand **inputs, size_t temp_count, InstructionOperand **temps)
bool NeedsPointerMap() const
PointerMap * pointer_map() const
bool ClobbersRegisters() const
PointerMap * pointer_map_
Instruction * MarkAsControl()
bool IsSourcePosition() const
BitField< bool, 30, 1 > IsCallField
void set_pointer_map(PointerMap *map)
BitField< size_t, 0, 8 > OutputCountField
static Instruction * New(Zone *zone, InstructionCode opcode, size_t output_count, InstructionOperand **outputs, size_t input_count, InstructionOperand **inputs, size_t temp_count, InstructionOperand **temps)
InstructionOperand * InputAt(size_t i) const
InstructionOperand * OutputAt(size_t i) const
UnallocatedOperand(ExtendedPolicy policy)
static const int kFixedSlotIndexWidth
static const UnallocatedOperand * cast(const InstructionOperand *op)
bool HasFixedDoubleRegisterPolicy() const
UnallocatedOperand(BasicPolicy policy, int index)
UnallocatedOperand(ExtendedPolicy policy, int index)
bool HasAnyPolicy() const
bool HasFixedRegisterPolicy() const
void set_virtual_register(unsigned id)
UnallocatedOperand * CopyUnconstrained(Zone *zone)
bool HasRegisterPolicy() const
static const int kMaxVirtualRegisters
BasicPolicy basic_policy() const
static UnallocatedOperand * cast(InstructionOperand *op)
bool HasFixedPolicy() const
ExtendedPolicy extended_policy() const
bool HasSameAsInputPolicy() const
int fixed_register_index() const
int fixed_slot_index() const
STATIC_ASSERT(KindField::kSize==3)
bool HasFixedSlotPolicy() const
UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime)
static const int kMinFixedSlotIndex
int virtual_register() const
static const int kMaxFixedSlotIndex
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define INSTRUCTION_OPERAND_PREDICATE(name, type, number)
#define INSTRUCTION_OPERAND_LIST(V)
#define INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
Dest bit_cast(Source const &source)
std::map< int, Constant, std::less< int >, zone_allocator< std::pair< int, Constant > > > ConstantMap
std::ostream & operator<<(std::ostream &os, const MachineType &type)
const InstructionCode kBlockStartInstruction
ZoneVector< FrameStateDescriptor * > DeoptimizationVector
const InstructionCode kGapInstruction
const InstructionCode kSourcePositionInstruction
ZoneDeque< PointerMap * > PointerMapDeque
ZoneVector< InstructionOperand * > InstructionOperandVector
ZoneDeque< Constant > ConstantDeque
ZoneDeque< Instruction * > InstructionDeque
static void RoundUp(Vector< char > buffer, int *length, int *decimal_point)
Debugger support for the V8 JavaScript engine.