20 #ifdef ENABLE_DISASSEMBLER
24 explicit V8NameConverter(Code* code) : code_(code) {}
25 virtual const char* NameOfAddress(
byte*
pc)
const;
26 virtual const char* NameInCode(
byte* addr)
const;
27 Code* code()
const {
return code_; }
31 EmbeddedVector<char, 128> v8_buffer_;
35 const char* V8NameConverter::NameOfAddress(
byte*
pc)
const {
36 const char*
name = code_->GetIsolate()->builtins()->Lookup(
pc);
39 return v8_buffer_.start();
43 int offs =
static_cast<int>(
pc - code_->instruction_start());
45 if (0 <= offs && offs < code_->instruction_size()) {
47 return v8_buffer_.start();
55 const char* V8NameConverter::NameInCode(
byte* addr)
const {
58 return (code_ !=
NULL) ?
reinterpret_cast<const char*
>(addr) :
"";
62 static void DumpBuffer(OStream* os, StringBuilder* out) {
63 (*os) << out->Finalize() <<
endl;
69 static const int kRelocInfoPosition = 57;
71 static int DecodeIt(Isolate* isolate, OStream* os,
72 const V8NameConverter& converter,
byte* begin,
byte* end) {
73 SealHandleScope shs(isolate);
75 ExternalReferenceEncoder ref_encoder(isolate);
79 StringBuilder out(out_buffer.
start(), out_buffer.
length());
82 RelocIterator* it =
NULL;
83 if (converter.code() !=
NULL) {
84 it =
new RelocIterator(converter.code());
100 int num_const = d.ConstantPoolSizeAt(
pc);
101 if (num_const >= 0) {
103 "%08x constant pool begin",
105 constants = num_const;
107 }
else if (it !=
NULL && !it->done() && it->rinfo()->pc() ==
pc &&
110 byte* ptr = *
reinterpret_cast<byte**
>(
pc);
113 reinterpret_cast<intptr_t
>(ptr),
117 decode_buffer[0] =
'\0';
118 pc += d.InstructionDecode(decode_buffer,
pc);
123 List<const char*> comments(4);
125 List<RelocInfo::Mode> rmodes(1);
126 List<intptr_t> datas(1);
128 while (!it->done() && it->rinfo()->pc() <
pc) {
131 comments.Add(
reinterpret_cast<const char*
>(it->rinfo()->data()));
134 pcs.Add(it->rinfo()->pc());
135 rmodes.Add(it->rinfo()->rmode());
136 datas.Add(it->rinfo()->data());
143 for (
int i = 0;
i < comments.length();
i++) {
144 out.AddFormatted(
" %s", comments[
i]);
145 DumpBuffer(os, &out);
149 out.AddFormatted(
"%p %4d ", prev_pc, prev_pc - begin);
152 out.AddFormatted(
"%s", decode_buffer.
start());
155 for (
int i = 0;
i < pcs.length();
i++) {
157 RelocInfo relocinfo(pcs[
i], rmodes[
i], datas[
i], converter.code());
162 out.AddPadding(
' ', kRelocInfoPosition - out.position());
165 DumpBuffer(os, &out);
166 out.AddPadding(
' ', kRelocInfoPosition);
172 out.AddFormatted(
" ;; debug: statement %d", relocinfo.data());
174 out.AddFormatted(
" ;; debug: position %d", relocinfo.data());
177 HeapStringAllocator allocator;
178 StringStream accumulator(&allocator);
179 relocinfo.target_object()->ShortPrint(&accumulator);
180 SmartArrayPointer<const char> obj_name = accumulator.ToCString();
181 out.AddFormatted(
" ;; object: %s", obj_name.get());
183 const char* reference_name =
184 ref_encoder.NameOfAddress(relocinfo.target_reference());
185 out.AddFormatted(
" ;; external reference (%s)", reference_name);
187 out.AddFormatted(
" ;; code:");
189 out.AddFormatted(
" constructor,");
193 if (code->is_inline_cache_stub()) {
194 if (kind == Code::LOAD_IC &&
195 LoadICState::GetContextualMode(code->extra_ic_state()) ==
197 out.AddFormatted(
" contextual,");
201 Code::ICState2String(ic_state));
204 out.AddFormatted(
", %s", Code::StubType2String(type));
206 }
else if (kind ==
Code::STUB || kind == Code::HANDLER) {
209 uint32_t minor_key = CodeStub::MinorKeyFromKey(key);
210 CodeStub::Major major_key = CodeStub::GetMajorKey(code);
211 DCHECK(major_key == CodeStub::MajorKeyFromKey(key));
213 CodeStub::MajorName(major_key,
false));
215 case CodeStub::CallFunction: {
217 out.AddFormatted(
"argc = %d", argc);
221 out.AddFormatted(
"minor: %d", minor_key);
227 out.AddFormatted(
" (id = %d)",
static_cast<int>(relocinfo.data()));
230 isolate->deoptimizer_data() !=
NULL) {
232 Address addr = relocinfo.target_address();
245 out.AddFormatted(
" ;; %s", RelocInfo::RelocModeName(rmode));
247 out.AddFormatted(
" ;; soft deoptimization bailout %d",
id);
250 out.AddFormatted(
" ;; lazy deoptimization bailout %d",
id);
253 out.AddFormatted(
" ;; deoptimization bailout %d",
id);
256 out.AddFormatted(
" ;; %s", RelocInfo::RelocModeName(rmode));
259 DumpBuffer(os, &out);
264 for ( ; !it->done(); it->next()) {
266 out.AddFormatted(
" %s",
267 reinterpret_cast<const char*
>(it->rinfo()->data()));
268 DumpBuffer(os, &out);
274 return static_cast<int>(
pc - begin);
280 V8NameConverter v8NameConverter(code);
281 return DecodeIt(isolate, os, v8NameConverter, begin, end);
virtual const char * NameOfAddress(byte *addr) const
static int ExtractArgcFromMinorKey(int minor_key)
static Code * GetCodeFromTargetAddress(Address address)
static const char * Kind2String(Kind kind)
static const int kNotDeoptimizationEntry
static int GetDeoptimizationId(Isolate *isolate, Address addr, BailoutType type)
static int Decode(Isolate *isolate, OStream *os, byte *begin, byte *end, Code *code=NULL)
static bool IsComment(Mode mode)
static bool IsRuntimeEntry(Mode mode)
static bool IsPosition(Mode mode)
static bool IsCodeTarget(Mode mode)
static bool IsStatementPosition(Mode mode)
static const int kMaxShortPrintLength
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
OStream & endl(OStream &os)
int SNPrintF(Vector< char > str, const char *format,...)
Debugger support for the V8 JavaScript engine.