105 if (loc) *loc = value;
128 if (loc) (*loc) -= value;
247 base::ElapsedTimer* timer() {
return &
timer_; }
262 bool allow_nesting =
false)
265 skipped_timer_start_(
false) {
266 if (timer_->timer()->IsStarted() && allow_nesting) {
267 skipped_timer_start_ =
true;
279 if (!skipped_timer_start_) {
290 bool skipped_timer_start_;
294 #define HISTOGRAM_RANGE_LIST(HR) \
296 HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000, 101) \
297 HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000, 101) \
298 HR(gc_idle_time_limit_undershot, V8.GCIdleTimeLimit.Undershot, 0, 10000, 101)
300 #define HISTOGRAM_TIMER_LIST(HT) \
302 HT(gc_compactor, V8.GCCompactor) \
303 HT(gc_scavenger, V8.GCScavenger) \
304 HT(gc_context, V8.GCContext) \
305 HT(gc_idle_notification, V8.GCIdleNotification) \
306 HT(gc_incremental_marking, V8.GCIncrementalMarking) \
307 HT(gc_low_memory_notification, V8.GCLowMemoryNotification) \
309 HT(parse, V8.Parse) \
310 HT(parse_lazy, V8.ParseLazy) \
311 HT(pre_parse, V8.PreParse) \
313 HT(compile, V8.Compile) \
314 HT(compile_eval, V8.CompileEval) \
316 HT(compile_serialize, V8.CompileSerialize) \
317 HT(compile_deserialize, V8.CompileDeserialize)
320 #define HISTOGRAM_PERCENTAGE_LIST(HP) \
322 HP(external_fragmentation_total, \
323 V8.MemoryExternalFragmentationTotal) \
324 HP(external_fragmentation_old_pointer_space, \
325 V8.MemoryExternalFragmentationOldPointerSpace) \
326 HP(external_fragmentation_old_data_space, \
327 V8.MemoryExternalFragmentationOldDataSpace) \
328 HP(external_fragmentation_code_space, \
329 V8.MemoryExternalFragmentationCodeSpace) \
330 HP(external_fragmentation_map_space, \
331 V8.MemoryExternalFragmentationMapSpace) \
332 HP(external_fragmentation_cell_space, \
333 V8.MemoryExternalFragmentationCellSpace) \
334 HP(external_fragmentation_property_cell_space, \
335 V8.MemoryExternalFragmentationPropertyCellSpace) \
336 HP(external_fragmentation_lo_space, \
337 V8.MemoryExternalFragmentationLoSpace) \
339 HP(heap_fraction_new_space, \
340 V8.MemoryHeapFractionNewSpace) \
341 HP(heap_fraction_old_pointer_space, \
342 V8.MemoryHeapFractionOldPointerSpace) \
343 HP(heap_fraction_old_data_space, \
344 V8.MemoryHeapFractionOldDataSpace) \
345 HP(heap_fraction_code_space, \
346 V8.MemoryHeapFractionCodeSpace) \
347 HP(heap_fraction_map_space, \
348 V8.MemoryHeapFractionMapSpace) \
349 HP(heap_fraction_cell_space, \
350 V8.MemoryHeapFractionCellSpace) \
351 HP(heap_fraction_property_cell_space, \
352 V8.MemoryHeapFractionPropertyCellSpace) \
353 HP(heap_fraction_lo_space, \
354 V8.MemoryHeapFractionLoSpace) \
356 HP(codegen_fraction_crankshaft, \
357 V8.CodegenFractionCrankshaft) \
360 #define HISTOGRAM_MEMORY_LIST(HM) \
361 HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \
362 HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \
363 HM(heap_sample_map_space_committed, \
364 V8.MemoryHeapSampleMapSpaceCommitted) \
365 HM(heap_sample_cell_space_committed, \
366 V8.MemoryHeapSampleCellSpaceCommitted) \
367 HM(heap_sample_property_cell_space_committed, \
368 V8.MemoryHeapSamplePropertyCellSpaceCommitted) \
369 HM(heap_sample_code_space_committed, \
370 V8.MemoryHeapSampleCodeSpaceCommitted) \
371 HM(heap_sample_maximum_committed, \
372 V8.MemoryHeapSampleMaximumCommitted) \
380 #define STATS_COUNTER_LIST_1(SC) \
382 SC(global_handles, V8.GlobalHandles) \
384 SC(memory_allocated, V8.OsMemoryAllocated) \
385 SC(normalized_maps, V8.NormalizedMaps) \
386 SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \
387 SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \
388 SC(alive_after_last_gc, V8.AliveAfterLastGC) \
389 SC(objs_since_last_young, V8.ObjsSinceLastYoung) \
390 SC(objs_since_last_full, V8.ObjsSinceLastFull) \
391 SC(string_table_capacity, V8.StringTableCapacity) \
392 SC(number_of_symbols, V8.NumberOfSymbols) \
393 SC(script_wrappers, V8.ScriptWrappers) \
394 SC(call_initialize_stubs, V8.CallInitializeStubs) \
395 SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \
396 SC(call_normal_stubs, V8.CallNormalStubs) \
397 SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \
398 SC(inlined_copied_elements, V8.InlinedCopiedElements) \
399 SC(arguments_adaptors, V8.ArgumentsAdaptors) \
400 SC(compilation_cache_hits, V8.CompilationCacheHits) \
401 SC(compilation_cache_misses, V8.CompilationCacheMisses) \
402 SC(string_ctor_calls, V8.StringConstructorCalls) \
403 SC(string_ctor_conversions, V8.StringConstructorConversions) \
404 SC(string_ctor_cached_number, V8.StringConstructorCachedNumber) \
405 SC(string_ctor_string_value, V8.StringConstructorStringValue) \
406 SC(string_ctor_gc_required, V8.StringConstructorGCRequired) \
408 SC(total_eval_size, V8.TotalEvalSize) \
410 SC(total_load_size, V8.TotalLoadSize) \
412 SC(total_parse_size, V8.TotalParseSize) \
414 SC(total_preparse_skipped, V8.TotalPreparseSkipped) \
416 SC(total_preparse_symbols_skipped, V8.TotalPreparseSymbolSkipped) \
418 SC(total_compile_size, V8.TotalCompileSize) \
420 SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize) \
422 SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch) \
424 SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \
426 SC(pc_to_code, V8.PcToCode) \
427 SC(pc_to_code_cached, V8.PcToCodeCached) \
429 SC(store_buffer_compactions, V8.StoreBufferCompactions) \
430 SC(store_buffer_overflows, V8.StoreBufferOverflows)
433 #define STATS_COUNTER_LIST_2(SC) \
435 SC(code_stubs, V8.CodeStubs) \
437 SC(total_stubs_code_size, V8.TotalStubsCodeSize) \
439 SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \
440 SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \
441 SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \
442 SC(gc_compactor_caused_by_oldspace_exhaustion, \
443 V8.GCCompactorCausedByOldspaceExhaustion) \
444 SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \
445 SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \
447 SC(keyed_load_generic_smi, V8.KeyedLoadGenericSmi) \
448 SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol) \
449 SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache) \
450 SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow) \
451 SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs) \
452 SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow) \
454 SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast) \
455 SC(keyed_call_generic_smi_dict, V8.KeyedCallGenericSmiDict) \
456 SC(keyed_call_generic_lookup_cache, V8.KeyedCallGenericLookupCache) \
457 SC(keyed_call_generic_lookup_dict, V8.KeyedCallGenericLookupDict) \
458 SC(keyed_call_generic_slow, V8.KeyedCallGenericSlow) \
459 SC(keyed_call_generic_slow_load, V8.KeyedCallGenericSlowLoad) \
460 SC(named_load_global_stub, V8.NamedLoadGlobalStub) \
461 SC(named_store_global_inline, V8.NamedStoreGlobalInline) \
462 SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss) \
463 SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs) \
464 SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow) \
465 SC(store_normal_miss, V8.StoreNormalMiss) \
466 SC(store_normal_hit, V8.StoreNormalHit) \
467 SC(cow_arrays_created_stub, V8.COWArraysCreatedStub) \
468 SC(cow_arrays_created_runtime, V8.COWArraysCreatedRuntime) \
469 SC(cow_arrays_converted, V8.COWArraysConverted) \
470 SC(call_miss, V8.CallMiss) \
471 SC(keyed_call_miss, V8.KeyedCallMiss) \
472 SC(load_miss, V8.LoadMiss) \
473 SC(keyed_load_miss, V8.KeyedLoadMiss) \
474 SC(call_const, V8.CallConst) \
475 SC(call_const_fast_api, V8.CallConstFastApi) \
476 SC(call_const_interceptor, V8.CallConstInterceptor) \
477 SC(call_const_interceptor_fast_api, V8.CallConstInterceptorFastApi) \
478 SC(call_global_inline, V8.CallGlobalInline) \
479 SC(call_global_inline_miss, V8.CallGlobalInlineMiss) \
480 SC(constructed_objects, V8.ConstructedObjects) \
481 SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \
482 SC(negative_lookups, V8.NegativeLookups) \
483 SC(negative_lookups_miss, V8.NegativeLookupsMiss) \
484 SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \
485 SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \
486 SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \
487 SC(array_function_runtime, V8.ArrayFunctionRuntime) \
488 SC(array_function_native, V8.ArrayFunctionNative) \
489 SC(for_in, V8.ForIn) \
490 SC(enum_cache_hits, V8.EnumCacheHits) \
491 SC(enum_cache_misses, V8.EnumCacheMisses) \
492 SC(zone_segment_bytes, V8.ZoneSegmentBytes) \
493 SC(fast_new_closure_total, V8.FastNewClosureTotal) \
494 SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized) \
495 SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized) \
496 SC(string_add_runtime, V8.StringAddRuntime) \
497 SC(string_add_native, V8.StringAddNative) \
498 SC(string_add_runtime_ext_to_one_byte, V8.StringAddRuntimeExtToOneByte) \
499 SC(sub_string_runtime, V8.SubStringRuntime) \
500 SC(sub_string_native, V8.SubStringNative) \
501 SC(string_add_make_two_char, V8.StringAddMakeTwoChar) \
502 SC(string_compare_native, V8.StringCompareNative) \
503 SC(string_compare_runtime, V8.StringCompareRuntime) \
504 SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \
505 SC(regexp_entry_native, V8.RegExpEntryNative) \
506 SC(number_to_string_native, V8.NumberToStringNative) \
507 SC(number_to_string_runtime, V8.NumberToStringRuntime) \
508 SC(math_acos, V8.MathAcos) \
509 SC(math_asin, V8.MathAsin) \
510 SC(math_atan, V8.MathAtan) \
511 SC(math_atan2, V8.MathAtan2) \
512 SC(math_exp, V8.MathExp) \
513 SC(math_floor, V8.MathFloor) \
514 SC(math_log, V8.MathLog) \
515 SC(math_pow, V8.MathPow) \
516 SC(math_round, V8.MathRound) \
517 SC(math_sqrt, V8.MathSqrt) \
518 SC(stack_interrupts, V8.StackInterrupts) \
519 SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \
520 SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \
521 SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \
522 SC(soft_deopts_requested, V8.SoftDeoptsRequested) \
523 SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \
524 SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \
526 SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \
527 SC(write_barriers_static, V8.WriteBarriersStatic) \
528 SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \
529 SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \
530 SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \
531 SC(old_pointer_space_bytes_available, \
532 V8.MemoryOldPointerSpaceBytesAvailable) \
533 SC(old_pointer_space_bytes_committed, \
534 V8.MemoryOldPointerSpaceBytesCommitted) \
535 SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed) \
536 SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable) \
537 SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted) \
538 SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed) \
539 SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \
540 SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \
541 SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \
542 SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \
543 SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \
544 SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \
545 SC(cell_space_bytes_available, V8.MemoryCellSpaceBytesAvailable) \
546 SC(cell_space_bytes_committed, V8.MemoryCellSpaceBytesCommitted) \
547 SC(cell_space_bytes_used, V8.MemoryCellSpaceBytesUsed) \
548 SC(property_cell_space_bytes_available, \
549 V8.MemoryPropertyCellSpaceBytesAvailable) \
550 SC(property_cell_space_bytes_committed, \
551 V8.MemoryPropertyCellSpaceBytesCommitted) \
552 SC(property_cell_space_bytes_used, V8.MemoryPropertyCellSpaceBytesUsed) \
553 SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \
554 SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \
555 SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed)
561 #define HR(name, caption, min, max, num_buckets) \
562 Histogram* name() { return &name##_; }
566 #define HT(name, caption) \
567 HistogramTimer* name() { return &name##_; }
571 #define HP(name, caption) \
572 Histogram* name() { return &name##_; }
576 #define HM(name, caption) \
577 Histogram* name() { return &name##_; }
581 #define SC(name, caption) \
582 StatsCounter* name() { return &name##_; }
588 StatsCounter* count_of_##name() { return &count_of_##name##_; } \
589 StatsCounter* size_of_##name() { return &size_of_##name##_; }
594 StatsCounter* count_of_CODE_TYPE_##name() \
595 { return &count_of_CODE_TYPE_##name##_; } \
596 StatsCounter* size_of_CODE_TYPE_##name() \
597 { return &size_of_CODE_TYPE_##name##_; }
602 StatsCounter* count_of_FIXED_ARRAY_##name() \
603 { return &count_of_FIXED_ARRAY_##name##_; } \
604 StatsCounter* size_of_FIXED_ARRAY_##name() \
605 { return &size_of_FIXED_ARRAY_##name##_; }
610 StatsCounter* count_of_CODE_AGE_##name() \
611 { return &count_of_CODE_AGE_##name##_; } \
612 StatsCounter* size_of_CODE_AGE_##name() \
613 { return &size_of_CODE_AGE_##name##_; }
618 #define RATE_ID(name, caption) k_##name,
621 #define PERCENTAGE_ID(name, caption) k_##name,
624 #define MEMORY_ID(name, caption) k_##name,
627 #define COUNTER_ID(name, caption) k_##name,
631 #define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
634 #define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \
635 kSizeOfCODE_TYPE_##name,
638 #define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \
639 kSizeOfFIXED_ARRAY__##name,
642 #define COUNTER_ID(name) kCountOfCODE_AGE__##name, \
643 kSizeOfCODE_AGE__##name,
653 #define HR(name, caption, min, max, num_buckets) Histogram name##_;
657 #define HT(name, caption) \
658 HistogramTimer name##_;
662 #define HP(name, caption) \
667 #define HM(name, caption) \
672 #define SC(name, caption) \
673 StatsCounter name##_;
679 StatsCounter size_of_##name##_; \
680 StatsCounter count_of_##name##_;
685 StatsCounter size_of_CODE_TYPE_##name##_; \
686 StatsCounter count_of_CODE_TYPE_##name##_;
691 StatsCounter size_of_FIXED_ARRAY_##name##_; \
692 StatsCounter count_of_FIXED_ARRAY_##name##_;
697 StatsCounter size_of_CODE_AGE_##name##_; \
698 StatsCounter count_of_CODE_AGE_##name##_;
#define CODE_AGE_LIST_COMPLETE(V)
HistogramTimerScope(HistogramTimer *timer, bool allow_nesting=false)
DISALLOW_IMPLICIT_CONSTRUCTORS(Counters)
Counters(Isolate *isolate)
HistogramTimer(const char *name, int min, int max, int num_buckets, Isolate *isolate)
base::ElapsedTimer timer_
Histogram(const char *name, int min, int max, int num_buckets, Isolate *isolate)
void * CreateHistogram() const
void AddSample(int sample)
Isolate * isolate() const
StatsCounter(Isolate *isolate, const char *name)
int * GetInternalPointer()
void Decrement(int value)
void Increment(int value)
int * FindLocationInStatsTable() const
void * CreateHistogram(const char *name, int min, int max, size_t buckets)
CounterLookupCallback lookup_function_
void AddHistogramSample(void *histogram, int sample)
int * FindLocation(const char *name)
CreateHistogramCallback create_histogram_function_
bool HasCounterFunction() const
AddHistogramSampleCallback add_histogram_sample_function_
void SetCounterFunction(CounterLookupCallback f)
void SetAddHistogramSampleFunction(AddHistogramSampleCallback f)
DISALLOW_COPY_AND_ASSIGN(StatsTable)
void SetCreateHistogramFunction(CreateHistogramCallback f)
#define STATS_COUNTER_LIST_1(SC)
#define HISTOGRAM_TIMER_LIST(HT)
#define MEMORY_ID(name, caption)
#define HT(name, caption)
#define HISTOGRAM_PERCENTAGE_LIST(HP)
#define HR(name, caption, min, max, num_buckets)
#define PERCENTAGE_ID(name, caption)
#define HP(name, caption)
#define HM(name, caption)
#define COUNTER_ID(name, caption)
#define SC(name, caption)
#define RATE_ID(name, caption)
#define HISTOGRAM_MEMORY_LIST(HM)
#define HISTOGRAM_RANGE_LIST(HR)
#define STATS_COUNTER_LIST_2(SC)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
static int min(int a, int b)
Debugger support for the V8 JavaScript engine.
void(* AddHistogramSampleCallback)(void *histogram, int sample)
int *(* CounterLookupCallback)(const char *name)
void *(* CreateHistogramCallback)(const char *name, int min, int max, size_t buckets)
#define INSTANCE_TYPE_LIST(V)
#define CODE_KIND_LIST(V)
#define FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(V)