V8 Project
v8::internal::RuntimeProfiler Class Reference

#include <runtime-profiler.h>

+ Collaboration diagram for v8::internal::RuntimeProfiler:

Public Member Functions

 RuntimeProfiler (Isolate *isolate)
 
void OptimizeNow ()
 
void NotifyICChanged ()
 
void AttemptOnStackReplacement (JSFunction *function, int nesting_levels=1)
 

Private Member Functions

void Optimize (JSFunction *function, const char *reason)
 
bool CodeSizeOKForOSR (Code *shared_code)
 

Private Attributes

Isolateisolate_
 
bool any_ic_changed_
 

Detailed Description

Definition at line 22 of file runtime-profiler.h.

Constructor & Destructor Documentation

◆ RuntimeProfiler()

v8::internal::RuntimeProfiler::RuntimeProfiler ( Isolate isolate)
explicit

Definition at line 54 of file runtime-profiler.cc.

55  : isolate_(isolate),
56  any_ic_changed_(false) {
57 }

Member Function Documentation

◆ AttemptOnStackReplacement()

void v8::internal::RuntimeProfiler::AttemptOnStackReplacement ( JSFunction function,
int  nesting_levels = 1 
)

Definition at line 121 of file runtime-profiler.cc.

122  {
123  SharedFunctionInfo* shared = function->shared();
124  // See AlwaysFullCompiler (in compiler.cc) comment on why we need
125  // Debug::has_break_points().
126  if (!FLAG_use_osr ||
128  function->IsBuiltin()) {
129  return;
130  }
131 
132  // If the code is not optimizable, don't try OSR.
133  if (!shared->code()->optimizable()) return;
134 
135  // We are not prepared to do OSR for a function that already has an
136  // allocated arguments object. The optimized code would bypass it for
137  // arguments accesses, which is unsound. Don't try OSR.
138  if (shared->uses_arguments()) return;
139 
140  // We're using on-stack replacement: patch the unoptimized code so that
141  // any back edge in any unoptimized frame will trigger on-stack
142  // replacement for that frame.
143  if (FLAG_trace_osr) {
144  PrintF("[OSR - patching back edges in ");
145  function->PrintName();
146  PrintF("]\n");
147  }
148 
149  for (int i = 0; i < loop_nesting_levels; i++) {
150  BackEdgeTable::Patch(isolate_, shared->code());
151  }
152 }
static void Patch(Isolate *isolate, Code *unoptimized_code)
bool DebuggerHasBreakPoints()
Definition: isolate-inl.h:28
void PrintF(const char *format,...)
Definition: utils.cc:80

References v8::internal::Isolate::DebuggerHasBreakPoints(), isolate_, v8::internal::BackEdgeTable::Patch(), and v8::internal::PrintF().

Referenced by OptimizeNow().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CodeSizeOKForOSR()

bool v8::internal::RuntimeProfiler::CodeSizeOKForOSR ( Code shared_code)
private

◆ NotifyICChanged()

void v8::internal::RuntimeProfiler::NotifyICChanged ( )
inline

Definition at line 28 of file runtime-profiler.h.

28 { any_ic_changed_ = true; }

References any_ic_changed_.

Referenced by v8::internal::IC::OnTypeFeedbackChanged().

+ Here is the caller graph for this function:

◆ Optimize()

void v8::internal::RuntimeProfiler::Optimize ( JSFunction function,
const char *  reason 
)
private

Definition at line 83 of file runtime-profiler.cc.

83  {
84  DCHECK(function->IsOptimizable());
85 
86  if (FLAG_trace_opt && function->PassesFilter(FLAG_hydrogen_filter)) {
87  PrintF("[marking ");
88  function->ShortPrint();
89  PrintF(" for recompilation, reason: %s", reason);
90  if (FLAG_type_info_threshold > 0) {
91  int typeinfo, generic, total, type_percentage, generic_percentage;
92  GetICCounts(function->shared()->code(), &typeinfo, &generic, &total,
93  &type_percentage, &generic_percentage);
94  PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total,
95  type_percentage);
96  PrintF(", generic ICs: %d/%d (%d%%)", generic, total, generic_percentage);
97  }
98  PrintF("]\n");
99  }
100 
101 
103  !isolate_->bootstrapper()->IsActive()) {
106  // Do not attempt regular recompilation if we already queued this for OSR.
107  // TODO(yangguo): This is necessary so that we don't install optimized
108  // code on a function that is already optimized, since OSR and regular
109  // recompilation race. This goes away as soon as OSR becomes one-shot.
110  return;
111  }
112  DCHECK(!function->IsInOptimizationQueue());
113  function->MarkForConcurrentOptimization();
114  } else {
115  // The next call to the function will trigger optimization.
116  function->MarkForOptimization();
117  }
118 }
bool concurrent_osr_enabled() const
Definition: isolate.h:1052
OptimizingCompilerThread * optimizing_compiler_thread()
Definition: isolate.h:1059
bool concurrent_recompilation_enabled()
Definition: isolate.h:1045
Bootstrapper * bootstrapper()
Definition: isolate.h:856
bool IsQueuedForOSR(Handle< JSFunction > function, BailoutId osr_ast_id)
#define DCHECK(condition)
Definition: logging.h:205
static void GetICCounts(Code *shared_code, int *ic_with_type_info_count, int *ic_generic_count, int *ic_total_count, int *type_info_percentage, int *generic_percentage)

References v8::internal::Isolate::bootstrapper(), v8::internal::Isolate::concurrent_osr_enabled(), v8::internal::Isolate::concurrent_recompilation_enabled(), DCHECK, v8::internal::GetICCounts(), isolate_, v8::internal::OptimizingCompilerThread::IsQueuedForOSR(), v8::internal::Isolate::optimizing_compiler_thread(), and v8::internal::PrintF().

Referenced by OptimizeNow().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OptimizeNow()

void v8::internal::RuntimeProfiler::OptimizeNow ( )

Definition at line 155 of file runtime-profiler.cc.

155  {
156  HandleScope scope(isolate_);
157 
158  if (isolate_->DebuggerHasBreakPoints()) return;
159 
161 
162  // Run through the JavaScript frames and collect them. If we already
163  // have a sample of the function, we mark it for optimizations
164  // (eagerly or lazily).
165  int frame_count = 0;
166  int frame_count_limit = FLAG_frame_count;
167  for (JavaScriptFrameIterator it(isolate_);
168  frame_count++ < frame_count_limit && !it.done();
169  it.Advance()) {
170  JavaScriptFrame* frame = it.frame();
171  JSFunction* function = frame->function();
172 
173  SharedFunctionInfo* shared = function->shared();
174  Code* shared_code = shared->code();
175 
176  List<JSFunction*> functions(4);
177  frame->GetFunctions(&functions);
178  for (int i = functions.length(); --i >= 0; ) {
179  SharedFunctionInfo* shared_function_info = functions[i]->shared();
180  int ticks = shared_function_info->profiler_ticks();
181  if (ticks < Smi::kMaxValue) {
182  shared_function_info->set_profiler_ticks(ticks + 1);
183  }
184  }
185 
186  if (shared_code->kind() != Code::FUNCTION) continue;
187  if (function->IsInOptimizationQueue()) continue;
188 
189  if (FLAG_always_osr) {
191  // Fall through and do a normal optimized compile as well.
192  } else if (!frame->is_optimized() &&
193  (function->IsMarkedForOptimization() ||
194  function->IsMarkedForConcurrentOptimization() ||
195  function->IsOptimized())) {
196  // Attempt OSR if we are still running unoptimized code even though the
197  // the function has long been marked or even already been optimized.
198  int ticks = shared_code->profiler_ticks();
199  int allowance = kOSRCodeSizeAllowanceBase +
201  if (shared_code->CodeSize() > allowance) {
202  if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1);
203  } else {
204  AttemptOnStackReplacement(function);
205  }
206  continue;
207  }
208 
209  // Only record top-level code on top of the execution stack and
210  // avoid optimizing excessively large scripts since top-level code
211  // will be executed only once.
212  const int kMaxToplevelSourceSize = 10 * 1024;
213  if (shared->is_toplevel() &&
214  (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
215  continue;
216  }
217 
218  // Do not record non-optimizable functions.
219  if (shared->optimization_disabled()) {
220  if (shared->deopt_count() >= FLAG_max_opt_count) {
221  // If optimization was disabled due to many deoptimizations,
222  // then check if the function is hot and try to reenable optimization.
223  int ticks = shared_code->profiler_ticks();
225  shared_code->set_profiler_ticks(0);
226  shared->TryReenableOptimization();
227  } else {
228  shared_code->set_profiler_ticks(ticks + 1);
229  }
230  }
231  continue;
232  }
233  if (!function->IsOptimizable()) continue;
234 
235  int ticks = shared_code->profiler_ticks();
236 
237  if (ticks >= kProfilerTicksBeforeOptimization) {
238  int typeinfo, generic, total, type_percentage, generic_percentage;
239  GetICCounts(shared_code, &typeinfo, &generic, &total, &type_percentage,
240  &generic_percentage);
241  if (type_percentage >= FLAG_type_info_threshold &&
242  generic_percentage <= FLAG_generic_ic_threshold) {
243  // If this particular function hasn't had any ICs patched for enough
244  // ticks, optimize it now.
245  Optimize(function, "hot and stable");
246  } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
247  Optimize(function, "not much type info but very hot");
248  } else {
249  shared_code->set_profiler_ticks(ticks + 1);
250  if (FLAG_trace_opt_verbose) {
251  PrintF("[not yet optimizing ");
252  function->PrintName();
253  PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
254  type_percentage);
255  }
256  }
257  } else if (!any_ic_changed_ &&
258  shared_code->instruction_size() < kMaxSizeEarlyOpt) {
259  // If no IC was patched since the last tick and this function is very
260  // small, optimistically optimize it now.
261  int typeinfo, generic, total, type_percentage, generic_percentage;
262  GetICCounts(shared_code, &typeinfo, &generic, &total, &type_percentage,
263  &generic_percentage);
264  if (type_percentage >= FLAG_type_info_threshold &&
265  generic_percentage <= FLAG_generic_ic_threshold) {
266  Optimize(function, "small function");
267  } else {
268  shared_code->set_profiler_ticks(ticks + 1);
269  }
270  } else {
271  shared_code->set_profiler_ticks(ticks + 1);
272  }
273  }
274  any_ic_changed_ = false;
275 }
static const int kMaxLoopNestingMarker
Definition: objects.h:5346
void Optimize(JSFunction *function, const char *reason)
void AttemptOnStackReplacement(JSFunction *function, int nesting_levels=1)
static const int kMaxValue
Definition: objects.h:1272
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:110
static const int kTicksWhenNotEnoughTypeInfo
static const int kOSRCodeSizeAllowancePerTick
static const int kOSRCodeSizeAllowanceBase
static const int kProfilerTicksBeforeReenablingOptimization
static const int kProfilerTicksBeforeOptimization
static const int kMaxSizeEarlyOpt

References any_ic_changed_, AttemptOnStackReplacement(), v8::internal::Code::CodeSize(), v8::internal::Isolate::DebuggerHasBreakPoints(), v8::internal::SharedFunctionInfo::deopt_count(), v8::internal::JavaScriptFrame::function(), v8::internal::JavaScriptFrame::GetFunctions(), v8::internal::GetICCounts(), v8::internal::Code::instruction_size(), isolate_, v8::internal::Code::kind(), v8::internal::Code::kMaxLoopNestingMarker, v8::internal::kMaxSizeEarlyOpt, v8::internal::Smi::kMaxValue, v8::internal::kOSRCodeSizeAllowanceBase, v8::internal::kOSRCodeSizeAllowancePerTick, v8::internal::kProfilerTicksBeforeOptimization, v8::internal::kProfilerTicksBeforeReenablingOptimization, v8::internal::kTicksWhenNotEnoughTypeInfo, Optimize(), v8::internal::PrintF(), v8::internal::Code::profiler_ticks(), v8::internal::SharedFunctionInfo::profiler_ticks(), v8::internal::Code::set_profiler_ticks(), v8::internal::SharedFunctionInfo::set_profiler_ticks(), v8::internal::SharedFunctionInfo::SourceSize(), and v8::internal::SharedFunctionInfo::TryReenableOptimization().

+ Here is the call graph for this function:

Member Data Documentation

◆ any_ic_changed_

bool v8::internal::RuntimeProfiler::any_ic_changed_
private

Definition at line 39 of file runtime-profiler.h.

Referenced by NotifyICChanged(), and OptimizeNow().

◆ isolate_

Isolate* v8::internal::RuntimeProfiler::isolate_
private

Definition at line 37 of file runtime-profiler.h.

Referenced by AttemptOnStackReplacement(), Optimize(), and OptimizeNow().


The documentation for this class was generated from the following files: