V8 Project
compiler.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #include "src/compiler.h"
8 
9 #include "src/bootstrapper.h"
10 #include "src/codegen.h"
11 #include "src/compilation-cache.h"
12 #include "src/compiler/pipeline.h"
13 #include "src/cpu-profiler.h"
14 #include "src/debug.h"
15 #include "src/deoptimizer.h"
16 #include "src/full-codegen.h"
17 #include "src/gdb-jit.h"
18 #include "src/hydrogen.h"
19 #include "src/isolate-inl.h"
20 #include "src/lithium.h"
21 #include "src/liveedit.h"
22 #include "src/parser.h"
23 #include "src/rewriter.h"
24 #include "src/runtime-profiler.h"
26 #include "src/scopeinfo.h"
27 #include "src/scopes.h"
28 #include "src/typing.h"
29 #include "src/vm-state-inl.h"
30 
31 namespace v8 {
32 namespace internal {
33 
34 
35 ScriptData::ScriptData(const byte* data, int length)
36  : owns_data_(false), data_(data), length_(length) {
37  if (!IsAligned(reinterpret_cast<intptr_t>(data), kPointerAlignment)) {
38  byte* copy = NewArray<byte>(length);
39  DCHECK(IsAligned(reinterpret_cast<intptr_t>(copy), kPointerAlignment));
40  CopyBytes(copy, data, length);
41  data_ = copy;
43  }
44 }
45 
46 
48  : flags_(kThisHasUses),
49  script_(script),
50  source_stream_(NULL),
51  osr_ast_id_(BailoutId::None()),
52  parameter_count_(0),
53  optimization_id_(-1),
54  ast_value_factory_(NULL),
55  ast_value_factory_owned_(false) {
56  Initialize(script->GetIsolate(), BASE, zone);
57 }
58 
59 
61  : flags_(kThisHasUses),
62  script_(Handle<Script>::null()),
63  source_stream_(NULL),
64  osr_ast_id_(BailoutId::None()),
65  parameter_count_(0),
66  optimization_id_(-1),
67  ast_value_factory_(NULL),
68  ast_value_factory_owned_(false) {
70 }
71 
72 
74  Zone* zone)
75  : flags_(kLazy | kThisHasUses),
76  shared_info_(shared_info),
77  script_(Handle<Script>(Script::cast(shared_info->script()))),
78  source_stream_(NULL),
79  osr_ast_id_(BailoutId::None()),
80  parameter_count_(0),
81  optimization_id_(-1),
82  ast_value_factory_(NULL),
83  ast_value_factory_owned_(false) {
84  Initialize(script_->GetIsolate(), BASE, zone);
85 }
86 
87 
89  : flags_(kLazy | kThisHasUses),
90  closure_(closure),
91  shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
92  script_(Handle<Script>(Script::cast(shared_info_->script()))),
93  source_stream_(NULL),
94  context_(closure->context()),
95  osr_ast_id_(BailoutId::None()),
96  parameter_count_(0),
97  optimization_id_(-1),
98  ast_value_factory_(NULL),
99  ast_value_factory_owned_(false) {
100  Initialize(script_->GetIsolate(), BASE, zone);
101 }
102 
103 
105  Zone* zone)
106  : flags_(kLazy | kThisHasUses),
107  source_stream_(NULL),
108  osr_ast_id_(BailoutId::None()),
109  parameter_count_(0),
110  optimization_id_(-1),
111  ast_value_factory_(NULL),
112  ast_value_factory_owned_(false) {
114  code_stub_ = stub;
115 }
116 
117 
121  Zone* zone)
122  : flags_(kThisHasUses),
123  source_stream_(stream),
124  source_stream_encoding_(encoding),
125  osr_ast_id_(BailoutId::None()),
126  parameter_count_(0),
127  optimization_id_(-1),
128  ast_value_factory_(NULL),
129  ast_value_factory_owned_(false) {
131 }
132 
133 
135  Mode mode,
136  Zone* zone) {
137  isolate_ = isolate;
138  function_ = NULL;
139  scope_ = NULL;
141  extension_ = NULL;
142  cached_data_ = NULL;
144  zone_ = zone;
146  code_stub_ = NULL;
148  opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count();
149  no_frame_ranges_ = isolate->cpu_profiler()->is_profiling()
150  ? new List<OffsetRange>(2) : NULL;
151  for (int i = 0; i < DependentCode::kGroupCount; i++) {
152  dependencies_[i] = NULL;
153  }
154  if (mode == STUB) {
155  mode_ = STUB;
156  return;
157  }
158  mode_ = mode;
159  if (!script_.is_null() && script_->type()->value() == Script::TYPE_NATIVE) {
160  MarkAsNative();
161  }
162  if (isolate_->debug()->is_active()) MarkAsDebug();
163  if (FLAG_context_specialization) MarkAsContextSpecializing();
164  if (FLAG_turbo_inlining) MarkAsInliningEnabled();
165  if (FLAG_turbo_types) MarkAsTypingEnabled();
166 
167  if (!shared_info_.is_null()) {
168  DCHECK(strict_mode() == SLOPPY);
169  SetStrictMode(shared_info_->strict_mode());
170  }
171  bailout_reason_ = kUnknown;
172 
173  if (!shared_info().is_null() && shared_info()->is_compiled()) {
174  // We should initialize the CompilationInfo feedback vector from the
175  // passed in shared info, rather than creating a new one.
178  }
179 }
180 
181 
184  shared_info()->DisableOptimization(bailout_reason());
185  }
186  delete deferred_handles_;
187  delete no_frame_ranges_;
189 #ifdef DEBUG
190  // Check that no dependent maps have been added or added dependent maps have
191  // been rolled back or committed.
192  for (int i = 0; i < DependentCode::kGroupCount; i++) {
194  }
195 #endif // DEBUG
196 }
197 
198 
200  for (int i = 0; i < DependentCode::kGroupCount; i++) {
201  ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
202  if (group_objects == NULL) continue;
203  DCHECK(!object_wrapper_.is_null());
204  for (int j = 0; j < group_objects->length(); j++) {
206  static_cast<DependentCode::DependencyGroup>(i);
208  DependentCode::ForObject(group_objects->at(j), group);
209  dependent_code->UpdateToFinishedCode(group, this, *code);
210  }
211  dependencies_[i] = NULL; // Zone-allocated, no need to delete.
212  }
213 }
214 
215 
217  // Unregister from all dependent maps if not yet committed.
218  for (int i = 0; i < DependentCode::kGroupCount; i++) {
219  ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
220  if (group_objects == NULL) continue;
221  for (int j = 0; j < group_objects->length(); j++) {
223  static_cast<DependentCode::DependencyGroup>(i);
225  DependentCode::ForObject(group_objects->at(j), group);
226  dependent_code->RemoveCompilationInfo(group, this);
227  }
228  dependencies_[i] = NULL; // Zone-allocated, no need to delete.
229  }
230 }
231 
232 
234  if (IsStub()) {
236  return parameter_count_;
237  } else {
238  return scope()->num_parameters();
239  }
240 }
241 
242 
244  if (IsStub()) {
245  return 0;
246  } else {
247  return scope()->num_heap_slots();
248  }
249 }
250 
251 
253  if (IsStub()) {
254  return Code::ComputeFlags(code_stub()->GetCodeKind(),
255  code_stub()->GetICState(),
256  code_stub()->GetExtraICState(),
257  code_stub()->GetStubType());
258  } else {
259  return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
260  }
261 }
262 
263 
264 // Primitive functions are unlikely to be picked up by the stack-walking
265 // profiler, so they trigger their own optimization when they're called
266 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
268  return FLAG_crankshaft &&
269  !function()->flags()->Contains(kDontSelfOptimize) &&
270  !function()->dont_optimize() &&
271  function()->scope()->AllowsLazyCompilation() &&
272  (shared_info().is_null() || !shared_info()->optimization_disabled());
273 }
274 
275 
277  DCHECK(scope_ == NULL);
278  scope_ = scope;
279 
280  int length = function()->slot_count();
281  if (feedback_vector_.is_null()) {
282  // Allocate the feedback vector too.
283  feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(length);
284  }
285  DCHECK(feedback_vector_->length() == length);
286 }
287 
288 
290  public:
292  : HOptimizedGraphBuilder(info) {
293  }
294 
295 #define DEF_VISIT(type) \
296  virtual void Visit##type(type* node) OVERRIDE { \
297  if (node->position() != RelocInfo::kNoPosition) { \
298  SetSourcePosition(node->position()); \
299  } \
300  HOptimizedGraphBuilder::Visit##type(node); \
301  }
303 #undef DEF_VISIT
304 
305 #define DEF_VISIT(type) \
306  virtual void Visit##type(type* node) OVERRIDE { \
307  if (node->position() != RelocInfo::kNoPosition) { \
308  SetSourcePosition(node->position()); \
309  } \
310  HOptimizedGraphBuilder::Visit##type(node); \
311  }
313 #undef DEF_VISIT
314 
315 #define DEF_VISIT(type) \
316  virtual void Visit##type(type* node) OVERRIDE { \
317  HOptimizedGraphBuilder::Visit##type(node); \
318  }
321 #undef DEF_VISIT
322 };
323 
324 
326  DCHECK(info()->IsOptimizing());
327  DCHECK(!info()->IsCompilingForDebugging());
328 
329  // We should never arrive here if optimization has been disabled on the
330  // shared function info.
331  DCHECK(!info()->shared_info()->optimization_disabled());
332 
333  // Do not use crankshaft if we need to be able to set break points.
334  if (isolate()->DebuggerHasBreakPoints()) {
335  return RetryOptimization(kDebuggerHasBreakPoints);
336  }
337 
338  // Limit the number of times we re-compile a functions with
339  // the optimizing compiler.
340  const int kMaxOptCount =
341  FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
342  if (info()->opt_count() > kMaxOptCount) {
343  return AbortOptimization(kOptimizedTooManyTimes);
344  }
345 
346  // Due to an encoding limit on LUnallocated operands in the Lithium
347  // language, we cannot optimize functions with too many formal parameters
348  // or perform on-stack replacement for function with too many
349  // stack-allocated local variables.
350  //
351  // The encoding is as a signed value, with parameters and receiver using
352  // the negative indices and locals the non-negative ones.
353  const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
354  Scope* scope = info()->scope();
355  if ((scope->num_parameters() + 1) > parameter_limit) {
356  return AbortOptimization(kTooManyParameters);
357  }
358 
359  const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
360  if (info()->is_osr() &&
361  scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
362  return AbortOptimization(kTooManyParametersLocals);
363  }
364 
365  if (scope->HasIllegalRedeclaration()) {
366  return AbortOptimization(kFunctionWithIllegalRedeclaration);
367  }
368 
369  // Check the whitelist for Crankshaft.
370  if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
371  return AbortOptimization(kHydrogenFilter);
372  }
373 
374  // Crankshaft requires a version of fullcode with deoptimization support.
375  // Recompile the unoptimized version of the code if the current version
376  // doesn't have deoptimization support already.
377  // Otherwise, if we are gathering compilation time and space statistics
378  // for hydrogen, gather baseline statistics for a fullcode compilation.
379  bool should_recompile = !info()->shared_info()->has_deoptimization_support();
380  if (should_recompile || FLAG_hydrogen_stats) {
381  base::ElapsedTimer timer;
382  if (FLAG_hydrogen_stats) {
383  timer.Start();
384  }
386  return SetLastStatus(FAILED);
387  }
388  if (FLAG_hydrogen_stats) {
389  isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
390  }
391  }
392 
393  DCHECK(info()->shared_info()->has_deoptimization_support());
394 
395  // Check the whitelist for TurboFan.
396  if ((FLAG_turbo_asm && info()->shared_info()->asm_function()) ||
397  info()->closure()->PassesFilter(FLAG_turbo_filter)) {
398  compiler::Pipeline pipeline(info());
399  pipeline.GenerateCode();
400  if (!info()->code().is_null()) {
401  if (FLAG_turbo_deoptimization) {
402  info()->context()->native_context()->AddOptimizedCode(*info()->code());
403  }
404  return SetLastStatus(SUCCEEDED);
405  }
406  }
407 
408  if (FLAG_trace_hydrogen) {
409  Handle<String> name = info()->function()->debug_name();
410  PrintF("-----------------------------------------------------------\n");
411  PrintF("Compiling method %s using hydrogen\n", name->ToCString().get());
412  isolate()->GetHTracer()->TraceCompilation(info());
413  }
414 
415  // Type-check the function.
416  AstTyper::Run(info());
417 
418  graph_builder_ = (FLAG_hydrogen_track_positions || FLAG_trace_ic)
420  : new(info()->zone()) HOptimizedGraphBuilder(info());
421 
423  info()->set_this_has_uses(false);
425 
426  if (isolate()->has_pending_exception()) {
427  return SetLastStatus(FAILED);
428  }
429 
430  if (graph_ == NULL) return SetLastStatus(BAILED_OUT);
431 
432  if (info()->HasAbortedDueToDependencyChange()) {
433  // Dependency has changed during graph creation. Let's try again later.
434  return RetryOptimization(kBailedOutDueToDependencyChange);
435  }
436 
437  return SetLastStatus(SUCCEEDED);
438 }
439 
440 
442  DisallowHeapAllocation no_allocation;
443  DisallowHandleAllocation no_handles;
444  DisallowHandleDereference no_deref;
445  DisallowCodeDependencyChange no_dependency_change;
446 
448  // TODO(turbofan): Currently everything is done in the first phase.
449  if (!info()->code().is_null()) {
450  return last_status();
451  }
452 
453  Timer t(this, &time_taken_to_optimize_);
454  DCHECK(graph_ != NULL);
455  BailoutReason bailout_reason = kNoReason;
456 
457  if (graph_->Optimize(&bailout_reason)) {
459  if (chunk_ != NULL) return SetLastStatus(SUCCEEDED);
460  } else if (bailout_reason != kNoReason) {
461  graph_builder_->Bailout(bailout_reason);
462  }
463 
464  return SetLastStatus(BAILED_OUT);
465 }
466 
467 
470  // TODO(turbofan): Currently everything is done in the first phase.
471  if (!info()->code().is_null()) {
473  return last_status();
474  }
475 
476  DCHECK(!info()->HasAbortedDueToDependencyChange());
477  DisallowCodeDependencyChange no_dependency_change;
479  { // Scope for timer.
480  Timer timer(this, &time_taken_to_codegen_);
481  DCHECK(chunk_ != NULL);
482  DCHECK(graph_ != NULL);
483  // Deferred handles reference objects that were accessible during
484  // graph creation. To make sure that we don't encounter inconsistencies
485  // between graph creation and code generation, we disallow accessing
486  // objects through deferred handles during the latter, with exceptions.
487  DisallowDeferredHandleDereference no_deferred_handle_deref;
488  Handle<Code> optimized_code = chunk_->Codegen();
489  if (optimized_code.is_null()) {
490  if (info()->bailout_reason() == kNoReason) {
491  return AbortOptimization(kCodeGenerationFailed);
492  }
493  return SetLastStatus(BAILED_OUT);
494  }
495  info()->SetCode(optimized_code);
496  }
498  // Add to the weak list of optimized code objects.
499  info()->context()->native_context()->AddOptimizedCode(*info()->code());
500  return SetLastStatus(SUCCEEDED);
501 }
502 
503 
505  Handle<JSFunction> function = info()->closure();
506  if (!function->IsOptimized()) {
507  // Concurrent recompilation and OSR may race. Increment only once.
508  int opt_count = function->shared()->opt_count();
509  function->shared()->set_opt_count(opt_count + 1);
510  }
511  double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
512  double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
513  double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
514  if (FLAG_trace_opt) {
515  PrintF("[optimizing ");
516  function->ShortPrint();
517  PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
518  ms_codegen);
519  }
520  if (FLAG_trace_opt_stats) {
521  static double compilation_time = 0.0;
522  static int compiled_functions = 0;
523  static int code_size = 0;
524 
525  compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
526  compiled_functions++;
527  code_size += function->shared()->SourceSize();
528  PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
529  compiled_functions,
530  code_size,
531  compilation_time);
532  }
533  if (FLAG_hydrogen_stats) {
534  isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
537  }
538 }
539 
540 
541 // Sets the expected number of properties based on estimate from compiler.
543  int estimate) {
544  // If no properties are added in the constructor, they are more likely
545  // to be added later.
546  if (estimate == 0) estimate = 2;
547 
548  // TODO(yangguo): check whether those heuristics are still up-to-date.
549  // We do not shrink objects that go into a snapshot (yet), so we adjust
550  // the estimate conservatively.
551  if (shared->GetIsolate()->serializer_enabled()) {
552  estimate += 2;
553  } else if (FLAG_clever_optimizations) {
554  // Inobject slack tracking will reclaim redundant inobject space later,
555  // so we can afford to adjust the estimate generously.
556  estimate += 8;
557  } else {
558  estimate += 3;
559  }
560 
561  shared->set_expected_nof_properties(estimate);
562 }
563 
564 
565 // Sets the function info on a function.
566 // The start_position points to the first '(' character after the function name
567 // in the full script source. When counting characters in the script source the
568 // the first character is number 0 (not 1).
569 static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
570  FunctionLiteral* lit,
571  bool is_toplevel,
572  Handle<Script> script) {
573  function_info->set_length(lit->parameter_count());
574  function_info->set_formal_parameter_count(lit->parameter_count());
575  function_info->set_script(*script);
576  function_info->set_function_token_position(lit->function_token_position());
577  function_info->set_start_position(lit->start_position());
578  function_info->set_end_position(lit->end_position());
579  function_info->set_is_expression(lit->is_expression());
580  function_info->set_is_anonymous(lit->is_anonymous());
581  function_info->set_is_toplevel(is_toplevel);
582  function_info->set_inferred_name(*lit->inferred_name());
583  function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
584  function_info->set_allows_lazy_compilation_without_context(
585  lit->AllowsLazyCompilationWithoutContext());
586  function_info->set_strict_mode(lit->strict_mode());
587  function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
588  function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
589  function_info->set_ast_node_count(lit->ast_node_count());
590  function_info->set_is_function(lit->is_function());
591  function_info->set_bailout_reason(lit->dont_optimize_reason());
592  function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
593  function_info->set_kind(lit->kind());
594  function_info->set_asm_function(lit->scope()->asm_function());
595 }
596 
597 
599  CompilationInfo* info,
601  // SharedFunctionInfo is passed separately, because if CompilationInfo
602  // was created using Script object, it will not have it.
603 
604  // Log the code generation. If source information is available include
605  // script name and line number. Check explicitly whether logging is
606  // enabled as finding the line number is not free.
607  if (info->isolate()->logger()->is_logging_code_events() ||
608  info->isolate()->cpu_profiler()->is_profiling()) {
609  Handle<Script> script = info->script();
610  Handle<Code> code = info->code();
611  if (code.is_identical_to(info->isolate()->builtins()->CompileLazy())) {
612  return;
613  }
614  int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
615  int column_num =
616  Script::GetColumnNumber(script, shared->start_position()) + 1;
617  String* script_name = script->name()->IsString()
618  ? String::cast(script->name())
619  : info->isolate()->heap()->empty_string();
620  Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script);
621  PROFILE(info->isolate(),
622  CodeCreateEvent(log_tag, *code, *shared, info, script_name,
623  line_num, column_num));
624  }
625 
626  GDBJIT(AddCode(Handle<String>(shared->DebugName()),
627  Handle<Script>(info->script()), Handle<Code>(info->code()),
628  info));
629 }
630 
631 
633  DCHECK(AllowCompilation::IsAllowed(info->isolate()));
634  DCHECK(info->function() != NULL);
635  if (!Rewriter::Rewrite(info)) return false;
636  if (!Scope::Analyze(info)) return false;
637  DCHECK(info->scope() != NULL);
638 
639  if (!FullCodeGenerator::MakeCode(info)) {
640  Isolate* isolate = info->isolate();
641  if (!isolate->has_pending_exception()) isolate->StackOverflow();
642  return false;
643  }
644  return true;
645 }
646 
647 
649  CompilationInfo* info) {
650  VMState<COMPILER> state(info->isolate());
651  PostponeInterruptsScope postpone(info->isolate());
652 
653  // Parse and update CompilationInfo with the results.
654  if (!Parser::Parse(info)) return MaybeHandle<Code>();
655  Handle<SharedFunctionInfo> shared = info->shared_info();
656  FunctionLiteral* lit = info->function();
657  shared->set_strict_mode(lit->strict_mode());
658  SetExpectedNofPropertiesFromEstimate(shared, lit->expected_property_count());
659  shared->set_bailout_reason(lit->dont_optimize_reason());
660  shared->set_ast_node_count(lit->ast_node_count());
661 
662  // Compile unoptimized code.
663  if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
664 
665  CHECK_EQ(Code::FUNCTION, info->code()->kind());
666  RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
667 
668  // Update the shared function info with the scope info. Allocating the
669  // ScopeInfo object may cause a GC.
670  Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope(), info->zone());
671  shared->set_scope_info(*scope_info);
672 
673  // Update the code and feedback vector for the shared function info.
674  shared->ReplaceCode(*info->code());
675  if (shared->optimization_disabled()) info->code()->set_optimizable(false);
676  shared->set_feedback_vector(*info->feedback_vector());
677 
678  return info->code();
679 }
680 
681 
683  Handle<JSFunction> function, BailoutId osr_ast_id) {
684  if (FLAG_cache_optimized_code) {
685  Handle<SharedFunctionInfo> shared(function->shared());
686  // Bound functions are not cached.
687  if (shared->bound()) return MaybeHandle<Code>();
689  int index = shared->SearchOptimizedCodeMap(
690  function->context()->native_context(), osr_ast_id);
691  if (index > 0) {
692  if (FLAG_trace_opt) {
693  PrintF("[found optimized code for ");
694  function->ShortPrint();
695  if (!osr_ast_id.IsNone()) {
696  PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
697  }
698  PrintF("]\n");
699  }
700  FixedArray* literals = shared->GetLiteralsFromOptimizedCodeMap(index);
701  if (literals != NULL) function->set_literals(literals);
702  return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index));
703  }
704  }
705  return MaybeHandle<Code>();
706 }
707 
708 
710  Handle<Code> code = info->code();
711  if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do.
712 
713  // Context specialization folds-in the context, so no sharing can occur.
714  if (code->is_turbofanned() && info->is_context_specializing()) return;
715 
716  // Cache optimized code.
717  if (FLAG_cache_optimized_code) {
718  Handle<JSFunction> function = info->closure();
719  Handle<SharedFunctionInfo> shared(function->shared());
720  // Do not cache bound functions.
721  if (shared->bound()) return;
722  Handle<FixedArray> literals(function->literals());
723  Handle<Context> native_context(function->context()->native_context());
724  SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
725  literals, info->osr_ast_id());
726  }
727 }
728 
729 
731  if (!Parser::Parse(info)) return false;
732  if (!Rewriter::Rewrite(info)) return false;
733  if (!Scope::Analyze(info)) return false;
734  DCHECK(info->scope() != NULL);
735  return true;
736 }
737 
738 
740  if (!CompileOptimizedPrologue(info)) return false;
741 
743 
744  OptimizedCompileJob job(info);
748  if (FLAG_trace_opt) {
749  PrintF("[aborted optimizing ");
750  info->closure()->ShortPrint();
751  PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
752  }
753  return false;
754  }
755 
756  // Success!
757  DCHECK(!info->isolate()->has_pending_exception());
759  RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info,
760  info->shared_info());
761  if (FLAG_trace_opt) {
762  PrintF("[completed optimizing ");
763  info->closure()->ShortPrint();
764  PrintF("]\n");
765  }
766  return true;
767 }
768 
769 
771  Isolate* isolate = info->isolate();
772  if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
773  if (FLAG_trace_concurrent_recompilation) {
774  PrintF(" ** Compilation queue full, will retry optimizing ");
775  info->closure()->ShortPrint();
776  PrintF(" later.\n");
777  }
778  return false;
779  }
780 
781  CompilationHandleScope handle_scope(info);
782  if (!CompileOptimizedPrologue(info)) return false;
783  info->SaveHandles(); // Copy handles to the compilation handle scope.
784 
786 
787  OptimizedCompileJob* job = new (info->zone()) OptimizedCompileJob(info);
789  if (status != OptimizedCompileJob::SUCCEEDED) return false;
791 
792  if (FLAG_trace_concurrent_recompilation) {
793  PrintF(" ** Queued ");
794  info->closure()->ShortPrint();
795  if (info->is_osr()) {
796  PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt());
797  } else {
798  PrintF(" for concurrent optimization.\n");
799  }
800  }
801  return true;
802 }
803 
804 
806  DCHECK(!function->GetIsolate()->has_pending_exception());
807  DCHECK(!function->is_compiled());
808  if (function->shared()->is_compiled()) {
809  return Handle<Code>(function->shared()->code());
810  }
811 
812  CompilationInfoWithZone info(function);
813  Handle<Code> result;
814  ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
816  Code);
817  return result;
818 }
819 
820 
822  DCHECK(!function->GetIsolate()->has_pending_exception());
823  DCHECK(!function->is_compiled());
824 
825  if (FLAG_turbo_asm && function->shared()->asm_function()) {
826  CompilationInfoWithZone info(function);
827 
828  VMState<COMPILER> state(info.isolate());
829  PostponeInterruptsScope postpone(info.isolate());
830 
832  Handle<Code>(function->shared()->code()));
833 
835  info.MarkAsTypingEnabled();
836  info.MarkAsInliningDisabled();
837 
838  if (GetOptimizedCodeNow(&info)) return info.code();
839  }
840 
841  if (function->shared()->is_compiled()) {
842  return Handle<Code>(function->shared()->code());
843  }
844 
845  CompilationInfoWithZone info(function);
846  Handle<Code> result;
847  ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
849 
850  if (FLAG_always_opt &&
851  info.isolate()->use_crankshaft() &&
852  !info.shared_info()->optimization_disabled() &&
853  !info.isolate()->DebuggerHasBreakPoints()) {
854  Handle<Code> opt_code;
856  function, result,
857  Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) {
858  result = opt_code;
859  }
860  }
861 
862  return result;
863 }
864 
865 
868  DCHECK(!shared->GetIsolate()->has_pending_exception());
869  DCHECK(!shared->is_compiled());
870 
871  CompilationInfoWithZone info(shared);
872  return GetUnoptimizedCodeCommon(&info);
873 }
874 
875 
878  if (function->is_compiled()) return true;
879  MaybeHandle<Code> maybe_code = Compiler::GetLazyCode(function);
880  Handle<Code> code;
881  if (!maybe_code.ToHandle(&code)) {
882  if (flag == CLEAR_EXCEPTION) {
883  function->GetIsolate()->clear_pending_exception();
884  }
885  return false;
886  }
887  function->ReplaceCode(*code);
888  DCHECK(function->is_compiled());
889  return true;
890 }
891 
892 
893 // TODO(turbofan): In the future, unoptimized code with deopt support could
894 // be generated lazily once deopt is triggered.
896  if (!info->shared_info()->has_deoptimization_support()) {
897  CompilationInfoWithZone unoptimized(info->shared_info());
898  // Note that we use the same AST that we will use for generating the
899  // optimized code.
900  unoptimized.SetFunction(info->function());
901  unoptimized.PrepareForCompilation(info->scope());
902  unoptimized.SetContext(info->context());
903  unoptimized.EnableDeoptimizationSupport();
904  if (!FullCodeGenerator::MakeCode(&unoptimized)) return false;
905 
906  Handle<SharedFunctionInfo> shared = info->shared_info();
907  shared->EnableDeoptimizationSupport(*unoptimized.code());
908  shared->set_feedback_vector(*unoptimized.feedback_vector());
909 
910  // The scope info might not have been set if a lazily compiled
911  // function is inlined before being called for the first time.
912  if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) {
913  Handle<ScopeInfo> target_scope_info =
914  ScopeInfo::Create(info->scope(), info->zone());
915  shared->set_scope_info(*target_scope_info);
916  }
917 
918  // The existing unoptimized code was replaced with the new one.
919  RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
920  }
921  return true;
922 }
923 
924 
925 // Compile full code for debugging. This code will have debug break slots
926 // and deoptimization information. Deoptimization information is required
927 // in case that an optimized version of this function is still activated on
928 // the stack. It will also make sure that the full code is compiled with
929 // the same flags as the previous version, that is flags which can change
930 // the code generated. The current method of mapping from already compiled
931 // full code without debug break slots to full code with debug break slots
932 // depends on the generated code is otherwise exactly the same.
933 // If compilation fails, just keep the existing code.
935  CompilationInfoWithZone info(function);
936  Isolate* isolate = info.isolate();
937  VMState<COMPILER> state(isolate);
938 
939  info.MarkAsDebug();
940 
941  DCHECK(!isolate->has_pending_exception());
942  Handle<Code> old_code(function->shared()->code());
943  DCHECK(old_code->kind() == Code::FUNCTION);
944  DCHECK(!old_code->has_debug_break_slots());
945 
947  if (old_code->is_compiled_optimizable()) {
949  } else {
950  info.MarkNonOptimizable();
951  }
952  MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info);
953  Handle<Code> new_code;
954  if (!maybe_new_code.ToHandle(&new_code)) {
955  isolate->clear_pending_exception();
956  } else {
957  DCHECK_EQ(old_code->is_compiled_optimizable(),
958  new_code->is_compiled_optimizable());
959  }
960  return maybe_new_code;
961 }
962 
963 
965  // TODO(635): support extensions.
966  CompilationInfoWithZone info(script);
967  PostponeInterruptsScope postpone(info.isolate());
968  VMState<COMPILER> state(info.isolate());
969 
970  info.MarkAsGlobal();
971  if (!Parser::Parse(&info)) return;
972 
973  LiveEditFunctionTracker tracker(info.isolate(), info.function());
974  if (!CompileUnoptimizedCode(&info)) return;
975  if (!info.shared_info().is_null()) {
976  Handle<ScopeInfo> scope_info = ScopeInfo::Create(info.scope(),
977  info.zone());
978  info.shared_info()->set_scope_info(*scope_info);
979  }
980  tracker.RecordRootFunctionInfo(info.code());
981 }
982 
983 
985  Isolate* isolate = info->isolate();
986  PostponeInterruptsScope postpone(isolate);
987  DCHECK(!isolate->native_context().is_null());
988  Handle<Script> script = info->script();
989 
990  // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
991  FixedArray* array = isolate->native_context()->embedder_data();
992  script->set_context_data(array->get(0));
993 
994  isolate->debug()->OnBeforeCompile(script);
995 
996  DCHECK(info->is_eval() || info->is_global());
997 
998  info->MarkAsToplevel();
999 
1001 
1002  { VMState<COMPILER> state(info->isolate());
1003  if (info->function() == NULL) {
1004  // Parse the script if needed (if it's already parsed, function() is
1005  // non-NULL).
1006  bool parse_allow_lazy =
1008  String::cast(script->source())->length() >
1009  FLAG_min_preparse_length) &&
1011 
1012  if (!parse_allow_lazy &&
1015  // We are going to parse eagerly, but we either 1) have cached data
1016  // produced by lazy parsing or 2) are asked to generate cached data.
1017  // Eager parsing cannot benefit from cached data, and producing cached
1018  // data while parsing eagerly is not implemented.
1020  }
1021  if (!Parser::Parse(info, parse_allow_lazy)) {
1023  }
1024  }
1025 
1026  FunctionLiteral* lit = info->function();
1027  LiveEditFunctionTracker live_edit_tracker(isolate, lit);
1028 
1029  // Measure how long it takes to do the compilation; only take the
1030  // rest of the function into account to avoid overlap with the
1031  // parsing statistics.
1032  HistogramTimer* rate = info->is_eval()
1033  ? info->isolate()->counters()->compile_eval()
1034  : info->isolate()->counters()->compile();
1035  HistogramTimerScope timer(rate);
1036 
1037  // Compile the code.
1038  if (!CompileUnoptimizedCode(info)) {
1040  }
1041 
1042  // Allocate function.
1043  DCHECK(!info->code().is_null());
1044  result = isolate->factory()->NewSharedFunctionInfo(
1045  lit->name(), lit->materialized_literal_count(), lit->kind(),
1046  info->code(), ScopeInfo::Create(info->scope(), info->zone()),
1047  info->feedback_vector());
1048 
1049  DCHECK_EQ(RelocInfo::kNoPosition, lit->function_token_position());
1050  SetFunctionInfo(result, lit, true, script);
1051 
1052  Handle<String> script_name = script->name()->IsString()
1053  ? Handle<String>(String::cast(script->name()))
1054  : isolate->factory()->empty_string();
1055  Logger::LogEventsAndTags log_tag = info->is_eval()
1056  ? Logger::EVAL_TAG
1057  : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script);
1058 
1059  PROFILE(isolate, CodeCreateEvent(
1060  log_tag, *info->code(), *result, info, *script_name));
1061  GDBJIT(AddCode(script_name, script, info->code(), info));
1062 
1063  // Hint to the runtime system used when allocating space for initial
1064  // property space by setting the expected number of properties for
1065  // the instances of the function.
1067  lit->expected_property_count());
1068 
1069  if (!script.is_null())
1070  script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
1071 
1072  live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
1073  }
1074 
1075  isolate->debug()->OnAfterCompile(script);
1076 
1077  return result;
1078 }
1079 
1080 
1082  Handle<String> source,
1083  Handle<Context> context,
1084  StrictMode strict_mode,
1085  ParseRestriction restriction,
1086  int scope_position) {
1087  Isolate* isolate = source->GetIsolate();
1088  int source_length = source->length();
1089  isolate->counters()->total_eval_size()->Increment(source_length);
1090  isolate->counters()->total_compile_size()->Increment(source_length);
1091 
1092  CompilationCache* compilation_cache = isolate->compilation_cache();
1093  MaybeHandle<SharedFunctionInfo> maybe_shared_info =
1094  compilation_cache->LookupEval(source, context, strict_mode,
1095  scope_position);
1096  Handle<SharedFunctionInfo> shared_info;
1097 
1098  if (!maybe_shared_info.ToHandle(&shared_info)) {
1099  Handle<Script> script = isolate->factory()->NewScript(source);
1100  CompilationInfoWithZone info(script);
1101  info.MarkAsEval();
1102  if (context->IsNativeContext()) info.MarkAsGlobal();
1103  info.SetStrictMode(strict_mode);
1104  info.SetParseRestriction(restriction);
1105  info.SetContext(context);
1106 
1107  Debug::RecordEvalCaller(script);
1108 
1109  shared_info = CompileToplevel(&info);
1110 
1111  if (shared_info.is_null()) {
1112  return MaybeHandle<JSFunction>();
1113  } else {
1114  // Explicitly disable optimization for eval code. We're not yet prepared
1115  // to handle eval-code in the optimizing compiler.
1116  shared_info->DisableOptimization(kEval);
1117 
1118  // If caller is strict mode, the result must be in strict mode as well.
1119  DCHECK(strict_mode == SLOPPY || shared_info->strict_mode() == STRICT);
1120  if (!shared_info->dont_cache()) {
1121  compilation_cache->PutEval(
1122  source, context, shared_info, scope_position);
1123  }
1124  }
1125  } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) {
1126  shared_info->ResetForNewContext(isolate->heap()->global_ic_age());
1127  }
1128 
1129  return isolate->factory()->NewFunctionFromSharedFunctionInfo(
1130  shared_info, context, NOT_TENURED);
1131 }
1132 
1133 
1135  Handle<String> source, Handle<Object> script_name, int line_offset,
1136  int column_offset, bool is_shared_cross_origin, Handle<Context> context,
1137  v8::Extension* extension, ScriptData** cached_data,
1138  ScriptCompiler::CompileOptions compile_options, NativesFlag natives) {
1139  if (compile_options == ScriptCompiler::kNoCompileOptions) {
1140  cached_data = NULL;
1141  } else if (compile_options == ScriptCompiler::kProduceParserCache ||
1142  compile_options == ScriptCompiler::kProduceCodeCache) {
1143  DCHECK(cached_data && !*cached_data);
1144  DCHECK(extension == NULL);
1145  } else {
1146  DCHECK(compile_options == ScriptCompiler::kConsumeParserCache ||
1147  compile_options == ScriptCompiler::kConsumeCodeCache);
1148  DCHECK(cached_data && *cached_data);
1149  DCHECK(extension == NULL);
1150  }
1151  Isolate* isolate = source->GetIsolate();
1152  int source_length = source->length();
1153  isolate->counters()->total_load_size()->Increment(source_length);
1154  isolate->counters()->total_compile_size()->Increment(source_length);
1155 
1156  CompilationCache* compilation_cache = isolate->compilation_cache();
1157 
1158  // Do a lookup in the compilation cache but not for extensions.
1159  MaybeHandle<SharedFunctionInfo> maybe_result;
1161  if (extension == NULL) {
1162  if (FLAG_serialize_toplevel &&
1163  compile_options == ScriptCompiler::kConsumeCodeCache &&
1164  !isolate->debug()->is_loaded()) {
1165  HistogramTimerScope timer(isolate->counters()->compile_deserialize());
1166  return CodeSerializer::Deserialize(isolate, *cached_data, source);
1167  } else {
1168  maybe_result = compilation_cache->LookupScript(
1169  source, script_name, line_offset, column_offset,
1170  is_shared_cross_origin, context);
1171  }
1172  }
1173 
1174  base::ElapsedTimer timer;
1175  if (FLAG_profile_deserialization && FLAG_serialize_toplevel &&
1176  compile_options == ScriptCompiler::kProduceCodeCache) {
1177  timer.Start();
1178  }
1179 
1180  if (!maybe_result.ToHandle(&result)) {
1181  // No cache entry found. Compile the script.
1182 
1183  // Create a script object describing the script to be compiled.
1184  Handle<Script> script = isolate->factory()->NewScript(source);
1185  if (natives == NATIVES_CODE) {
1186  script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
1187  }
1188  if (!script_name.is_null()) {
1189  script->set_name(*script_name);
1190  script->set_line_offset(Smi::FromInt(line_offset));
1191  script->set_column_offset(Smi::FromInt(column_offset));
1192  }
1193  script->set_is_shared_cross_origin(is_shared_cross_origin);
1194 
1195  // Compile the function and add it to the cache.
1196  CompilationInfoWithZone info(script);
1197  info.MarkAsGlobal();
1198  info.SetCachedData(cached_data, compile_options);
1199  info.SetExtension(extension);
1200  info.SetContext(context);
1201  if (FLAG_serialize_toplevel &&
1202  compile_options == ScriptCompiler::kProduceCodeCache) {
1203  info.PrepareForSerializing();
1204  }
1205  if (FLAG_use_strict) info.SetStrictMode(STRICT);
1206 
1207  result = CompileToplevel(&info);
1208  if (extension == NULL && !result.is_null() && !result->dont_cache()) {
1209  compilation_cache->PutScript(source, context, result);
1210  if (FLAG_serialize_toplevel &&
1211  compile_options == ScriptCompiler::kProduceCodeCache) {
1212  HistogramTimerScope histogram_timer(
1213  isolate->counters()->compile_serialize());
1214  *cached_data = CodeSerializer::Serialize(isolate, result, source);
1215  if (FLAG_profile_deserialization) {
1216  PrintF("[Compiling and serializing %d bytes took %0.3f ms]\n",
1217  (*cached_data)->length(), timer.Elapsed().InMillisecondsF());
1218  }
1219  }
1220  }
1221 
1222  if (result.is_null()) isolate->ReportPendingMessages();
1223  } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
1224  result->ResetForNewContext(isolate->heap()->global_ic_age());
1225  }
1226  return result;
1227 }
1228 
1229 
1231  CompilationInfo* info, int source_length) {
1232  Isolate* isolate = info->isolate();
1233  isolate->counters()->total_load_size()->Increment(source_length);
1234  isolate->counters()->total_compile_size()->Increment(source_length);
1235 
1236  if (FLAG_use_strict) info->SetStrictMode(STRICT);
1237  // TODO(marja): FLAG_serialize_toplevel is not honoured and won't be; when the
1238  // real code caching lands, streaming needs to be adapted to use it.
1239  return CompileToplevel(info);
1240 }
1241 
1242 
1244  FunctionLiteral* literal, Handle<Script> script,
1245  CompilationInfo* outer_info) {
1246  // Precondition: code has been parsed and scopes have been analyzed.
1247  CompilationInfoWithZone info(script);
1248  info.SetFunction(literal);
1249  info.PrepareForCompilation(literal->scope());
1250  info.SetStrictMode(literal->scope()->strict_mode());
1251  if (outer_info->will_serialize()) info.PrepareForSerializing();
1252 
1253  Isolate* isolate = info.isolate();
1254  Factory* factory = isolate->factory();
1255  LiveEditFunctionTracker live_edit_tracker(isolate, literal);
1256  // Determine if the function can be lazily compiled. This is necessary to
1257  // allow some of our builtin JS files to be lazily compiled. These
1258  // builtins cannot be handled lazily by the parser, since we have to know
1259  // if a function uses the special natives syntax, which is something the
1260  // parser records.
1261  // If the debugger requests compilation for break points, we cannot be
1262  // aggressive about lazy compilation, because it might trigger compilation
1263  // of functions without an outer context when setting a breakpoint through
1264  // Debug::FindSharedFunctionInfoInScript.
1265  bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1266  bool allow_lazy = literal->AllowsLazyCompilation() &&
1267  !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
1268 
1269 
1270  if (outer_info->is_toplevel() && outer_info->will_serialize()) {
1271  // Make sure that if the toplevel code (possibly to be serialized),
1272  // the inner unction must be allowed to be compiled lazily.
1273  DCHECK(allow_lazy);
1274  }
1275 
1276  // Generate code
1277  Handle<ScopeInfo> scope_info;
1278  if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
1279  Handle<Code> code = isolate->builtins()->CompileLazy();
1280  info.SetCode(code);
1281  scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
1282  } else if (FullCodeGenerator::MakeCode(&info)) {
1283  DCHECK(!info.code().is_null());
1284  scope_info = ScopeInfo::Create(info.scope(), info.zone());
1285  } else {
1287  }
1288 
1289  // Create a shared function info object.
1290  Handle<SharedFunctionInfo> result = factory->NewSharedFunctionInfo(
1291  literal->name(), literal->materialized_literal_count(), literal->kind(),
1292  info.code(), scope_info, info.feedback_vector());
1293  SetFunctionInfo(result, literal, false, script);
1294  RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1295  result->set_allows_lazy_compilation(allow_lazy);
1296  result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
1297 
1298  // Set the expected number of properties for instances and return
1299  // the resulting function.
1301  literal->expected_property_count());
1302  live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1303  return result;
1304 }
1305 
1306 
1308  Handle<Code> current_code,
1310  BailoutId osr_ast_id) {
1311  Handle<Code> cached_code;
1313  function, osr_ast_id).ToHandle(&cached_code)) {
1314  return cached_code;
1315  }
1316 
1318  Isolate* isolate = info->isolate();
1319  DCHECK(AllowCompilation::IsAllowed(isolate));
1320  VMState<COMPILER> state(isolate);
1321  DCHECK(!isolate->has_pending_exception());
1322  PostponeInterruptsScope postpone(isolate);
1323 
1324  Handle<SharedFunctionInfo> shared = info->shared_info();
1325  if (shared->code()->kind() != Code::FUNCTION ||
1326  ScopeInfo::Empty(isolate) == shared->scope_info()) {
1327  // The function was never compiled. Compile it unoptimized first.
1328  // TODO(titzer): reuse the AST and scope info from this compile.
1329  CompilationInfoWithZone nested(function);
1330  nested.EnableDeoptimizationSupport();
1331  if (!GetUnoptimizedCodeCommon(&nested).ToHandle(&current_code)) {
1332  return MaybeHandle<Code>();
1333  }
1334  shared->ReplaceCode(*current_code);
1335  }
1336  current_code->set_profiler_ticks(0);
1337 
1338  info->SetOptimizing(osr_ast_id, current_code);
1339 
1340  if (mode == CONCURRENT) {
1341  if (GetOptimizedCodeLater(info.get())) {
1342  info.Detach(); // The background recompile job owns this now.
1343  return isolate->builtins()->InOptimizationQueue();
1344  }
1345  } else {
1346  if (GetOptimizedCodeNow(info.get())) return info->code();
1347  }
1348 
1349  if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1350  return MaybeHandle<Code>();
1351 }
1352 
1353 
1355  // Take ownership of compilation info. Deleting compilation info
1356  // also tears down the zone and the recompile job.
1357  SmartPointer<CompilationInfo> info(job->info());
1358  Isolate* isolate = info->isolate();
1359 
1360  VMState<COMPILER> state(isolate);
1361  TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
1362 
1363  Handle<SharedFunctionInfo> shared = info->shared_info();
1364  shared->code()->set_profiler_ticks(0);
1365 
1366  // 1) Optimization on the concurrent thread may have failed.
1367  // 2) The function may have already been optimized by OSR. Simply continue.
1368  // Except when OSR already disabled optimization for some reason.
1369  // 3) The code may have already been invalidated due to dependency change.
1370  // 4) Debugger may have been activated.
1371  // 5) Code generation may have failed.
1373  if (shared->optimization_disabled()) {
1374  job->RetryOptimization(kOptimizationDisabled);
1375  } else if (info->HasAbortedDueToDependencyChange()) {
1376  job->RetryOptimization(kBailedOutDueToDependencyChange);
1377  } else if (isolate->DebuggerHasBreakPoints()) {
1378  job->RetryOptimization(kDebuggerHasBreakPoints);
1379  } else if (job->GenerateCode() == OptimizedCompileJob::SUCCEEDED) {
1380  RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info.get(), shared);
1381  if (info->shared_info()->SearchOptimizedCodeMap(
1382  info->context()->native_context(), info->osr_ast_id()) == -1) {
1384  }
1385  if (FLAG_trace_opt) {
1386  PrintF("[completed optimizing ");
1387  info->closure()->ShortPrint();
1388  PrintF("]\n");
1389  }
1390  return Handle<Code>(*info->code());
1391  }
1392  }
1393 
1395  if (FLAG_trace_opt) {
1396  PrintF("[aborted optimizing ");
1397  info->closure()->ShortPrint();
1398  PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
1399  }
1400  return Handle<Code>::null();
1401 }
1402 
1403 
1405  bool allow_lazy_without_ctx) {
1406  return LiveEditFunctionTracker::IsActive(info->isolate()) ||
1407  (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
1408 }
1409 
1410 
1411 CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
1412  : name_(name), info_(info), zone_(info->isolate()) {
1413  if (FLAG_hydrogen_stats) {
1414  info_zone_start_allocation_size_ = info->zone()->allocation_size();
1415  timer_.Start();
1416  }
1417 }
1418 
1419 
1420 CompilationPhase::~CompilationPhase() {
1421  if (FLAG_hydrogen_stats) {
1422  unsigned size = zone()->allocation_size();
1423  size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1424  isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1425  }
1426 }
1427 
1428 
1429 bool CompilationPhase::ShouldProduceTraceOutput() const {
1430  // Trace if the appropriate trace flag is set and the phase name's first
1431  // character is in the FLAG_trace_phase command line parameter.
1432  AllowHandleDereference allow_deref;
1433  bool tracing_on = info()->IsStub()
1434  ? FLAG_trace_hydrogen_stubs
1435  : (FLAG_trace_hydrogen &&
1436  info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1437  return (tracing_on &&
1438  base::OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1439 }
1440 
1441 } } // namespace v8::internal
#define STATEMENT_NODE_LIST(V)
Definition: ast.h:57
#define EXPRESSION_NODE_LIST(V)
Definition: ast.h:77
#define MODULE_NODE_LIST(V)
Definition: ast.h:51
#define DECLARATION_NODE_LIST(V)
Definition: ast.h:44
Ignore.
Definition: v8.h:4008
For streaming incomplete script data to V8.
Definition: v8.h:1096
@ kProduceParserCache
Definition: v8.h:1161
@ kConsumeParserCache
Definition: v8.h:1162
@ kProduceCodeCache
Definition: v8.h:1163
@ kNoCompileOptions
Definition: v8.h:1160
@ kConsumeCodeCache
Definition: v8.h:1164
static char * StrChr(char *str, int c)
static void Run(CompilationInfo *info)
Definition: typing.cc:36
bool IsNone() const
Definition: utils.h:966
int ToInt() const
Definition: utils.h:958
static BailoutId None()
Definition: utils.h:960
static ScriptData * Serialize(Isolate *isolate, Handle< SharedFunctionInfo > info, Handle< String > source)
Definition: serialize.cc:1836
static Handle< SharedFunctionInfo > Deserialize(Isolate *isolate, ScriptData *data, Handle< String > source)
Definition: serialize.cc:2024
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
Definition: objects-inl.h:4954
static const int kPrologueOffsetNotSet
Definition: objects.h:4973
void PutEval(Handle< String > source, Handle< Context > context, Handle< SharedFunctionInfo > function_info, int scope_position)
MaybeHandle< SharedFunctionInfo > LookupScript(Handle< String > source, Handle< Object > name, int line_offset, int column_offset, bool is_shared_cross_origin, Handle< Context > context)
MaybeHandle< SharedFunctionInfo > LookupEval(Handle< String > source, Handle< Context > context, StrictMode strict_mode, int scope_position)
void PutScript(Handle< String > source, Handle< Context > context, Handle< SharedFunctionInfo > function_info)
v8::Extension * extension_
Definition: compiler.h:462
Handle< Foreign > object_wrapper_
Definition: compiler.h:510
bool is_context_specializing() const
Definition: compiler.h:200
void SetCode(Handle< Code > code)
Definition: compiler.h:242
List< OffsetRange > * no_frame_ranges_
Definition: compiler.h:501
BailoutId osr_ast_id() const
Definition: compiler.h:128
void SetCachedData(ScriptData **cached_data, ScriptCompiler::CompileOptions compile_options)
Definition: compiler.h:247
BailoutReason bailout_reason() const
Definition: compiler.h:339
bool GetFlag(Flag flag) const
Definition: compiler.h:437
ScriptCompiler::CompileOptions compile_options_
Definition: compiler.h:464
ZoneList< Handle< HeapObject > > * dependencies_[DependentCode::kGroupCount]
Definition: compiler.h:487
Handle< Script > script_
Definition: compiler.h:457
HydrogenCodeStub * code_stub() const
Definition: compiler.h:115
Handle< Code > code() const
Definition: compiler.h:110
void PrepareForCompilation(Scope *scope)
Definition: compiler.cc:276
ScriptCompiler::CompileOptions compile_options() const
Definition: compiler.h:118
Code::Flags flags() const
Definition: compiler.cc:252
BailoutReason bailout_reason_
Definition: compiler.h:497
void SetExtension(v8::Extension *extension)
Definition: compiler.h:243
Handle< Script > script() const
Definition: compiler.h:113
Handle< Context > context() const
Definition: compiler.h:127
HydrogenCodeStub * code_stub_
Definition: compiler.h:450
Handle< TypeFeedbackVector > feedback_vector_
Definition: compiler.h:471
Handle< SharedFunctionInfo > shared_info_
Definition: compiler.h:456
void SetFunction(FunctionLiteral *literal)
Definition: compiler.h:230
void SetOptimizing(BailoutId osr_ast_id, Handle< Code > unoptimized)
Definition: compiler.h:285
DeferredHandles * deferred_handles_
Definition: compiler.h:485
void SetStrictMode(StrictMode strict_mode)
Definition: compiler.h:156
void set_this_has_uses(bool has_no_uses)
Definition: compiler.h:150
void SetContext(Handle< Context > context)
Definition: compiler.h:257
void CommitDependencies(Handle< Code > code)
Definition: compiler.cc:199
FunctionLiteral * function() const
Definition: compiler.h:107
ScriptData ** cached_data_
Definition: compiler.h:463
Handle< JSFunction > closure() const
Definition: compiler.h:111
StrictMode strict_mode() const
Definition: compiler.h:104
void SetParseRestriction(ParseRestriction restriction)
Definition: compiler.h:221
Isolate * isolate() const
Definition: compiler.h:96
Handle< TypeFeedbackVector > feedback_vector() const
Definition: compiler.h:239
Handle< SharedFunctionInfo > shared_info() const
Definition: compiler.h:112
void Initialize(Isolate *isolate, Mode mode, Zone *zone)
Definition: compiler.cc:134
AstValueFactory * ast_value_factory_
Definition: compiler.h:514
FunctionLiteral * function_
Definition: compiler.h:443
CompilationInfo(Handle< JSFunction > closure, Zone *zone)
Definition: compiler.cc:88
static MUST_USE_RESULT MaybeHandle< JSFunction > GetFunctionFromEval(Handle< String > source, Handle< Context > context, StrictMode strict_mode, ParseRestriction restriction, int scope_position)
Definition: compiler.cc:1081
static Handle< SharedFunctionInfo > CompileStreamedScript(CompilationInfo *info, int source_length)
Definition: compiler.cc:1230
static bool EnsureDeoptimizationSupport(CompilationInfo *info)
Definition: compiler.cc:895
static MUST_USE_RESULT MaybeHandle< Code > GetDebugCode(Handle< JSFunction > function)
Definition: compiler.cc:934
static MUST_USE_RESULT MaybeHandle< Code > GetOptimizedCode(Handle< JSFunction > function, Handle< Code > current_code, ConcurrencyMode mode, BailoutId osr_ast_id=BailoutId::None())
Definition: compiler.cc:1307
static MUST_USE_RESULT MaybeHandle< Code > GetLazyCode(Handle< JSFunction > function)
Definition: compiler.cc:821
static bool DebuggerWantsEagerCompilation(CompilationInfo *info, bool allow_lazy_without_ctx=false)
Definition: compiler.cc:1404
static Handle< Code > GetConcurrentlyOptimizedCode(OptimizedCompileJob *job)
Definition: compiler.cc:1354
static bool EnsureCompiled(Handle< JSFunction > function, ClearExceptionFlag flag)
Definition: compiler.cc:876
static MUST_USE_RESULT MaybeHandle< Code > GetUnoptimizedCode(Handle< JSFunction > function)
Definition: compiler.cc:805
static void CompileForLiveEdit(Handle< Script > script)
Definition: compiler.cc:964
static Handle< SharedFunctionInfo > CompileScript(Handle< String > source, Handle< Object > script_name, int line_offset, int column_offset, bool is_shared_cross_origin, Handle< Context > context, v8::Extension *extension, ScriptData **cached_data, ScriptCompiler::CompileOptions compile_options, NativesFlag is_natives_code)
Definition: compiler.cc:1134
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script, CompilationInfo *outer)
Definition: compiler.cc:1243
static void RecordEvalCaller(Handle< Script > script)
Definition: debug.cc:2419
void OnAfterCompile(Handle< Script > script)
Definition: debug.cc:2621
bool is_loaded() const
Definition: debug.h:466
void OnBeforeCompile(Handle< Script > script)
Definition: debug.cc:2600
bool is_active() const
Definition: debug.h:465
static const int kGroupCount
Definition: objects.h:5534
static DependentCode * ForObject(Handle< HeapObject > object, DependencyGroup group)
Definition: objects.cc:11397
Object * get(int index)
Definition: objects-inl.h:2165
static bool MakeCode(CompilationInfo *info)
HOptimizedGraphBuilderWithPositions(CompilationInfo *info)
Definition: compiler.cc:291
void Bailout(BailoutReason reason)
Definition: hydrogen.cc:4212
bool is_null() const
Definition: handles.h:124
static Handle< T > null()
Definition: handles.h:123
int global_ic_age()
Definition: heap.h:1268
bool DebuggerHasBreakPoints()
Definition: isolate-inl.h:28
Handle< Context > native_context()
Definition: isolate.cc:1339
Builtins * builtins()
Definition: isolate.h:947
HTracer * GetHTracer()
Definition: isolate.cc:2148
Counters * counters()
Definition: isolate.h:857
void ReportPendingMessages()
Definition: isolate.cc:1194
OptimizingCompilerThread * optimizing_compiler_thread()
Definition: isolate.h:1059
HStatistics * GetHStatistics()
Definition: isolate.cc:2136
bool use_crankshaft() const
Definition: isolate.cc:2174
void clear_pending_exception()
Definition: isolate.h:572
CompilationCache * compilation_cache()
Definition: isolate.h:865
CpuProfiler * cpu_profiler() const
Definition: isolate.h:971
Object * StackOverflow()
Definition: isolate.cc:773
Logger * logger()
Definition: isolate.h:866
Factory * factory()
Definition: isolate.h:982
bool has_pending_exception()
Definition: isolate.h:581
Handle< Code > Codegen()
Definition: lithium.cc:461
static LChunk * NewChunk(HGraph *graph)
Definition: lithium.cc:434
static const int kMinFixedSlotIndex
Definition: lithium.h:178
static const int kMaxFixedSlotIndex
Definition: lithium.h:177
T & at(int i) const
Definition: list.h:69
void RecordRootFunctionInfo(Handle< Code > code)
Definition: liveedit.cc:2078
static bool IsActive(Isolate *isolate)
Definition: liveedit.cc:2083
void RecordFunctionInfo(Handle< SharedFunctionInfo > info, FunctionLiteral *lit, Zone *zone)
Definition: liveedit.cc:2068
bool is_logging_code_events()
Definition: log.h:315
MUST_USE_RESULT Status OptimizeGraph()
Definition: compiler.cc:441
MUST_USE_RESULT Status GenerateCode()
Definition: compiler.cc:468
Status AbortOptimization(BailoutReason reason)
Definition: compiler.h:609
HOptimizedGraphBuilder * graph_builder_
Definition: compiler.h:623
CompilationInfo * info() const
Definition: compiler.h:601
base::TimeDelta time_taken_to_create_graph_
Definition: compiler.h:626
Status RetryOptimization(BailoutReason reason)
Definition: compiler.h:604
MUST_USE_RESULT Status SetLastStatus(Status status)
Definition: compiler.h:632
base::TimeDelta time_taken_to_optimize_
Definition: compiler.h:627
MUST_USE_RESULT Status CreateGraph()
Definition: compiler.cc:325
base::TimeDelta time_taken_to_codegen_
Definition: compiler.h:628
void QueueForOptimization(OptimizedCompileJob *optimizing_compiler)
static const int kNoPosition
Definition: assembler.h:317
static bool Rewrite(CompilationInfo *info)
Definition: rewriter.cc:230
static Handle< ScopeInfo > Create(Scope *scope, Zone *zone)
Definition: scopeinfo.cc:16
static ScopeInfo * Empty(Isolate *isolate)
Definition: scopeinfo.cc:132
int num_heap_slots() const
Definition: scopes.h:352
static bool Analyze(CompilationInfo *info)
Definition: scopes.cc:260
int num_parameters() const
Definition: scopes.h:321
bool HasIllegalRedeclaration() const
Definition: scopes.h:198
bool AllowsLazyCompilation() const
Definition: scopes.cc:705
int num_stack_slots() const
Definition: scopes.h:351
const byte * data_
Definition: compiler.h:55
const byte * data() const
Definition: compiler.h:40
int length() const
Definition: compiler.h:41
ScriptData(const byte *data, int length)
Definition: compiler.cc:35
static int GetColumnNumber(Handle< Script > script, int code_pos)
Definition: objects.cc:9635
static int GetLineNumber(Handle< Script > script, int code_pos)
Definition: objects.cc:9673
static void AddToOptimizedCodeMap(Handle< SharedFunctionInfo > shared, Handle< Context > native_context, Handle< Code > code, Handle< FixedArray > literals, BailoutId osr_ast_id)
Definition: objects.cc:9184
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
unsigned allocation_size() const
Definition: zone.h:66
Handle< Code > GenerateCode()
Definition: pipeline.cc:166
#define DEF_VISIT(type)
Definition: compiler.cc:315
#define PROFILE(IsolateGetter, Call)
Definition: cpu-profiler.h:181
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define GDBJIT(action)
Definition: gdb-jit.h:51
#define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T)
Definition: isolate.h:135
#define CHECK_EQ(expected, value)
Definition: logging.h:169
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
#define MUST_USE_RESULT
Definition: macros.h:266
static bool CompileOptimizedPrologue(CompilationInfo *info)
Definition: compiler.cc:730
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit is_toplevel
Definition: objects-inl.h:5431
ClearExceptionFlag
Definition: globals.h:760
@ CLEAR_EXCEPTION
Definition: globals.h:762
static MUST_USE_RESULT MaybeHandle< Code > GetCodeFromOptimizedCodeMap(Handle< JSFunction > function, BailoutId osr_ast_id)
Definition: compiler.cc:682
static void SetFunctionInfo(Handle< SharedFunctionInfo > function_info, FunctionLiteral *lit, bool is_toplevel, Handle< Script > script)
Definition: compiler.cc:569
PerThreadAssertScopeDebugOnly< HANDLE_DEREFERENCE_ASSERT, true > AllowHandleDereference
Definition: assert-scope.h:122
static bool GetOptimizedCodeNow(CompilationInfo *info)
Definition: compiler.cc:739
static void RecordFunctionCompilation(Logger::LogEventsAndTags tag, CompilationInfo *info, Handle< SharedFunctionInfo > shared)
Definition: compiler.cc:598
const char * GetBailoutReason(BailoutReason reason)
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset dependent_code
Definition: objects-inl.h:5353
void PrintF(const char *format,...)
Definition: utils.cc:80
@ kDontCache
Definition: ast.h:158
@ kDontSelfOptimize
Definition: ast.h:156
void SetExpectedNofPropertiesFromEstimate(Handle< SharedFunctionInfo > shared, int estimate)
Definition: compiler.cc:542
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418
static Handle< SharedFunctionInfo > CompileToplevel(CompilationInfo *info)
Definition: compiler.cc:984
static bool GetOptimizedCodeLater(CompilationInfo *info)
Definition: compiler.cc:770
@ NATIVES_CODE
Definition: globals.h:401
bool IsAligned(T value, U alignment)
Definition: utils.h:123
const intptr_t kPointerAlignment
Definition: globals.h:230
void CopyBytes(uint8_t *target, uint8_t *source)
static void InsertCodeIntoOptimizedCodeMap(CompilationInfo *info)
Definition: compiler.cc:709
static bool CompileUnoptimizedCode(CompilationInfo *info)
Definition: compiler.cc:632
static MUST_USE_RESULT MaybeHandle< Code > GetUnoptimizedCodeCommon(CompilationInfo *info)
Definition: compiler.cc:648
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
@ None
Definition: v8.h:2211