V8 Project
deoptimizer.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #include "src/accessors.h"
8 #include "src/codegen.h"
9 #include "src/deoptimizer.h"
10 #include "src/disasm.h"
11 #include "src/full-codegen.h"
12 #include "src/global-handles.h"
13 #include "src/macro-assembler.h"
14 #include "src/prettyprinter.h"
15 
16 
17 namespace v8 {
18 namespace internal {
19 
23 #if defined(__native_client__)
24  // The Native Client port of V8 uses an interpreter,
25  // so code pages don't need PROT_EXEC.
27 #else
28  EXECUTABLE,
29 #endif
30  NULL);
31 }
32 
33 
35  : allocator_(allocator),
36  deoptimized_frame_info_(NULL),
37  current_(NULL) {
38  for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
41  }
42 }
43 
44 
46  for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
49  }
50 }
51 
52 
56  }
57 }
58 
59 
61  if (function_->IsHeapObject()) {
62  // Search all deoptimizing code in the native context of the function.
63  Context* native_context = function_->context()->native_context();
64  Object* element = native_context->DeoptimizedCodeListHead();
65  while (!element->IsUndefined()) {
66  Code* code = Code::cast(element);
67  CHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
68  if (code->contains(addr)) return code;
69  element = code->next_code_link();
70  }
71  }
72  return NULL;
73 }
74 
75 
76 // We rely on this function not causing a GC. It is called from generated code
77 // without having a real stack frame in place.
79  BailoutType type,
80  unsigned bailout_id,
81  Address from,
82  int fp_to_sp_delta,
83  Isolate* isolate) {
84  Deoptimizer* deoptimizer = new Deoptimizer(isolate,
85  function,
86  type,
87  bailout_id,
88  from,
89  fp_to_sp_delta,
90  NULL);
92  isolate->deoptimizer_data()->current_ = deoptimizer;
93  return deoptimizer;
94 }
95 
96 
97 // No larger than 2K on all platforms
98 static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB;
99 
100 
102  int entries_size =
104  int commit_page_size = static_cast<int>(base::OS::CommitPageSize());
105  int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) /
106  commit_page_size) + 1;
107  return static_cast<size_t>(commit_page_size * page_count);
108 }
109 
110 
113  CHECK_NE(result, NULL);
114  result->DeleteFrameDescriptions();
116  return result;
117 }
118 
119 
121  if (jsframe_index == 0) return 0;
122 
123  int frame_index = 0;
124  while (jsframe_index >= 0) {
125  FrameDescription* frame = output_[frame_index];
126  if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) {
127  jsframe_index--;
128  }
129  frame_index++;
130  }
131 
132  return frame_index - 1;
133 }
134 
135 
137  JavaScriptFrame* frame,
138  int jsframe_index,
139  Isolate* isolate) {
140  CHECK(frame->is_optimized());
142 
143  // Get the function and code from the frame.
144  JSFunction* function = frame->function();
145  Code* code = frame->LookupCode();
146 
147  // Locate the deoptimization point in the code. As we are at a call the
148  // return address must be at a place in the code with deoptimization support.
149  SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc());
150  int deoptimization_index = safepoint_entry.deoptimization_index();
151  CHECK_NE(deoptimization_index, Safepoint::kNoDeoptimizationIndex);
152 
153  // Always use the actual stack slots when calculating the fp to sp
154  // delta adding two for the function and context.
155  unsigned stack_slots = code->stack_slots();
156  unsigned fp_to_sp_delta = (stack_slots * kPointerSize) +
158 
159  Deoptimizer* deoptimizer = new Deoptimizer(isolate,
160  function,
162  deoptimization_index,
163  frame->pc(),
164  fp_to_sp_delta,
165  code);
166  Address tos = frame->fp() - fp_to_sp_delta;
167  deoptimizer->FillInputFrame(tos, frame);
168 
169  // Calculate the output frames.
171 
172  // Create the GC safe output frame information and register it for GC
173  // handling.
174  CHECK_LT(jsframe_index, deoptimizer->jsframe_count());
175 
176  // Convert JS frame index into frame index.
177  int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index);
178 
179  bool has_arguments_adaptor =
180  frame_index > 0 &&
181  deoptimizer->output_[frame_index - 1]->GetFrameType() ==
183 
184  int construct_offset = has_arguments_adaptor ? 2 : 1;
185  bool has_construct_stub =
186  frame_index >= construct_offset &&
187  deoptimizer->output_[frame_index - construct_offset]->GetFrameType() ==
188  StackFrame::CONSTRUCT;
189 
190  DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer,
191  frame_index,
192  has_arguments_adaptor,
193  has_construct_stub);
195 
196  // Get the "simulated" top and size for the requested frame.
197  FrameDescription* parameters_frame =
198  deoptimizer->output_[
199  has_arguments_adaptor ? (frame_index - 1) : frame_index];
200 
201  uint32_t parameters_size = (info->parameters_count() + 1) * kPointerSize;
202  Address parameters_top = reinterpret_cast<Address>(
203  parameters_frame->GetTop() + (parameters_frame->GetFrameSize() -
204  parameters_size));
205 
206  uint32_t expressions_size = info->expression_count() * kPointerSize;
207  Address expressions_top = reinterpret_cast<Address>(
208  deoptimizer->output_[frame_index]->GetTop());
209 
210  // Done with the GC-unsafe frame descriptions. This re-enables allocation.
211  deoptimizer->DeleteFrameDescriptions();
212 
213  // Allocate a heap number for the doubles belonging to this frame.
215  parameters_top, parameters_size, expressions_top, expressions_size, info);
216 
217  // Finished using the deoptimizer instance.
218  delete deoptimizer;
219 
220  return info;
221 }
222 
223 
225  Isolate* isolate) {
227  delete info;
229 }
230 
231 
233  int count,
234  BailoutType type) {
235  TableEntryGenerator generator(masm, type, count);
236  generator.Generate();
237 }
238 
239 
241  Context* context, OptimizedFunctionVisitor* visitor) {
242  DisallowHeapAllocation no_allocation;
243 
244  CHECK(context->IsNativeContext());
245 
246  visitor->EnterContext(context);
247 
248  // Visit the list of optimized functions, removing elements that
249  // no longer refer to optimized code.
250  JSFunction* prev = NULL;
251  Object* element = context->OptimizedFunctionsListHead();
252  while (!element->IsUndefined()) {
253  JSFunction* function = JSFunction::cast(element);
254  Object* next = function->next_function_link();
255  if (function->code()->kind() != Code::OPTIMIZED_FUNCTION ||
256  (visitor->VisitFunction(function),
257  function->code()->kind() != Code::OPTIMIZED_FUNCTION)) {
258  // The function no longer refers to optimized code, or the visitor
259  // changed the code to which it refers to no longer be optimized code.
260  // Remove the function from this list.
261  if (prev != NULL) {
262  prev->set_next_function_link(next);
263  } else {
264  context->SetOptimizedFunctionsListHead(next);
265  }
266  // The visitor should not alter the link directly.
267  CHECK_EQ(function->next_function_link(), next);
268  // Set the next function link to undefined to indicate it is no longer
269  // in the optimized functions list.
270  function->set_next_function_link(context->GetHeap()->undefined_value());
271  } else {
272  // The visitor should not alter the link directly.
273  CHECK_EQ(function->next_function_link(), next);
274  // preserve this element.
275  prev = function;
276  }
277  element = next;
278  }
279 
280  visitor->LeaveContext(context);
281 }
282 
283 
285  Isolate* isolate,
286  OptimizedFunctionVisitor* visitor) {
287  DisallowHeapAllocation no_allocation;
288 
289  // Run through the list of all native contexts.
290  Object* context = isolate->heap()->native_contexts_list();
291  while (!context->IsUndefined()) {
293  context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
294  }
295 }
296 
297 
298 // Unlink functions referring to code marked for deoptimization, then move
299 // marked code from the optimized code list to the deoptimized code list,
300 // and patch code for lazy deopt.
302  DisallowHeapAllocation no_allocation;
303 
304  // A "closure" that unlinks optimized code that is going to be
305  // deoptimized from the functions that refer to it.
306  class SelectedCodeUnlinker: public OptimizedFunctionVisitor {
307  public:
308  virtual void EnterContext(Context* context) { } // Don't care.
309  virtual void LeaveContext(Context* context) { } // Don't care.
310  virtual void VisitFunction(JSFunction* function) {
311  Code* code = function->code();
312  if (!code->marked_for_deoptimization()) return;
313 
314  // Unlink this function and evict from optimized code map.
315  SharedFunctionInfo* shared = function->shared();
316  function->set_code(shared->code());
317 
318  if (FLAG_trace_deopt) {
319  CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer());
320  PrintF(scope.file(), "[deoptimizer unlinked: ");
321  function->PrintName(scope.file());
322  PrintF(scope.file(),
323  " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
324  }
325  }
326  };
327 
328  // Unlink all functions that refer to marked code.
329  SelectedCodeUnlinker unlinker;
330  VisitAllOptimizedFunctionsForContext(context, &unlinker);
331 
332  Isolate* isolate = context->GetHeap()->isolate();
333 #ifdef DEBUG
334  Code* topmost_optimized_code = NULL;
335  bool safe_to_deopt_topmost_optimized_code = false;
336  // Make sure all activations of optimized code can deopt at their current PC.
337  // The topmost optimized code has special handling because it cannot be
338  // deoptimized due to weak object dependency.
340  !it.done(); it.Advance()) {
341  StackFrame::Type type = it.frame()->type();
342  if (type == StackFrame::OPTIMIZED) {
343  Code* code = it.frame()->LookupCode();
344  if (FLAG_trace_deopt) {
345  JSFunction* function =
346  static_cast<OptimizedFrame*>(it.frame())->function();
347  CodeTracer::Scope scope(isolate->GetCodeTracer());
348  PrintF(scope.file(), "[deoptimizer found activation of function: ");
349  function->PrintName(scope.file());
350  PrintF(scope.file(),
351  " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
352  }
353  SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
354  int deopt_index = safepoint.deoptimization_index();
355  // Turbofan deopt is checked when we are patching addresses on stack.
356  bool turbofanned = code->is_turbofanned() && !FLAG_turbo_deoptimization;
357  bool safe_to_deopt =
358  deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned;
359  CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned);
360  if (topmost_optimized_code == NULL) {
361  topmost_optimized_code = code;
362  safe_to_deopt_topmost_optimized_code = safe_to_deopt;
363  }
364  }
365  }
366 #endif
367 
368  // Move marked code from the optimized code list to the deoptimized
369  // code list, collecting them into a ZoneList.
370  Zone zone(isolate);
371  ZoneList<Code*> codes(10, &zone);
372 
373  // Walk over all optimized code objects in this native context.
374  Code* prev = NULL;
375  Object* element = context->OptimizedCodeListHead();
376  while (!element->IsUndefined()) {
377  Code* code = Code::cast(element);
378  CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
379  Object* next = code->next_code_link();
380 
381  if (code->marked_for_deoptimization() &&
382  (!code->is_turbofanned() || FLAG_turbo_deoptimization)) {
383  // Put the code into the list for later patching.
384  codes.Add(code, &zone);
385 
386  if (prev != NULL) {
387  // Skip this code in the optimized code list.
388  prev->set_next_code_link(next);
389  } else {
390  // There was no previous node, the next node is the new head.
391  context->SetOptimizedCodeListHead(next);
392  }
393 
394  // Move the code to the _deoptimized_ code list.
395  code->set_next_code_link(context->DeoptimizedCodeListHead());
396  context->SetDeoptimizedCodeListHead(code);
397  } else {
398  // Not marked; preserve this element.
399  prev = code;
400  }
401  element = next;
402  }
403 
404  // TODO(titzer): we need a handle scope only because of the macro assembler,
405  // which is only used in EnsureCodeForDeoptimizationEntry.
406  HandleScope scope(isolate);
407 
408  // Now patch all the codes for deoptimization.
409  for (int i = 0; i < codes.length(); i++) {
410 #ifdef DEBUG
411  if (codes[i] == topmost_optimized_code) {
412  DCHECK(safe_to_deopt_topmost_optimized_code);
413  }
414 #endif
415  // It is finally time to die, code object.
416 
417  // Remove the code from optimized code map.
418  DeoptimizationInputData* deopt_data =
419  DeoptimizationInputData::cast(codes[i]->deoptimization_data());
420  SharedFunctionInfo* shared =
421  SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo());
422  shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code");
423 
424  // Do platform-specific patching to force any activations to lazy deopt.
425  if (!codes[i]->is_turbofanned() || FLAG_turbo_deoptimization) {
427 
428  // We might be in the middle of incremental marking with compaction.
429  // Tell collector to treat this code object in a special way and
430  // ignore all slots that might have been recorded on it.
432  }
433  }
434 }
435 
436 
438  if (FLAG_trace_deopt) {
439  CodeTracer::Scope scope(isolate->GetCodeTracer());
440  PrintF(scope.file(), "[deoptimize all code in all contexts]\n");
441  }
442  DisallowHeapAllocation no_allocation;
443  // For all contexts, mark all code, then deoptimize.
444  Object* context = isolate->heap()->native_contexts_list();
445  while (!context->IsUndefined()) {
446  Context* native_context = Context::cast(context);
447  MarkAllCodeForContext(native_context);
448  DeoptimizeMarkedCodeForContext(native_context);
449  context = native_context->get(Context::NEXT_CONTEXT_LINK);
450  }
451 }
452 
453 
455  if (FLAG_trace_deopt) {
456  CodeTracer::Scope scope(isolate->GetCodeTracer());
457  PrintF(scope.file(), "[deoptimize marked code in all contexts]\n");
458  }
459  DisallowHeapAllocation no_allocation;
460  // For all contexts, deoptimize code already marked.
461  Object* context = isolate->heap()->native_contexts_list();
462  while (!context->IsUndefined()) {
463  Context* native_context = Context::cast(context);
464  DeoptimizeMarkedCodeForContext(native_context);
465  context = native_context->get(Context::NEXT_CONTEXT_LINK);
466  }
467 }
468 
469 
471  if (FLAG_trace_deopt) {
472  CodeTracer::Scope scope(object->GetHeap()->isolate()->GetCodeTracer());
473  PrintF(scope.file(), "[deoptimize global object @ 0x%08" V8PRIxPTR "]\n",
474  reinterpret_cast<intptr_t>(object));
475  }
476  if (object->IsJSGlobalProxy()) {
477  PrototypeIterator iter(object->GetIsolate(), object);
478  // TODO(verwaest): This CHECK will be hit if the global proxy is detached.
479  CHECK(iter.GetCurrent()->IsJSGlobalObject());
480  Context* native_context =
481  GlobalObject::cast(iter.GetCurrent())->native_context();
482  MarkAllCodeForContext(native_context);
483  DeoptimizeMarkedCodeForContext(native_context);
484  } else if (object->IsGlobalObject()) {
485  Context* native_context = GlobalObject::cast(object)->native_context();
486  MarkAllCodeForContext(native_context);
487  DeoptimizeMarkedCodeForContext(native_context);
488  }
489 }
490 
491 
493  Object* element = context->OptimizedCodeListHead();
494  while (!element->IsUndefined()) {
495  Code* code = Code::cast(element);
496  CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
497  code->set_marked_for_deoptimization(true);
498  element = code->next_code_link();
499  }
500 }
501 
502 
504  Code* code = function->code();
505  if (code->kind() == Code::OPTIMIZED_FUNCTION) {
506  // Mark the code for deoptimization and unlink any functions that also
507  // refer to that code. The code cannot be shared across native contexts,
508  // so we only need to search one.
509  code->set_marked_for_deoptimization(true);
510  DeoptimizeMarkedCodeForContext(function->context()->native_context());
511  }
512 }
513 
514 
516  deoptimizer->DoComputeOutputFrames();
517 }
518 
519 
521  StackFrame::Type frame_type) {
522  switch (deopt_type) {
523  case EAGER:
524  case SOFT:
525  case LAZY:
526  case DEBUGGER:
527  return (frame_type == StackFrame::STUB)
528  ? FLAG_trace_stub_failures
529  : FLAG_trace_deopt;
530  }
531  FATAL("Unsupported deopt type");
532  return false;
533 }
534 
535 
537  switch (type) {
538  case EAGER: return "eager";
539  case SOFT: return "soft";
540  case LAZY: return "lazy";
541  case DEBUGGER: return "debugger";
542  }
543  FATAL("Unsupported deopt type");
544  return NULL;
545 }
546 
547 
549  JSFunction* function,
550  BailoutType type,
551  unsigned bailout_id,
552  Address from,
553  int fp_to_sp_delta,
554  Code* optimized_code)
555  : isolate_(isolate),
556  function_(function),
557  bailout_id_(bailout_id),
558  bailout_type_(type),
559  from_(from),
560  fp_to_sp_delta_(fp_to_sp_delta),
561  has_alignment_padding_(0),
562  input_(NULL),
563  output_count_(0),
564  jsframe_count_(0),
565  output_(NULL),
566  deferred_objects_tagged_values_(0),
567  deferred_objects_double_values_(0),
568  deferred_objects_(0),
569  deferred_heap_numbers_(0),
570  jsframe_functions_(0),
571  jsframe_has_adapted_arguments_(0),
572  materialized_values_(NULL),
573  materialized_objects_(NULL),
574  materialization_value_index_(0),
575  materialization_object_index_(0),
576  trace_scope_(NULL) {
577  // For COMPILED_STUBs called from builtins, the function pointer is a SMI
578  // indicating an internal frame.
579  if (function->IsSmi()) {
580  function = NULL;
581  }
582  DCHECK(from != NULL);
583  if (function != NULL && function->IsOptimized()) {
584  function->shared()->increment_deopt_count();
586  isolate->counters()->soft_deopts_executed()->Increment();
587  // Soft deopts shouldn't count against the overall re-optimization count
588  // that can eventually lead to disabling optimization for a function.
589  int opt_count = function->shared()->opt_count();
590  if (opt_count > 0) opt_count--;
591  function->shared()->set_opt_count(opt_count);
592  }
593  }
594  compiled_code_ = FindOptimizedCode(function, optimized_code);
595 
596 #if DEBUG
598  if (type == EAGER || type == SOFT || type == LAZY) {
599  DCHECK(compiled_code_->kind() != Code::FUNCTION);
600  }
601 #endif
602 
603  StackFrame::Type frame_type = function == NULL
605  : StackFrame::JAVA_SCRIPT;
606  trace_scope_ = TraceEnabledFor(type, frame_type) ?
607  new CodeTracer::Scope(isolate->GetCodeTracer()) : NULL;
608 #ifdef DEBUG
609  CHECK(AllowHeapAllocation::IsAllowed());
610  disallow_heap_allocation_ = new DisallowHeapAllocation();
611 #endif // DEBUG
612  unsigned size = ComputeInputFrameSize();
613  input_ = new(size) FrameDescription(size, function);
614  input_->SetFrameType(frame_type);
615 }
616 
617 
619  Code* optimized_code) {
620  switch (bailout_type_) {
621  case Deoptimizer::SOFT:
622  case Deoptimizer::EAGER:
623  case Deoptimizer::LAZY: {
625  return (compiled_code == NULL)
626  ? static_cast<Code*>(isolate_->FindCodeObject(from_))
627  : compiled_code;
628  }
630  DCHECK(optimized_code->contains(from_));
631  return optimized_code;
632  }
633  FATAL("Could not find code for optimized function");
634  return NULL;
635 }
636 
637 
639  if (function_->IsJSFunction()) {
640  function_->PrintName(trace_scope_->file());
641  } else {
642  PrintF(trace_scope_->file(),
644  }
645 }
646 
647 
649  DCHECK(input_ == NULL && output_ == NULL);
650  DCHECK(disallow_heap_allocation_ == NULL);
651  delete trace_scope_;
652 }
653 
654 
656  delete input_;
657  for (int i = 0; i < output_count_; ++i) {
658  if (output_[i] != input_) delete output_[i];
659  }
660  delete[] output_;
661  input_ = NULL;
662  output_ = NULL;
663 #ifdef DEBUG
664  CHECK(!AllowHeapAllocation::IsAllowed());
665  CHECK(disallow_heap_allocation_ != NULL);
666  delete disallow_heap_allocation_;
667  disallow_heap_allocation_ = NULL;
668 #endif // DEBUG
669 }
670 
671 
673  int id,
674  BailoutType type,
675  GetEntryMode mode) {
676  CHECK_GE(id, 0);
677  if (id >= kMaxNumberOfEntries) return NULL;
678  if (mode == ENSURE_ENTRY_CODE) {
680  } else {
682  }
685  MemoryChunk* base = data->deopt_entry_code_[type];
686  return base->area_start() + (id * table_entry_size_);
687 }
688 
689 
691  Address addr,
692  BailoutType type) {
694  MemoryChunk* base = data->deopt_entry_code_[type];
695  Address start = base->area_start();
696  if (base == NULL ||
697  addr < start ||
698  addr >= start + (kMaxNumberOfEntries * table_entry_size_)) {
700  }
701  DCHECK_EQ(0,
702  static_cast<int>(addr - start) % table_entry_size_);
703  return static_cast<int>(addr - start) / table_entry_size_;
704 }
705 
706 
708  BailoutId id,
709  SharedFunctionInfo* shared) {
710  // TODO(kasperl): For now, we do a simple linear search for the PC
711  // offset associated with the given node id. This should probably be
712  // changed to a binary search.
713  int length = data->DeoptPoints();
714  for (int i = 0; i < length; i++) {
715  if (data->AstId(i) == id) {
716  return data->PcAndState(i)->value();
717  }
718  }
719  OFStream os(stderr);
720  os << "[couldn't find pc offset for node=" << id.ToInt() << "]\n"
721  << "[method: " << shared->DebugName()->ToCString().get() << "]\n"
722  << "[source:\n" << SourceCodeOf(shared) << "\n]" << endl;
723 
724  FATAL("unable to find pc offset during deoptimization");
725  return -1;
726 }
727 
728 
730  int length = 0;
731  // Count all entries in the deoptimizing code list of every context.
732  Object* context = isolate->heap()->native_contexts_list();
733  while (!context->IsUndefined()) {
734  Context* native_context = Context::cast(context);
735  Object* element = native_context->DeoptimizedCodeListHead();
736  while (!element->IsUndefined()) {
737  Code* code = Code::cast(element);
738  DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
739  length++;
740  element = code->next_code_link();
741  }
742  context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
743  }
744  return length;
745 }
746 
747 
748 // We rely on this function not causing a GC. It is called from generated code
749 // without having a real stack frame in place.
751  // Print some helpful diagnostic information.
752  if (FLAG_log_timer_events &&
753  compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
754  LOG(isolate(), CodeDeoptEvent(compiled_code_));
755  }
756  base::ElapsedTimer timer;
757 
758  // Determine basic deoptimization information. The optimized frame is
759  // described by the input data.
760  DeoptimizationInputData* input_data =
761  DeoptimizationInputData::cast(compiled_code_->deoptimization_data());
762 
763  if (trace_scope_ != NULL) {
764  timer.Start();
765  PrintF(trace_scope_->file(),
766  "[deoptimizing (DEOPT %s): begin 0x%08" V8PRIxPTR " ",
768  reinterpret_cast<intptr_t>(function_));
770  PrintF(trace_scope_->file(),
771  " (opt #%d) @%d, FP to SP delta: %d]\n",
772  input_data->OptimizationId()->value(),
773  bailout_id_,
775  if (bailout_type_ == EAGER || bailout_type_ == SOFT ||
778  }
779  }
780 
781  BailoutId node_id = input_data->AstId(bailout_id_);
782  ByteArray* translations = input_data->TranslationByteArray();
783  unsigned translation_index =
784  input_data->TranslationIndex(bailout_id_)->value();
785 
786  // Do the input frame to output frame(s) translation.
787  TranslationIterator iterator(translations, translation_index);
788  Translation::Opcode opcode =
789  static_cast<Translation::Opcode>(iterator.Next());
790  DCHECK(Translation::BEGIN == opcode);
791  USE(opcode);
792  // Read the number of output frames and allocate an array for their
793  // descriptions.
794  int count = iterator.Next();
795  iterator.Next(); // Drop JS frames count.
796  DCHECK(output_ == NULL);
797  output_ = new FrameDescription*[count];
798  for (int i = 0; i < count; ++i) {
799  output_[i] = NULL;
800  }
801  output_count_ = count;
802 
804  stack_fp_ = reinterpret_cast<Address>(
805  input_->GetRegister(fp_reg.code()) +
807 
808  // Translate each output frame.
809  for (int i = 0; i < count; ++i) {
810  // Read the ast node id, function, and frame height for this output frame.
811  Translation::Opcode opcode =
812  static_cast<Translation::Opcode>(iterator.Next());
813  switch (opcode) {
815  DoComputeJSFrame(&iterator, i);
816  jsframe_count_++;
817  break;
818  case Translation::ARGUMENTS_ADAPTOR_FRAME:
819  DoComputeArgumentsAdaptorFrame(&iterator, i);
820  break;
821  case Translation::CONSTRUCT_STUB_FRAME:
822  DoComputeConstructStubFrame(&iterator, i);
823  break;
824  case Translation::GETTER_STUB_FRAME:
825  DoComputeAccessorStubFrame(&iterator, i, false);
826  break;
827  case Translation::SETTER_STUB_FRAME:
828  DoComputeAccessorStubFrame(&iterator, i, true);
829  break;
830  case Translation::COMPILED_STUB_FRAME:
831  DoComputeCompiledStubFrame(&iterator, i);
832  break;
833  case Translation::BEGIN:
835  case Translation::INT32_REGISTER:
836  case Translation::UINT32_REGISTER:
837  case Translation::DOUBLE_REGISTER:
838  case Translation::STACK_SLOT:
839  case Translation::INT32_STACK_SLOT:
840  case Translation::UINT32_STACK_SLOT:
841  case Translation::DOUBLE_STACK_SLOT:
842  case Translation::LITERAL:
843  case Translation::ARGUMENTS_OBJECT:
844  default:
845  FATAL("Unsupported translation");
846  break;
847  }
848  }
849 
850  // Print some helpful diagnostic information.
851  if (trace_scope_ != NULL) {
852  double ms = timer.Elapsed().InMillisecondsF();
853  int index = output_count_ - 1; // Index of the topmost frame.
854  JSFunction* function = output_[index]->GetFunction();
855  PrintF(trace_scope_->file(),
856  "[deoptimizing (%s): end 0x%08" V8PRIxPTR " ",
858  reinterpret_cast<intptr_t>(function));
860  PrintF(trace_scope_->file(),
861  " @%d => node=%d, pc=0x%08" V8PRIxPTR ", state=%s, alignment=%s,"
862  " took %0.3f ms]\n",
863  bailout_id_,
864  node_id.ToInt(),
865  output_[index]->GetPc(),
867  static_cast<FullCodeGenerator::State>(
868  output_[index]->GetState()->value())),
869  has_alignment_padding_ ? "with padding" : "no padding",
870  ms);
871  }
872 }
873 
874 
875 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
876  int frame_index) {
877  BailoutId node_id = BailoutId(iterator->Next());
878  JSFunction* function;
879  if (frame_index != 0) {
880  function = JSFunction::cast(ComputeLiteral(iterator->Next()));
881  } else {
882  int closure_id = iterator->Next();
883  USE(closure_id);
884  CHECK_EQ(Translation::kSelfLiteralId, closure_id);
885  function = function_;
886  }
887  unsigned height = iterator->Next() - 1; // Do not count the context.
888  unsigned height_in_bytes = height * kPointerSize;
889  if (trace_scope_ != NULL) {
890  PrintF(trace_scope_->file(), " translating ");
891  function->PrintName(trace_scope_->file());
892  PrintF(trace_scope_->file(),
893  " => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
894  }
895 
896  // The 'fixed' part of the frame consists of the incoming parameters and
897  // the part described by JavaScriptFrameConstants.
898  unsigned fixed_frame_size = ComputeFixedSize(function);
899  unsigned input_frame_size = input_->GetFrameSize();
900  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
901 
902  // Allocate and store the output frame description.
903  FrameDescription* output_frame =
904  new(output_frame_size) FrameDescription(output_frame_size, function);
905  output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
906 
907  bool is_bottommost = (0 == frame_index);
908  bool is_topmost = (output_count_ - 1 == frame_index);
909  CHECK(frame_index >= 0 && frame_index < output_count_);
910  CHECK_EQ(output_[frame_index], NULL);
911  output_[frame_index] = output_frame;
912 
913  // The top address for the bottommost output frame can be computed from
914  // the input frame pointer and the output frame's height. For all
915  // subsequent output frames, it can be computed from the previous one's
916  // top address and the current frame's size.
918  intptr_t top_address;
919  if (is_bottommost) {
920  // Determine whether the input frame contains alignment padding.
923  ? 1
924  : 0;
925  // 2 = context and function in the frame.
926  // If the optimized frame had alignment padding, adjust the frame pointer
927  // to point to the new position of the old frame pointer after padding
928  // is removed. Subtract 2 * kPointerSize for the context and function slots.
929  top_address = input_->GetRegister(fp_reg.code()) -
931  height_in_bytes + has_alignment_padding_ * kPointerSize;
932  } else {
933  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
934  }
935  output_frame->SetTop(top_address);
936 
937  // Compute the incoming parameter translation.
938  int parameter_count = function->shared()->formal_parameter_count() + 1;
939  unsigned output_offset = output_frame_size;
940  unsigned input_offset = input_frame_size;
941  for (int i = 0; i < parameter_count; ++i) {
943  DoTranslateCommand(iterator, frame_index, output_offset);
944  }
945  input_offset -= (parameter_count * kPointerSize);
946 
947  // There are no translation commands for the caller's pc and fp, the
948  // context, and the function. Synthesize their values and set them up
949  // explicitly.
950  //
951  // The caller's pc for the bottommost output frame is the same as in the
952  // input frame. For all subsequent output frames, it can be read from the
953  // previous one. This frame's pc can be computed from the non-optimized
954  // function code and AST id of the bailout.
957  intptr_t value;
958  if (is_bottommost) {
959  value = input_->GetFrameSlot(input_offset);
960  } else {
961  value = output_[frame_index - 1]->GetPc();
962  }
963  output_frame->SetCallerPc(output_offset, value);
964  if (trace_scope_ != NULL) {
965  PrintF(trace_scope_->file(),
966  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
967  V8PRIxPTR " ; caller's pc\n",
968  top_address + output_offset, output_offset, value);
969  }
970 
971  // The caller's frame pointer for the bottommost output frame is the same
972  // as in the input frame. For all subsequent output frames, it can be
973  // read from the previous one. Also compute and set this frame's frame
974  // pointer.
977  if (is_bottommost) {
978  value = input_->GetFrameSlot(input_offset);
979  } else {
980  value = output_[frame_index - 1]->GetFp();
981  }
982  output_frame->SetCallerFp(output_offset, value);
983  intptr_t fp_value = top_address + output_offset;
984  DCHECK(!is_bottommost || (input_->GetRegister(fp_reg.code()) +
985  has_alignment_padding_ * kPointerSize) == fp_value);
986  output_frame->SetFp(fp_value);
987  if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value);
988  if (trace_scope_ != NULL) {
989  PrintF(trace_scope_->file(),
990  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
991  V8PRIxPTR " ; caller's fp\n",
992  fp_value, output_offset, value);
993  }
994  DCHECK(!is_bottommost || !has_alignment_padding_ ||
995  (fp_value & kPointerSize) != 0);
996 
997  if (FLAG_enable_ool_constant_pool) {
998  // For the bottommost output frame the constant pool pointer can be gotten
999  // from the input frame. For subsequent output frames, it can be read from
1000  // the previous frame.
1003  if (is_bottommost) {
1004  value = input_->GetFrameSlot(input_offset);
1005  } else {
1006  value = output_[frame_index - 1]->GetConstantPool();
1007  }
1008  output_frame->SetCallerConstantPool(output_offset, value);
1009  if (trace_scope_) {
1010  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1011  V8PRIxPTR "; caller's constant_pool\n",
1012  top_address + output_offset, output_offset, value);
1013  }
1014  }
1015 
1016  // For the bottommost output frame the context can be gotten from the input
1017  // frame. For all subsequent output frames it can be gotten from the function
1018  // so long as we don't inline functions that need local contexts.
1022  // Read the context from the translations.
1023  DoTranslateCommand(iterator, frame_index, output_offset);
1024  value = output_frame->GetFrameSlot(output_offset);
1025  // The context should not be a placeholder for a materialized object.
1026  CHECK(value !=
1027  reinterpret_cast<intptr_t>(isolate_->heap()->arguments_marker()));
1028  if (value ==
1029  reinterpret_cast<intptr_t>(isolate_->heap()->undefined_value())) {
1030  // If the context was optimized away, just use the context from
1031  // the activation. This should only apply to Crankshaft code.
1033  if (is_bottommost) {
1034  value = input_->GetFrameSlot(input_offset);
1035  } else {
1036  value = reinterpret_cast<intptr_t>(function->context());
1037  }
1038  output_frame->SetFrameSlot(output_offset, value);
1039  }
1040  output_frame->SetContext(value);
1041  if (is_topmost) output_frame->SetRegister(context_reg.code(), value);
1042  if (trace_scope_ != NULL) {
1043  PrintF(trace_scope_->file(),
1044  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1045  V8PRIxPTR "; context\n",
1046  top_address + output_offset, output_offset, value);
1047  }
1048 
1049  // The function was mentioned explicitly in the BEGIN_FRAME.
1052  value = reinterpret_cast<intptr_t>(function);
1053  // The function for the bottommost output frame should also agree with the
1054  // input frame.
1055  DCHECK(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
1056  output_frame->SetFrameSlot(output_offset, value);
1057  if (trace_scope_ != NULL) {
1058  PrintF(trace_scope_->file(),
1059  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1060  V8PRIxPTR "; function\n",
1061  top_address + output_offset, output_offset, value);
1062  }
1063 
1064  // Translate the rest of the frame.
1065  for (unsigned i = 0; i < height; ++i) {
1067  DoTranslateCommand(iterator, frame_index, output_offset);
1068  }
1069  CHECK_EQ(0, output_offset);
1070 
1071  // Compute this frame's PC, state, and continuation.
1072  Code* non_optimized_code = function->shared()->code();
1073  FixedArray* raw_data = non_optimized_code->deoptimization_data();
1074  DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
1075  Address start = non_optimized_code->instruction_start();
1076  unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
1077  unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
1078  intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
1079  output_frame->SetPc(pc_value);
1080 
1081  // Update constant pool.
1082  if (FLAG_enable_ool_constant_pool) {
1083  intptr_t constant_pool_value =
1084  reinterpret_cast<intptr_t>(non_optimized_code->constant_pool());
1085  output_frame->SetConstantPool(constant_pool_value);
1086  if (is_topmost) {
1087  Register constant_pool_reg =
1089  output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1090  }
1091  }
1092 
1093  FullCodeGenerator::State state =
1095  output_frame->SetState(Smi::FromInt(state));
1096 
1097  // Set the continuation for the topmost frame.
1098  if (is_topmost && bailout_type_ != DEBUGGER) {
1099  Builtins* builtins = isolate_->builtins();
1100  Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
1101  if (bailout_type_ == LAZY) {
1102  continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
1103  } else if (bailout_type_ == SOFT) {
1104  continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
1105  } else {
1107  }
1108  output_frame->SetContinuation(
1109  reinterpret_cast<intptr_t>(continuation->entry()));
1110  }
1111 }
1112 
1113 
1114 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
1115  int frame_index) {
1116  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
1117  unsigned height = iterator->Next();
1118  unsigned height_in_bytes = height * kPointerSize;
1119  if (trace_scope_ != NULL) {
1120  PrintF(trace_scope_->file(),
1121  " translating arguments adaptor => height=%d\n", height_in_bytes);
1122  }
1123 
1124  unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
1125  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1126 
1127  // Allocate and store the output frame description.
1128  FrameDescription* output_frame =
1129  new(output_frame_size) FrameDescription(output_frame_size, function);
1131 
1132  // Arguments adaptor can not be topmost or bottommost.
1133  CHECK(frame_index > 0 && frame_index < output_count_ - 1);
1134  CHECK(output_[frame_index] == NULL);
1135  output_[frame_index] = output_frame;
1136 
1137  // The top address of the frame is computed from the previous
1138  // frame's top and this frame's size.
1139  intptr_t top_address;
1140  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1141  output_frame->SetTop(top_address);
1142 
1143  // Compute the incoming parameter translation.
1144  int parameter_count = height;
1145  unsigned output_offset = output_frame_size;
1146  for (int i = 0; i < parameter_count; ++i) {
1148  DoTranslateCommand(iterator, frame_index, output_offset);
1149  }
1150 
1151  // Read caller's PC from the previous frame.
1153  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1154  output_frame->SetCallerPc(output_offset, callers_pc);
1155  if (trace_scope_ != NULL) {
1156  PrintF(trace_scope_->file(),
1157  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1158  V8PRIxPTR " ; caller's pc\n",
1159  top_address + output_offset, output_offset, callers_pc);
1160  }
1161 
1162  // Read caller's FP from the previous frame, and set this frame's FP.
1164  intptr_t value = output_[frame_index - 1]->GetFp();
1165  output_frame->SetCallerFp(output_offset, value);
1166  intptr_t fp_value = top_address + output_offset;
1167  output_frame->SetFp(fp_value);
1168  if (trace_scope_ != NULL) {
1169  PrintF(trace_scope_->file(),
1170  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1171  V8PRIxPTR " ; caller's fp\n",
1172  fp_value, output_offset, value);
1173  }
1174 
1175  if (FLAG_enable_ool_constant_pool) {
1176  // Read the caller's constant pool from the previous frame.
1178  value = output_[frame_index - 1]->GetConstantPool();
1179  output_frame->SetCallerConstantPool(output_offset, value);
1180  if (trace_scope_) {
1181  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1182  V8PRIxPTR "; caller's constant_pool\n",
1183  top_address + output_offset, output_offset, value);
1184  }
1185  }
1186 
1187  // A marker value is used in place of the context.
1189  intptr_t context = reinterpret_cast<intptr_t>(
1191  output_frame->SetFrameSlot(output_offset, context);
1192  if (trace_scope_ != NULL) {
1193  PrintF(trace_scope_->file(),
1194  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1195  V8PRIxPTR " ; context (adaptor sentinel)\n",
1196  top_address + output_offset, output_offset, context);
1197  }
1198 
1199  // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
1201  value = reinterpret_cast<intptr_t>(function);
1202  output_frame->SetFrameSlot(output_offset, value);
1203  if (trace_scope_ != NULL) {
1204  PrintF(trace_scope_->file(),
1205  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1206  V8PRIxPTR " ; function\n",
1207  top_address + output_offset, output_offset, value);
1208  }
1209 
1210  // Number of incoming arguments.
1212  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1213  output_frame->SetFrameSlot(output_offset, value);
1214  if (trace_scope_ != NULL) {
1215  PrintF(trace_scope_->file(),
1216  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1217  V8PRIxPTR " ; argc (%d)\n",
1218  top_address + output_offset, output_offset, value, height - 1);
1219  }
1220 
1221  DCHECK(0 == output_offset);
1222 
1223  Builtins* builtins = isolate_->builtins();
1224  Code* adaptor_trampoline =
1225  builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
1226  intptr_t pc_value = reinterpret_cast<intptr_t>(
1227  adaptor_trampoline->instruction_start() +
1228  isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
1229  output_frame->SetPc(pc_value);
1230  if (FLAG_enable_ool_constant_pool) {
1231  intptr_t constant_pool_value =
1232  reinterpret_cast<intptr_t>(adaptor_trampoline->constant_pool());
1233  output_frame->SetConstantPool(constant_pool_value);
1234  }
1235 }
1236 
1237 
1238 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
1239  int frame_index) {
1240  Builtins* builtins = isolate_->builtins();
1241  Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
1242  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
1243  unsigned height = iterator->Next();
1244  unsigned height_in_bytes = height * kPointerSize;
1245  if (trace_scope_ != NULL) {
1246  PrintF(trace_scope_->file(),
1247  " translating construct stub => height=%d\n", height_in_bytes);
1248  }
1249 
1250  unsigned fixed_frame_size = ConstructFrameConstants::kFrameSize;
1251  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1252 
1253  // Allocate and store the output frame description.
1254  FrameDescription* output_frame =
1255  new(output_frame_size) FrameDescription(output_frame_size, function);
1256  output_frame->SetFrameType(StackFrame::CONSTRUCT);
1257 
1258  // Construct stub can not be topmost or bottommost.
1259  DCHECK(frame_index > 0 && frame_index < output_count_ - 1);
1260  DCHECK(output_[frame_index] == NULL);
1261  output_[frame_index] = output_frame;
1262 
1263  // The top address of the frame is computed from the previous
1264  // frame's top and this frame's size.
1265  intptr_t top_address;
1266  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1267  output_frame->SetTop(top_address);
1268 
1269  // Compute the incoming parameter translation.
1270  int parameter_count = height;
1271  unsigned output_offset = output_frame_size;
1272  for (int i = 0; i < parameter_count; ++i) {
1274  int deferred_object_index = deferred_objects_.length();
1275  DoTranslateCommand(iterator, frame_index, output_offset);
1276  // The allocated receiver of a construct stub frame is passed as the
1277  // receiver parameter through the translation. It might be encoding
1278  // a captured object, patch the slot address for a captured object.
1279  if (i == 0 && deferred_objects_.length() > deferred_object_index) {
1280  CHECK(!deferred_objects_[deferred_object_index].is_arguments());
1281  deferred_objects_[deferred_object_index].patch_slot_address(top_address);
1282  }
1283  }
1284 
1285  // Read caller's PC from the previous frame.
1287  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1288  output_frame->SetCallerPc(output_offset, callers_pc);
1289  if (trace_scope_ != NULL) {
1290  PrintF(trace_scope_->file(),
1291  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1292  V8PRIxPTR " ; caller's pc\n",
1293  top_address + output_offset, output_offset, callers_pc);
1294  }
1295 
1296  // Read caller's FP from the previous frame, and set this frame's FP.
1298  intptr_t value = output_[frame_index - 1]->GetFp();
1299  output_frame->SetCallerFp(output_offset, value);
1300  intptr_t fp_value = top_address + output_offset;
1301  output_frame->SetFp(fp_value);
1302  if (trace_scope_ != NULL) {
1303  PrintF(trace_scope_->file(),
1304  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1305  V8PRIxPTR " ; caller's fp\n",
1306  fp_value, output_offset, value);
1307  }
1308 
1309  if (FLAG_enable_ool_constant_pool) {
1310  // Read the caller's constant pool from the previous frame.
1312  value = output_[frame_index - 1]->GetConstantPool();
1313  output_frame->SetCallerConstantPool(output_offset, value);
1314  if (trace_scope_) {
1315  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1316  V8PRIxPTR " ; caller's constant pool\n",
1317  top_address + output_offset, output_offset, value);
1318  }
1319  }
1320 
1321  // The context can be gotten from the previous frame.
1323  value = output_[frame_index - 1]->GetContext();
1324  output_frame->SetFrameSlot(output_offset, value);
1325  if (trace_scope_ != NULL) {
1326  PrintF(trace_scope_->file(),
1327  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1328  V8PRIxPTR " ; context\n",
1329  top_address + output_offset, output_offset, value);
1330  }
1331 
1332  // A marker value is used in place of the function.
1334  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
1335  output_frame->SetFrameSlot(output_offset, value);
1336  if (trace_scope_ != NULL) {
1337  PrintF(trace_scope_->file(),
1338  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1339  V8PRIxPTR " ; function (construct sentinel)\n",
1340  top_address + output_offset, output_offset, value);
1341  }
1342 
1343  // The output frame reflects a JSConstructStubGeneric frame.
1345  value = reinterpret_cast<intptr_t>(construct_stub);
1346  output_frame->SetFrameSlot(output_offset, value);
1347  if (trace_scope_ != NULL) {
1348  PrintF(trace_scope_->file(),
1349  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1350  V8PRIxPTR " ; code object\n",
1351  top_address + output_offset, output_offset, value);
1352  }
1353 
1354  // Number of incoming arguments.
1356  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1357  output_frame->SetFrameSlot(output_offset, value);
1358  if (trace_scope_ != NULL) {
1359  PrintF(trace_scope_->file(),
1360  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1361  V8PRIxPTR " ; argc (%d)\n",
1362  top_address + output_offset, output_offset, value, height - 1);
1363  }
1364 
1365  // Constructor function being invoked by the stub (only present on some
1366  // architectures, indicated by kConstructorOffset).
1369  value = reinterpret_cast<intptr_t>(function);
1370  output_frame->SetFrameSlot(output_offset, value);
1371  if (trace_scope_ != NULL) {
1372  PrintF(trace_scope_->file(),
1373  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1374  V8PRIxPTR " ; constructor function\n",
1375  top_address + output_offset, output_offset, value);
1376  }
1377  }
1378 
1379  // The newly allocated object was passed as receiver in the artificial
1380  // constructor stub environment created by HEnvironment::CopyForInlining().
1382  value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
1383  output_frame->SetFrameSlot(output_offset, value);
1384  if (trace_scope_ != NULL) {
1385  PrintF(trace_scope_->file(),
1386  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1387  V8PRIxPTR " ; allocated receiver\n",
1388  top_address + output_offset, output_offset, value);
1389  }
1390 
1391  CHECK_EQ(0, output_offset);
1392 
1393  intptr_t pc = reinterpret_cast<intptr_t>(
1394  construct_stub->instruction_start() +
1395  isolate_->heap()->construct_stub_deopt_pc_offset()->value());
1396  output_frame->SetPc(pc);
1397  if (FLAG_enable_ool_constant_pool) {
1398  intptr_t constant_pool_value =
1399  reinterpret_cast<intptr_t>(construct_stub->constant_pool());
1400  output_frame->SetConstantPool(constant_pool_value);
1401  }
1402 }
1403 
1404 
1405 void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
1406  int frame_index,
1407  bool is_setter_stub_frame) {
1408  JSFunction* accessor = JSFunction::cast(ComputeLiteral(iterator->Next()));
1409  // The receiver (and the implicit return value, if any) are expected in
1410  // registers by the LoadIC/StoreIC, so they don't belong to the output stack
1411  // frame. This means that we have to use a height of 0.
1412  unsigned height = 0;
1413  unsigned height_in_bytes = height * kPointerSize;
1414  const char* kind = is_setter_stub_frame ? "setter" : "getter";
1415  if (trace_scope_ != NULL) {
1416  PrintF(trace_scope_->file(),
1417  " translating %s stub => height=%u\n", kind, height_in_bytes);
1418  }
1419 
1420  // We need 1 stack entry for the return address and enough entries for the
1421  // StackFrame::INTERNAL (FP, context, frame type, code object and constant
1422  // pool (if FLAG_enable_ool_constant_pool)- see MacroAssembler::EnterFrame).
1423  // For a setter stub frame we need one additional entry for the implicit
1424  // return value, see StoreStubCompiler::CompileStoreViaSetter.
1425  unsigned fixed_frame_entries =
1427  (is_setter_stub_frame ? 1 : 0);
1428  unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
1429  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1430 
1431  // Allocate and store the output frame description.
1432  FrameDescription* output_frame =
1433  new(output_frame_size) FrameDescription(output_frame_size, accessor);
1434  output_frame->SetFrameType(StackFrame::INTERNAL);
1435 
1436  // A frame for an accessor stub can not be the topmost or bottommost one.
1437  CHECK(frame_index > 0 && frame_index < output_count_ - 1);
1438  CHECK_EQ(output_[frame_index], NULL);
1439  output_[frame_index] = output_frame;
1440 
1441  // The top address of the frame is computed from the previous frame's top and
1442  // this frame's size.
1443  intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1444  output_frame->SetTop(top_address);
1445 
1446  unsigned output_offset = output_frame_size;
1447 
1448  // Read caller's PC from the previous frame.
1450  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1451  output_frame->SetCallerPc(output_offset, callers_pc);
1452  if (trace_scope_ != NULL) {
1453  PrintF(trace_scope_->file(),
1454  " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1455  " ; caller's pc\n",
1456  top_address + output_offset, output_offset, callers_pc);
1457  }
1458 
1459  // Read caller's FP from the previous frame, and set this frame's FP.
1461  intptr_t value = output_[frame_index - 1]->GetFp();
1462  output_frame->SetCallerFp(output_offset, value);
1463  intptr_t fp_value = top_address + output_offset;
1464  output_frame->SetFp(fp_value);
1465  if (trace_scope_ != NULL) {
1466  PrintF(trace_scope_->file(),
1467  " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1468  " ; caller's fp\n",
1469  fp_value, output_offset, value);
1470  }
1471 
1472  if (FLAG_enable_ool_constant_pool) {
1473  // Read the caller's constant pool from the previous frame.
1475  value = output_[frame_index - 1]->GetConstantPool();
1476  output_frame->SetCallerConstantPool(output_offset, value);
1477  if (trace_scope_) {
1478  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1479  V8PRIxPTR " ; caller's constant pool\n",
1480  top_address + output_offset, output_offset, value);
1481  }
1482  }
1483 
1484  // The context can be gotten from the previous frame.
1486  value = output_[frame_index - 1]->GetContext();
1487  output_frame->SetFrameSlot(output_offset, value);
1488  if (trace_scope_ != NULL) {
1489  PrintF(trace_scope_->file(),
1490  " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1491  " ; context\n",
1492  top_address + output_offset, output_offset, value);
1493  }
1494 
1495  // A marker value is used in place of the function.
1497  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
1498  output_frame->SetFrameSlot(output_offset, value);
1499  if (trace_scope_ != NULL) {
1500  PrintF(trace_scope_->file(),
1501  " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1502  " ; function (%s sentinel)\n",
1503  top_address + output_offset, output_offset, value, kind);
1504  }
1505 
1506  // Get Code object from accessor stub.
1508  Builtins::Name name = is_setter_stub_frame ?
1509  Builtins::kStoreIC_Setter_ForDeopt :
1510  Builtins::kLoadIC_Getter_ForDeopt;
1511  Code* accessor_stub = isolate_->builtins()->builtin(name);
1512  value = reinterpret_cast<intptr_t>(accessor_stub);
1513  output_frame->SetFrameSlot(output_offset, value);
1514  if (trace_scope_ != NULL) {
1515  PrintF(trace_scope_->file(),
1516  " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1517  " ; code object\n",
1518  top_address + output_offset, output_offset, value);
1519  }
1520 
1521  // Skip receiver.
1522  DoTranslateObjectAndSkip(iterator);
1523 
1524  if (is_setter_stub_frame) {
1525  // The implicit return value was part of the artificial setter stub
1526  // environment.
1528  DoTranslateCommand(iterator, frame_index, output_offset);
1529  }
1530 
1531  CHECK_EQ(output_offset, 0);
1532 
1533  Smi* offset = is_setter_stub_frame ?
1534  isolate_->heap()->setter_stub_deopt_pc_offset() :
1535  isolate_->heap()->getter_stub_deopt_pc_offset();
1536  intptr_t pc = reinterpret_cast<intptr_t>(
1537  accessor_stub->instruction_start() + offset->value());
1538  output_frame->SetPc(pc);
1539  if (FLAG_enable_ool_constant_pool) {
1540  intptr_t constant_pool_value =
1541  reinterpret_cast<intptr_t>(accessor_stub->constant_pool());
1542  output_frame->SetConstantPool(constant_pool_value);
1543  }
1544 }
1545 
1546 
1547 void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
1548  int frame_index) {
1549  //
1550  // FROM TO
1551  // | .... | | .... |
1552  // +-------------------------+ +-------------------------+
1553  // | JSFunction continuation | | JSFunction continuation |
1554  // +-------------------------+ +-------------------------+
1555  // | | saved frame (FP) | | saved frame (FP) |
1556  // | +=========================+<-fpreg +=========================+<-fpreg
1557  // | |constant pool (if ool_cp)| |constant pool (if ool_cp)|
1558  // | +-------------------------+ +-------------------------|
1559  // | | JSFunction context | | JSFunction context |
1560  // v +-------------------------+ +-------------------------|
1561  // | COMPILED_STUB marker | | STUB_FAILURE marker |
1562  // +-------------------------+ +-------------------------+
1563  // | | | caller args.arguments_ |
1564  // | ... | +-------------------------+
1565  // | | | caller args.length_ |
1566  // |-------------------------|<-spreg +-------------------------+
1567  // | caller args pointer |
1568  // +-------------------------+
1569  // | caller stack param 1 |
1570  // parameters in registers +-------------------------+
1571  // and spilled to stack | .... |
1572  // +-------------------------+
1573  // | caller stack param n |
1574  // +-------------------------+<-spreg
1575  // reg = number of parameters
1576  // reg = failure handler address
1577  // reg = saved frame
1578  // reg = JSFunction context
1579  //
1580 
1582  int major_key = CodeStub::GetMajorKey(compiled_code_);
1584 
1585  // The output frame must have room for all pushed register parameters
1586  // and the standard stack frame slots. Include space for an argument
1587  // object to the callee and optionally the space to pass the argument
1588  // object to the stub failure handler.
1589  int param_count = descriptor.GetEnvironmentParameterCount();
1590  CHECK_GE(param_count, 0);
1591 
1592  int height_in_bytes = kPointerSize * param_count + sizeof(Arguments) +
1593  kPointerSize;
1594  int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
1595  int input_frame_size = input_->GetFrameSize();
1596  int output_frame_size = height_in_bytes + fixed_frame_size;
1597  if (trace_scope_ != NULL) {
1598  PrintF(trace_scope_->file(),
1599  " translating %s => StubFailureTrampolineStub, height=%d\n",
1600  CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
1601  height_in_bytes);
1602  }
1603 
1604  // The stub failure trampoline is a single frame.
1605  FrameDescription* output_frame =
1606  new(output_frame_size) FrameDescription(output_frame_size, NULL);
1607  output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
1608  CHECK_EQ(frame_index, 0);
1609  output_[frame_index] = output_frame;
1610 
1611  // The top address for the output frame can be computed from the input
1612  // frame pointer and the output frame's height. Subtract space for the
1613  // context and function slots.
1615  intptr_t top_address = input_->GetRegister(fp_reg.code()) -
1617  output_frame->SetTop(top_address);
1618 
1619  // Read caller's PC (JSFunction continuation) from the input frame.
1620  unsigned input_frame_offset = input_frame_size - kPCOnStackSize;
1621  unsigned output_frame_offset = output_frame_size - kFPOnStackSize;
1622  intptr_t value = input_->GetFrameSlot(input_frame_offset);
1623  output_frame->SetCallerPc(output_frame_offset, value);
1624  if (trace_scope_ != NULL) {
1625  PrintF(trace_scope_->file(),
1626  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1627  V8PRIxPTR " ; caller's pc\n",
1628  top_address + output_frame_offset, output_frame_offset, value);
1629  }
1630 
1631  // Read caller's FP from the input frame, and set this frame's FP.
1632  input_frame_offset -= kFPOnStackSize;
1633  value = input_->GetFrameSlot(input_frame_offset);
1634  output_frame_offset -= kFPOnStackSize;
1635  output_frame->SetCallerFp(output_frame_offset, value);
1636  intptr_t frame_ptr = input_->GetRegister(fp_reg.code());
1637  output_frame->SetRegister(fp_reg.code(), frame_ptr);
1638  output_frame->SetFp(frame_ptr);
1639  if (trace_scope_ != NULL) {
1640  PrintF(trace_scope_->file(),
1641  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1642  V8PRIxPTR " ; caller's fp\n",
1643  top_address + output_frame_offset, output_frame_offset, value);
1644  }
1645 
1646  if (FLAG_enable_ool_constant_pool) {
1647  // Read the caller's constant pool from the input frame.
1648  input_frame_offset -= kPointerSize;
1649  value = input_->GetFrameSlot(input_frame_offset);
1650  output_frame_offset -= kPointerSize;
1651  output_frame->SetCallerConstantPool(output_frame_offset, value);
1652  if (trace_scope_) {
1653  PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1654  V8PRIxPTR " ; caller's constant_pool\n",
1655  top_address + output_frame_offset, output_frame_offset, value);
1656  }
1657  }
1658 
1659  // The context can be gotten from the input frame.
1661  input_frame_offset -= kPointerSize;
1662  value = input_->GetFrameSlot(input_frame_offset);
1663  output_frame->SetRegister(context_reg.code(), value);
1664  output_frame_offset -= kPointerSize;
1665  output_frame->SetFrameSlot(output_frame_offset, value);
1666  CHECK(reinterpret_cast<Object*>(value)->IsContext());
1667  if (trace_scope_ != NULL) {
1668  PrintF(trace_scope_->file(),
1669  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1670  V8PRIxPTR " ; context\n",
1671  top_address + output_frame_offset, output_frame_offset, value);
1672  }
1673 
1674  // A marker value is used in place of the function.
1675  output_frame_offset -= kPointerSize;
1676  value = reinterpret_cast<intptr_t>(
1677  Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
1678  output_frame->SetFrameSlot(output_frame_offset, value);
1679  if (trace_scope_ != NULL) {
1680  PrintF(trace_scope_->file(),
1681  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1682  V8PRIxPTR " ; function (stub failure sentinel)\n",
1683  top_address + output_frame_offset, output_frame_offset, value);
1684  }
1685 
1686  intptr_t caller_arg_count = 0;
1687  bool arg_count_known = !descriptor.stack_parameter_count().is_valid();
1688 
1689  // Build the Arguments object for the caller's parameters and a pointer to it.
1690  output_frame_offset -= kPointerSize;
1691  int args_arguments_offset = output_frame_offset;
1692  intptr_t the_hole = reinterpret_cast<intptr_t>(
1693  isolate_->heap()->the_hole_value());
1694  if (arg_count_known) {
1695  value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1696  (caller_arg_count - 1) * kPointerSize;
1697  } else {
1698  value = the_hole;
1699  }
1700 
1701  output_frame->SetFrameSlot(args_arguments_offset, value);
1702  if (trace_scope_ != NULL) {
1703  PrintF(trace_scope_->file(),
1704  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1705  V8PRIxPTR " ; args.arguments %s\n",
1706  top_address + args_arguments_offset, args_arguments_offset, value,
1707  arg_count_known ? "" : "(the hole)");
1708  }
1709 
1710  output_frame_offset -= kPointerSize;
1711  int length_frame_offset = output_frame_offset;
1712  value = arg_count_known ? caller_arg_count : the_hole;
1713  output_frame->SetFrameSlot(length_frame_offset, value);
1714  if (trace_scope_ != NULL) {
1715  PrintF(trace_scope_->file(),
1716  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1717  V8PRIxPTR " ; args.length %s\n",
1718  top_address + length_frame_offset, length_frame_offset, value,
1719  arg_count_known ? "" : "(the hole)");
1720  }
1721 
1722  output_frame_offset -= kPointerSize;
1723  value = frame_ptr + StandardFrameConstants::kCallerSPOffset -
1724  (output_frame_size - output_frame_offset) + kPointerSize;
1725  output_frame->SetFrameSlot(output_frame_offset, value);
1726  if (trace_scope_ != NULL) {
1727  PrintF(trace_scope_->file(),
1728  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1729  V8PRIxPTR " ; args*\n",
1730  top_address + output_frame_offset, output_frame_offset, value);
1731  }
1732 
1733  // Copy the register parameters to the failure frame.
1734  int arguments_length_offset = -1;
1735  for (int i = 0; i < param_count; ++i) {
1736  output_frame_offset -= kPointerSize;
1737  DoTranslateCommand(iterator, 0, output_frame_offset);
1738 
1739  if (!arg_count_known && descriptor.IsEnvironmentParameterCountRegister(i)) {
1740  arguments_length_offset = output_frame_offset;
1741  }
1742  }
1743 
1744  CHECK_EQ(output_frame_offset, 0);
1745 
1746  if (!arg_count_known) {
1747  CHECK_GE(arguments_length_offset, 0);
1748  // We know it's a smi because 1) the code stub guarantees the stack
1749  // parameter count is in smi range, and 2) the DoTranslateCommand in the
1750  // parameter loop above translated that to a tagged value.
1751  Smi* smi_caller_arg_count = reinterpret_cast<Smi*>(
1752  output_frame->GetFrameSlot(arguments_length_offset));
1753  caller_arg_count = smi_caller_arg_count->value();
1754  output_frame->SetFrameSlot(length_frame_offset, caller_arg_count);
1755  if (trace_scope_ != NULL) {
1756  PrintF(trace_scope_->file(),
1757  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1758  V8PRIxPTR " ; args.length\n",
1759  top_address + length_frame_offset, length_frame_offset,
1760  caller_arg_count);
1761  }
1762  value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1763  (caller_arg_count - 1) * kPointerSize;
1764  output_frame->SetFrameSlot(args_arguments_offset, value);
1765  if (trace_scope_ != NULL) {
1766  PrintF(trace_scope_->file(),
1767  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1768  V8PRIxPTR " ; args.arguments\n",
1769  top_address + args_arguments_offset, args_arguments_offset,
1770  value);
1771  }
1772  }
1773 
1774  // Copy the double registers from the input into the output frame.
1775  CopyDoubleRegisters(output_frame);
1776 
1777  // Fill registers containing handler and number of parameters.
1778  SetPlatformCompiledStubRegisters(output_frame, &descriptor);
1779 
1780  // Compute this frame's PC, state, and continuation.
1781  Code* trampoline = NULL;
1782  StubFunctionMode function_mode = descriptor.function_mode();
1784  function_mode).FindCodeInCache(&trampoline);
1785  DCHECK(trampoline != NULL);
1786  output_frame->SetPc(reinterpret_cast<intptr_t>(
1787  trampoline->instruction_start()));
1788  if (FLAG_enable_ool_constant_pool) {
1789  Register constant_pool_reg =
1791  intptr_t constant_pool_value =
1792  reinterpret_cast<intptr_t>(trampoline->constant_pool());
1793  output_frame->SetConstantPool(constant_pool_value);
1794  output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1795  }
1797  Code* notify_failure =
1798  isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles);
1799  output_frame->SetContinuation(
1800  reinterpret_cast<intptr_t>(notify_failure->entry()));
1801 }
1802 
1803 
1805  int object_index = materialization_object_index_++;
1806  ObjectMaterializationDescriptor desc = deferred_objects_[object_index];
1807  const int length = desc.object_length();
1808 
1809  if (desc.duplicate_object() >= 0) {
1810  // Found a previously materialized object by de-duplication.
1811  object_index = desc.duplicate_object();
1813  } else if (desc.is_arguments() && ArgumentsObjectIsAdapted(object_index)) {
1814  // Use the arguments adapter frame we just built to materialize the
1815  // arguments object. FunctionGetArguments can't throw an exception.
1816  Handle<JSFunction> function = ArgumentsObjectFunction(object_index);
1819  materialized_objects_->Add(arguments);
1820  // To keep consistent object counters, we still materialize the
1821  // nested values (but we throw them away).
1822  for (int i = 0; i < length; ++i) {
1824  }
1825  } else if (desc.is_arguments()) {
1826  // Construct an arguments object and copy the parameters to a newly
1827  // allocated arguments object backing store.
1828  Handle<JSFunction> function = ArgumentsObjectFunction(object_index);
1829  Handle<JSObject> arguments =
1830  isolate_->factory()->NewArgumentsObject(function, length);
1831  Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
1832  DCHECK_EQ(array->length(), length);
1833  arguments->set_elements(*array);
1834  materialized_objects_->Add(arguments);
1835  for (int i = 0; i < length; ++i) {
1837  array->set(i, *value);
1838  }
1839  } else {
1840  // Dispatch on the instance type of the object to be materialized.
1841  // We also need to make sure that the representation of all fields
1842  // in the given object are general enough to hold a tagged value.
1845  switch (map->instance_type()) {
1847  case HEAP_NUMBER_TYPE: {
1848  // Reuse the HeapNumber value directly as it is already properly
1849  // tagged and skip materializing the HeapNumber explicitly. Turn mutable
1850  // heap numbers immutable.
1852  if (object_index < prev_materialized_count_) {
1854  previously_materialized_objects_->get(object_index), isolate_));
1855  } else {
1856  materialized_objects_->Add(object);
1857  }
1859  break;
1860  }
1861  case JS_OBJECT_TYPE: {
1862  Handle<JSObject> object =
1863  isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED, false);
1864  if (object_index < prev_materialized_count_) {
1866  previously_materialized_objects_->get(object_index), isolate_));
1867  } else {
1868  materialized_objects_->Add(object);
1869  }
1870  Handle<Object> properties = MaterializeNextValue();
1871  Handle<Object> elements = MaterializeNextValue();
1872  object->set_properties(FixedArray::cast(*properties));
1873  object->set_elements(FixedArrayBase::cast(*elements));
1874  for (int i = 0; i < length - 3; ++i) {
1876  FieldIndex index = FieldIndex::ForPropertyIndex(object->map(), i);
1877  object->FastPropertyAtPut(index, *value);
1878  }
1879  break;
1880  }
1881  case JS_ARRAY_TYPE: {
1882  Handle<JSArray> object =
1883  isolate_->factory()->NewJSArray(0, map->elements_kind());
1884  if (object_index < prev_materialized_count_) {
1886  previously_materialized_objects_->get(object_index), isolate_));
1887  } else {
1888  materialized_objects_->Add(object);
1889  }
1890  Handle<Object> properties = MaterializeNextValue();
1891  Handle<Object> elements = MaterializeNextValue();
1893  object->set_properties(FixedArray::cast(*properties));
1894  object->set_elements(FixedArrayBase::cast(*elements));
1895  object->set_length(*length);
1896  break;
1897  }
1898  default:
1899  PrintF(stderr,
1900  "[couldn't handle instance type %d]\n", map->instance_type());
1901  FATAL("Unsupported instance type");
1902  }
1903  }
1904 
1905  return materialized_objects_->at(object_index);
1906 }
1907 
1908 
1910  int value_index = materialization_value_index_++;
1911  Handle<Object> value = materialized_values_->at(value_index);
1912  if (value->IsMutableHeapNumber()) {
1913  HeapNumber::cast(*value)->set_map(isolate_->heap()->heap_number_map());
1914  }
1915  if (*value == isolate_->heap()->arguments_marker()) {
1916  value = MaterializeNextHeapObject();
1917  }
1918  return value;
1919 }
1920 
1921 
1922 void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
1924 
1925  MaterializedObjectStore* materialized_store =
1927  previously_materialized_objects_ = materialized_store->Get(stack_fp_);
1929  0 : previously_materialized_objects_->length();
1930 
1931  // Walk all JavaScript output frames with the given frame iterator.
1932  for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) {
1933  if (frame_index != 0) it->Advance();
1934  JavaScriptFrame* frame = it->frame();
1935  jsframe_functions_.Add(handle(frame->function(), isolate_));
1937  }
1938 
1939  // Handlify all tagged object values before triggering any allocation.
1941  for (int i = 0; i < deferred_objects_tagged_values_.length(); ++i) {
1943  }
1944 
1945  // Play it safe and clear all unhandlified values before we continue.
1947 
1948  // Materialize all heap numbers before looking at arguments because when the
1949  // output frames are used to materialize arguments objects later on they need
1950  // to already contain valid heap numbers.
1951  for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
1952  HeapNumberMaterializationDescriptor<Address> d = deferred_heap_numbers_[i];
1953  Handle<Object> num = isolate_->factory()->NewNumber(d.value());
1954  if (trace_scope_ != NULL) {
1955  PrintF(trace_scope_->file(),
1956  "Materialized a new heap number %p [%e] in slot %p\n",
1957  reinterpret_cast<void*>(*num),
1958  d.value(),
1959  d.destination());
1960  }
1961  Memory::Object_at(d.destination()) = *num;
1962  }
1963 
1964  // Materialize all heap numbers required for arguments/captured objects.
1965  for (int i = 0; i < deferred_objects_double_values_.length(); i++) {
1966  HeapNumberMaterializationDescriptor<int> d =
1968  Handle<Object> num = isolate_->factory()->NewNumber(d.value());
1969  if (trace_scope_ != NULL) {
1970  PrintF(trace_scope_->file(),
1971  "Materialized a new heap number %p [%e] for object at %d\n",
1972  reinterpret_cast<void*>(*num),
1973  d.value(),
1974  d.destination());
1975  }
1976  DCHECK(values.at(d.destination())->IsTheHole());
1977  values.Set(d.destination(), num);
1978  }
1979 
1980  // Play it safe and clear all object double values before we continue.
1982 
1983  // Materialize arguments/captured objects.
1984  if (!deferred_objects_.is_empty()) {
1985  List<Handle<Object> > materialized_objects(deferred_objects_.length());
1986  materialized_objects_ = &materialized_objects;
1987  materialized_values_ = &values;
1988 
1990  int object_index = materialization_object_index_;
1991  ObjectMaterializationDescriptor descriptor =
1992  deferred_objects_.at(object_index);
1993 
1994  // Find a previously materialized object by de-duplication or
1995  // materialize a new instance of the object if necessary. Store
1996  // the materialized object into the frame slot.
1998  if (descriptor.slot_address() != NULL) {
1999  Memory::Object_at(descriptor.slot_address()) = *object;
2000  }
2001  if (trace_scope_ != NULL) {
2002  if (descriptor.is_arguments()) {
2003  PrintF(trace_scope_->file(),
2004  "Materialized %sarguments object of length %d for %p: ",
2005  ArgumentsObjectIsAdapted(object_index) ? "(adapted) " : "",
2006  Handle<JSObject>::cast(object)->elements()->length(),
2007  reinterpret_cast<void*>(descriptor.slot_address()));
2008  } else {
2009  PrintF(trace_scope_->file(),
2010  "Materialized captured object of size %d for %p: ",
2011  Handle<HeapObject>::cast(object)->Size(),
2012  reinterpret_cast<void*>(descriptor.slot_address()));
2013  }
2014  object->ShortPrint(trace_scope_->file());
2015  PrintF(trace_scope_->file(), "\n");
2016  }
2017  }
2018 
2021  }
2022 
2023  if (prev_materialized_count_ > 0) {
2024  materialized_store->Remove(stack_fp_);
2025  }
2026 }
2027 
2028 
2030  Address parameters_top,
2031  uint32_t parameters_size,
2032  Address expressions_top,
2033  uint32_t expressions_size,
2034  DeoptimizedFrameInfo* info) {
2036  Address parameters_bottom = parameters_top + parameters_size;
2037  Address expressions_bottom = expressions_top + expressions_size;
2038  for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
2039  HeapNumberMaterializationDescriptor<Address> d = deferred_heap_numbers_[i];
2040 
2041  // Check of the heap number to materialize actually belong to the frame
2042  // being extracted.
2043  Address slot = d.destination();
2044  if (parameters_top <= slot && slot < parameters_bottom) {
2045  Handle<Object> num = isolate_->factory()->NewNumber(d.value());
2046 
2047  int index = (info->parameters_count() - 1) -
2048  static_cast<int>(slot - parameters_top) / kPointerSize;
2049 
2050  if (trace_scope_ != NULL) {
2051  PrintF(trace_scope_->file(),
2052  "Materializing a new heap number %p [%e] in slot %p"
2053  "for parameter slot #%d\n",
2054  reinterpret_cast<void*>(*num),
2055  d.value(),
2056  d.destination(),
2057  index);
2058  }
2059 
2060  info->SetParameter(index, *num);
2061  } else if (expressions_top <= slot && slot < expressions_bottom) {
2062  Handle<Object> num = isolate_->factory()->NewNumber(d.value());
2063 
2064  int index = info->expression_count() - 1 -
2065  static_cast<int>(slot - expressions_top) / kPointerSize;
2066 
2067  if (trace_scope_ != NULL) {
2068  PrintF(trace_scope_->file(),
2069  "Materializing a new heap number %p [%e] in slot %p"
2070  "for expression slot #%d\n",
2071  reinterpret_cast<void*>(*num),
2072  d.value(),
2073  d.destination(),
2074  index);
2075  }
2076 
2077  info->SetExpression(index, *num);
2078  }
2079  }
2080 }
2081 
2082 
2083 static const char* TraceValueType(bool is_smi) {
2084  if (is_smi) {
2085  return "smi";
2086  }
2087 
2088  return "heap number";
2089 }
2090 
2091 
2092 void Deoptimizer::DoTranslateObjectAndSkip(TranslationIterator* iterator) {
2093  Translation::Opcode opcode =
2094  static_cast<Translation::Opcode>(iterator->Next());
2095 
2096  switch (opcode) {
2097  case Translation::BEGIN:
2098  case Translation::JS_FRAME:
2099  case Translation::ARGUMENTS_ADAPTOR_FRAME:
2100  case Translation::CONSTRUCT_STUB_FRAME:
2101  case Translation::GETTER_STUB_FRAME:
2102  case Translation::SETTER_STUB_FRAME:
2103  case Translation::COMPILED_STUB_FRAME: {
2104  FATAL("Unexpected frame start translation opcode");
2105  return;
2106  }
2107 
2108  case Translation::REGISTER:
2109  case Translation::INT32_REGISTER:
2110  case Translation::UINT32_REGISTER:
2111  case Translation::DOUBLE_REGISTER:
2112  case Translation::STACK_SLOT:
2113  case Translation::INT32_STACK_SLOT:
2114  case Translation::UINT32_STACK_SLOT:
2115  case Translation::DOUBLE_STACK_SLOT:
2116  case Translation::LITERAL: {
2117  // The value is not part of any materialized object, so we can ignore it.
2118  iterator->Skip(Translation::NumberOfOperandsFor(opcode));
2119  return;
2120  }
2121 
2122  case Translation::DUPLICATED_OBJECT: {
2123  int object_index = iterator->Next();
2124  if (trace_scope_ != NULL) {
2125  PrintF(trace_scope_->file(), " skipping object ");
2126  PrintF(trace_scope_->file(),
2127  " ; duplicate of object #%d\n", object_index);
2128  }
2129  AddObjectDuplication(0, object_index);
2130  return;
2131  }
2132 
2133  case Translation::ARGUMENTS_OBJECT:
2134  case Translation::CAPTURED_OBJECT: {
2135  int length = iterator->Next();
2136  bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2137  if (trace_scope_ != NULL) {
2138  PrintF(trace_scope_->file(), " skipping object ");
2139  PrintF(trace_scope_->file(),
2140  " ; object (length = %d, is_args = %d)\n", length, is_args);
2141  }
2142 
2143  AddObjectStart(0, length, is_args);
2144 
2145  // We save the object values on the side and materialize the actual
2146  // object after the deoptimized frame is built.
2147  int object_index = deferred_objects_.length() - 1;
2148  for (int i = 0; i < length; i++) {
2149  DoTranslateObject(iterator, object_index, i);
2150  }
2151  return;
2152  }
2153  }
2154 
2155  FATAL("Unexpected translation opcode");
2156 }
2157 
2158 
2159 void Deoptimizer::DoTranslateObject(TranslationIterator* iterator,
2160  int object_index,
2161  int field_index) {
2162  disasm::NameConverter converter;
2163  Address object_slot = deferred_objects_[object_index].slot_address();
2164 
2165  Translation::Opcode opcode =
2166  static_cast<Translation::Opcode>(iterator->Next());
2167 
2168  switch (opcode) {
2169  case Translation::BEGIN:
2170  case Translation::JS_FRAME:
2171  case Translation::ARGUMENTS_ADAPTOR_FRAME:
2172  case Translation::CONSTRUCT_STUB_FRAME:
2173  case Translation::GETTER_STUB_FRAME:
2174  case Translation::SETTER_STUB_FRAME:
2175  case Translation::COMPILED_STUB_FRAME:
2176  FATAL("Unexpected frame start translation opcode");
2177  return;
2178 
2179  case Translation::REGISTER: {
2180  int input_reg = iterator->Next();
2181  intptr_t input_value = input_->GetRegister(input_reg);
2182  if (trace_scope_ != NULL) {
2183  PrintF(trace_scope_->file(),
2184  " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2185  reinterpret_cast<intptr_t>(object_slot),
2186  field_index);
2187  PrintF(trace_scope_->file(),
2188  "0x%08" V8PRIxPTR " ; %s ", input_value,
2189  converter.NameOfCPURegister(input_reg));
2190  reinterpret_cast<Object*>(input_value)->ShortPrint(
2191  trace_scope_->file());
2192  PrintF(trace_scope_->file(),
2193  "\n");
2194  }
2195  AddObjectTaggedValue(input_value);
2196  return;
2197  }
2198 
2199  case Translation::INT32_REGISTER: {
2200  int input_reg = iterator->Next();
2201  intptr_t value = input_->GetRegister(input_reg);
2202  bool is_smi = Smi::IsValid(value);
2203  if (trace_scope_ != NULL) {
2204  PrintF(trace_scope_->file(),
2205  " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2206  reinterpret_cast<intptr_t>(object_slot),
2207  field_index);
2208  PrintF(trace_scope_->file(),
2209  "%" V8PRIdPTR " ; %s (%s)\n", value,
2210  converter.NameOfCPURegister(input_reg),
2211  TraceValueType(is_smi));
2212  }
2213  if (is_smi) {
2214  intptr_t tagged_value =
2215  reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2216  AddObjectTaggedValue(tagged_value);
2217  } else {
2218  double double_value = static_cast<double>(static_cast<int32_t>(value));
2219  AddObjectDoubleValue(double_value);
2220  }
2221  return;
2222  }
2223 
2224  case Translation::UINT32_REGISTER: {
2225  int input_reg = iterator->Next();
2226  uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
2227  bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
2228  if (trace_scope_ != NULL) {
2229  PrintF(trace_scope_->file(),
2230  " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2231  reinterpret_cast<intptr_t>(object_slot),
2232  field_index);
2233  PrintF(trace_scope_->file(),
2234  "%" V8PRIdPTR " ; uint %s (%s)\n", value,
2235  converter.NameOfCPURegister(input_reg),
2236  TraceValueType(is_smi));
2237  }
2238  if (is_smi) {
2239  intptr_t tagged_value =
2240  reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2241  AddObjectTaggedValue(tagged_value);
2242  } else {
2243  double double_value = static_cast<double>(static_cast<uint32_t>(value));
2244  AddObjectDoubleValue(double_value);
2245  }
2246  return;
2247  }
2248 
2249  case Translation::DOUBLE_REGISTER: {
2250  int input_reg = iterator->Next();
2251  double value = input_->GetDoubleRegister(input_reg);
2252  if (trace_scope_ != NULL) {
2253  PrintF(trace_scope_->file(),
2254  " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2255  reinterpret_cast<intptr_t>(object_slot),
2256  field_index);
2257  PrintF(trace_scope_->file(),
2258  "%e ; %s\n", value,
2260  }
2261  AddObjectDoubleValue(value);
2262  return;
2263  }
2264 
2265  case Translation::STACK_SLOT: {
2266  int input_slot_index = iterator->Next();
2267  unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2268  intptr_t input_value = input_->GetFrameSlot(input_offset);
2269  if (trace_scope_ != NULL) {
2270  PrintF(trace_scope_->file(),
2271  " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2272  reinterpret_cast<intptr_t>(object_slot),
2273  field_index);
2274  PrintF(trace_scope_->file(),
2275  "0x%08" V8PRIxPTR " ; [sp + %d] ", input_value, input_offset);
2276  reinterpret_cast<Object*>(input_value)->ShortPrint(
2277  trace_scope_->file());
2278  PrintF(trace_scope_->file(),
2279  "\n");
2280  }
2281  AddObjectTaggedValue(input_value);
2282  return;
2283  }
2284 
2285  case Translation::INT32_STACK_SLOT: {
2286  int input_slot_index = iterator->Next();
2287  unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2288  intptr_t value = input_->GetFrameSlot(input_offset);
2289  bool is_smi = Smi::IsValid(value);
2290  if (trace_scope_ != NULL) {
2291  PrintF(trace_scope_->file(),
2292  " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2293  reinterpret_cast<intptr_t>(object_slot),
2294  field_index);
2295  PrintF(trace_scope_->file(),
2296  "%" V8PRIdPTR " ; [sp + %d] (%s)\n",
2297  value, input_offset, TraceValueType(is_smi));
2298  }
2299  if (is_smi) {
2300  intptr_t tagged_value =
2301  reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2302  AddObjectTaggedValue(tagged_value);
2303  } else {
2304  double double_value = static_cast<double>(static_cast<int32_t>(value));
2305  AddObjectDoubleValue(double_value);
2306  }
2307  return;
2308  }
2309 
2310  case Translation::UINT32_STACK_SLOT: {
2311  int input_slot_index = iterator->Next();
2312  unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2313  uintptr_t value =
2314  static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
2315  bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
2316  if (trace_scope_ != NULL) {
2317  PrintF(trace_scope_->file(),
2318  " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2319  reinterpret_cast<intptr_t>(object_slot),
2320  field_index);
2321  PrintF(trace_scope_->file(),
2322  "%" V8PRIdPTR " ; [sp + %d] (uint %s)\n",
2323  value, input_offset, TraceValueType(is_smi));
2324  }
2325  if (is_smi) {
2326  intptr_t tagged_value =
2327  reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2328  AddObjectTaggedValue(tagged_value);
2329  } else {
2330  double double_value = static_cast<double>(static_cast<uint32_t>(value));
2331  AddObjectDoubleValue(double_value);
2332  }
2333  return;
2334  }
2335 
2336  case Translation::DOUBLE_STACK_SLOT: {
2337  int input_slot_index = iterator->Next();
2338  unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2339  double value = input_->GetDoubleFrameSlot(input_offset);
2340  if (trace_scope_ != NULL) {
2341  PrintF(trace_scope_->file(),
2342  " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2343  reinterpret_cast<intptr_t>(object_slot),
2344  field_index);
2345  PrintF(trace_scope_->file(),
2346  "%e ; [sp + %d]\n", value, input_offset);
2347  }
2348  AddObjectDoubleValue(value);
2349  return;
2350  }
2351 
2352  case Translation::LITERAL: {
2353  Object* literal = ComputeLiteral(iterator->Next());
2354  if (trace_scope_ != NULL) {
2355  PrintF(trace_scope_->file(),
2356  " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2357  reinterpret_cast<intptr_t>(object_slot),
2358  field_index);
2359  literal->ShortPrint(trace_scope_->file());
2360  PrintF(trace_scope_->file(),
2361  " ; literal\n");
2362  }
2363  intptr_t value = reinterpret_cast<intptr_t>(literal);
2364  AddObjectTaggedValue(value);
2365  return;
2366  }
2367 
2368  case Translation::DUPLICATED_OBJECT: {
2369  int object_index = iterator->Next();
2370  if (trace_scope_ != NULL) {
2371  PrintF(trace_scope_->file(),
2372  " nested @0x%08" V8PRIxPTR ": [field #%d] <- ",
2373  reinterpret_cast<intptr_t>(object_slot),
2374  field_index);
2375  isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2376  PrintF(trace_scope_->file(),
2377  " ; duplicate of object #%d\n", object_index);
2378  }
2379  // Use the materialization marker value as a sentinel and fill in
2380  // the object after the deoptimized frame is built.
2381  intptr_t value = reinterpret_cast<intptr_t>(
2382  isolate_->heap()->arguments_marker());
2383  AddObjectDuplication(0, object_index);
2384  AddObjectTaggedValue(value);
2385  return;
2386  }
2387 
2388  case Translation::ARGUMENTS_OBJECT:
2389  case Translation::CAPTURED_OBJECT: {
2390  int length = iterator->Next();
2391  bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2392  if (trace_scope_ != NULL) {
2393  PrintF(trace_scope_->file(),
2394  " nested @0x%08" V8PRIxPTR ": [field #%d] <- ",
2395  reinterpret_cast<intptr_t>(object_slot),
2396  field_index);
2397  isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2398  PrintF(trace_scope_->file(),
2399  " ; object (length = %d, is_args = %d)\n", length, is_args);
2400  }
2401  // Use the materialization marker value as a sentinel and fill in
2402  // the object after the deoptimized frame is built.
2403  intptr_t value = reinterpret_cast<intptr_t>(
2404  isolate_->heap()->arguments_marker());
2405  AddObjectStart(0, length, is_args);
2406  AddObjectTaggedValue(value);
2407  // We save the object values on the side and materialize the actual
2408  // object after the deoptimized frame is built.
2409  int object_index = deferred_objects_.length() - 1;
2410  for (int i = 0; i < length; i++) {
2411  DoTranslateObject(iterator, object_index, i);
2412  }
2413  return;
2414  }
2415  }
2416 
2417  FATAL("Unexpected translation opcode");
2418 }
2419 
2420 
2421 void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
2422  int frame_index,
2423  unsigned output_offset) {
2424  disasm::NameConverter converter;
2425  // A GC-safe temporary placeholder that we can put in the output frame.
2426  const intptr_t kPlaceholder = reinterpret_cast<intptr_t>(Smi::FromInt(0));
2427 
2428  Translation::Opcode opcode =
2429  static_cast<Translation::Opcode>(iterator->Next());
2430 
2431  switch (opcode) {
2432  case Translation::BEGIN:
2433  case Translation::JS_FRAME:
2434  case Translation::ARGUMENTS_ADAPTOR_FRAME:
2435  case Translation::CONSTRUCT_STUB_FRAME:
2436  case Translation::GETTER_STUB_FRAME:
2437  case Translation::SETTER_STUB_FRAME:
2438  case Translation::COMPILED_STUB_FRAME:
2439  FATAL("Unexpected translation opcode");
2440  return;
2441 
2442  case Translation::REGISTER: {
2443  int input_reg = iterator->Next();
2444  intptr_t input_value = input_->GetRegister(input_reg);
2445  if (trace_scope_ != NULL) {
2446  PrintF(
2447  trace_scope_->file(),
2448  " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s ",
2449  output_[frame_index]->GetTop() + output_offset,
2450  output_offset,
2451  input_value,
2452  converter.NameOfCPURegister(input_reg));
2453  reinterpret_cast<Object*>(input_value)->ShortPrint(
2454  trace_scope_->file());
2455  PrintF(trace_scope_->file(), "\n");
2456  }
2457  output_[frame_index]->SetFrameSlot(output_offset, input_value);
2458  return;
2459  }
2460 
2461  case Translation::INT32_REGISTER: {
2462  int input_reg = iterator->Next();
2463  intptr_t value = input_->GetRegister(input_reg);
2464  bool is_smi = Smi::IsValid(value);
2465  if (trace_scope_ != NULL) {
2466  PrintF(
2467  trace_scope_->file(),
2468  " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIdPTR " ; %s (%s)\n",
2469  output_[frame_index]->GetTop() + output_offset,
2470  output_offset,
2471  value,
2472  converter.NameOfCPURegister(input_reg),
2473  TraceValueType(is_smi));
2474  }
2475  if (is_smi) {
2476  intptr_t tagged_value =
2477  reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2478  output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2479  } else {
2480  // We save the untagged value on the side and store a GC-safe
2481  // temporary placeholder in the frame.
2482  AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2483  static_cast<double>(static_cast<int32_t>(value)));
2484  output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2485  }
2486  return;
2487  }
2488 
2489  case Translation::UINT32_REGISTER: {
2490  int input_reg = iterator->Next();
2491  uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
2492  bool is_smi = value <= static_cast<uintptr_t>(Smi::kMaxValue);
2493  if (trace_scope_ != NULL) {
2494  PrintF(
2495  trace_scope_->file(),
2496  " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIuPTR
2497  " ; uint %s (%s)\n",
2498  output_[frame_index]->GetTop() + output_offset,
2499  output_offset,
2500  value,
2501  converter.NameOfCPURegister(input_reg),
2502  TraceValueType(is_smi));
2503  }
2504  if (is_smi) {
2505  intptr_t tagged_value =
2506  reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2507  output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2508  } else {
2509  // We save the untagged value on the side and store a GC-safe
2510  // temporary placeholder in the frame.
2511  AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2512  static_cast<double>(static_cast<uint32_t>(value)));
2513  output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2514  }
2515  return;
2516  }
2517 
2518  case Translation::DOUBLE_REGISTER: {
2519  int input_reg = iterator->Next();
2520  double value = input_->GetDoubleRegister(input_reg);
2521  if (trace_scope_ != NULL) {
2522  PrintF(trace_scope_->file(),
2523  " 0x%08" V8PRIxPTR ": [top + %d] <- %e ; %s\n",
2524  output_[frame_index]->GetTop() + output_offset,
2525  output_offset,
2526  value,
2528  }
2529  // We save the untagged value on the side and store a GC-safe
2530  // temporary placeholder in the frame.
2531  AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
2532  output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2533  return;
2534  }
2535 
2536  case Translation::STACK_SLOT: {
2537  int input_slot_index = iterator->Next();
2538  unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2539  intptr_t input_value = input_->GetFrameSlot(input_offset);
2540  if (trace_scope_ != NULL) {
2541  PrintF(trace_scope_->file(),
2542  " 0x%08" V8PRIxPTR ": ",
2543  output_[frame_index]->GetTop() + output_offset);
2544  PrintF(trace_scope_->file(),
2545  "[top + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
2546  output_offset,
2547  input_value,
2548  input_offset);
2549  reinterpret_cast<Object*>(input_value)->ShortPrint(
2550  trace_scope_->file());
2551  PrintF(trace_scope_->file(), "\n");
2552  }
2553  output_[frame_index]->SetFrameSlot(output_offset, input_value);
2554  return;
2555  }
2556 
2557  case Translation::INT32_STACK_SLOT: {
2558  int input_slot_index = iterator->Next();
2559  unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2560  intptr_t value = input_->GetFrameSlot(input_offset);
2561  bool is_smi = Smi::IsValid(value);
2562  if (trace_scope_ != NULL) {
2563  PrintF(trace_scope_->file(),
2564  " 0x%08" V8PRIxPTR ": ",
2565  output_[frame_index]->GetTop() + output_offset);
2566  PrintF(trace_scope_->file(),
2567  "[top + %d] <- %" V8PRIdPTR " ; [sp + %d] (%s)\n",
2568  output_offset,
2569  value,
2570  input_offset,
2571  TraceValueType(is_smi));
2572  }
2573  if (is_smi) {
2574  intptr_t tagged_value =
2575  reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2576  output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2577  } else {
2578  // We save the untagged value on the side and store a GC-safe
2579  // temporary placeholder in the frame.
2580  AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2581  static_cast<double>(static_cast<int32_t>(value)));
2582  output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2583  }
2584  return;
2585  }
2586 
2587  case Translation::UINT32_STACK_SLOT: {
2588  int input_slot_index = iterator->Next();
2589  unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2590  uintptr_t value =
2591  static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
2592  bool is_smi = value <= static_cast<uintptr_t>(Smi::kMaxValue);
2593  if (trace_scope_ != NULL) {
2594  PrintF(trace_scope_->file(),
2595  " 0x%08" V8PRIxPTR ": ",
2596  output_[frame_index]->GetTop() + output_offset);
2597  PrintF(trace_scope_->file(),
2598  "[top + %d] <- %" V8PRIuPTR " ; [sp + %d] (uint32 %s)\n",
2599  output_offset,
2600  value,
2601  input_offset,
2602  TraceValueType(is_smi));
2603  }
2604  if (is_smi) {
2605  intptr_t tagged_value =
2606  reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2607  output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2608  } else {
2609  // We save the untagged value on the side and store a GC-safe
2610  // temporary placeholder in the frame.
2611  AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2612  static_cast<double>(static_cast<uint32_t>(value)));
2613  output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2614  }
2615  return;
2616  }
2617 
2618  case Translation::DOUBLE_STACK_SLOT: {
2619  int input_slot_index = iterator->Next();
2620  unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2621  double value = input_->GetDoubleFrameSlot(input_offset);
2622  if (trace_scope_ != NULL) {
2623  PrintF(trace_scope_->file(),
2624  " 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [sp + %d]\n",
2625  output_[frame_index]->GetTop() + output_offset,
2626  output_offset,
2627  value,
2628  input_offset);
2629  }
2630  // We save the untagged value on the side and store a GC-safe
2631  // temporary placeholder in the frame.
2632  AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
2633  output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2634  return;
2635  }
2636 
2637  case Translation::LITERAL: {
2638  Object* literal = ComputeLiteral(iterator->Next());
2639  if (trace_scope_ != NULL) {
2640  PrintF(trace_scope_->file(),
2641  " 0x%08" V8PRIxPTR ": [top + %d] <- ",
2642  output_[frame_index]->GetTop() + output_offset,
2643  output_offset);
2644  literal->ShortPrint(trace_scope_->file());
2645  PrintF(trace_scope_->file(), " ; literal\n");
2646  }
2647  intptr_t value = reinterpret_cast<intptr_t>(literal);
2648  output_[frame_index]->SetFrameSlot(output_offset, value);
2649  return;
2650  }
2651 
2652  case Translation::DUPLICATED_OBJECT: {
2653  int object_index = iterator->Next();
2654  if (trace_scope_ != NULL) {
2655  PrintF(trace_scope_->file(),
2656  " 0x%08" V8PRIxPTR ": [top + %d] <- ",
2657  output_[frame_index]->GetTop() + output_offset,
2658  output_offset);
2659  isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2660  PrintF(trace_scope_->file(),
2661  " ; duplicate of object #%d\n", object_index);
2662  }
2663  // Use the materialization marker value as a sentinel and fill in
2664  // the object after the deoptimized frame is built.
2665  intptr_t value = reinterpret_cast<intptr_t>(
2666  isolate_->heap()->arguments_marker());
2667  AddObjectDuplication(output_[frame_index]->GetTop() + output_offset,
2668  object_index);
2669  output_[frame_index]->SetFrameSlot(output_offset, value);
2670  return;
2671  }
2672 
2673  case Translation::ARGUMENTS_OBJECT:
2674  case Translation::CAPTURED_OBJECT: {
2675  int length = iterator->Next();
2676  bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2677  if (trace_scope_ != NULL) {
2678  PrintF(trace_scope_->file(),
2679  " 0x%08" V8PRIxPTR ": [top + %d] <- ",
2680  output_[frame_index]->GetTop() + output_offset,
2681  output_offset);
2682  isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2683  PrintF(trace_scope_->file(),
2684  " ; object (length = %d, is_args = %d)\n", length, is_args);
2685  }
2686  // Use the materialization marker value as a sentinel and fill in
2687  // the object after the deoptimized frame is built.
2688  intptr_t value = reinterpret_cast<intptr_t>(
2689  isolate_->heap()->arguments_marker());
2690  AddObjectStart(output_[frame_index]->GetTop() + output_offset,
2691  length, is_args);
2692  output_[frame_index]->SetFrameSlot(output_offset, value);
2693  // We save the object values on the side and materialize the actual
2694  // object after the deoptimized frame is built.
2695  int object_index = deferred_objects_.length() - 1;
2696  for (int i = 0; i < length; i++) {
2697  DoTranslateObject(iterator, object_index, i);
2698  }
2699  return;
2700  }
2701  }
2702 }
2703 
2704 
2706  unsigned fixed_size = ComputeFixedSize(function_);
2707  // The fp-to-sp delta already takes the context, constant pool pointer and the
2708  // function into account so we have to avoid double counting them.
2709  unsigned result = fixed_size + fp_to_sp_delta_ -
2711  if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
2712  unsigned stack_slots = compiled_code_->stack_slots();
2713  unsigned outgoing_size = ComputeOutgoingArgumentSize();
2714  CHECK(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size);
2715  }
2716  return result;
2717 }
2718 
2719 
2720 unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const {
2721  // The fixed part of the frame consists of the return address, frame
2722  // pointer, function, context, and all the incoming arguments.
2723  return ComputeIncomingArgumentSize(function) +
2725 }
2726 
2727 
2729  // The incoming arguments is the values for formal parameters and
2730  // the receiver. Every slot contains a pointer.
2731  if (function->IsSmi()) {
2732  CHECK_EQ(Smi::cast(function), Smi::FromInt(StackFrame::STUB));
2733  return 0;
2734  }
2735  unsigned arguments = function->shared()->formal_parameter_count() + 1;
2736  return arguments * kPointerSize;
2737 }
2738 
2739 
2741  DeoptimizationInputData* data = DeoptimizationInputData::cast(
2742  compiled_code_->deoptimization_data());
2743  unsigned height = data->ArgumentsStackHeight(bailout_id_)->value();
2744  return height * kPointerSize;
2745 }
2746 
2747 
2749  DeoptimizationInputData* data = DeoptimizationInputData::cast(
2750  compiled_code_->deoptimization_data());
2751  FixedArray* literals = data->LiteralArray();
2752  return literals->get(index);
2753 }
2754 
2755 
2756 void Deoptimizer::AddObjectStart(intptr_t slot, int length, bool is_args) {
2757  ObjectMaterializationDescriptor object_desc(
2758  reinterpret_cast<Address>(slot), jsframe_count_, length, -1, is_args);
2759  deferred_objects_.Add(object_desc);
2760 }
2761 
2762 
2763 void Deoptimizer::AddObjectDuplication(intptr_t slot, int object_index) {
2764  ObjectMaterializationDescriptor object_desc(
2765  reinterpret_cast<Address>(slot), jsframe_count_, -1, object_index, false);
2766  deferred_objects_.Add(object_desc);
2767 }
2768 
2769 
2771  deferred_objects_tagged_values_.Add(reinterpret_cast<Object*>(value));
2772 }
2773 
2774 
2776  deferred_objects_tagged_values_.Add(isolate()->heap()->the_hole_value());
2777  HeapNumberMaterializationDescriptor<int> value_desc(
2778  deferred_objects_tagged_values_.length() - 1, value);
2780 }
2781 
2782 
2783 void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) {
2784  HeapNumberMaterializationDescriptor<Address> value_desc(
2785  reinterpret_cast<Address>(slot_address), value);
2786  deferred_heap_numbers_.Add(value_desc);
2787 }
2788 
2789 
2791  BailoutType type,
2792  int max_entry_id) {
2793  // We cannot run this if the serializer is enabled because this will
2794  // cause us to emit relocation information for the external
2795  // references. This is fine because the deoptimizer's code section
2796  // isn't meant to be serialized at all.
2797  CHECK(type == EAGER || type == SOFT || type == LAZY);
2799  int entry_count = data->deopt_entry_code_entries_[type];
2800  if (max_entry_id < entry_count) return;
2801  entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
2802  while (max_entry_id >= entry_count) entry_count *= 2;
2803  CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries);
2804 
2805  MacroAssembler masm(isolate, NULL, 16 * KB);
2806  masm.set_emit_debug_code(false);
2807  GenerateDeoptimizationEntries(&masm, entry_count, type);
2808  CodeDesc desc;
2809  masm.GetCode(&desc);
2810  DCHECK(!RelocInfo::RequiresRelocation(desc));
2811 
2812  MemoryChunk* chunk = data->deopt_entry_code_[type];
2813  CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
2814  desc.instr_size);
2815  chunk->CommitArea(desc.instr_size);
2816  CopyBytes(chunk->area_start(), desc.buffer,
2817  static_cast<size_t>(desc.instr_size));
2819 
2820  data->deopt_entry_code_entries_[type] = entry_count;
2821 }
2822 
2823 
2825  JSFunction* function)
2826  : frame_size_(frame_size),
2827  function_(function),
2828  top_(kZapUint32),
2829  pc_(kZapUint32),
2830  fp_(kZapUint32),
2831  context_(kZapUint32),
2832  constant_pool_(kZapUint32) {
2833  // Zap all the registers.
2834  for (int r = 0; r < Register::kNumRegisters; r++) {
2835  // TODO(jbramley): It isn't safe to use kZapUint32 here. If the register
2836  // isn't used before the next safepoint, the GC will try to scan it as a
2837  // tagged value. kZapUint32 looks like a valid tagged pointer, but it isn't.
2838  SetRegister(r, kZapUint32);
2839  }
2840 
2841  // Zap all the slots.
2842  for (unsigned o = 0; o < frame_size; o += kPointerSize) {
2844  }
2845 }
2846 
2847 
2851 }
2852 
2853 
2855  if (slot_index >= 0) {
2856  // Local or spill slots. Skip the fixed part of the frame
2857  // including all arguments.
2858  unsigned base = GetFrameSize() - ComputeFixedSize();
2859  return base - ((slot_index + 1) * kPointerSize);
2860  } else {
2861  // Incoming parameter.
2862  int arg_size = (ComputeParametersCount() + 1) * kPointerSize;
2863  unsigned base = GetFrameSize() - arg_size;
2864  return base - ((slot_index + 1) * kPointerSize);
2865  }
2866 }
2867 
2868 
2870  switch (type_) {
2871  case StackFrame::JAVA_SCRIPT:
2872  return function_->shared()->formal_parameter_count();
2874  // Last slot contains number of incomming arguments as a smi.
2875  // Can't use GetExpression(0) because it would cause infinite recursion.
2876  return reinterpret_cast<Smi*>(*GetFrameSlotPointer(0))->value();
2877  }
2878  case StackFrame::STUB:
2879  return -1; // Minus receiver.
2880  default:
2881  FATAL("Unexpected stack frame type");
2882  return 0;
2883  }
2884 }
2885 
2886 
2888  CHECK_GE(index, 0);
2889  CHECK_LT(index, ComputeParametersCount());
2890  // The slot indexes for incoming arguments are negative.
2891  unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount());
2892  return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
2893 }
2894 
2895 
2897  CHECK_EQ(StackFrame::JAVA_SCRIPT, type_);
2898  unsigned size = GetFrameSize() - ComputeFixedSize();
2899  return size / kPointerSize;
2900 }
2901 
2902 
2904  DCHECK_EQ(StackFrame::JAVA_SCRIPT, type_);
2905  unsigned offset = GetOffsetFromSlotIndex(index);
2906  return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
2907 }
2908 
2909 
2910 void TranslationBuffer::Add(int32_t value, Zone* zone) {
2911  // Encode the sign bit in the least significant bit.
2912  bool is_negative = (value < 0);
2913  uint32_t bits = ((is_negative ? -value : value) << 1) |
2914  static_cast<int32_t>(is_negative);
2915  // Encode the individual bytes using the least significant bit of
2916  // each byte to indicate whether or not more bytes follow.
2917  do {
2918  uint32_t next = bits >> 7;
2919  contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone);
2920  bits = next;
2921  } while (bits != 0);
2922 }
2923 
2924 
2925 int32_t TranslationIterator::Next() {
2926  // Run through the bytes until we reach one with a least significant
2927  // bit of zero (marks the end).
2928  uint32_t bits = 0;
2929  for (int i = 0; true; i += 7) {
2930  DCHECK(HasNext());
2931  uint8_t next = buffer_->get(index_++);
2932  bits |= (next >> 1) << i;
2933  if ((next & 1) == 0) break;
2934  }
2935  // The bits encode the sign in the least significant bit.
2936  bool is_negative = (bits & 1) == 1;
2937  int32_t result = bits >> 1;
2938  return is_negative ? -result : result;
2939 }
2940 
2941 
2942 Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
2943  int length = contents_.length();
2944  Handle<ByteArray> result = factory->NewByteArray(length, TENURED);
2945  MemCopy(result->GetDataStartAddress(), contents_.ToVector().start(), length);
2946  return result;
2947 }
2948 
2949 
2950 void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
2951  buffer_->Add(CONSTRUCT_STUB_FRAME, zone());
2952  buffer_->Add(literal_id, zone());
2953  buffer_->Add(height, zone());
2954 }
2955 
2956 
2957 void Translation::BeginGetterStubFrame(int literal_id) {
2958  buffer_->Add(GETTER_STUB_FRAME, zone());
2959  buffer_->Add(literal_id, zone());
2960 }
2961 
2962 
2963 void Translation::BeginSetterStubFrame(int literal_id) {
2964  buffer_->Add(SETTER_STUB_FRAME, zone());
2965  buffer_->Add(literal_id, zone());
2966 }
2967 
2968 
2969 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
2970  buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone());
2971  buffer_->Add(literal_id, zone());
2972  buffer_->Add(height, zone());
2973 }
2974 
2975 
2976 void Translation::BeginJSFrame(BailoutId node_id,
2977  int literal_id,
2978  unsigned height) {
2979  buffer_->Add(JS_FRAME, zone());
2980  buffer_->Add(node_id.ToInt(), zone());
2981  buffer_->Add(literal_id, zone());
2982  buffer_->Add(height, zone());
2983 }
2984 
2985 
2986 void Translation::BeginCompiledStubFrame() {
2987  buffer_->Add(COMPILED_STUB_FRAME, zone());
2988 }
2989 
2990 
2991 void Translation::BeginArgumentsObject(int args_length) {
2992  buffer_->Add(ARGUMENTS_OBJECT, zone());
2993  buffer_->Add(args_length, zone());
2994 }
2995 
2996 
2997 void Translation::BeginCapturedObject(int length) {
2998  buffer_->Add(CAPTURED_OBJECT, zone());
2999  buffer_->Add(length, zone());
3000 }
3001 
3002 
3003 void Translation::DuplicateObject(int object_index) {
3004  buffer_->Add(DUPLICATED_OBJECT, zone());
3005  buffer_->Add(object_index, zone());
3006 }
3007 
3008 
3009 void Translation::StoreRegister(Register reg) {
3010  buffer_->Add(REGISTER, zone());
3011  buffer_->Add(reg.code(), zone());
3012 }
3013 
3014 
3015 void Translation::StoreInt32Register(Register reg) {
3016  buffer_->Add(INT32_REGISTER, zone());
3017  buffer_->Add(reg.code(), zone());
3018 }
3019 
3020 
3021 void Translation::StoreUint32Register(Register reg) {
3022  buffer_->Add(UINT32_REGISTER, zone());
3023  buffer_->Add(reg.code(), zone());
3024 }
3025 
3026 
3027 void Translation::StoreDoubleRegister(DoubleRegister reg) {
3028  buffer_->Add(DOUBLE_REGISTER, zone());
3029  buffer_->Add(DoubleRegister::ToAllocationIndex(reg), zone());
3030 }
3031 
3032 
3033 void Translation::StoreStackSlot(int index) {
3034  buffer_->Add(STACK_SLOT, zone());
3035  buffer_->Add(index, zone());
3036 }
3037 
3038 
3039 void Translation::StoreInt32StackSlot(int index) {
3040  buffer_->Add(INT32_STACK_SLOT, zone());
3041  buffer_->Add(index, zone());
3042 }
3043 
3044 
3045 void Translation::StoreUint32StackSlot(int index) {
3046  buffer_->Add(UINT32_STACK_SLOT, zone());
3047  buffer_->Add(index, zone());
3048 }
3049 
3050 
3051 void Translation::StoreDoubleStackSlot(int index) {
3052  buffer_->Add(DOUBLE_STACK_SLOT, zone());
3053  buffer_->Add(index, zone());
3054 }
3055 
3056 
3057 void Translation::StoreLiteral(int literal_id) {
3058  buffer_->Add(LITERAL, zone());
3059  buffer_->Add(literal_id, zone());
3060 }
3061 
3062 
3063 void Translation::StoreArgumentsObject(bool args_known,
3064  int args_index,
3065  int args_length) {
3066  buffer_->Add(ARGUMENTS_OBJECT, zone());
3067  buffer_->Add(args_known, zone());
3068  buffer_->Add(args_index, zone());
3069  buffer_->Add(args_length, zone());
3070 }
3071 
3072 
3073 int Translation::NumberOfOperandsFor(Opcode opcode) {
3074  switch (opcode) {
3075  case GETTER_STUB_FRAME:
3076  case SETTER_STUB_FRAME:
3077  case DUPLICATED_OBJECT:
3078  case ARGUMENTS_OBJECT:
3079  case CAPTURED_OBJECT:
3080  case REGISTER:
3081  case INT32_REGISTER:
3082  case UINT32_REGISTER:
3083  case DOUBLE_REGISTER:
3084  case STACK_SLOT:
3085  case INT32_STACK_SLOT:
3086  case UINT32_STACK_SLOT:
3087  case DOUBLE_STACK_SLOT:
3088  case LITERAL:
3089  case COMPILED_STUB_FRAME:
3090  return 1;
3091  case BEGIN:
3092  case ARGUMENTS_ADAPTOR_FRAME:
3093  case CONSTRUCT_STUB_FRAME:
3094  return 2;
3095  case JS_FRAME:
3096  return 3;
3097  }
3098  FATAL("Unexpected translation type");
3099  return -1;
3100 }
3101 
3102 
3103 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
3104 
3105 const char* Translation::StringFor(Opcode opcode) {
3106 #define TRANSLATION_OPCODE_CASE(item) case item: return #item;
3107  switch (opcode) {
3108  TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE)
3109  }
3110 #undef TRANSLATION_OPCODE_CASE
3111  UNREACHABLE();
3112  return "";
3113 }
3114 
3115 #endif
3116 
3117 
3118 // We can't intermix stack decoding and allocations because
3119 // deoptimization infrastracture is not GC safe.
3120 // Thus we build a temporary structure in malloced space.
3121 SlotRef SlotRefValueBuilder::ComputeSlotForNextArgument(
3122  Translation::Opcode opcode,
3123  TranslationIterator* iterator,
3124  DeoptimizationInputData* data,
3125  JavaScriptFrame* frame) {
3126  switch (opcode) {
3127  case Translation::BEGIN:
3128  case Translation::JS_FRAME:
3129  case Translation::ARGUMENTS_ADAPTOR_FRAME:
3130  case Translation::CONSTRUCT_STUB_FRAME:
3131  case Translation::GETTER_STUB_FRAME:
3132  case Translation::SETTER_STUB_FRAME:
3133  // Peeled off before getting here.
3134  break;
3135 
3136  case Translation::DUPLICATED_OBJECT: {
3137  return SlotRef::NewDuplicateObject(iterator->Next());
3138  }
3139 
3140  case Translation::ARGUMENTS_OBJECT:
3141  return SlotRef::NewArgumentsObject(iterator->Next());
3142 
3143  case Translation::CAPTURED_OBJECT: {
3144  return SlotRef::NewDeferredObject(iterator->Next());
3145  }
3146 
3147  case Translation::REGISTER:
3148  case Translation::INT32_REGISTER:
3149  case Translation::UINT32_REGISTER:
3150  case Translation::DOUBLE_REGISTER:
3151  // We are at safepoint which corresponds to call. All registers are
3152  // saved by caller so there would be no live registers at this
3153  // point. Thus these translation commands should not be used.
3154  break;
3155 
3156  case Translation::STACK_SLOT: {
3157  int slot_index = iterator->Next();
3158  Address slot_addr = SlotAddress(frame, slot_index);
3159  return SlotRef(slot_addr, SlotRef::TAGGED);
3160  }
3161 
3162  case Translation::INT32_STACK_SLOT: {
3163  int slot_index = iterator->Next();
3164  Address slot_addr = SlotAddress(frame, slot_index);
3165  return SlotRef(slot_addr, SlotRef::INT32);
3166  }
3167 
3168  case Translation::UINT32_STACK_SLOT: {
3169  int slot_index = iterator->Next();
3170  Address slot_addr = SlotAddress(frame, slot_index);
3171  return SlotRef(slot_addr, SlotRef::UINT32);
3172  }
3173 
3174  case Translation::DOUBLE_STACK_SLOT: {
3175  int slot_index = iterator->Next();
3176  Address slot_addr = SlotAddress(frame, slot_index);
3177  return SlotRef(slot_addr, SlotRef::DOUBLE);
3178  }
3179 
3180  case Translation::LITERAL: {
3181  int literal_index = iterator->Next();
3182  return SlotRef(data->GetIsolate(),
3183  data->LiteralArray()->get(literal_index));
3184  }
3185 
3186  case Translation::COMPILED_STUB_FRAME:
3187  UNREACHABLE();
3188  break;
3189  }
3190 
3191  FATAL("We should never get here - unexpected deopt info.");
3192  return SlotRef();
3193 }
3194 
3195 
3196 SlotRefValueBuilder::SlotRefValueBuilder(JavaScriptFrame* frame,
3197  int inlined_jsframe_index,
3198  int formal_parameter_count)
3199  : current_slot_(0), args_length_(-1), first_slot_index_(-1) {
3200  DisallowHeapAllocation no_gc;
3201 
3202  int deopt_index = Safepoint::kNoDeoptimizationIndex;
3203  DeoptimizationInputData* data =
3204  static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index);
3205  TranslationIterator it(data->TranslationByteArray(),
3206  data->TranslationIndex(deopt_index)->value());
3207  Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
3208  CHECK_EQ(opcode, Translation::BEGIN);
3209  it.Next(); // Drop frame count.
3210 
3211  stack_frame_id_ = frame->fp();
3212 
3213  int jsframe_count = it.Next();
3214  CHECK_GT(jsframe_count, inlined_jsframe_index);
3215  int jsframes_to_skip = inlined_jsframe_index;
3216  int number_of_slots = -1; // Number of slots inside our frame (yet unknown)
3217  bool should_deopt = false;
3218  while (number_of_slots != 0) {
3219  opcode = static_cast<Translation::Opcode>(it.Next());
3220  bool processed = false;
3221  if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) {
3222  if (jsframes_to_skip == 0) {
3223  CHECK_EQ(Translation::NumberOfOperandsFor(opcode), 2);
3224 
3225  it.Skip(1); // literal id
3226  int height = it.Next();
3227 
3228  // Skip the translation command for the receiver.
3229  it.Skip(Translation::NumberOfOperandsFor(
3230  static_cast<Translation::Opcode>(it.Next())));
3231 
3232  // We reached the arguments adaptor frame corresponding to the
3233  // inlined function in question. Number of arguments is height - 1.
3234  first_slot_index_ = slot_refs_.length();
3235  args_length_ = height - 1;
3236  number_of_slots = height - 1;
3237  processed = true;
3238  }
3239  } else if (opcode == Translation::JS_FRAME) {
3240  if (jsframes_to_skip == 0) {
3241  // Skip over operands to advance to the next opcode.
3242  it.Skip(Translation::NumberOfOperandsFor(opcode));
3243 
3244  // Skip the translation command for the receiver.
3245  it.Skip(Translation::NumberOfOperandsFor(
3246  static_cast<Translation::Opcode>(it.Next())));
3247 
3248  // We reached the frame corresponding to the inlined function
3249  // in question. Process the translation commands for the
3250  // arguments. Number of arguments is equal to the number of
3251  // format parameter count.
3252  first_slot_index_ = slot_refs_.length();
3253  args_length_ = formal_parameter_count;
3254  number_of_slots = formal_parameter_count;
3255  processed = true;
3256  }
3257  jsframes_to_skip--;
3258  } else if (opcode != Translation::BEGIN &&
3259  opcode != Translation::CONSTRUCT_STUB_FRAME &&
3260  opcode != Translation::GETTER_STUB_FRAME &&
3261  opcode != Translation::SETTER_STUB_FRAME &&
3262  opcode != Translation::COMPILED_STUB_FRAME) {
3263  slot_refs_.Add(ComputeSlotForNextArgument(opcode, &it, data, frame));
3264 
3265  if (first_slot_index_ >= 0) {
3266  // We have found the beginning of our frame -> make sure we count
3267  // the nested slots of captured objects
3268  number_of_slots--;
3269  SlotRef& slot = slot_refs_.last();
3270  CHECK_NE(slot.Representation(), SlotRef::ARGUMENTS_OBJECT);
3271  number_of_slots += slot.GetChildrenCount();
3272  if (slot.Representation() == SlotRef::DEFERRED_OBJECT ||
3273  slot.Representation() == SlotRef::DUPLICATE_OBJECT) {
3274  should_deopt = true;
3275  }
3276  }
3277 
3278  processed = true;
3279  }
3280  if (!processed) {
3281  // Skip over operands to advance to the next opcode.
3282  it.Skip(Translation::NumberOfOperandsFor(opcode));
3283  }
3284  }
3285  if (should_deopt) {
3286  List<JSFunction*> functions(2);
3287  frame->GetFunctions(&functions);
3288  Deoptimizer::DeoptimizeFunction(functions[0]);
3289  }
3290 }
3291 
3292 
3293 Handle<Object> SlotRef::GetValue(Isolate* isolate) {
3294  switch (representation_) {
3295  case TAGGED:
3296  return Handle<Object>(Memory::Object_at(addr_), isolate);
3297 
3298  case INT32: {
3299 #if V8_TARGET_BIG_ENDIAN && V8_HOST_ARCH_64_BIT
3300  int value = Memory::int32_at(addr_ + kIntSize);
3301 #else
3302  int value = Memory::int32_at(addr_);
3303 #endif
3304  if (Smi::IsValid(value)) {
3305  return Handle<Object>(Smi::FromInt(value), isolate);
3306  } else {
3307  return isolate->factory()->NewNumberFromInt(value);
3308  }
3309  }
3310 
3311  case UINT32: {
3312 #if V8_TARGET_BIG_ENDIAN && V8_HOST_ARCH_64_BIT
3313  uint32_t value = Memory::uint32_at(addr_ + kIntSize);
3314 #else
3315  uint32_t value = Memory::uint32_at(addr_);
3316 #endif
3317  if (value <= static_cast<uint32_t>(Smi::kMaxValue)) {
3318  return Handle<Object>(Smi::FromInt(static_cast<int>(value)), isolate);
3319  } else {
3320  return isolate->factory()->NewNumber(static_cast<double>(value));
3321  }
3322  }
3323 
3324  case DOUBLE: {
3325  double value = read_double_value(addr_);
3326  return isolate->factory()->NewNumber(value);
3327  }
3328 
3329  case LITERAL:
3330  return literal_;
3331 
3332  default:
3333  FATAL("We should never get here - unexpected deopt info.");
3334  return Handle<Object>::null();
3335  }
3336 }
3337 
3338 
3339 void SlotRefValueBuilder::Prepare(Isolate* isolate) {
3340  MaterializedObjectStore* materialized_store =
3341  isolate->materialized_object_store();
3342  previously_materialized_objects_ = materialized_store->Get(stack_frame_id_);
3343  prev_materialized_count_ = previously_materialized_objects_.is_null()
3344  ? 0 : previously_materialized_objects_->length();
3345 
3346  // Skip any materialized objects of the inlined "parent" frames.
3347  // (Note that we still need to materialize them because they might be
3348  // referred to as duplicated objects.)
3349  while (current_slot_ < first_slot_index_) {
3350  GetNext(isolate, 0);
3351  }
3352  CHECK_EQ(current_slot_, first_slot_index_);
3353 }
3354 
3355 
3356 Handle<Object> SlotRefValueBuilder::GetPreviouslyMaterialized(
3357  Isolate* isolate, int length) {
3358  int object_index = materialized_objects_.length();
3359  Handle<Object> return_value = Handle<Object>(
3360  previously_materialized_objects_->get(object_index), isolate);
3361  materialized_objects_.Add(return_value);
3362 
3363  // Now need to skip all the nested objects (and possibly read them from
3364  // the materialization store, too).
3365  for (int i = 0; i < length; i++) {
3366  SlotRef& slot = slot_refs_[current_slot_];
3367  current_slot_++;
3368 
3369  // We need to read all the nested objects - add them to the
3370  // number of objects we need to process.
3371  length += slot.GetChildrenCount();
3372 
3373  // Put the nested deferred/duplicate objects into our materialization
3374  // array.
3375  if (slot.Representation() == SlotRef::DEFERRED_OBJECT ||
3376  slot.Representation() == SlotRef::DUPLICATE_OBJECT) {
3377  int nested_object_index = materialized_objects_.length();
3378  Handle<Object> nested_object = Handle<Object>(
3379  previously_materialized_objects_->get(nested_object_index),
3380  isolate);
3381  materialized_objects_.Add(nested_object);
3382  }
3383  }
3384 
3385  return return_value;
3386 }
3387 
3388 
3389 Handle<Object> SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) {
3390  SlotRef& slot = slot_refs_[current_slot_];
3391  current_slot_++;
3392  switch (slot.Representation()) {
3393  case SlotRef::TAGGED:
3394  case SlotRef::INT32:
3395  case SlotRef::UINT32:
3396  case SlotRef::DOUBLE:
3397  case SlotRef::LITERAL: {
3398  return slot.GetValue(isolate);
3399  }
3400  case SlotRef::ARGUMENTS_OBJECT: {
3401  // We should never need to materialize an arguments object,
3402  // but we still need to put something into the array
3403  // so that the indexing is consistent.
3404  materialized_objects_.Add(isolate->factory()->undefined_value());
3405  int length = slot.GetChildrenCount();
3406  for (int i = 0; i < length; ++i) {
3407  // We don't need the argument, just ignore it
3408  GetNext(isolate, lvl + 1);
3409  }
3410  return isolate->factory()->undefined_value();
3411  }
3412  case SlotRef::DEFERRED_OBJECT: {
3413  int length = slot.GetChildrenCount();
3414  CHECK(slot_refs_[current_slot_].Representation() == SlotRef::LITERAL ||
3415  slot_refs_[current_slot_].Representation() == SlotRef::TAGGED);
3416 
3417  int object_index = materialized_objects_.length();
3418  if (object_index < prev_materialized_count_) {
3419  return GetPreviouslyMaterialized(isolate, length);
3420  }
3421 
3422  Handle<Object> map_object = slot_refs_[current_slot_].GetValue(isolate);
3424  Handle<Map>::cast(map_object));
3425  current_slot_++;
3426  // TODO(jarin) this should be unified with the code in
3427  // Deoptimizer::MaterializeNextHeapObject()
3428  switch (map->instance_type()) {
3430  case HEAP_NUMBER_TYPE: {
3431  // Reuse the HeapNumber value directly as it is already properly
3432  // tagged and skip materializing the HeapNumber explicitly.
3433  Handle<Object> object = GetNext(isolate, lvl + 1);
3434  materialized_objects_.Add(object);
3435  // On 32-bit architectures, there is an extra slot there because
3436  // the escape analysis calculates the number of slots as
3437  // object-size/pointer-size. To account for this, we read out
3438  // any extra slots.
3439  for (int i = 0; i < length - 2; i++) {
3440  GetNext(isolate, lvl + 1);
3441  }
3442  return object;
3443  }
3444  case JS_OBJECT_TYPE: {
3445  Handle<JSObject> object =
3446  isolate->factory()->NewJSObjectFromMap(map, NOT_TENURED, false);
3447  materialized_objects_.Add(object);
3448  Handle<Object> properties = GetNext(isolate, lvl + 1);
3449  Handle<Object> elements = GetNext(isolate, lvl + 1);
3450  object->set_properties(FixedArray::cast(*properties));
3451  object->set_elements(FixedArrayBase::cast(*elements));
3452  for (int i = 0; i < length - 3; ++i) {
3453  Handle<Object> value = GetNext(isolate, lvl + 1);
3454  FieldIndex index = FieldIndex::ForPropertyIndex(object->map(), i);
3455  object->FastPropertyAtPut(index, *value);
3456  }
3457  return object;
3458  }
3459  case JS_ARRAY_TYPE: {
3460  Handle<JSArray> object =
3461  isolate->factory()->NewJSArray(0, map->elements_kind());
3462  materialized_objects_.Add(object);
3463  Handle<Object> properties = GetNext(isolate, lvl + 1);
3464  Handle<Object> elements = GetNext(isolate, lvl + 1);
3465  Handle<Object> length = GetNext(isolate, lvl + 1);
3466  object->set_properties(FixedArray::cast(*properties));
3467  object->set_elements(FixedArrayBase::cast(*elements));
3468  object->set_length(*length);
3469  return object;
3470  }
3471  default:
3472  PrintF(stderr,
3473  "[couldn't handle instance type %d]\n", map->instance_type());
3474  UNREACHABLE();
3475  break;
3476  }
3477  UNREACHABLE();
3478  break;
3479  }
3480 
3481  case SlotRef::DUPLICATE_OBJECT: {
3482  int object_index = slot.DuplicateObjectId();
3483  Handle<Object> object = materialized_objects_[object_index];
3484  materialized_objects_.Add(object);
3485  return object;
3486  }
3487  default:
3488  UNREACHABLE();
3489  break;
3490  }
3491 
3492  FATAL("We should never get here - unexpected deopt slot kind.");
3493  return Handle<Object>::null();
3494 }
3495 
3496 
3497 void SlotRefValueBuilder::Finish(Isolate* isolate) {
3498  // We should have processed all the slots
3499  CHECK_EQ(slot_refs_.length(), current_slot_);
3500 
3501  if (materialized_objects_.length() > prev_materialized_count_) {
3502  // We have materialized some new objects, so we have to store them
3503  // to prevent duplicate materialization
3504  Handle<FixedArray> array = isolate->factory()->NewFixedArray(
3505  materialized_objects_.length());
3506  for (int i = 0; i < materialized_objects_.length(); i++) {
3507  array->set(i, *(materialized_objects_.at(i)));
3508  }
3509  isolate->materialized_object_store()->Set(stack_frame_id_, array);
3510  }
3511 }
3512 
3513 
3515  int index = StackIdToIndex(fp);
3516  if (index == -1) {
3517  return Handle<FixedArray>::null();
3518  }
3520  CHECK_GT(array->length(), index);
3521  return Handle<FixedArray>::cast(Handle<Object>(array->get(index),
3522  isolate()));
3523 }
3524 
3525 
3527  Handle<FixedArray> materialized_objects) {
3528  int index = StackIdToIndex(fp);
3529  if (index == -1) {
3530  index = frame_fps_.length();
3531  frame_fps_.Add(fp);
3532  }
3533 
3534  Handle<FixedArray> array = EnsureStackEntries(index + 1);
3535  array->set(index, *materialized_objects);
3536 }
3537 
3538 
3540  int index = StackIdToIndex(fp);
3541  CHECK_GE(index, 0);
3542 
3543  frame_fps_.Remove(index);
3545  CHECK_LT(index, array->length());
3546  for (int i = index; i < frame_fps_.length(); i++) {
3547  array->set(i, array->get(i + 1));
3548  }
3549  array->set(frame_fps_.length(), isolate()->heap()->undefined_value());
3550 }
3551 
3552 
3554  for (int i = 0; i < frame_fps_.length(); i++) {
3555  if (frame_fps_[i] == fp) {
3556  return i;
3557  }
3558  }
3559  return -1;
3560 }
3561 
3562 
3564  return Handle<FixedArray>(isolate()->heap()->materialized_objects());
3565 }
3566 
3567 
3570  if (array->length() >= length) {
3571  return array;
3572  }
3573 
3574  int new_length = length > 10 ? length : 10;
3575  if (new_length < 2 * array->length()) {
3576  new_length = 2 * array->length();
3577  }
3578 
3579  Handle<FixedArray> new_array =
3580  isolate()->factory()->NewFixedArray(new_length, TENURED);
3581  for (int i = 0; i < array->length(); i++) {
3582  new_array->set(i, array->get(i));
3583  }
3584  for (int i = array->length(); i < length; i++) {
3585  new_array->set(i, isolate()->heap()->undefined_value());
3586  }
3587  isolate()->heap()->public_set_materialized_objects(*new_array);
3588  return new_array;
3589 }
3590 
3591 
3593  int frame_index,
3594  bool has_arguments_adaptor,
3595  bool has_construct_stub) {
3596  FrameDescription* output_frame = deoptimizer->output_[frame_index];
3597  function_ = output_frame->GetFunction();
3598  context_ = reinterpret_cast<Object*>(output_frame->GetContext());
3599  has_construct_stub_ = has_construct_stub;
3600  expression_count_ = output_frame->GetExpressionCount();
3602  // Get the source position using the unoptimized code.
3603  Address pc = reinterpret_cast<Address>(output_frame->GetPc());
3604  Code* code = Code::cast(deoptimizer->isolate()->FindCodeObject(pc));
3606 
3607  for (int i = 0; i < expression_count_; i++) {
3608  SetExpression(i, output_frame->GetExpression(i));
3609  }
3610 
3611  if (has_arguments_adaptor) {
3612  output_frame = deoptimizer->output_[frame_index - 1];
3614  }
3615 
3616  parameters_count_ = output_frame->ComputeParametersCount();
3618  for (int i = 0; i < parameters_count_; i++) {
3619  SetParameter(i, output_frame->GetParameter(i));
3620  }
3621 }
3622 
3623 
3625  delete[] expression_stack_;
3626  delete[] parameters_;
3627 }
3628 
3629 
3631  v->VisitPointer(bit_cast<Object**>(&function_));
3632  v->VisitPointer(&context_);
3633  v->VisitPointers(parameters_, parameters_ + parameters_count_);
3635 }
3636 
3637 } } // namespace v8::internal
#define REGISTER(N, C)
virtual const char * NameOfCPURegister(int reg) const
static intptr_t CommitPageSize()
static Handle< Object > FunctionGetArguments(Handle< JSFunction > object)
Definition: accessors.cc:1157
void set_emit_debug_code(bool value)
Definition: assembler.h:66
void GetCode(CodeDesc *desc)
int ToInt() const
Definition: utils.h:958
static T decode(U value)
Definition: utils.h:228
Code * builtin(Name name)
Definition: builtins.h:254
int GetEnvironmentParameterCount() const
Definition: code-stubs.h:380
StubFunctionMode function_mode() const
Definition: code-stubs.h:412
Register stack_parameter_count() const
Definition: code-stubs.h:411
bool IsEnvironmentParameterCountRegister(int index) const
Definition: code-stubs.h:397
unsigned stack_slots()
Definition: objects-inl.h:4801
ConstantPoolArray * constant_pool()
Definition: objects-inl.h:4942
static const char * Kind2String(Kind kind)
Definition: objects.cc:10627
bool marked_for_deoptimization()
Definition: objects-inl.h:4877
void PrintDeoptLocation(FILE *out, int bailout_id)
Definition: objects.cc:10589
SafepointEntry GetSafepointEntry(Address pc)
Definition: objects.cc:10244
bool contains(byte *pc)
Definition: objects-inl.h:6211
void set_marked_for_deoptimization(bool flag)
Definition: objects-inl.h:4884
byte * instruction_start()
Definition: objects-inl.h:6176
int SourcePosition(Address pc)
Definition: objects.cc:10192
uint32_t stub_key()
Definition: objects-inl.h:6159
Object * OptimizedCodeListHead()
Definition: contexts.cc:369
Object * OptimizedFunctionsListHead()
Definition: contexts.cc:348
static Context * cast(Object *context)
Definition: contexts.h:255
void SetOptimizedCodeListHead(Object *head)
Definition: contexts.cc:363
Context * native_context()
Definition: contexts.cc:44
void SetOptimizedFunctionsListHead(Object *head)
Definition: contexts.cc:342
Object * DeoptimizedCodeListHead()
Definition: contexts.cc:381
void SetDeoptimizedCodeListHead(Object *head)
Definition: contexts.cc:375
static void FlushICache(void *start, size_t size)
void Iterate(ObjectVisitor *v)
DeoptimizedFrameInfo(Deoptimizer *deoptimizer, int frame_index, bool has_arguments_adaptor, bool has_construct_stub)
void SetParameter(int index, Object *obj)
Definition: deoptimizer.h:965
void SetExpression(int index, Object *obj)
Definition: deoptimizer.h:971
MemoryChunk * deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry]
Definition: deoptimizer.h:644
int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry]
Definition: deoptimizer.h:643
DeoptimizerData(MemoryAllocator *allocator)
Definition: deoptimizer.cc:34
DeoptimizedFrameInfo * deoptimized_frame_info_
Definition: deoptimizer.h:646
MemoryAllocator * allocator_
Definition: deoptimizer.h:642
void Iterate(ObjectVisitor *v)
Definition: deoptimizer.cc:53
unsigned ComputeInputFrameSize() const
void DoComputeAccessorStubFrame(TranslationIterator *iterator, int frame_index, bool is_setter_stub_frame)
List< Handle< Object > > * materialized_objects_
Definition: deoptimizer.h:441
static Deoptimizer * Grab(Isolate *isolate)
Definition: deoptimizer.cc:111
Handle< Object > MaterializeNextHeapObject()
static void MarkAllCodeForContext(Context *native_context)
Definition: deoptimizer.cc:492
static const int kBailoutTypesWithCodeEntry
Definition: deoptimizer.h:102
int ConvertJSFrameIndexToFrameIndex(int jsframe_index)
Definition: deoptimizer.cc:120
void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments)
Code * FindOptimizedCode(JSFunction *function, Code *optimized_code)
Definition: deoptimizer.cc:618
void MaterializeHeapObjects(JavaScriptFrameIterator *it)
void AddDoubleValue(intptr_t slot_address, double value)
static int output_offset()
Definition: deoptimizer.h:239
bool ArgumentsObjectIsAdapted(int object_index)
Definition: deoptimizer.h:350
friend class FrameDescription
Definition: deoptimizer.h:453
void MaterializeHeapNumbersForDebuggerInspectableFrame(Address parameters_top, uint32_t parameters_size, Address expressions_top, uint32_t expressions_size, DeoptimizedFrameInfo *info)
static void ComputeOutputFrames(Deoptimizer *deoptimizer)
Definition: deoptimizer.cc:515
static Deoptimizer * New(JSFunction *function, BailoutType type, unsigned bailout_id, Address from, int fp_to_sp_delta, Isolate *isolate)
Definition: deoptimizer.cc:78
List< Handle< Object > > * materialized_values_
Definition: deoptimizer.h:440
static const int table_entry_size_
Definition: deoptimizer.h:451
unsigned ComputeFixedSize(JSFunction *function) const
Object * ComputeLiteral(int index) const
static void DeoptimizeFunction(JSFunction *function)
Definition: deoptimizer.cc:503
void CopyDoubleRegisters(FrameDescription *output_frame)
void AddObjectDoubleValue(double value)
Deoptimizer(Isolate *isolate, JSFunction *function, BailoutType type, unsigned bailout_id, Address from, int fp_to_sp_delta, Code *optimized_code)
Definition: deoptimizer.cc:548
static void GenerateDeoptimizationEntries(MacroAssembler *masm, int count, BailoutType type)
Definition: deoptimizer.cc:232
Handle< JSFunction > function() const
Definition: deoptimizer.h:149
void DoTranslateCommand(TranslationIterator *iterator, int frame_index, unsigned output_offset)
Code * FindDeoptimizingCode(Address addr)
Definition: deoptimizer.cc:60
List< ObjectMaterializationDescriptor > deferred_objects_
Definition: deoptimizer.h:427
unsigned ComputeOutgoingArgumentSize() const
List< Handle< JSFunction > > jsframe_functions_
Definition: deoptimizer.h:436
static Address GetDeoptimizationEntry(Isolate *isolate, int id, BailoutType type, GetEntryMode mode=ENSURE_ENTRY_CODE)
Definition: deoptimizer.cc:672
static void DeoptimizeGlobalObject(JSObject *object)
Definition: deoptimizer.cc:470
Handle< FixedArray > previously_materialized_objects_
Definition: deoptimizer.h:432
List< bool > jsframe_has_adapted_arguments_
Definition: deoptimizer.h:437
static void EnsureCodeForDeoptimizationEntry(Isolate *isolate, BailoutType type, int max_entry_id)
static const char * MessageFor(BailoutType type)
Definition: deoptimizer.cc:536
static void PatchCodeForDeoptimization(Isolate *isolate, Code *code)
static DeoptimizedFrameInfo * DebuggerInspectableFrame(JavaScriptFrame *frame, int jsframe_index, Isolate *isolate)
Definition: deoptimizer.cc:136
static void VisitAllOptimizedFunctions(Isolate *isolate, OptimizedFunctionVisitor *visitor)
Definition: deoptimizer.cc:284
static bool TraceEnabledFor(BailoutType deopt_type, StackFrame::Type frame_type)
Definition: deoptimizer.cc:520
static const int kNotDeoptimizationEntry
Definition: deoptimizer.h:247
static void DeoptimizeAll(Isolate *isolate)
Definition: deoptimizer.cc:437
static void VisitAllOptimizedFunctionsForContext(Context *context, OptimizedFunctionVisitor *visitor)
Definition: deoptimizer.cc:240
static const int kMaxNumberOfEntries
Definition: deoptimizer.h:296
void DoComputeCompiledStubFrame(TranslationIterator *iterator, int frame_index)
static void DeoptimizeMarkedCode(Isolate *isolate)
Definition: deoptimizer.cc:454
void SetPlatformCompiledStubRegisters(FrameDescription *output_frame, CodeStubDescriptor *desc)
Handle< Code > compiled_code() const
Definition: deoptimizer.h:150
static int GetDeoptimizedCodeCount(Isolate *isolate)
Definition: deoptimizer.cc:729
Handle< JSFunction > ArgumentsObjectFunction(int object_index)
Definition: deoptimizer.h:356
static int GetOutputInfo(DeoptimizationOutputData *data, BailoutId node_id, SharedFunctionInfo *shared)
Definition: deoptimizer.cc:707
void DoTranslateObject(TranslationIterator *iterator, int object_index, int field_index)
void DoComputeConstructStubFrame(TranslationIterator *iterator, int frame_index)
void AddObjectDuplication(intptr_t slot, int object_index)
List< HeapNumberMaterializationDescriptor< int > > deferred_objects_double_values_
Definition: deoptimizer.h:426
void DoComputeJSFrame(TranslationIterator *iterator, int frame_index)
Definition: deoptimizer.cc:875
unsigned ComputeIncomingArgumentSize(JSFunction *function) const
FrameDescription ** output_
Definition: deoptimizer.h:421
bool HasAlignmentPadding(JSFunction *function)
List< Object * > deferred_objects_tagged_values_
Definition: deoptimizer.h:424
FrameDescription * input_
Definition: deoptimizer.h:415
static void DeoptimizeMarkedCodeForContext(Context *native_context)
Definition: deoptimizer.cc:301
friend class DeoptimizedFrameInfo
Definition: deoptimizer.h:454
static const int kMinNumberOfEntries
Definition: deoptimizer.h:295
CodeTracer::Scope * trace_scope_
Definition: deoptimizer.h:449
Handle< Object > MaterializeNextValue()
Isolate * isolate() const
Definition: deoptimizer.h:292
void FillInputFrame(Address tos, JavaScriptFrame *frame)
void DoTranslateObjectAndSkip(TranslationIterator *iterator)
static size_t GetMaxDeoptTableSize()
Definition: deoptimizer.cc:101
static int GetDeoptimizationId(Isolate *isolate, Address addr, BailoutType type)
Definition: deoptimizer.cc:690
List< HeapNumberMaterializationDescriptor< Address > > deferred_heap_numbers_
Definition: deoptimizer.h:428
void AddObjectTaggedValue(intptr_t value)
void DoComputeArgumentsAdaptorFrame(TranslationIterator *iterator, int frame_index)
static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo *info, Isolate *isolate)
Definition: deoptimizer.cc:224
Object * get(int index)
Definition: objects-inl.h:2165
unsigned GetOffsetFromSlotIndex(int slot_index)
void SetCallerFp(unsigned offset, intptr_t value)
uint32_t GetFrameSize() const
Definition: deoptimizer.h:477
void SetTop(intptr_t top)
Definition: deoptimizer.h:534
void SetContext(intptr_t context)
Definition: deoptimizer.h:543
void SetCallerConstantPool(unsigned offset, intptr_t value)
intptr_t * GetFrameSlotPointer(unsigned offset)
Definition: deoptimizer.h:624
static const uint32_t kZapUint32
Definition: deoptimizer.h:599
JSFunction * GetFunction() const
Definition: deoptimizer.h:482
intptr_t GetConstantPool() const
Definition: deoptimizer.h:545
Object * GetExpression(int index)
double GetDoubleFrameSlot(unsigned offset)
Definition: deoptimizer.h:490
Object * GetParameter(int index)
void SetFrameType(StackFrame::Type type)
Definition: deoptimizer.h:556
FrameDescription(uint32_t frame_size, JSFunction *function)
void SetConstantPool(intptr_t constant_pool)
Definition: deoptimizer.h:546
double GetDoubleRegister(unsigned n) const
Definition: deoptimizer.h:518
StackFrame::Type GetFrameType() const
Definition: deoptimizer.h:555
intptr_t GetFrameSlot(unsigned offset)
Definition: deoptimizer.h:486
void SetRegister(unsigned n, intptr_t value)
Definition: deoptimizer.h:523
intptr_t GetRegister(unsigned n) const
Definition: deoptimizer.h:505
void SetCallerPc(unsigned offset, intptr_t value)
void SetFrameSlot(unsigned offset, intptr_t value)
Definition: deoptimizer.h:495
void SetContinuation(intptr_t pc)
Definition: deoptimizer.h:553
static const char * State2String(State state)
Definition: full-codegen.h:90
static Handle< T > cast(Handle< S > that)
Definition: handles.h:116
static Handle< T > null()
Definition: handles.h:123
Heap * GetHeap() const
Definition: objects-inl.h:1379
Isolate * GetIsolate() const
Definition: objects-inl.h:1387
Object * native_contexts_list() const
Definition: heap.h:793
void public_set_materialized_objects(FixedArray *objects)
Definition: heap.h:891
Isolate * isolate()
Definition: heap-inl.h:589
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1197
MaterializedObjectStore * materialized_object_store()
Definition: isolate.h:879
Object * FindCodeObject(Address a)
Definition: isolate.cc:2214
DeoptimizerData * deoptimizer_data()
Definition: isolate.h:877
Builtins * builtins()
Definition: isolate.h:947
Counters * counters()
Definition: isolate.h:857
ThreadLocalTop * thread_local_top()
Definition: isolate.h:878
CodeTracer * GetCodeTracer()
Definition: isolate.cc:2154
Factory * factory()
Definition: isolate.h:982
void PrintName(FILE *out=stdout)
Definition: objects.cc:9548
bool has_adapted_arguments() const
Definition: frames-inl.h:260
static Register context_register()
JSFunction * function() const
Definition: frames-inl.h:265
static Register fp_register()
static Register constant_pool_pointer_register()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:17
void Set(int index, const T &element)
Definition: list-inl.h:85
T & at(int i) const
Definition: list.h:69
T Remove(int i)
Definition: list-inl.h:103
static Handle< Map > GeneralizeAllFieldRepresentations(Handle< Map > map)
Definition: objects.cc:2676
void Set(Address fp, Handle< FixedArray > materialized_objects)
Handle< FixedArray > EnsureStackEntries(int size)
Handle< FixedArray > Get(Address fp)
Handle< FixedArray > GetStackEntries()
MemoryChunk * AllocateChunk(intptr_t reserve_area_size, intptr_t commit_area_size, Executability executable, Space *space)
Definition: spaces.cc:548
void Free(MemoryChunk *chunk)
Definition: spaces.cc:700
bool CommitArea(size_t requested)
Definition: spaces.cc:476
static uint32_t & uint32_at(Address addr)
Definition: v8memory.h:24
static Object *& Object_at(Address addr)
Definition: v8memory.h:60
static int32_t & int32_at(Address addr)
Definition: v8memory.h:28
void ShortPrint(FILE *out=stdout)
Definition: objects.cc:905
A class to uniformly access the prototype of any Object and walk its prototype chain.
Definition: prototype.h:25
Object * GetCurrent() const
Definition: prototype.h:62
void EvictFromOptimizedCodeMap(Code *optimized_code, const char *reason)
Definition: objects.cc:9272
static const int kMaxValue
Definition: objects.h:1272
int value() const
Definition: objects-inl.h:1316
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static bool IsValid(intptr_t value)
Definition: objects-inl.h:1334
static const int kFixedFrameSizeFromFp
Definition: frames.h:157
static const int kFixedFrameSize
Definition: frames.h:158
static const int kCallerSPOffset
Definition: frames.h:167
SmartArrayPointer< char > ToCString(AllowNullsFlag allow_nulls, RobustnessFlag robustness_flag, int offset, int length, int *length_output=0)
Definition: objects.cc:8004
static Register constant_pool_pointer_register()
#define TRANSLATION_OPCODE_LIST(V)
Definition: deoptimizer.h:691
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define LOG(isolate, Call)
Definition: log.h:69
#define UNREACHABLE()
Definition: logging.h:30
#define CHECK_EQ(expected, value)
Definition: logging.h:169
#define CHECK_LT(a, b)
Definition: logging.h:179
#define CHECK(condition)
Definition: logging.h:36
#define CHECK_NE(unexpected, value)
Definition: logging.h:173
#define CHECK_GE(a, b)
Definition: logging.h:178
#define FATAL(msg)
Definition: logging.h:26
#define DCHECK_NE(v1, v2)
Definition: logging.h:207
#define CHECK_GT(a, b)
Definition: logging.h:177
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
void USE(T)
Definition: macros.h:322
#define V8PRIuPTR
Definition: macros.h:365
#define V8PRIdPTR
Definition: macros.h:364
#define V8PRIxPTR
Definition: macros.h:363
int int32_t
Definition: unicode.cc:24
const int kPointerSize
Definition: globals.h:129
const int KB
Definition: globals.h:106
@ NOT_EXECUTABLE
Definition: globals.h:391
static const char * TraceValueType(bool is_smi)
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:110
TypeImpl< ZoneTypeConfig > Type
const int kPCOnStackSize
Definition: globals.h:135
static MemoryChunk * AllocateCodeChunk(MemoryAllocator *allocator)
Definition: deoptimizer.cc:20
const int kDoubleSize
Definition: globals.h:127
const Register fp
DwVfpRegister DoubleRegister
OStream & endl(OStream &os)
Definition: ostreams.cc:112
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ JS_OBJECT_TYPE
Definition: objects.h:731
@ MUTABLE_HEAP_NUMBER_TYPE
Definition: objects.h:670
@ HEAP_NUMBER_TYPE
Definition: objects.h:669
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:146
const Register pc
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
const int kMinInt
Definition: globals.h:110
byte * Address
Definition: globals.h:101
void PrintF(const char *format,...)
Definition: utils.cc:80
const int kIntSize
Definition: globals.h:124
const int kFPOnStackSize
Definition: globals.h:136
static const int kDeoptTableMaxEpilogueCodeSize
Definition: deoptimizer.cc:98
static double read_double_value(Address p)
Definition: deoptimizer.h:19
void MemCopy(void *dest, const void *src, size_t size)
Definition: utils.h:350
void CopyBytes(uint8_t *target, uint8_t *source)
REGISTER(no_reg, -1)
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
static const char * AllocationIndexToString(int index)
static int ToAllocationIndex(DwVfpRegister reg)
static const int kNumRegisters
Definition: assembler-arm.h:95