V8 Project
code-stubs-hydrogen.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #include "src/bailout-reason.h"
8 #include "src/code-stubs.h"
9 #include "src/field-index.h"
10 #include "src/hydrogen.h"
11 #include "src/lithium.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 
17 static LChunk* OptimizeGraph(HGraph* graph) {
18  DisallowHeapAllocation no_allocation;
19  DisallowHandleAllocation no_handles;
21 
22  DCHECK(graph != NULL);
23  BailoutReason bailout_reason = kNoReason;
24  if (!graph->Optimize(&bailout_reason)) {
25  FATAL(GetBailoutReason(bailout_reason));
26  }
27  LChunk* chunk = LChunk::NewChunk(graph);
28  if (chunk == NULL) {
29  FATAL(GetBailoutReason(graph->info()->bailout_reason()));
30  }
31  return chunk;
32 }
33 
34 
36  public:
38  : HGraphBuilder(&info_),
40  info_(stub, isolate),
42  context_(NULL) {
43  int parameter_count = descriptor_.GetEnvironmentParameterCount();
44  parameters_.Reset(new HParameter*[parameter_count]);
45  }
46  virtual bool BuildGraph();
47 
48  protected:
49  virtual HValue* BuildCodeStub() = 0;
50  HParameter* GetParameter(int parameter) {
52  return parameters_[parameter];
53  }
55  // This is initialized in BuildGraph()
57  return arguments_length_;
58  }
59  CompilationInfo* info() { return &info_; }
61  HContext* context() { return context_; }
62  Isolate* isolate() { return info_.isolate(); }
63 
64  HLoadNamedField* BuildLoadNamedField(HValue* object,
65  FieldIndex index);
66  void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
67  Representation representation);
68 
72  MULTIPLE
73  };
74 
75  HValue* UnmappedCase(HValue* elements, HValue* key);
76 
78  AllocationSiteOverrideMode override_mode,
79  ArgumentClass argument_class);
81  ArgumentClass argument_class);
82 
83  // BuildCheckAndInstallOptimizedCode emits code to install the optimized
84  // function found in the optimized code map at map_index in js_function, if
85  // the function at map_index matches the given native_context. Builder is
86  // left in the "Then()" state after the install.
87  void BuildCheckAndInstallOptimizedCode(HValue* js_function,
88  HValue* native_context,
89  IfBuilder* builder,
90  HValue* optimized_map,
91  HValue* map_index);
92  void BuildInstallCode(HValue* js_function, HValue* shared_info);
93 
95  HValue* iterator,
96  int field_offset);
97  void BuildInstallFromOptimizedCodeMap(HValue* js_function,
98  HValue* shared_info,
99  HValue* native_context);
100 
101  private:
102  HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
103  HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
104  ElementsKind kind);
105 
110  HContext* context_;
111 };
112 
113 
115  // Update the static counter each time a new code stub is generated.
116  isolate()->counters()->code_stubs()->Increment();
117 
118  if (FLAG_trace_hydrogen_stubs) {
119  const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
120  PrintF("-----------------------------------------------------------\n");
121  PrintF("Compiling stub %s using hydrogen\n", name);
122  isolate()->GetHTracer()->TraceCompilation(&info_);
123  }
124 
125  int param_count = descriptor_.GetEnvironmentParameterCount();
126  HEnvironment* start_environment = graph()->start_environment();
127  HBasicBlock* next_block = CreateBasicBlock(start_environment);
128  Goto(next_block);
129  next_block->SetJoinId(BailoutId::StubEntry());
130  set_current_block(next_block);
131 
132  bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
133  HInstruction* stack_parameter_count = NULL;
134  for (int i = 0; i < param_count; ++i) {
136  HParameter* param = Add<HParameter>(i,
137  HParameter::REGISTER_PARAMETER, r);
138  start_environment->Bind(i, param);
139  parameters_[i] = param;
141  param->set_type(HType::Smi());
142  stack_parameter_count = param;
143  arguments_length_ = stack_parameter_count;
144  }
145  }
146 
147  DCHECK(!runtime_stack_params || arguments_length_ != NULL);
148  if (!runtime_stack_params) {
149  stack_parameter_count = graph()->GetConstantMinus1();
150  arguments_length_ = graph()->GetConstant0();
151  }
152 
153  context_ = Add<HContext>();
154  start_environment->BindContext(context_);
155 
156  Add<HSimulate>(BailoutId::StubEntry());
157 
158  NoObservableSideEffectsScope no_effects(this);
159 
160  HValue* return_value = BuildCodeStub();
161 
162  // We might have extra expressions to pop from the stack in addition to the
163  // arguments above.
164  HInstruction* stack_pop_count = stack_parameter_count;
166  if (!stack_parameter_count->IsConstant() &&
168  HInstruction* constant_one = graph()->GetConstant1();
169  stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
170  stack_pop_count->ClearFlag(HValue::kCanOverflow);
171  // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
172  // smi.
173  } else {
175  stack_pop_count = Add<HConstant>(count);
176  }
177  }
178 
179  if (current_block() != NULL) {
180  HReturn* hreturn_instruction = New<HReturn>(return_value,
181  stack_pop_count);
182  FinishCurrentBlock(hreturn_instruction);
183  }
184  return true;
185 }
186 
187 
188 template <class Stub>
190  public:
193 
194  protected:
195  virtual HValue* BuildCodeStub() {
196  if (casted_stub()->IsUninitialized()) {
198  } else {
199  return BuildCodeInitializedStub();
200  }
201  }
202 
204  UNIMPLEMENTED();
205  return NULL;
206  }
207 
209  // Force a deopt that falls back to the runtime.
210  HValue* undefined = graph()->GetConstantUndefined();
211  IfBuilder builder(this);
212  builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
213  builder.Then();
214  builder.ElseDeopt("Forced deopt to runtime");
215  return undefined;
216  }
217 
218  Stub* casted_stub() { return static_cast<Stub*>(stub()); }
219 };
220 
221 
223  ExternalReference miss) {
224  Factory* factory = isolate()->factory();
225 
226  // Generate the new code.
227  MacroAssembler masm(isolate(), NULL, 256);
228 
229  {
230  // Update the static counter each time a new code stub is generated.
231  isolate()->counters()->code_stubs()->Increment();
232 
233  // Generate the code for the stub.
234  masm.set_generating_stub(true);
235  NoCurrentFrameScope scope(&masm);
236  GenerateLightweightMiss(&masm, miss);
237  }
238 
239  // Create the code object.
240  CodeDesc desc;
241  masm.GetCode(&desc);
242 
243  // Copy the generated code into a heap object.
245  GetCodeKind(),
246  GetICState(),
247  GetExtraICState(),
248  GetStubType());
249  Handle<Code> new_object = factory->NewCode(
250  desc, flags, masm.CodeObject(), NeedsImmovableCode());
251  return new_object;
252 }
253 
254 
255 template <class Stub>
256 static Handle<Code> DoGenerateCode(Stub* stub) {
257  Isolate* isolate = stub->isolate();
258  CodeStubDescriptor descriptor(stub);
259 
260  // If we are uninitialized we can use a light-weight stub to enter
261  // the runtime that is significantly faster than using the standard
262  // stub-failure deopt mechanism.
263  if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
264  DCHECK(!descriptor.stack_parameter_count().is_valid());
265  return stub->GenerateLightweightMissCode(descriptor.miss_handler());
266  }
267  base::ElapsedTimer timer;
268  if (FLAG_profile_hydrogen_code_stub_compilation) {
269  timer.Start();
270  }
271  CodeStubGraphBuilder<Stub> builder(isolate, stub);
272  LChunk* chunk = OptimizeGraph(builder.CreateGraph());
273  // TODO(yangguo) remove this once the code serializer handles code stubs.
274  if (FLAG_serialize_toplevel) chunk->info()->PrepareForSerializing();
275  Handle<Code> code = chunk->Codegen();
276  if (FLAG_profile_hydrogen_code_stub_compilation) {
277  OFStream os(stdout);
278  os << "[Lazy compilation of " << stub << " took "
279  << timer.Elapsed().InMillisecondsF() << " ms]" << endl;
280  }
281  return code;
282 }
283 
284 
285 template <>
287  HValue* value = GetParameter(0);
288 
289  // Check if the parameter is already a SMI or heap number.
290  IfBuilder if_number(this);
291  if_number.If<HIsSmiAndBranch>(value);
292  if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map());
293  if_number.Then();
294 
295  // Return the number.
296  Push(value);
297 
298  if_number.Else();
299 
300  // Convert the parameter to number using the builtin.
301  HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
302  Add<HPushArguments>(value);
303  Push(Add<HInvokeFunction>(function, 1));
304 
305  if_number.End();
306 
307  return Pop();
308 }
309 
310 
312  return DoGenerateCode(this);
313 }
314 
315 
316 template <>
318  info()->MarkAsSavesCallerDoubles();
319  HValue* number = GetParameter(NumberToStringStub::kNumber);
320  return BuildNumberToString(number, Type::Number(zone()));
321 }
322 
323 
324 Handle<Code> NumberToStringStub::GenerateCode() {
325  return DoGenerateCode(this);
326 }
327 
328 
329 template <>
331  Factory* factory = isolate()->factory();
332  HValue* undefined = graph()->GetConstantUndefined();
333  AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
334 
335  // This stub is very performance sensitive, the generated code must be tuned
336  // so that it doesn't build and eager frame.
337  info()->MarkMustNotHaveEagerFrame();
338 
339  HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
340  GetParameter(1),
341  static_cast<HValue*>(NULL),
342  FAST_ELEMENTS);
343  IfBuilder checker(this);
344  checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
345  undefined);
346  checker.Then();
347 
348  HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
350  HInstruction* boilerplate = Add<HLoadNamedField>(
351  allocation_site, static_cast<HValue*>(NULL), access);
352  HValue* elements = AddLoadElements(boilerplate);
353  HValue* capacity = AddLoadFixedArrayLength(elements);
354  IfBuilder zero_capacity(this);
355  zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
356  Token::EQ);
357  zero_capacity.Then();
358  Push(BuildCloneShallowArrayEmpty(boilerplate,
359  allocation_site,
360  alloc_site_mode));
361  zero_capacity.Else();
362  IfBuilder if_fixed_cow(this);
363  if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
364  if_fixed_cow.Then();
365  Push(BuildCloneShallowArrayCow(boilerplate,
366  allocation_site,
367  alloc_site_mode,
368  FAST_ELEMENTS));
369  if_fixed_cow.Else();
370  IfBuilder if_fixed(this);
371  if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
372  if_fixed.Then();
373  Push(BuildCloneShallowArrayNonEmpty(boilerplate,
374  allocation_site,
375  alloc_site_mode,
376  FAST_ELEMENTS));
377 
378  if_fixed.Else();
379  Push(BuildCloneShallowArrayNonEmpty(boilerplate,
380  allocation_site,
381  alloc_site_mode,
383  if_fixed.End();
384  if_fixed_cow.End();
385  zero_capacity.End();
386 
387  checker.ElseDeopt("Uninitialized boilerplate literals");
388  checker.End();
389 
390  return environment()->Pop();
391 }
392 
393 
395  return DoGenerateCode(this);
396 }
397 
398 
399 template <>
401  HValue* undefined = graph()->GetConstantUndefined();
402 
403  HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
404  GetParameter(1),
405  static_cast<HValue*>(NULL),
406  FAST_ELEMENTS);
407 
408  IfBuilder checker(this);
409  checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
410  undefined);
411  checker.And();
412 
413  HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
415  HInstruction* boilerplate = Add<HLoadNamedField>(
416  allocation_site, static_cast<HValue*>(NULL), access);
417 
418  int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
419  int object_size = size;
420  if (FLAG_allocation_site_pretenuring) {
422  }
423 
424  HValue* boilerplate_map = Add<HLoadNamedField>(
425  boilerplate, static_cast<HValue*>(NULL),
426  HObjectAccess::ForMap());
427  HValue* boilerplate_size = Add<HLoadNamedField>(
428  boilerplate_map, static_cast<HValue*>(NULL),
429  HObjectAccess::ForMapInstanceSize());
430  HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
431  checker.If<HCompareNumericAndBranch>(boilerplate_size,
432  size_in_words, Token::EQ);
433  checker.Then();
434 
435  HValue* size_in_bytes = Add<HConstant>(size);
436 
437  HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
439 
440  for (int i = 0; i < object_size; i += kPointerSize) {
441  HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
442  Add<HStoreNamedField>(
443  object, access, Add<HLoadNamedField>(
444  boilerplate, static_cast<HValue*>(NULL), access));
445  }
446 
447  DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
448  if (FLAG_allocation_site_pretenuring) {
449  BuildCreateAllocationMemento(
450  object, Add<HConstant>(object_size), allocation_site);
451  }
452 
453  environment()->Push(object);
454  checker.ElseDeopt("Uninitialized boilerplate in fast clone");
455  checker.End();
456 
457  return environment()->Pop();
458 }
459 
460 
462  return DoGenerateCode(this);
463 }
464 
465 
466 template <>
468  HValue* size = Add<HConstant>(AllocationSite::kSize);
469  HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
471 
472  // Store the map
473  Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
474  AddStoreMapConstant(object, allocation_site_map);
475 
476  // Store the payload (smi elements kind)
477  HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
478  Add<HStoreNamedField>(object,
479  HObjectAccess::ForAllocationSiteOffset(
481  initial_elements_kind);
482 
483  // Unlike literals, constructed arrays don't have nested sites
484  Add<HStoreNamedField>(object,
485  HObjectAccess::ForAllocationSiteOffset(
487  graph()->GetConstant0());
488 
489  // Pretenuring calculation field.
490  Add<HStoreNamedField>(object,
491  HObjectAccess::ForAllocationSiteOffset(
493  graph()->GetConstant0());
494 
495  // Pretenuring memento creation count field.
496  Add<HStoreNamedField>(object,
497  HObjectAccess::ForAllocationSiteOffset(
499  graph()->GetConstant0());
500 
501  // Store an empty fixed array for the code dependency.
502  HConstant* empty_fixed_array =
503  Add<HConstant>(isolate()->factory()->empty_fixed_array());
504  Add<HStoreNamedField>(
505  object,
506  HObjectAccess::ForAllocationSiteOffset(
508  empty_fixed_array);
509 
510  // Link the object to the allocation site list
511  HValue* site_list = Add<HConstant>(
512  ExternalReference::allocation_sites_list_address(isolate()));
513  HValue* site = Add<HLoadNamedField>(
514  site_list, static_cast<HValue*>(NULL),
515  HObjectAccess::ForAllocationSiteList());
516  // TODO(mvstanton): This is a store to a weak pointer, which we may want to
517  // mark as such in order to skip the write barrier, once we have a unified
518  // system for weakness. For now we decided to keep it like this because having
519  // an initial write barrier backed store makes this pointer strong until the
520  // next GC, and allocation sites are designed to survive several GCs anyway.
521  Add<HStoreNamedField>(
522  object,
523  HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
524  site);
525  Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
526  object);
527 
528  HInstruction* feedback_vector = GetParameter(0);
529  HInstruction* slot = GetParameter(1);
530  Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
532  return feedback_vector;
533 }
534 
535 
537  return DoGenerateCode(this);
538 }
539 
540 
541 template <>
543  HInstruction* load = BuildUncheckedMonomorphicElementAccess(
544  GetParameter(LoadDescriptor::kReceiverIndex),
545  GetParameter(LoadDescriptor::kNameIndex), NULL,
546  casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
548  return load;
549 }
550 
551 
553  return DoGenerateCode(this);
554 }
555 
556 
558  HValue* object, FieldIndex index) {
559  Representation representation = index.is_double()
562  int offset = index.offset();
563  HObjectAccess access = index.is_inobject()
564  ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
565  : HObjectAccess::ForBackingStoreOffset(offset, representation);
566  if (index.is_double()) {
567  // Load the heap number.
568  object = Add<HLoadNamedField>(
569  object, static_cast<HValue*>(NULL),
570  access.WithRepresentation(Representation::Tagged()));
571  // Load the double value from it.
572  access = HObjectAccess::ForHeapNumberValue();
573  }
574  return Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), access);
575 }
576 
577 
578 template<>
580  return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
581 }
582 
583 
585  return DoGenerateCode(this);
586 }
587 
588 
589 template <>
591  HValue* map = AddLoadMap(GetParameter(0), NULL);
592  HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
594  HValue* descriptors =
595  Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), descriptors_access);
596  HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
597  DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
598  return Add<HLoadNamedField>(descriptors, static_cast<HValue*>(NULL),
599  value_access);
600 }
601 
602 
604 
605 
607  HValue* result;
608  HInstruction* backing_store = Add<HLoadKeyed>(
609  elements, graph()->GetConstant1(), static_cast<HValue*>(NULL),
611  Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
612  HValue* backing_store_length =
613  Add<HLoadNamedField>(backing_store, static_cast<HValue*>(NULL),
614  HObjectAccess::ForFixedArrayLength());
615  IfBuilder in_unmapped_range(this);
616  in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
617  Token::LT);
618  in_unmapped_range.Then();
619  {
620  result = Add<HLoadKeyed>(backing_store, key, static_cast<HValue*>(NULL),
622  }
623  in_unmapped_range.ElseDeopt("Outside of range");
624  in_unmapped_range.End();
625  return result;
626 }
627 
628 
629 template <>
631  HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
632  HValue* key = GetParameter(LoadDescriptor::kNameIndex);
633 
634  // Mapped arguments are actual arguments. Unmapped arguments are values added
635  // to the arguments object after it was created for the call. Mapped arguments
636  // are stored in the context at indexes given by elements[key + 2]. Unmapped
637  // arguments are stored as regular indexed properties in the arguments array,
638  // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
639  // look at argument object construction.
640  //
641  // The sloppy arguments elements array has a special format:
642  //
643  // 0: context
644  // 1: unmapped arguments array
645  // 2: mapped_index0,
646  // 3: mapped_index1,
647  // ...
648  //
649  // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
650  // If key + 2 >= elements.length then attempt to look in the unmapped
651  // arguments array (given by elements[1]) and return the value at key, missing
652  // to the runtime if the unmapped arguments array is not a fixed array or if
653  // key >= unmapped_arguments_array.length.
654  //
655  // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
656  // in the unmapped arguments array, as described above. Otherwise, t is a Smi
657  // index into the context array given at elements[0]. Return the value at
658  // context[t].
659 
660  key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
661  IfBuilder positive_smi(this);
662  positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
663  Token::LT);
664  positive_smi.ThenDeopt("key is negative");
665  positive_smi.End();
666 
667  HValue* constant_two = Add<HConstant>(2);
668  HValue* elements = AddLoadElements(receiver, static_cast<HValue*>(NULL));
669  HValue* elements_length =
670  Add<HLoadNamedField>(elements, static_cast<HValue*>(NULL),
671  HObjectAccess::ForFixedArrayLength());
672  HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
673  IfBuilder in_range(this);
674  in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
675  in_range.Then();
676  {
677  HValue* index = AddUncasted<HAdd>(key, constant_two);
678  HInstruction* mapped_index =
679  Add<HLoadKeyed>(elements, index, static_cast<HValue*>(NULL),
681 
682  IfBuilder is_valid(this);
683  is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
684  graph()->GetConstantHole());
685  is_valid.Then();
686  {
687  // TODO(mvstanton): I'd like to assert from this point, that if the
688  // mapped_index is not the hole that it is indeed, a smi. An unnecessary
689  // smi check is being emitted.
690  HValue* the_context =
691  Add<HLoadKeyed>(elements, graph()->GetConstant0(),
692  static_cast<HValue*>(NULL), FAST_ELEMENTS);
694  HValue* result =
695  Add<HLoadKeyed>(the_context, mapped_index, static_cast<HValue*>(NULL),
697  environment()->Push(result);
698  }
699  is_valid.Else();
700  {
701  HValue* result = UnmappedCase(elements, key);
702  environment()->Push(result);
703  }
704  is_valid.End();
705  }
706  in_range.Else();
707  {
708  HValue* result = UnmappedCase(elements, key);
709  environment()->Push(result);
710  }
711  in_range.End();
712 
713  return environment()->Pop();
714 }
715 
716 
718  return DoGenerateCode(this);
719 }
720 
721 
723  HValue* object, HValue* value, FieldIndex index,
724  Representation representation) {
725  DCHECK(!index.is_double() || representation.IsDouble());
726  int offset = index.offset();
727  HObjectAccess access =
728  index.is_inobject()
729  ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
730  : HObjectAccess::ForBackingStoreOffset(offset, representation);
731 
732  if (representation.IsDouble()) {
733  // Load the heap number.
734  object = Add<HLoadNamedField>(
735  object, static_cast<HValue*>(NULL),
736  access.WithRepresentation(Representation::Tagged()));
737  // Store the double value into it.
738  access = HObjectAccess::ForHeapNumberValue();
739  } else if (representation.IsHeapObject()) {
740  BuildCheckHeapObject(value);
741  }
742 
743  Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
744 }
745 
746 
747 template <>
749  BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
750  casted_stub()->representation());
751  return GetParameter(2);
752 }
753 
754 
756 
757 
758 template <>
760  HValue* string = BuildLoadNamedField(GetParameter(0),
761  FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
762  return BuildLoadNamedField(string,
763  FieldIndex::ForInObjectOffset(String::kLengthOffset));
764 }
765 
766 
768  return DoGenerateCode(this);
769 }
770 
771 
772 template <>
774  BuildUncheckedMonomorphicElementAccess(
775  GetParameter(StoreDescriptor::kReceiverIndex),
776  GetParameter(StoreDescriptor::kNameIndex),
777  GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
778  casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
779  casted_stub()->store_mode());
780 
781  return GetParameter(2);
782 }
783 
784 
786  return DoGenerateCode(this);
787 }
788 
789 
790 template <>
792  info()->MarkAsSavesCallerDoubles();
793 
794  BuildTransitionElementsKind(GetParameter(0),
795  GetParameter(1),
796  casted_stub()->from_kind(),
797  casted_stub()->to_kind(),
798  casted_stub()->is_js_array());
799 
800  return GetParameter(0);
801 }
802 
803 
805  return DoGenerateCode(this);
806 }
807 
809  ElementsKind kind,
810  AllocationSiteOverrideMode override_mode,
811  ArgumentClass argument_class) {
814  JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
815  override_mode);
816  HValue* result = NULL;
817  switch (argument_class) {
818  case NONE:
819  // This stub is very performance sensitive, the generated code must be
820  // tuned so that it doesn't build and eager frame.
822  result = array_builder.AllocateEmptyArray();
823  break;
824  case SINGLE:
825  result = BuildArraySingleArgumentConstructor(&array_builder);
826  break;
827  case MULTIPLE:
828  result = BuildArrayNArgumentsConstructor(&array_builder, kind);
829  break;
830  }
831 
832  return result;
833 }
834 
835 
837  ElementsKind kind, ArgumentClass argument_class) {
838  HValue* constructor = GetParameter(
840  JSArrayBuilder array_builder(this, kind, constructor);
841 
842  HValue* result = NULL;
843  switch (argument_class) {
844  case NONE:
845  // This stub is very performance sensitive, the generated code must be
846  // tuned so that it doesn't build and eager frame.
848  result = array_builder.AllocateEmptyArray();
849  break;
850  case SINGLE:
851  result = BuildArraySingleArgumentConstructor(&array_builder);
852  break;
853  case MULTIPLE:
854  result = BuildArrayNArgumentsConstructor(&array_builder, kind);
855  break;
856  }
857  return result;
858 }
859 
860 
862  JSArrayBuilder* array_builder) {
863  // Smi check and range check on the input arg.
864  HValue* constant_one = graph()->GetConstant1();
865  HValue* constant_zero = graph()->GetConstant0();
866 
867  HInstruction* elements = Add<HArgumentsElements>(false);
868  HInstruction* argument = Add<HAccessArgumentsAt>(
869  elements, constant_one, constant_zero);
870 
871  return BuildAllocateArrayFromLength(array_builder, argument);
872 }
873 
874 
876  JSArrayBuilder* array_builder, ElementsKind kind) {
877  // Insert a bounds check because the number of arguments might exceed
878  // the kInitialMaxFastElementArray limit. This cannot happen for code
879  // that was parsed, but calling via Array.apply(thisArg, [...]) might
880  // trigger it.
881  HValue* length = GetArgumentsLength();
882  HConstant* max_alloc_length =
884  HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
885 
886  // We need to fill with the hole if it's a smi array in the multi-argument
887  // case because we might have to bail out while copying arguments into
888  // the array because they aren't compatible with a smi array.
889  // If it's a double array, no problem, and if it's fast then no
890  // problem either because doubles are boxed.
891  //
892  // TODO(mvstanton): consider an instruction to memset fill the array
893  // with zero in this case instead.
894  JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
895  ? JSArrayBuilder::FILL_WITH_HOLE
896  : JSArrayBuilder::DONT_FILL_WITH_HOLE;
897  HValue* new_object = array_builder->AllocateArray(checked_length,
898  max_alloc_length,
899  checked_length,
900  fill_mode);
901  HValue* elements = array_builder->GetElementsLocation();
902  DCHECK(elements != NULL);
903 
904  // Now populate the elements correctly.
905  LoopBuilder builder(this,
906  context(),
907  LoopBuilder::kPostIncrement);
908  HValue* start = graph()->GetConstant0();
909  HValue* key = builder.BeginBody(start, checked_length, Token::LT);
910  HInstruction* argument_elements = Add<HArgumentsElements>(false);
911  HInstruction* argument = Add<HAccessArgumentsAt>(
912  argument_elements, checked_length, key);
913 
914  Add<HStoreKeyed>(elements, key, argument, kind);
915  builder.EndBody();
916  return new_object;
917 }
918 
919 
920 template <>
922  ElementsKind kind = casted_stub()->elements_kind();
923  AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
924  return BuildArrayConstructor(kind, override_mode, NONE);
925 }
926 
927 
929  return DoGenerateCode(this);
930 }
931 
932 
933 template <>
935  BuildCodeStub() {
936  ElementsKind kind = casted_stub()->elements_kind();
937  AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
938  return BuildArrayConstructor(kind, override_mode, SINGLE);
939 }
940 
941 
943  return DoGenerateCode(this);
944 }
945 
946 
947 template <>
949  ElementsKind kind = casted_stub()->elements_kind();
950  AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
951  return BuildArrayConstructor(kind, override_mode, MULTIPLE);
952 }
953 
954 
956  return DoGenerateCode(this);
957 }
958 
959 
960 template <>
962  BuildCodeStub() {
963  ElementsKind kind = casted_stub()->elements_kind();
964  return BuildInternalArrayConstructor(kind, NONE);
965 }
966 
967 
969  return DoGenerateCode(this);
970 }
971 
972 
973 template <>
975  BuildCodeStub() {
976  ElementsKind kind = casted_stub()->elements_kind();
977  return BuildInternalArrayConstructor(kind, SINGLE);
978 }
979 
980 
982  return DoGenerateCode(this);
983 }
984 
985 
986 template <>
988  BuildCodeStub() {
989  ElementsKind kind = casted_stub()->elements_kind();
990  return BuildInternalArrayConstructor(kind, MULTIPLE);
991 }
992 
993 
995  return DoGenerateCode(this);
996 }
997 
998 
999 template <>
1001  Isolate* isolate = graph()->isolate();
1002  CompareNilICStub* stub = casted_stub();
1003  HIfContinuation continuation;
1004  Handle<Map> sentinel_map(isolate->heap()->meta_map());
1005  Type* type = stub->GetType(zone(), sentinel_map);
1006  BuildCompareNil(GetParameter(0), type, &continuation);
1007  IfBuilder if_nil(this, &continuation);
1008  if_nil.Then();
1009  if (continuation.IsFalseReachable()) {
1010  if_nil.Else();
1011  if_nil.Return(graph()->GetConstant0());
1012  }
1013  if_nil.End();
1014  return continuation.IsTrueReachable()
1015  ? graph()->GetConstant1()
1016  : graph()->GetConstantUndefined();
1017 }
1018 
1019 
1021  return DoGenerateCode(this);
1022 }
1023 
1024 
1025 template <>
1027  BinaryOpICState state = casted_stub()->state();
1028 
1029  HValue* left = GetParameter(BinaryOpICStub::kLeft);
1030  HValue* right = GetParameter(BinaryOpICStub::kRight);
1031 
1032  Type* left_type = state.GetLeftType(zone());
1033  Type* right_type = state.GetRightType(zone());
1034  Type* result_type = state.GetResultType(zone());
1035 
1036  DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1037  (state.HasSideEffects() || !result_type->Is(Type::None())));
1038 
1039  HValue* result = NULL;
1040  HAllocationMode allocation_mode(NOT_TENURED);
1041  if (state.op() == Token::ADD &&
1042  (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1043  !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1044  // For the generic add stub a fast case for string addition is performance
1045  // critical.
1046  if (left_type->Maybe(Type::String())) {
1047  IfBuilder if_leftisstring(this);
1048  if_leftisstring.If<HIsStringAndBranch>(left);
1049  if_leftisstring.Then();
1050  {
1051  Push(BuildBinaryOperation(
1052  state.op(), left, right,
1053  Type::String(zone()), right_type,
1054  result_type, state.fixed_right_arg(),
1055  allocation_mode));
1056  }
1057  if_leftisstring.Else();
1058  {
1059  Push(BuildBinaryOperation(
1060  state.op(), left, right,
1061  left_type, right_type, result_type,
1062  state.fixed_right_arg(), allocation_mode));
1063  }
1064  if_leftisstring.End();
1065  result = Pop();
1066  } else {
1067  IfBuilder if_rightisstring(this);
1068  if_rightisstring.If<HIsStringAndBranch>(right);
1069  if_rightisstring.Then();
1070  {
1071  Push(BuildBinaryOperation(
1072  state.op(), left, right,
1073  left_type, Type::String(zone()),
1074  result_type, state.fixed_right_arg(),
1075  allocation_mode));
1076  }
1077  if_rightisstring.Else();
1078  {
1079  Push(BuildBinaryOperation(
1080  state.op(), left, right,
1081  left_type, right_type, result_type,
1082  state.fixed_right_arg(), allocation_mode));
1083  }
1084  if_rightisstring.End();
1085  result = Pop();
1086  }
1087  } else {
1088  result = BuildBinaryOperation(
1089  state.op(), left, right,
1090  left_type, right_type, result_type,
1091  state.fixed_right_arg(), allocation_mode);
1092  }
1093 
1094  // If we encounter a generic argument, the number conversion is
1095  // observable, thus we cannot afford to bail out after the fact.
1096  if (!state.HasSideEffects()) {
1097  result = EnforceNumberType(result, result_type);
1098  }
1099 
1100  // Reuse the double box of one of the operands if we are allowed to (i.e.
1101  // chained binops).
1102  if (state.CanReuseDoubleBox()) {
1103  HValue* operand = (state.mode() == OVERWRITE_LEFT) ? left : right;
1104  IfBuilder if_heap_number(this);
1105  if_heap_number.If<HHasInstanceTypeAndBranch>(operand, HEAP_NUMBER_TYPE);
1106  if_heap_number.Then();
1107  Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
1108  Push(operand);
1109  if_heap_number.Else();
1110  Push(result);
1111  if_heap_number.End();
1112  result = Pop();
1113  }
1114 
1115  return result;
1116 }
1117 
1118 
1120  return DoGenerateCode(this);
1121 }
1122 
1123 
1124 template <>
1126  BinaryOpICState state = casted_stub()->state();
1127 
1128  HValue* allocation_site = GetParameter(
1129  BinaryOpWithAllocationSiteStub::kAllocationSite);
1130  HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1131  HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1132 
1133  Type* left_type = state.GetLeftType(zone());
1134  Type* right_type = state.GetRightType(zone());
1135  Type* result_type = state.GetResultType(zone());
1136  HAllocationMode allocation_mode(allocation_site);
1137 
1138  return BuildBinaryOperation(state.op(), left, right,
1139  left_type, right_type, result_type,
1140  state.fixed_right_arg(), allocation_mode);
1141 }
1142 
1143 
1144 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1145  return DoGenerateCode(this);
1146 }
1147 
1148 
1149 template <>
1151  StringAddStub* stub = casted_stub();
1152  StringAddFlags flags = stub->flags();
1153  PretenureFlag pretenure_flag = stub->pretenure_flag();
1154 
1155  HValue* left = GetParameter(StringAddStub::kLeft);
1156  HValue* right = GetParameter(StringAddStub::kRight);
1157 
1158  // Make sure that both arguments are strings if not known in advance.
1160  left = BuildCheckString(left);
1161  }
1163  right = BuildCheckString(right);
1164  }
1165 
1166  return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1167 }
1168 
1169 
1170 Handle<Code> StringAddStub::GenerateCode() {
1171  return DoGenerateCode(this);
1172 }
1173 
1174 
1175 template <>
1177  ToBooleanStub* stub = casted_stub();
1178  HValue* true_value = NULL;
1179  HValue* false_value = NULL;
1180 
1181  switch (stub->mode()) {
1183  true_value = graph()->GetConstant1();
1184  false_value = graph()->GetConstant0();
1185  break;
1187  true_value = graph()->GetConstantTrue();
1188  false_value = graph()->GetConstantFalse();
1189  break;
1191  true_value = graph()->GetConstantFalse();
1192  false_value = graph()->GetConstantTrue();
1193  break;
1194  }
1195 
1196  IfBuilder if_true(this);
1197  if_true.If<HBranch>(GetParameter(0), stub->types());
1198  if_true.Then();
1199  if_true.Return(true_value);
1200  if_true.Else();
1201  if_true.End();
1202  return false_value;
1203 }
1204 
1205 
1207  return DoGenerateCode(this);
1208 }
1209 
1210 
1211 template <>
1213  StoreGlobalStub* stub = casted_stub();
1214  Handle<Object> placeholer_value(Smi::FromInt(0), isolate());
1215  Handle<PropertyCell> placeholder_cell =
1216  isolate()->factory()->NewPropertyCell(placeholer_value);
1217 
1218  HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
1219 
1220  if (stub->check_global()) {
1221  // Check that the map of the global has not changed: use a placeholder map
1222  // that will be replaced later with the global object's map.
1223  Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1224  HValue* global = Add<HConstant>(
1226  Add<HCheckMaps>(global, placeholder_map);
1227  }
1228 
1229  HValue* cell = Add<HConstant>(placeholder_cell);
1230  HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
1231  HValue* cell_contents = Add<HLoadNamedField>(
1232  cell, static_cast<HValue*>(NULL), access);
1233 
1234  if (stub->is_constant()) {
1235  IfBuilder builder(this);
1236  builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1237  builder.Then();
1238  builder.ElseDeopt("Unexpected cell contents in constant global store");
1239  builder.End();
1240  } else {
1241  // Load the payload of the global parameter cell. A hole indicates that the
1242  // property has been deleted and that the store must be handled by the
1243  // runtime.
1244  IfBuilder builder(this);
1245  HValue* hole_value = graph()->GetConstantHole();
1246  builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1247  builder.Then();
1248  builder.Deopt("Unexpected cell contents in global store");
1249  builder.Else();
1250  Add<HStoreNamedField>(cell, access, value);
1251  builder.End();
1252  }
1253 
1254  return value;
1255 }
1256 
1257 
1259  return DoGenerateCode(this);
1260 }
1261 
1262 
1263 template<>
1265  HValue* value = GetParameter(ElementsTransitionAndStoreStub::kValueIndex);
1268  HValue* object = GetParameter(ElementsTransitionAndStoreStub::kObjectIndex);
1269 
1270  if (FLAG_trace_elements_transitions) {
1271  // Tracing elements transitions is the job of the runtime.
1272  Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER);
1273  } else {
1274  info()->MarkAsSavesCallerDoubles();
1275 
1276  BuildTransitionElementsKind(object, map,
1277  casted_stub()->from_kind(),
1278  casted_stub()->to_kind(),
1279  casted_stub()->is_jsarray());
1280 
1281  BuildUncheckedMonomorphicElementAccess(object, key, value,
1282  casted_stub()->is_jsarray(),
1283  casted_stub()->to_kind(),
1285  casted_stub()->store_mode());
1286  }
1287 
1288  return value;
1289 }
1290 
1291 
1293  return DoGenerateCode(this);
1294 }
1295 
1296 
1298  HValue* js_function,
1299  HValue* native_context,
1300  IfBuilder* builder,
1301  HValue* optimized_map,
1302  HValue* map_index) {
1303  HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1304  HValue* context_slot = LoadFromOptimizedCodeMap(
1305  optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1306  HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1307  optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1308  builder->If<HCompareObjectEqAndBranch>(native_context,
1309  context_slot);
1310  builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1311  builder->Then();
1312  HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1314  // and the literals
1315  HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1317 
1318  Counters* counters = isolate()->counters();
1319  AddIncrementCounter(counters->fast_new_closure_install_optimized());
1320 
1321  // TODO(fschneider): Idea: store proper code pointers in the optimized code
1322  // map and either unmangle them on marking or do nothing as the whole map is
1323  // discarded on major GC anyway.
1324  Add<HStoreCodeEntry>(js_function, code_object);
1325  Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1326  literals);
1327 
1328  // Now link a function into a list of optimized functions.
1329  HValue* optimized_functions_list = Add<HLoadNamedField>(
1330  native_context, static_cast<HValue*>(NULL),
1331  HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1332  Add<HStoreNamedField>(js_function,
1333  HObjectAccess::ForNextFunctionLinkPointer(),
1334  optimized_functions_list);
1335 
1336  // This store is the only one that should have a write barrier.
1337  Add<HStoreNamedField>(native_context,
1338  HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1339  js_function);
1340 
1341  // The builder continues in the "then" after this function.
1342 }
1343 
1344 
1346  HValue* shared_info) {
1347  Add<HStoreNamedField>(js_function,
1348  HObjectAccess::ForNextFunctionLinkPointer(),
1349  graph()->GetConstantUndefined());
1350  HValue* code_object = Add<HLoadNamedField>(
1351  shared_info, static_cast<HValue*>(NULL), HObjectAccess::ForCodeOffset());
1352  Add<HStoreCodeEntry>(js_function, code_object);
1353 }
1354 
1355 
1357  HValue* optimized_map,
1358  HValue* iterator,
1359  int field_offset) {
1360  // By making sure to express these loads in the form [<hvalue> + constant]
1361  // the keyed load can be hoisted.
1362  DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1363  HValue* field_slot = iterator;
1364  if (field_offset > 0) {
1365  HValue* field_offset_value = Add<HConstant>(field_offset);
1366  field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1367  }
1368  HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot,
1369  static_cast<HValue*>(NULL), FAST_ELEMENTS);
1370  return field_entry;
1371 }
1372 
1373 
1375  HValue* js_function,
1376  HValue* shared_info,
1377  HValue* native_context) {
1378  Counters* counters = isolate()->counters();
1379  IfBuilder is_optimized(this);
1380  HInstruction* optimized_map = Add<HLoadNamedField>(
1381  shared_info, static_cast<HValue*>(NULL),
1382  HObjectAccess::ForOptimizedCodeMap());
1383  HValue* null_constant = Add<HConstant>(0);
1384  is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1385  is_optimized.Then();
1386  {
1387  BuildInstallCode(js_function, shared_info);
1388  }
1389  is_optimized.Else();
1390  {
1391  AddIncrementCounter(counters->fast_new_closure_try_optimized());
1392  // optimized_map points to fixed array of 3-element entries
1393  // (native context, optimized code, literals).
1394  // Map must never be empty, so check the first elements.
1395  HValue* first_entry_index =
1396  Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1397  IfBuilder already_in(this);
1398  BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1399  optimized_map, first_entry_index);
1400  already_in.Else();
1401  {
1402  // Iterate through the rest of map backwards. Do not double check first
1403  // entry. After the loop, if no matching optimized code was found,
1404  // install unoptimized code.
1405  // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1406  // i > SharedFunctionInfo::kEntriesStart;
1407  // i -= SharedFunctionInfo::kEntryLength) { .. }
1408  HValue* shared_function_entry_length =
1409  Add<HConstant>(SharedFunctionInfo::kEntryLength);
1410  LoopBuilder loop_builder(this,
1411  context(),
1412  LoopBuilder::kPostDecrement,
1413  shared_function_entry_length);
1414  HValue* array_length = Add<HLoadNamedField>(
1415  optimized_map, static_cast<HValue*>(NULL),
1416  HObjectAccess::ForFixedArrayLength());
1417  HValue* start_pos = AddUncasted<HSub>(array_length,
1418  shared_function_entry_length);
1419  HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1420  first_entry_index,
1421  Token::GT);
1422  {
1423  IfBuilder done_check(this);
1424  BuildCheckAndInstallOptimizedCode(js_function, native_context,
1425  &done_check,
1426  optimized_map,
1427  slot_iterator);
1428  // Fall out of the loop
1429  loop_builder.Break();
1430  }
1431  loop_builder.EndBody();
1432 
1433  // If slot_iterator equals first entry index, then we failed to find and
1434  // install optimized code
1435  IfBuilder no_optimized_code_check(this);
1436  no_optimized_code_check.If<HCompareNumericAndBranch>(
1437  slot_iterator, first_entry_index, Token::EQ);
1438  no_optimized_code_check.Then();
1439  {
1440  // Store the unoptimized code
1441  BuildInstallCode(js_function, shared_info);
1442  }
1443  }
1444  }
1445 }
1446 
1447 
1448 template<>
1450  Counters* counters = isolate()->counters();
1451  Factory* factory = isolate()->factory();
1452  HInstruction* empty_fixed_array =
1453  Add<HConstant>(factory->empty_fixed_array());
1454  HValue* shared_info = GetParameter(0);
1455 
1456  AddIncrementCounter(counters->fast_new_closure_total());
1457 
1458  // Create a new closure from the given function info in new space
1459  HValue* size = Add<HConstant>(JSFunction::kSize);
1460  HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
1462 
1463  int map_index = Context::FunctionMapIndex(casted_stub()->strict_mode(),
1464  casted_stub()->kind());
1465 
1466  // Compute the function map in the current native context and set that
1467  // as the map of the allocated object.
1468  HInstruction* native_context = BuildGetNativeContext();
1469  HInstruction* map_slot_value = Add<HLoadNamedField>(
1470  native_context, static_cast<HValue*>(NULL),
1471  HObjectAccess::ForContextSlot(map_index));
1472  Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1473 
1474  // Initialize the rest of the function.
1475  Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1476  empty_fixed_array);
1477  Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1478  empty_fixed_array);
1479  Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1480  empty_fixed_array);
1481  Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1482  graph()->GetConstantHole());
1483  Add<HStoreNamedField>(js_function,
1484  HObjectAccess::ForSharedFunctionInfoPointer(),
1485  shared_info);
1486  Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1487  context());
1488 
1489  // Initialize the code pointer in the function to be the one
1490  // found in the shared function info object.
1491  // But first check if there is an optimized version for our context.
1492  if (FLAG_cache_optimized_code) {
1493  BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1494  } else {
1495  BuildInstallCode(js_function, shared_info);
1496  }
1497 
1498  return js_function;
1499 }
1500 
1501 
1503  return DoGenerateCode(this);
1504 }
1505 
1506 
1507 template<>
1509  int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1510 
1511  // Get the function.
1512  HParameter* function = GetParameter(FastNewContextStub::kFunction);
1513 
1514  // Allocate the context in new space.
1515  HAllocate* function_context = Add<HAllocate>(
1516  Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
1517  HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
1518 
1519  // Set up the object header.
1520  AddStoreMapConstant(function_context,
1521  isolate()->factory()->function_context_map());
1522  Add<HStoreNamedField>(function_context,
1523  HObjectAccess::ForFixedArrayLength(),
1524  Add<HConstant>(length));
1525 
1526  // Set up the fixed slots.
1527  Add<HStoreNamedField>(function_context,
1528  HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1529  function);
1530  Add<HStoreNamedField>(function_context,
1531  HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1532  context());
1533  Add<HStoreNamedField>(function_context,
1534  HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
1535  graph()->GetConstant0());
1536 
1537  // Copy the global object from the previous context.
1538  HValue* global_object = Add<HLoadNamedField>(
1539  context(), static_cast<HValue*>(NULL),
1540  HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1541  Add<HStoreNamedField>(function_context,
1542  HObjectAccess::ForContextSlot(
1544  global_object);
1545 
1546  // Initialize the rest of the slots to undefined.
1547  for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1548  Add<HStoreNamedField>(function_context,
1549  HObjectAccess::ForContextSlot(i),
1550  graph()->GetConstantUndefined());
1551  }
1552 
1553  return function_context;
1554 }
1555 
1556 
1557 Handle<Code> FastNewContextStub::GenerateCode() {
1558  return DoGenerateCode(this);
1559 }
1560 
1561 
1562 template <>
1564  HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1565  HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1566 
1567  Add<HCheckSmi>(key);
1568 
1569  HValue* elements = AddLoadElements(receiver);
1570 
1571  HValue* hash = BuildElementIndexHash(key);
1572 
1573  return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
1574 }
1575 
1576 
1578  return DoGenerateCode(this);
1579 }
1580 
1581 
1582 template<>
1584  // Determine the parameters.
1585  HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1586  HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1587  HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1588 
1589  info()->MarkMustNotHaveEagerFrame();
1590 
1591  return BuildRegExpConstructResult(length, index, input);
1592 }
1593 
1594 
1595 Handle<Code> RegExpConstructResultStub::GenerateCode() {
1596  return DoGenerateCode(this);
1597 }
1598 
1599 
1600 template <>
1602  : public CodeStubGraphBuilderBase {
1603  public:
1606 
1607  protected:
1608  virtual HValue* BuildCodeStub();
1609 
1610  void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
1611  HValue* bit_field2,
1612  ElementsKind kind);
1613 
1614  void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
1615  HValue* receiver,
1616  HValue* key,
1617  HValue* instance_type,
1618  HValue* bit_field2,
1619  ElementsKind kind);
1620 
1621  void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
1622  HValue* receiver,
1623  HValue* key,
1624  HValue* instance_type,
1625  HValue* bit_field2,
1626  ElementsKind kind);
1627 
1629  return static_cast<KeyedLoadGenericStub*>(stub());
1630  }
1631 };
1632 
1633 
1635  HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
1636  ElementsKind kind) {
1637  ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
1638  HValue* kind_limit = Add<HConstant>(
1639  static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
1640 
1641  if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
1642  if_builder->Then();
1643 }
1644 
1645 
1647  HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1648  HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1650 
1651  BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1652 
1653  IfBuilder js_array_check(this);
1654  js_array_check.If<HCompareNumericAndBranch>(
1655  instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
1656  js_array_check.Then();
1658  true, kind,
1660  STANDARD_STORE));
1661  js_array_check.Else();
1663  false, kind,
1665  STANDARD_STORE));
1666  js_array_check.End();
1667 }
1668 
1669 
1671  HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1672  HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1674 
1675  BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1676 
1678  false, kind,
1680  STANDARD_STORE));
1681 }
1682 
1683 
1687 
1688  // Split into a smi/integer case and unique string case.
1689  HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
1690  graph()->CreateBasicBlock());
1691 
1692  BuildKeyedIndexCheck(key, &index_name_split_continuation);
1693 
1694  IfBuilder index_name_split(this, &index_name_split_continuation);
1695  index_name_split.Then();
1696  {
1697  // Key is an index (number)
1698  key = Pop();
1699 
1700  int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1702  BuildJSObjectCheck(receiver, bit_field_mask);
1703 
1704  HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1705  HObjectAccess::ForMap());
1706 
1707  HValue* instance_type =
1708  Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1709  HObjectAccess::ForMapInstanceType());
1710 
1711  HValue* bit_field2 = Add<HLoadNamedField>(map,
1712  static_cast<HValue*>(NULL),
1713  HObjectAccess::ForMapBitField2());
1714 
1715  IfBuilder kind_if(this);
1716  BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1718 
1719  kind_if.Else();
1720  {
1721  BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1723  }
1724  kind_if.Else();
1725 
1726  // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
1727  BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
1728  {
1729  HValue* elements = AddLoadElements(receiver);
1730 
1731  HValue* hash = BuildElementIndexHash(key);
1732 
1733  Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
1734  }
1735  kind_if.Else();
1736 
1737  // The SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
1738  BuildElementsKindLimitCheck(&kind_if, bit_field2,
1740  // Non-strict elements are not handled.
1741  Add<HDeoptimize>("non-strict elements in KeyedLoadGenericStub",
1743  Push(graph()->GetConstant0());
1744 
1745  kind_if.Else();
1746  BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1748 
1749  kind_if.Else();
1750  BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1752 
1753  kind_if.Else();
1754  BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1756 
1757  kind_if.Else();
1758  BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1760 
1761  kind_if.Else();
1762  BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1764 
1765  kind_if.Else();
1766  BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1768 
1769  kind_if.Else();
1770  BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1772 
1773  kind_if.Else();
1774  BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1776 
1777  kind_if.Else();
1778  BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1780 
1781  kind_if.ElseDeopt("ElementsKind unhandled in KeyedLoadGenericStub");
1782 
1783  kind_if.End();
1784  }
1785  index_name_split.Else();
1786  {
1787  // Key is a unique string.
1788  key = Pop();
1789 
1790  int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1792  BuildJSObjectCheck(receiver, bit_field_mask);
1793 
1794  HIfContinuation continuation;
1795  BuildTestForDictionaryProperties(receiver, &continuation);
1796  IfBuilder if_dict_properties(this, &continuation);
1797  if_dict_properties.Then();
1798  {
1799  // Key is string, properties are dictionary mode
1800  BuildNonGlobalObjectCheck(receiver);
1801 
1802  HValue* properties = Add<HLoadNamedField>(
1803  receiver, static_cast<HValue*>(NULL),
1804  HObjectAccess::ForPropertiesPointer());
1805 
1806  HValue* hash =
1807  Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1808  HObjectAccess::ForNameHashField());
1809 
1810  hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
1811 
1812  HValue* value = BuildUncheckedDictionaryElementLoad(receiver,
1813  properties,
1814  key,
1815  hash);
1816  Push(value);
1817  }
1818  if_dict_properties.Else();
1819  {
1820  // Key is string, properties are fast mode
1821  HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
1822 
1823  ExternalReference cache_keys_ref =
1824  ExternalReference::keyed_lookup_cache_keys(isolate());
1825  HValue* cache_keys = Add<HConstant>(cache_keys_ref);
1826 
1827  HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1828  HObjectAccess::ForMap());
1829  HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
1830  base_index->ClearFlag(HValue::kCanOverflow);
1831 
1832  HIfContinuation inline_or_runtime_continuation(
1834  {
1835  IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
1836  for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
1837  ++probe) {
1838  IfBuilder* lookup_if = &lookup_ifs[probe];
1839  lookup_if->Initialize(this);
1840  int probe_base = probe * KeyedLookupCache::kEntryLength;
1841  HValue* map_index = AddUncasted<HAdd>(
1842  base_index,
1843  Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
1844  map_index->ClearFlag(HValue::kCanOverflow);
1845  HValue* key_index = AddUncasted<HAdd>(
1846  base_index,
1847  Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
1848  key_index->ClearFlag(HValue::kCanOverflow);
1849  HValue* map_to_check =
1850  Add<HLoadKeyed>(cache_keys, map_index, static_cast<HValue*>(NULL),
1852  lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
1853  lookup_if->And();
1854  HValue* key_to_check =
1855  Add<HLoadKeyed>(cache_keys, key_index, static_cast<HValue*>(NULL),
1857  lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
1858  lookup_if->Then();
1859  {
1860  ExternalReference cache_field_offsets_ref =
1861  ExternalReference::keyed_lookup_cache_field_offsets(isolate());
1862  HValue* cache_field_offsets =
1863  Add<HConstant>(cache_field_offsets_ref);
1864  HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
1866  HValue* property_index = Add<HLoadKeyed>(
1867  cache_field_offsets, index, static_cast<HValue*>(NULL),
1869  Push(property_index);
1870  }
1871  lookup_if->Else();
1872  }
1873  for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
1874  lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
1875  }
1876  }
1877 
1878  IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
1879  inline_or_runtime.Then();
1880  {
1881  // Found a cached index, load property inline.
1882  Push(Add<HLoadFieldByIndex>(receiver, Pop()));
1883  }
1884  inline_or_runtime.Else();
1885  {
1886  // KeyedLookupCache miss; call runtime.
1887  Add<HPushArguments>(receiver, key);
1888  Push(Add<HCallRuntime>(
1889  isolate()->factory()->empty_string(),
1890  Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
1891  }
1892  inline_or_runtime.End();
1893  }
1894  if_dict_properties.End();
1895  }
1896  index_name_split.End();
1897 
1898  return Pop();
1899 }
1900 
1901 
1903  return DoGenerateCode(this);
1904 }
1905 
1906 
1907 template <>
1909  HValue* receiver = GetParameter(VectorLoadICDescriptor::kReceiverIndex);
1910  Add<HDeoptimize>("Always deopt", Deoptimizer::EAGER);
1911  return receiver;
1912 }
1913 
1914 
1916 
1917 
1918 template <>
1920  HValue* receiver = GetParameter(VectorLoadICDescriptor::kReceiverIndex);
1921  Add<HDeoptimize>("Always deopt", Deoptimizer::EAGER);
1922  return receiver;
1923 }
1924 
1925 
1927  return DoGenerateCode(this);
1928 }
1929 
1930 
1932  return DoGenerateCode(this);
1933 }
1934 
1935 
1936 template <>
1938  // The return address is on the stack.
1939  HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1940  HValue* name = GetParameter(LoadDescriptor::kNameIndex);
1941 
1942  // Probe the stub cache.
1944  Code::ComputeHandlerFlags(Code::LOAD_IC));
1945  Add<HTailCallThroughMegamorphicCache>(receiver, name, flags);
1946 
1947  // We never continue.
1948  return graph()->GetConstant0();
1949 }
1950 } } // namespace v8::internal
An object reference managed by the v8 garbage collector.
Definition: v8.h:198
static const int kPretenureDataOffset
Definition: objects.h:8256
static const int kWeakNextOffset
Definition: objects.h:8261
static const int kDependentCodeOffset
Definition: objects.h:8259
static const int kSize
Definition: objects.h:8262
static const int kTransitionInfoOffset
Definition: objects.h:8254
static const int kPretenureCreateCountOffset
Definition: objects.h:8257
static const int kNestedSiteOffset
Definition: objects.h:8255
void GetCode(CodeDesc *desc)
static BailoutId StubEntry()
Definition: utils.h:964
static BailoutId None()
Definition: utils.h:960
static U encode(T value)
Definition: utils.h:217
int GetEnvironmentParameterCount() const
Definition: code-stubs.h:380
Representation GetEnvironmentParameterRepresentation(int index) const
Definition: code-stubs.h:384
StubFunctionMode function_mode() const
Definition: code-stubs.h:412
Register stack_parameter_count() const
Definition: code-stubs.h:411
ExternalReference miss_handler() const
Definition: code-stubs.h:388
bool IsEnvironmentParameterCountRegister(int index) const
Definition: code-stubs.h:397
HValue * BuildArrayNArgumentsConstructor(JSArrayBuilder *builder, ElementsKind kind)
HValue * BuildArrayConstructor(ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class)
CodeStubGraphBuilderBase(Isolate *isolate, HydrogenCodeStub *stub)
HValue * BuildArraySingleArgumentConstructor(JSArrayBuilder *builder)
void BuildStoreNamedField(HValue *object, HValue *value, FieldIndex index, Representation representation)
HLoadNamedField * BuildLoadNamedField(HValue *object, FieldIndex index)
void BuildInstallFromOptimizedCodeMap(HValue *js_function, HValue *shared_info, HValue *native_context)
HValue * UnmappedCase(HValue *elements, HValue *key)
void BuildInstallCode(HValue *js_function, HValue *shared_info)
void BuildCheckAndInstallOptimizedCode(HValue *js_function, HValue *native_context, IfBuilder *builder, HValue *optimized_map, HValue *map_index)
HInstruction * LoadFromOptimizedCodeMap(HValue *optimized_map, HValue *iterator, int field_offset)
HValue * BuildInternalArrayConstructor(ElementsKind kind, ArgumentClass argument_class)
SmartArrayPointer< HParameter * > parameters_
HParameter * GetParameter(int parameter)
CodeStubGraphBuilder(Isolate *isolate, KeyedLoadGenericStub *stub)
CodeStubGraphBuilder(Isolate *isolate, Stub *stub)
static Flags ComputeHandlerFlags(Kind handler_kind, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
Definition: objects-inl.h:4975
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
Definition: objects-inl.h:4954
static Flags RemoveTypeAndHolderFromFlags(Flags flags)
Definition: objects-inl.h:5012
Type * GetType(Zone *zone, Handle< Map > map=Handle< Map >())
Definition: code-stubs.cc:513
HydrogenCodeStub * code_stub() const
Definition: compiler.h:115
Isolate * isolate() const
Definition: compiler.h:96
static int FunctionMapIndex(StrictMode strict_mode, FunctionKind kind)
Definition: contexts.h:556
static int GetValueOffset(int descriptor_number)
Definition: objects-inl.h:2965
static const int kHeaderSize
Definition: objects.h:2393
HValue * BuildUncheckedDictionaryElementLoad(HValue *receiver, HValue *elements, HValue *key, HValue *hash)
Definition: hydrogen.cc:1667
void BuildJSObjectCheck(HValue *receiver, int bit_field_mask)
Definition: hydrogen.cc:1435
HValue * BuildAllocateArrayFromLength(JSArrayBuilder *array_builder, HValue *length_argument)
Definition: hydrogen.cc:2514
HValue * BuildCheckHeapObject(HValue *object)
Definition: hydrogen.cc:1266
void BuildNonGlobalObjectCheck(HValue *receiver)
Definition: hydrogen.cc:1571
HValue * BuildElementIndexHash(HValue *index)
Definition: hydrogen.cc:1634
void set_current_block(HBasicBlock *block)
Definition: hydrogen.h:1058
HInstruction * BuildUncheckedMonomorphicElementAccess(HValue *checked_object, HValue *key, HValue *val, bool is_js_array, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode, KeyedAccessStoreMode store_mode)
Definition: hydrogen.cc:2398
void Push(HValue *value)
Definition: hydrogen.h:1070
HLoadNamedField * AddLoadElements(HValue *object, HValue *dependency=NULL)
Definition: hydrogen.cc:2708
HValue * BuildKeyedLookupCacheHash(HValue *object, HValue *key)
Definition: hydrogen.cc:1612
void Goto(HBasicBlock *from, HBasicBlock *target, FunctionState *state=NULL, bool add_simulate=true)
Definition: hydrogen.h:1080
HBasicBlock * CreateBasicBlock(HEnvironment *env)
Definition: hydrogen.cc:1240
void BuildKeyedIndexCheck(HValue *key, HIfContinuation *join_continuation)
Definition: hydrogen.cc:1461
HGraph * graph() const
Definition: hydrogen.h:1063
void AddIncrementCounter(StatsCounter *counter)
Definition: hydrogen.cc:1219
void BuildTestForDictionaryProperties(HValue *object, HIfContinuation *continuation)
Definition: hydrogen.cc:1596
HBasicBlock * current_block() const
Definition: hydrogen.h:1057
void FinishCurrentBlock(HControlInstruction *last)
Definition: hydrogen.cc:1198
virtual Handle< Code > GenerateCode()=0
virtual Code::Kind GetCodeKind() const
Definition: code-stubs.h:438
void GenerateLightweightMiss(MacroAssembler *masm, ExternalReference miss)
Handle< Code > GenerateLightweightMissCode(ExternalReference miss)
HTracer * GetHTracer()
Definition: isolate.cc:2148
Counters * counters()
Definition: isolate.h:857
static const int kSize
Definition: objects.h:7385
static const int kHeaderSize
Definition: objects.h:2195
static const int kInitialMaxFastElementArray
Definition: objects.h:2180
static const int kValueOffset
Definition: objects.h:7546
static const int kKeyIndex
Definition: heap.h:2259
static const int kEntriesPerBucket
Definition: heap.h:2256
static const int kEntryLength
Definition: heap.h:2257
static const int kMapIndex
Definition: heap.h:2258
CompilationInfo * info() const
Definition: lithium.h:631
Handle< Code > Codegen()
Definition: lithium.cc:461
static LChunk * NewChunk(HGraph *graph)
Definition: lithium.cc:434
static const int kHasIndexedInterceptor
Definition: objects.h:6243
static const int kDescriptorsOffset
Definition: objects.h:6198
static const int kIsAccessCheckNeeded
Definition: objects.h:6246
static const int kHasNamedInterceptor
Definition: objects.h:6242
static const int kHashShift
Definition: objects.h:8499
static Representation Double()
static Representation Smi()
static Representation Tagged()
static const Function * FunctionForId(FunctionId id)
Definition: runtime.cc:9312
static const int kCachedCodeOffset
Definition: objects.h:6626
static const int kEntriesStart
Definition: objects.h:6624
static const int kOsrAstIdOffset
Definition: objects.h:6628
static const int kLiteralsOffset
Definition: objects.h:6627
static const int kContextOffset
Definition: objects.h:6625
static const int kEntryLength
Definition: objects.h:6629
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
static Handle< HeapObject > global_placeholder(Isolate *isolate)
Definition: code-stubs.h:1009
static const int kLengthOffset
Definition: objects.h:8802
ResultMode mode() const
Definition: code-stubs.h:2251
bool Is(TypeImpl *that)
Definition: types.h:390
bool Maybe(TypeImpl *that)
Definition: types.cc:504
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define FATAL(msg)
Definition: logging.h:26
#define UNIMPLEMENTED()
Definition: logging.h:28
#define DCHECK(condition)
Definition: logging.h:205
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
const int kPointerSize
Definition: globals.h:129
@ STRING_ADD_CHECK_LEFT
Definition: code-stubs.h:1214
@ STRING_ADD_CHECK_RIGHT
Definition: code-stubs.h:1216
bool IsExternalArrayElementsKind(ElementsKind kind)
Definition: elements-kind.h:95
@ STANDARD_STORE
Definition: objects.h:154
@ JS_FUNCTION_STUB_MODE
Definition: code-stubs.h:350
OStream & endl(OStream &os)
Definition: ostreams.cc:112
const int kPointerSizeLog2
Definition: globals.h:147
@ JS_ARRAY_TYPE
Definition: objects.h:738
@ FIXED_ARRAY_TYPE
Definition: objects.h:717
@ JS_OBJECT_TYPE
Definition: objects.h:731
@ HEAP_NUMBER_TYPE
Definition: objects.h:669
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ EXTERNAL_UINT16_ELEMENTS
Definition: elements-kind.h:36
@ EXTERNAL_INT16_ELEMENTS
Definition: elements-kind.h:35
@ EXTERNAL_UINT8_ELEMENTS
Definition: elements-kind.h:34
@ EXTERNAL_INT32_ELEMENTS
Definition: elements-kind.h:37
@ FAST_HOLEY_DOUBLE_ELEMENTS
Definition: elements-kind.h:27
@ SLOPPY_ARGUMENTS_ELEMENTS
Definition: elements-kind.h:31
@ EXTERNAL_INT8_ELEMENTS
Definition: elements-kind.h:33
@ EXTERNAL_FLOAT32_ELEMENTS
Definition: elements-kind.h:39
@ EXTERNAL_FLOAT64_ELEMENTS
Definition: elements-kind.h:40
@ EXTERNAL_UINT32_ELEMENTS
Definition: elements-kind.h:38
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
Definition: elements-kind.h:41
@ OVERWRITE_LEFT
Definition: ic-state.h:58
AllocationSiteOverrideMode
Definition: code-stubs.h:716
const char * GetBailoutReason(BailoutReason reason)
void PrintF(const char *format,...)
Definition: utils.cc:80
static Handle< Code > DoGenerateCode(Stub *stub)
bool IsFastSmiElementsKind(ElementsKind kind)
ElementsKind GetInitialFastElementsKind()
Definition: elements-kind.h:78
static LChunk * OptimizeGraph(HGraph *graph)
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
@ None
Definition: v8.h:2211
@ NONE