V8 Project
heap.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_
7 
8 #include <cmath>
9 
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
12 #include "src/counters.h"
13 #include "src/globals.h"
15 #include "src/heap/gc-tracer.h"
17 #include "src/heap/mark-compact.h"
19 #include "src/heap/spaces.h"
20 #include "src/heap/store-buffer.h"
21 #include "src/list.h"
22 #include "src/splay-tree-inl.h"
23 
24 namespace v8 {
25 namespace internal {
26 
27 // Defines all the roots in Heap.
28 #define STRONG_ROOT_LIST(V) \
29  V(Map, byte_array_map, ByteArrayMap) \
30  V(Map, free_space_map, FreeSpaceMap) \
31  V(Map, one_pointer_filler_map, OnePointerFillerMap) \
32  V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
33  /* Cluster the most popular ones in a few cache lines here at the top. */ \
34  V(Smi, store_buffer_top, StoreBufferTop) \
35  V(Oddball, undefined_value, UndefinedValue) \
36  V(Oddball, the_hole_value, TheHoleValue) \
37  V(Oddball, null_value, NullValue) \
38  V(Oddball, true_value, TrueValue) \
39  V(Oddball, false_value, FalseValue) \
40  V(Oddball, uninitialized_value, UninitializedValue) \
41  V(Oddball, exception, Exception) \
42  V(Map, cell_map, CellMap) \
43  V(Map, global_property_cell_map, GlobalPropertyCellMap) \
44  V(Map, shared_function_info_map, SharedFunctionInfoMap) \
45  V(Map, meta_map, MetaMap) \
46  V(Map, heap_number_map, HeapNumberMap) \
47  V(Map, mutable_heap_number_map, MutableHeapNumberMap) \
48  V(Map, native_context_map, NativeContextMap) \
49  V(Map, fixed_array_map, FixedArrayMap) \
50  V(Map, code_map, CodeMap) \
51  V(Map, scope_info_map, ScopeInfoMap) \
52  V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
53  V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
54  V(Map, constant_pool_array_map, ConstantPoolArrayMap) \
55  V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
56  V(Map, hash_table_map, HashTableMap) \
57  V(Map, ordered_hash_table_map, OrderedHashTableMap) \
58  V(FixedArray, empty_fixed_array, EmptyFixedArray) \
59  V(ByteArray, empty_byte_array, EmptyByteArray) \
60  V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
61  V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray) \
62  V(Oddball, arguments_marker, ArgumentsMarker) \
63  /* The roots above this line should be boring from a GC point of view. */ \
64  /* This means they are never in new space and never on a page that is */ \
65  /* being compacted. */ \
66  V(FixedArray, number_string_cache, NumberStringCache) \
67  V(Object, instanceof_cache_function, InstanceofCacheFunction) \
68  V(Object, instanceof_cache_map, InstanceofCacheMap) \
69  V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
70  V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
71  V(FixedArray, string_split_cache, StringSplitCache) \
72  V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
73  V(Oddball, termination_exception, TerminationException) \
74  V(Smi, hash_seed, HashSeed) \
75  V(Map, symbol_map, SymbolMap) \
76  V(Map, string_map, StringMap) \
77  V(Map, one_byte_string_map, OneByteStringMap) \
78  V(Map, cons_string_map, ConsStringMap) \
79  V(Map, cons_one_byte_string_map, ConsOneByteStringMap) \
80  V(Map, sliced_string_map, SlicedStringMap) \
81  V(Map, sliced_one_byte_string_map, SlicedOneByteStringMap) \
82  V(Map, external_string_map, ExternalStringMap) \
83  V(Map, external_string_with_one_byte_data_map, \
84  ExternalStringWithOneByteDataMap) \
85  V(Map, external_one_byte_string_map, ExternalOneByteStringMap) \
86  V(Map, short_external_string_map, ShortExternalStringMap) \
87  V(Map, short_external_string_with_one_byte_data_map, \
88  ShortExternalStringWithOneByteDataMap) \
89  V(Map, internalized_string_map, InternalizedStringMap) \
90  V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap) \
91  V(Map, external_internalized_string_map, ExternalInternalizedStringMap) \
92  V(Map, external_internalized_string_with_one_byte_data_map, \
93  ExternalInternalizedStringWithOneByteDataMap) \
94  V(Map, external_one_byte_internalized_string_map, \
95  ExternalOneByteInternalizedStringMap) \
96  V(Map, short_external_internalized_string_map, \
97  ShortExternalInternalizedStringMap) \
98  V(Map, short_external_internalized_string_with_one_byte_data_map, \
99  ShortExternalInternalizedStringWithOneByteDataMap) \
100  V(Map, short_external_one_byte_internalized_string_map, \
101  ShortExternalOneByteInternalizedStringMap) \
102  V(Map, short_external_one_byte_string_map, ShortExternalOneByteStringMap) \
103  V(Map, undetectable_string_map, UndetectableStringMap) \
104  V(Map, undetectable_one_byte_string_map, UndetectableOneByteStringMap) \
105  V(Map, external_int8_array_map, ExternalInt8ArrayMap) \
106  V(Map, external_uint8_array_map, ExternalUint8ArrayMap) \
107  V(Map, external_int16_array_map, ExternalInt16ArrayMap) \
108  V(Map, external_uint16_array_map, ExternalUint16ArrayMap) \
109  V(Map, external_int32_array_map, ExternalInt32ArrayMap) \
110  V(Map, external_uint32_array_map, ExternalUint32ArrayMap) \
111  V(Map, external_float32_array_map, ExternalFloat32ArrayMap) \
112  V(Map, external_float64_array_map, ExternalFloat64ArrayMap) \
113  V(Map, external_uint8_clamped_array_map, ExternalUint8ClampedArrayMap) \
114  V(ExternalArray, empty_external_int8_array, EmptyExternalInt8Array) \
115  V(ExternalArray, empty_external_uint8_array, EmptyExternalUint8Array) \
116  V(ExternalArray, empty_external_int16_array, EmptyExternalInt16Array) \
117  V(ExternalArray, empty_external_uint16_array, EmptyExternalUint16Array) \
118  V(ExternalArray, empty_external_int32_array, EmptyExternalInt32Array) \
119  V(ExternalArray, empty_external_uint32_array, EmptyExternalUint32Array) \
120  V(ExternalArray, empty_external_float32_array, EmptyExternalFloat32Array) \
121  V(ExternalArray, empty_external_float64_array, EmptyExternalFloat64Array) \
122  V(ExternalArray, empty_external_uint8_clamped_array, \
123  EmptyExternalUint8ClampedArray) \
124  V(Map, fixed_uint8_array_map, FixedUint8ArrayMap) \
125  V(Map, fixed_int8_array_map, FixedInt8ArrayMap) \
126  V(Map, fixed_uint16_array_map, FixedUint16ArrayMap) \
127  V(Map, fixed_int16_array_map, FixedInt16ArrayMap) \
128  V(Map, fixed_uint32_array_map, FixedUint32ArrayMap) \
129  V(Map, fixed_int32_array_map, FixedInt32ArrayMap) \
130  V(Map, fixed_float32_array_map, FixedFloat32ArrayMap) \
131  V(Map, fixed_float64_array_map, FixedFloat64ArrayMap) \
132  V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap) \
133  V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array) \
134  V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array) \
135  V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array) \
136  V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array) \
137  V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array) \
138  V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array) \
139  V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array) \
140  V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array) \
141  V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array, \
142  EmptyFixedUint8ClampedArray) \
143  V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) \
144  V(Map, function_context_map, FunctionContextMap) \
145  V(Map, catch_context_map, CatchContextMap) \
146  V(Map, with_context_map, WithContextMap) \
147  V(Map, block_context_map, BlockContextMap) \
148  V(Map, module_context_map, ModuleContextMap) \
149  V(Map, global_context_map, GlobalContextMap) \
150  V(Map, undefined_map, UndefinedMap) \
151  V(Map, the_hole_map, TheHoleMap) \
152  V(Map, null_map, NullMap) \
153  V(Map, boolean_map, BooleanMap) \
154  V(Map, uninitialized_map, UninitializedMap) \
155  V(Map, arguments_marker_map, ArgumentsMarkerMap) \
156  V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap) \
157  V(Map, exception_map, ExceptionMap) \
158  V(Map, termination_exception_map, TerminationExceptionMap) \
159  V(Map, message_object_map, JSMessageObjectMap) \
160  V(Map, foreign_map, ForeignMap) \
161  V(HeapNumber, nan_value, NanValue) \
162  V(HeapNumber, infinity_value, InfinityValue) \
163  V(HeapNumber, minus_zero_value, MinusZeroValue) \
164  V(Map, neander_map, NeanderMap) \
165  V(JSObject, message_listeners, MessageListeners) \
166  V(UnseededNumberDictionary, code_stubs, CodeStubs) \
167  V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
168  V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
169  V(Code, js_entry_code, JsEntryCode) \
170  V(Code, js_construct_entry_code, JsConstructEntryCode) \
171  V(FixedArray, natives_source_cache, NativesSourceCache) \
172  V(Script, empty_script, EmptyScript) \
173  V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
174  V(Cell, undefined_cell, UndefineCell) \
175  V(JSObject, observation_state, ObservationState) \
176  V(Map, external_map, ExternalMap) \
177  V(Object, symbol_registry, SymbolRegistry) \
178  V(Symbol, frozen_symbol, FrozenSymbol) \
179  V(Symbol, nonexistent_symbol, NonExistentSymbol) \
180  V(Symbol, elements_transition_symbol, ElementsTransitionSymbol) \
181  V(SeededNumberDictionary, empty_slow_element_dictionary, \
182  EmptySlowElementDictionary) \
183  V(Symbol, observed_symbol, ObservedSymbol) \
184  V(Symbol, uninitialized_symbol, UninitializedSymbol) \
185  V(Symbol, megamorphic_symbol, MegamorphicSymbol) \
186  V(Symbol, premonomorphic_symbol, PremonomorphicSymbol) \
187  V(Symbol, generic_symbol, GenericSymbol) \
188  V(Symbol, stack_trace_symbol, StackTraceSymbol) \
189  V(Symbol, detailed_stack_trace_symbol, DetailedStackTraceSymbol) \
190  V(Symbol, normal_ic_symbol, NormalICSymbol) \
191  V(Symbol, home_object_symbol, HomeObjectSymbol) \
192  V(FixedArray, materialized_objects, MaterializedObjects) \
193  V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
194  V(FixedArray, microtask_queue, MicrotaskQueue)
195 
196 // Entries in this list are limited to Smis and are not visited during GC.
197 #define SMI_ROOT_LIST(V) \
198  V(Smi, stack_limit, StackLimit) \
199  V(Smi, real_stack_limit, RealStackLimit) \
200  V(Smi, last_script_id, LastScriptId) \
201  V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
202  V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
203  V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
204  V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
205 
206 #define ROOT_LIST(V) \
207  STRONG_ROOT_LIST(V) \
208  SMI_ROOT_LIST(V) \
209  V(StringTable, string_table, StringTable)
210 
211 // Heap roots that are known to be immortal immovable, for which we can safely
212 // skip write barriers.
213 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
214  V(byte_array_map) \
215  V(free_space_map) \
216  V(one_pointer_filler_map) \
217  V(two_pointer_filler_map) \
218  V(undefined_value) \
219  V(the_hole_value) \
220  V(null_value) \
221  V(true_value) \
222  V(false_value) \
223  V(uninitialized_value) \
224  V(cell_map) \
225  V(global_property_cell_map) \
226  V(shared_function_info_map) \
227  V(meta_map) \
228  V(heap_number_map) \
229  V(mutable_heap_number_map) \
230  V(native_context_map) \
231  V(fixed_array_map) \
232  V(code_map) \
233  V(scope_info_map) \
234  V(fixed_cow_array_map) \
235  V(fixed_double_array_map) \
236  V(constant_pool_array_map) \
237  V(no_interceptor_result_sentinel) \
238  V(hash_table_map) \
239  V(ordered_hash_table_map) \
240  V(empty_fixed_array) \
241  V(empty_byte_array) \
242  V(empty_descriptor_array) \
243  V(empty_constant_pool_array) \
244  V(arguments_marker) \
245  V(symbol_map) \
246  V(sloppy_arguments_elements_map) \
247  V(function_context_map) \
248  V(catch_context_map) \
249  V(with_context_map) \
250  V(block_context_map) \
251  V(module_context_map) \
252  V(global_context_map) \
253  V(undefined_map) \
254  V(the_hole_map) \
255  V(null_map) \
256  V(boolean_map) \
257  V(uninitialized_map) \
258  V(message_object_map) \
259  V(foreign_map) \
260  V(neander_map)
261 
262 #define INTERNALIZED_STRING_LIST(V) \
263  V(Object_string, "Object") \
264  V(proto_string, "__proto__") \
265  V(arguments_string, "arguments") \
266  V(Arguments_string, "Arguments") \
267  V(caller_string, "caller") \
268  V(boolean_string, "boolean") \
269  V(Boolean_string, "Boolean") \
270  V(callee_string, "callee") \
271  V(constructor_string, "constructor") \
272  V(dot_result_string, ".result") \
273  V(dot_for_string, ".for.") \
274  V(eval_string, "eval") \
275  V(empty_string, "") \
276  V(function_string, "function") \
277  V(Function_string, "Function") \
278  V(length_string, "length") \
279  V(name_string, "name") \
280  V(null_string, "null") \
281  V(number_string, "number") \
282  V(Number_string, "Number") \
283  V(nan_string, "NaN") \
284  V(source_string, "source") \
285  V(source_url_string, "source_url") \
286  V(source_mapping_url_string, "source_mapping_url") \
287  V(global_string, "global") \
288  V(ignore_case_string, "ignoreCase") \
289  V(multiline_string, "multiline") \
290  V(sticky_string, "sticky") \
291  V(harmony_regexps_string, "harmony_regexps") \
292  V(input_string, "input") \
293  V(index_string, "index") \
294  V(last_index_string, "lastIndex") \
295  V(object_string, "object") \
296  V(prototype_string, "prototype") \
297  V(string_string, "string") \
298  V(String_string, "String") \
299  V(symbol_string, "symbol") \
300  V(Symbol_string, "Symbol") \
301  V(Map_string, "Map") \
302  V(Set_string, "Set") \
303  V(WeakMap_string, "WeakMap") \
304  V(WeakSet_string, "WeakSet") \
305  V(for_string, "for") \
306  V(for_api_string, "for_api") \
307  V(for_intern_string, "for_intern") \
308  V(private_api_string, "private_api") \
309  V(private_intern_string, "private_intern") \
310  V(Date_string, "Date") \
311  V(char_at_string, "CharAt") \
312  V(undefined_string, "undefined") \
313  V(value_of_string, "valueOf") \
314  V(stack_string, "stack") \
315  V(toJSON_string, "toJSON") \
316  V(KeyedLoadMonomorphic_string, "KeyedLoadMonomorphic") \
317  V(KeyedStoreMonomorphic_string, "KeyedStoreMonomorphic") \
318  V(stack_overflow_string, "kStackOverflowBoilerplate") \
319  V(illegal_access_string, "illegal access") \
320  V(cell_value_string, "%cell_value") \
321  V(illegal_argument_string, "illegal argument") \
322  V(identity_hash_string, "v8::IdentityHash") \
323  V(closure_string, "(closure)") \
324  V(dot_string, ".") \
325  V(compare_ic_string, "==") \
326  V(strict_compare_ic_string, "===") \
327  V(infinity_string, "Infinity") \
328  V(minus_infinity_string, "-Infinity") \
329  V(query_colon_string, "(?:)") \
330  V(Generator_string, "Generator") \
331  V(throw_string, "throw") \
332  V(done_string, "done") \
333  V(value_string, "value") \
334  V(next_string, "next") \
335  V(byte_length_string, "byteLength") \
336  V(byte_offset_string, "byteOffset") \
337  V(intl_initialized_marker_string, "v8::intl_initialized_marker") \
338  V(intl_impl_object_string, "v8::intl_object")
339 
340 // Forward declarations.
341 class HeapStats;
342 class Isolate;
343 class WeakObjectRetainer;
344 
345 
346 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
347  Object** pointer);
348 
350  public:
351  explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
352  : store_buffer_(store_buffer) {}
353 
354  void Callback(MemoryChunk* page, StoreBufferEvent event);
355 
356  private:
358 
359  // We record in this variable how full the store buffer was when we started
360  // iterating over the current page, finding pointers to new space. If the
361  // store buffer overflows again we can exempt the page from the store buffer
362  // by rewinding to this point instead of having to search the store buffer.
364  // The current page we are scanning in the store buffer iterator.
366 };
367 
368 
369 // A queue of objects promoted during scavenge. Each object is accompanied
370 // by it's size to avoid dereferencing a map pointer for scanning.
372  public:
373  explicit PromotionQueue(Heap* heap)
374  : front_(NULL),
375  rear_(NULL),
376  limit_(NULL),
377  emergency_stack_(0),
378  heap_(heap) {}
379 
380  void Initialize();
381 
382  void Destroy() {
383  DCHECK(is_empty());
384  delete emergency_stack_;
386  }
387 
389  return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
390  }
391 
392  void SetNewLimit(Address limit) {
393  limit_ = reinterpret_cast<intptr_t*>(limit);
394 
395  if (limit_ <= rear_) {
396  return;
397  }
398 
400  }
401 
402  bool IsBelowPromotionQueue(Address to_space_top) {
403  // If the given to-space top pointer and the head of the promotion queue
404  // are not on the same page, then the to-space objects are below the
405  // promotion queue.
406  if (GetHeadPage() != Page::FromAddress(to_space_top)) {
407  return true;
408  }
409  // If the to space top pointer is smaller or equal than the promotion
410  // queue head, then the to-space objects are below the promotion queue.
411  return reinterpret_cast<intptr_t*>(to_space_top) <= rear_;
412  }
413 
414  bool is_empty() {
415  return (front_ == rear_) &&
416  (emergency_stack_ == NULL || emergency_stack_->length() == 0);
417  }
418 
419  inline void insert(HeapObject* target, int size);
420 
421  void remove(HeapObject** target, int* size) {
422  DCHECK(!is_empty());
423  if (front_ == rear_) {
424  Entry e = emergency_stack_->RemoveLast();
425  *target = e.obj_;
426  *size = e.size_;
427  return;
428  }
429 
430  if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
431  NewSpacePage* front_page =
432  NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
433  DCHECK(!front_page->prev_page()->is_anchor());
434  front_ = reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
435  }
436  *target = reinterpret_cast<HeapObject*>(*(--front_));
437  *size = static_cast<int>(*(--front_));
438  // Assert no underflow.
439  SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
440  reinterpret_cast<Address>(front_));
441  }
442 
443  private:
444  // The front of the queue is higher in the memory page chain than the rear.
445  intptr_t* front_;
446  intptr_t* rear_;
447  intptr_t* limit_;
448 
449  static const int kEntrySizeInWords = 2;
450 
451  struct Entry {
452  Entry(HeapObject* obj, int size) : obj_(obj), size_(size) {}
453 
455  int size_;
456  };
458 
460 
461  void RelocateQueueHead();
462 
464 };
465 
466 
467 typedef void (*ScavengingCallback)(Map* map, HeapObject** slot,
468  HeapObject* object);
469 
470 
471 // External strings table is a place where all external strings are
472 // registered. We need to keep track of such strings to properly
473 // finalize them.
475  public:
476  // Registers an external string.
477  inline void AddString(String* string);
478 
479  inline void Iterate(ObjectVisitor* v);
480 
481  // Restores internal invariant and gets rid of collected strings.
482  // Must be called after each Iterate() that modified the strings.
483  void CleanUp();
484 
485  // Destroys all allocated memory.
486  void TearDown();
487 
488  private:
489  explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
490 
491  friend class Heap;
492 
493  inline void Verify();
494 
495  inline void AddOldString(String* string);
496 
497  // Notifies the table that only a prefix of the new list is valid.
498  inline void ShrinkNewStrings(int position);
499 
500  // To speed up scavenge collections new space string are kept
501  // separate from old space strings.
504 
506 
508 };
509 
510 
514 };
515 
516 
517 class Heap {
518  public:
519  // Configure heap size in MB before setup. Return false if the heap has been
520  // set up already.
521  bool ConfigureHeap(int max_semi_space_size, int max_old_space_size,
522  int max_executable_size, size_t code_range_size);
523  bool ConfigureHeapDefault();
524 
525  // Prepares the heap, setting up memory areas that are needed in the isolate
526  // without actually creating any objects.
527  bool SetUp();
528 
529  // Bootstraps the object heap with the core set of objects required to run.
530  // Returns whether it succeeded.
531  bool CreateHeapObjects();
532 
533  // Destroys all memory allocated by the heap.
534  void TearDown();
535 
536  // Set the stack limit in the roots_ array. Some architectures generate
537  // code that looks here, because it is faster than loading from the static
538  // jslimit_/real_jslimit_ variable in the StackGuard.
539  void SetStackLimits();
540 
541  // Returns whether SetUp has been called.
542  bool HasBeenSetUp();
543 
544  // Returns the maximum amount of memory reserved for the heap. For
545  // the young generation, we reserve 4 times the amount needed for a
546  // semi space. The young generation consists of two semi spaces and
547  // we reserve twice the amount needed for those in order to ensure
548  // that new space can be aligned to its size.
549  intptr_t MaxReserved() {
551  }
557 
558  // Returns the capacity of the heap in bytes w/o growing. Heap grows when
559  // more spaces are needed until it reaches the limit.
560  intptr_t Capacity();
561 
562  // Returns the amount of memory currently committed for the heap.
563  intptr_t CommittedMemory();
564 
565  // Returns the amount of executable memory currently committed for the heap.
566  intptr_t CommittedMemoryExecutable();
567 
568  // Returns the amount of phyical memory currently committed for the heap.
569  size_t CommittedPhysicalMemory();
570 
571  // Returns the maximum amount of memory ever committed for the heap.
573 
574  // Updates the maximum committed memory for the heap. Should be called
575  // whenever a space grows.
576  void UpdateMaximumCommitted();
577 
578  // Returns the available bytes in space w/o growing.
579  // Heap doesn't guarantee that it can allocate an object that requires
580  // all available bytes. Check MaxHeapObjectSize() instead.
581  intptr_t Available();
582 
583  // Returns of size of all objects residing in the heap.
584  intptr_t SizeOfObjects();
585 
586  // Return the starting address and a mask for the new space. And-masking an
587  // address with the mask will result in the start address of the new space
588  // for all addresses in either semispace.
592 
593  NewSpace* new_space() { return &new_space_; }
602  switch (idx) {
603  case OLD_POINTER_SPACE:
604  return old_pointer_space();
605  case OLD_DATA_SPACE:
606  return old_data_space();
607  case MAP_SPACE:
608  return map_space();
609  case CELL_SPACE:
610  return cell_space();
611  case PROPERTY_CELL_SPACE:
612  return property_cell_space();
613  case CODE_SPACE:
614  return code_space();
615  case NEW_SPACE:
616  case LO_SPACE:
617  UNREACHABLE();
618  }
619  return NULL;
620  }
621 
624  return reinterpret_cast<Address>(&always_allocate_scope_depth_);
625  }
626 
629  }
632  }
633 
636  }
639  }
640 
643  }
646  }
647 
648  // Returns a deep copy of the JavaScript object.
649  // Properties and elements are copied too.
650  // Optionally takes an AllocationSite to be appended in an AllocationMemento.
652  CopyJSObject(JSObject* source, AllocationSite* site = NULL);
653 
654  // Clear the Instanceof cache (used when a prototype changes).
655  inline void ClearInstanceofCache();
656 
657  // Iterates the whole code space to clear all ICs of the given kind.
658  void ClearAllICsByKind(Code::Kind kind);
659 
660  // For use during bootup.
662 
663  template <typename T>
664  static inline bool IsOneByte(T t, int chars);
665 
666  // Move len elements within a given array from src_index index to dst_index
667  // index.
668  void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
669 
670  // Sloppy mode arguments object size.
671  static const int kSloppyArgumentsObjectSize =
673  // Strict mode arguments has no callee so it is smaller.
674  static const int kStrictArgumentsObjectSize =
676  // Indicies for direct access into argument objects.
677  static const int kArgumentsLengthIndex = 0;
678  // callee is only valid in sloppy mode.
679  static const int kArgumentsCalleeIndex = 1;
680 
681  // Finalizes an external string by deleting the associated external
682  // data and clearing the resource pointer.
683  inline void FinalizeExternalString(String* string);
684 
685  // Initialize a filler object to keep the ability to iterate over the heap
686  // when introducing gaps within pages.
687  void CreateFillerObjectAt(Address addr, int size);
688 
689  bool CanMoveObjectStart(HeapObject* object);
690 
691  // Indicates whether live bytes adjustment is triggered from within the GC
692  // code or from mutator code.
694 
695  // Maintain consistency of live bytes during incremental marking.
696  void AdjustLiveBytes(Address address, int by, InvocationMode mode);
697 
698  // Trim the given array from the left. Note that this relocates the object
699  // start and hence is only valid if there is only a single reference to it.
700  FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
701 
702  // Trim the given array from the right.
703  template<Heap::InvocationMode mode>
704  void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
705 
706  // Converts the given boolean condition to JavaScript boolean value.
707  inline Object* ToBoolean(bool condition);
708 
709  // Performs garbage collection operation.
710  // Returns whether there is a chance that another major GC could
711  // collect more garbage.
712  inline bool CollectGarbage(
713  AllocationSpace space, const char* gc_reason = NULL,
714  const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
715 
716  static const int kNoGCFlags = 0;
717  static const int kReduceMemoryFootprintMask = 1;
718  static const int kAbortIncrementalMarkingMask = 2;
719 
720  // Making the heap iterable requires us to abort incremental marking.
722 
723  // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
724  // non-zero, then the slower precise sweeper is used, which leaves the heap
725  // in a state where we can iterate over the heap visiting all objects.
726  void CollectAllGarbage(
727  int flags, const char* gc_reason = NULL,
728  const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
729 
730  // Last hope GC, should try to squeeze as much as possible.
731  void CollectAllAvailableGarbage(const char* gc_reason = NULL);
732 
733  // Check whether the heap is currently iterable.
734  bool IsHeapIterable();
735 
736  // Notify the heap that a context has been disposed.
737  int NotifyContextDisposed();
738 
741  if (FLAG_gc_verbose) {
742  PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
743  }
744  }
745 
748  if (FLAG_gc_verbose) {
749  PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
750  }
751  }
752 
754 
756  GCType gc_type_filter, bool pass_isolate = true);
758 
760  GCType gc_type_filter, bool pass_isolate = true);
762 
763 // Heap root getters. We have versions with and without type::cast() here.
764 // You can't use type::cast during GC because the assert fails.
765 // TODO(1490): Try removing the unchecked accessors, now that GC marking does
766 // not corrupt the map.
767 #define ROOT_ACCESSOR(type, name, camel_name) \
768  type* name() { return type::cast(roots_[k##camel_name##RootIndex]); } \
769  type* raw_unchecked_##name() { \
770  return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
771  }
773 #undef ROOT_ACCESSOR
774 
775 // Utility type maps
776 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
777  Map* name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
779 #undef STRUCT_MAP_ACCESSOR
780 
781 #define STRING_ACCESSOR(name, str) \
782  String* name() { return String::cast(roots_[k##name##RootIndex]); }
784 #undef STRING_ACCESSOR
785 
786  // The hidden_string is special because it is the empty string, but does
787  // not match the empty string.
789 
791  native_contexts_list_ = object;
792  }
794 
797 
799  allocation_sites_list_ = object;
800  }
802 
803  // Used in CreateAllocationSiteStub and the (de)serializer.
805 
807 
808  void set_encountered_weak_collections(Object* weak_collection) {
809  encountered_weak_collections_ = weak_collection;
810  }
813  }
814 
815  // Number of mark-sweeps.
816  unsigned int ms_count() { return ms_count_; }
817 
818  // Iterates over all roots in the heap.
820  // Iterates over all strong roots in the heap.
822  // Iterates over entries in the smi roots list. Only interesting to the
823  // serializer/deserializer, since GC does not care about smis.
825  // Iterates over all the other roots in the heap.
827 
828  // Iterate pointers to from semispace of new space found in memory interval
829  // from start to end.
831  ObjectSlotCallback callback);
832 
833  // Returns whether the object resides in new space.
834  inline bool InNewSpace(Object* object);
835  inline bool InNewSpace(Address address);
836  inline bool InNewSpacePage(Address address);
837  inline bool InFromSpace(Object* object);
838  inline bool InToSpace(Object* object);
839 
840  // Returns whether the object resides in old pointer space.
841  inline bool InOldPointerSpace(Address address);
842  inline bool InOldPointerSpace(Object* object);
843 
844  // Returns whether the object resides in old data space.
845  inline bool InOldDataSpace(Address address);
846  inline bool InOldDataSpace(Object* object);
847 
848  // Checks whether an address/object in the heap (including auxiliary
849  // area and unused area).
850  bool Contains(Address addr);
851  bool Contains(HeapObject* value);
852 
853  // Checks whether an address/object in a space.
854  // Currently used by tests, serialization and heap verification only.
855  bool InSpace(Address addr, AllocationSpace space);
856  bool InSpace(HeapObject* value, AllocationSpace space);
857 
858  // Finds out which space an object should get promoted to based on its type.
859  inline OldSpace* TargetSpace(HeapObject* object);
860  static inline AllocationSpace TargetSpaceId(InstanceType type);
861 
862  // Checks whether the given object is allowed to be migrated from it's
863  // current space into the given destination space. Used for debugging.
864  inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
865 
866  // Sets the stub_cache_ (only used when expanding the dictionary).
868  roots_[kCodeStubsRootIndex] = value;
869  }
870 
871  // Support for computing object sizes for old objects during GCs. Returns
872  // a function that is guaranteed to be safe for computing object sizes in
873  // the current GC phase.
876  }
877 
878  // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
880  roots_[kNonMonomorphicCacheRootIndex] = value;
881  }
882 
884  roots_[kEmptyScriptRootIndex] = script;
885  }
886 
888  roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
889  }
890 
892  roots_[kMaterializedObjectsRootIndex] = objects;
893  }
894 
895  // Generated code can embed this address to get access to the roots.
897 
899  return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
900  }
901 
902 #ifdef VERIFY_HEAP
903  // Verify the heap is in its normal state before or after a GC.
904  void Verify();
905 
906 
907  bool weak_embedded_objects_verification_enabled() {
908  return no_weak_object_verification_scope_depth_ == 0;
909  }
910 #endif
911 
912 #ifdef DEBUG
913  void Print();
914  void PrintHandles();
915 
916  void OldPointerSpaceCheckStoreBuffer();
917  void MapSpaceCheckStoreBuffer();
918  void LargeObjectSpaceCheckStoreBuffer();
919 
920  // Report heap statistics.
921  void ReportHeapStatistics(const char* title);
922  void ReportCodeStatistics(const char* title);
923 #endif
924 
925  // Zapping is needed for verify heap, and always done in debug builds.
926  static inline bool ShouldZapGarbage() {
927 #ifdef DEBUG
928  return true;
929 #else
930 #ifdef VERIFY_HEAP
931  return FLAG_verify_heap;
932 #else
933  return false;
934 #endif
935 #endif
936  }
937 
938  // Number of "runtime allocations" done so far.
940 
941  // Returns deterministic "time" value in ms. Works only with
942  // FLAG_verify_predictable.
943  double synthetic_time() { return allocations_count_ / 2.0; }
944 
945  // Print short heap statistics.
947 
948  // Write barrier support for address[offset] = o.
949  INLINE(void RecordWrite(Address address, int offset));
950 
951  // Write barrier support for address[start : start + len[ = o.
952  INLINE(void RecordWrites(Address address, int start, int len));
953 
955  inline HeapState gc_state() { return gc_state_; }
956 
957  inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
958 
959 #ifdef DEBUG
960  void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
961 
962  void TracePathToObjectFrom(Object* target, Object* root);
963  void TracePathToObject(Object* target);
964  void TracePathToGlobal();
965 #endif
966 
967  // Callback function passed to Heap::Iterate etc. Copies an object if
968  // necessary, the object might be promoted to an old space. The caller must
969  // ensure the precondition that the object is (a) a heap object and (b) in
970  // the heap's from space.
971  static inline void ScavengePointer(HeapObject** p);
972  static inline void ScavengeObject(HeapObject** p, HeapObject* object);
973 
975 
976  // If an object has an AllocationMemento trailing it, return it, otherwise
977  // return NULL;
979 
980  // An object may have an AllocationSite associated with it through a trailing
981  // AllocationMemento. Its feedback should be updated when objects are found
982  // in the heap.
983  static inline void UpdateAllocationSiteFeedback(HeapObject* object,
985 
986  // Support for partial snapshots. After calling this we have a linear
987  // space to write objects in each space.
988  void ReserveSpace(int* sizes, Address* addresses);
989 
990  //
991  // Support for the API.
992  //
993 
994  void CreateApiObjects();
995 
996  inline intptr_t PromotedTotalSize() {
998  if (total > kMaxInt) return static_cast<intptr_t>(kMaxInt);
999  if (total < 0) return 0;
1000  return static_cast<intptr_t>(total);
1001  }
1002 
1003  inline intptr_t OldGenerationSpaceAvailable() {
1005  }
1006 
1009  }
1010 
1012  8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1013 
1014  static const int kPointerMultiplier = i::kPointerSize / 4;
1015 
1016  // The new space size has to be a power of 2. Sizes are in MB.
1021 
1022  // The old space size has to be a multiple of Page::kPageSize.
1023  // Sizes are in MB.
1026  256 * kPointerMultiplier;
1029 
1030  // The executable size has to be a multiple of Page::kPageSize.
1031  // Sizes are in MB.
1034  192 * kPointerMultiplier;
1036  256 * kPointerMultiplier;
1038  256 * kPointerMultiplier;
1039 
1040  intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size,
1041  int freed_global_handles);
1042 
1043  // Indicates whether inline bump-pointer allocation has been disabled.
1045 
1046  // Switch whether inline bump-pointer allocation should be used.
1047  void EnableInlineAllocation();
1048  void DisableInlineAllocation();
1049 
1050  // Implements the corresponding V8 API function.
1051  bool IdleNotification(int idle_time_in_ms);
1052 
1053  // Declare all the root indices. This defines the root list order.
1055 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1057 #undef ROOT_INDEX_DECLARATION
1058 
1059 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
1061 #undef STRING_DECLARATION
1062 
1063 // Utility type maps
1064 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1066 #undef DECLARE_STRUCT_MAP
1068 
1069 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1071 #undef ROOT_INDEX_DECLARATION
1075  };
1076 
1077  STATIC_ASSERT(kUndefinedValueRootIndex ==
1083 
1084  // Generated code can embed direct references to non-writable roots if
1085  // they are in new space.
1086  static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
1087  // Generated code can treat direct references to this root as constant.
1088  bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1089 
1092 
1095 
1100 
1101  void RecordStats(HeapStats* stats, bool take_snapshot = false);
1102 
1103  // Copy block of memory from src to dst. Size of block should be aligned
1104  // by pointer size.
1105  static inline void CopyBlock(Address dst, Address src, int byte_size);
1106 
1107  // Optimized version of memmove for blocks with pointer size aligned sizes and
1108  // pointer size aligned addresses.
1109  static inline void MoveBlock(Address dst, Address src, int byte_size);
1110 
1111  // Check new space expansion criteria and expand semispaces if it was hit.
1113 
1114  inline void IncrementPromotedObjectsSize(int object_size) {
1115  DCHECK(object_size > 0);
1116  promoted_objects_size_ += object_size;
1117  }
1118 
1119  inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
1120  DCHECK(object_size > 0);
1121  semi_space_copied_object_size_ += object_size;
1122  }
1123 
1125 
1127 
1129 
1130  inline void IncrementYoungSurvivorsCounter(int survived) {
1131  DCHECK(survived >= 0);
1132  survived_since_last_expansion_ += survived;
1133  }
1134 
1135  inline bool NextGCIsLikelyToBeFull() {
1136  if (FLAG_gc_global) return true;
1137 
1138  if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1139 
1140  intptr_t adjusted_allocation_limit =
1142 
1143  if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
1144 
1145  return false;
1146  }
1147 
1149  ExternalStringTableUpdaterCallback updater_func);
1150 
1152  ExternalStringTableUpdaterCallback updater_func);
1153 
1154  void ProcessWeakReferences(WeakObjectRetainer* retainer);
1155 
1157 
1158  // An object should be promoted if the object has survived a
1159  // scavenge operation.
1160  inline bool ShouldBePromoted(Address old_address, int object_size);
1161 
1163 
1164  void ClearNormalizedMapCaches();
1165 
1166  GCTracer* tracer() { return &tracer_; }
1167 
1168  // Returns the size of objects residing in non new spaces.
1169  intptr_t PromotedSpaceSizeOfObjects();
1170 
1174  }
1175 
1176  void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
1177  if (is_crankshafted) {
1179  } else {
1181  }
1182  }
1183 
1184  // Update GC statistics that are tracked on the Heap.
1185  void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
1186  double marking_time);
1187 
1188  // Returns maximum GC pause.
1189  double get_max_gc_pause() { return max_gc_pause_; }
1190 
1191  // Returns maximum size of objects alive after GC.
1193 
1194  // Returns minimal interval between two subsequent collections.
1196 
1198  return &mark_compact_collector_;
1199  }
1200 
1202 
1203  Marking* marking() { return &marking_; }
1204 
1206 
1208  return &external_string_table_;
1209  }
1210 
1211  // Returns the current sweep generation.
1213 
1214  inline Isolate* isolate();
1215 
1218 
1220 
1221  inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1223  }
1224 
1225  void QueueMemoryChunkForFree(MemoryChunk* chunk);
1226  void FreeQueuedChunks();
1227 
1228  int gc_count() const { return gc_count_; }
1229 
1230  // Completely clear the Instanceof cache (to stop it keeping objects alive
1231  // around a GC).
1232  inline void CompletelyClearInstanceofCache();
1233 
1234  // The roots that have an index less than this are always in old space.
1235  static const int kOldSpaceRoots = 0x20;
1236 
1238  uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1239  DCHECK(FLAG_randomize_hashes || seed == 0);
1240  return seed;
1241  }
1242 
1243  void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
1244  DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1245  set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1246  }
1247 
1248  void SetConstructStubDeoptPCOffset(int pc_offset) {
1249  DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1250  set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1251  }
1252 
1253  void SetGetterStubDeoptPCOffset(int pc_offset) {
1254  DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
1255  set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1256  }
1257 
1258  void SetSetterStubDeoptPCOffset(int pc_offset) {
1259  DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
1260  set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1261  }
1262 
1263  // For post mortem debugging.
1264  void RememberUnmappedPage(Address page, bool compacted);
1265 
1266  // Global inline caching age: it is incremented on some GCs after context
1267  // disposal. We use it to flush inline caches.
1268  int global_ic_age() { return global_ic_age_; }
1269 
1272  }
1273 
1275 
1278  }
1279 
1281 
1283 
1286  }
1287 
1288  // ObjectStats are kept in two arrays, counts and sizes. Related stats are
1289  // stored in a contiguous linear buffer. Stats groups are stored one after
1290  // another.
1291  enum {
1298  };
1299 
1300  void RecordObjectStats(InstanceType type, size_t size) {
1301  DCHECK(type <= LAST_TYPE);
1302  object_counts_[type]++;
1303  object_sizes_[type] += size;
1304  }
1305 
1306  void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
1307  int code_sub_type_index = FIRST_CODE_KIND_SUB_TYPE + code_sub_type;
1308  int code_age_index =
1310  DCHECK(code_sub_type_index >= FIRST_CODE_KIND_SUB_TYPE &&
1311  code_sub_type_index < FIRST_CODE_AGE_SUB_TYPE);
1312  DCHECK(code_age_index >= FIRST_CODE_AGE_SUB_TYPE &&
1313  code_age_index < OBJECT_STATS_COUNT);
1314  object_counts_[code_sub_type_index]++;
1315  object_sizes_[code_sub_type_index] += size;
1316  object_counts_[code_age_index]++;
1317  object_sizes_[code_age_index] += size;
1318  }
1319 
1320  void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
1321  DCHECK(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
1322  object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++;
1323  object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size;
1324  }
1325 
1326  void CheckpointObjectStats();
1327 
1328  // We don't use a LockGuard here since we want to lock the heap
1329  // only when FLAG_concurrent_recompilation is true.
1331  public:
1332  explicit RelocationLock(Heap* heap) : heap_(heap) {
1333  heap_->relocation_mutex_.Lock();
1334  }
1335 
1336 
1338 
1339  private:
1341  };
1342 
1344  Handle<DependentCode> dep);
1345 
1347 
1349  set_weak_object_to_code_table(undefined_value());
1350  }
1351 
1353 
1354  static void FatalProcessOutOfMemory(const char* location,
1355  bool take_snapshot = false);
1356 
1357  // This event is triggered after successful allocation of a new object made
1358  // by runtime. Allocations of target space for object evacuation do not
1359  // trigger the event. In order to track ALL allocations one must turn off
1360  // FLAG_inline_new and FLAG_use_allocation_folding.
1361  inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
1362 
1363  // This event is triggered after object is moved to a new place.
1364  inline void OnMoveEvent(HeapObject* target, HeapObject* source,
1365  int size_in_bytes);
1366 
1367  protected:
1368  // Methods made available to tests.
1369 
1370  // Allocates a JS Map in the heap.
1372  AllocateMap(InstanceType instance_type, int instance_size,
1373  ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1374 
1375  // Allocates and initializes a new JavaScript object based on a
1376  // constructor.
1377  // If allocation_site is non-null, then a memento is emitted after the object
1378  // that points to the site.
1380  AllocateJSObject(JSFunction* constructor,
1381  PretenureFlag pretenure = NOT_TENURED,
1382  AllocationSite* allocation_site = NULL);
1383 
1384  // Allocates and initializes a new JavaScript object based on a map.
1385  // Passing an allocation site means that a memento will be created that
1386  // points to the site.
1389  bool alloc_props = true,
1390  AllocationSite* allocation_site = NULL);
1391 
1392  // Allocated a HeapNumber from value.
1394  AllocateHeapNumber(double value, MutableMode mode = IMMUTABLE,
1395  PretenureFlag pretenure = NOT_TENURED);
1396 
1397  // Allocate a byte array of the specified length
1399  AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
1400 
1401  // Copy the code and scope info part of the code object, but insert
1402  // the provided data as the relocation information.
1404  CopyCode(Code* code, Vector<byte> reloc_info);
1405 
1407 
1408  // Allocates a fixed array initialized with undefined values
1410  AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
1411 
1412  private:
1413  Heap();
1414 
1415  // The amount of external memory registered through the API kept alive
1416  // by global handles
1418 
1419  // Caches the amount of external memory registered at the last global gc.
1421 
1422  // This can be calculated directly from a pointer to the heap; however, it is
1423  // more expedient to get at the isolate directly from within Heap methods.
1425 
1427 
1435 
1436  // For keeping track of how much data has survived
1437  // scavenge since last new space expansion.
1439 
1440  // For keeping track on when to flush RegExp code.
1442 
1444 
1445  // For keeping track of context disposals.
1447 
1449 
1451 
1453 
1465 
1466  // Returns the amount of external memory registered since last global gc.
1467  int64_t PromotedExternalMemorySize();
1468 
1469  // How many "runtime allocations" happened.
1471 
1472  // Running hash over allocations performed.
1474 
1475  // Countdown counter, dumps allocation hash when 0.
1477 
1478  // How many mark-sweep collections happened.
1479  unsigned int ms_count_;
1480 
1481  // How many gc happened.
1482  unsigned int gc_count_;
1483 
1484  // For post mortem debugging.
1485  static const int kRememberedUnmappedPages = 128;
1488 
1489  // Total length of the strings we failed to flatten since the last GC.
1491 
1492 #define ROOT_ACCESSOR(type, name, camel_name) \
1493  inline void set_##name(type* value) { \
1494  /* The deserializer makes use of the fact that these common roots are */ \
1495  /* never in new space and never on a page that is being compacted. */ \
1496  DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
1497  roots_[k##camel_name##RootIndex] = value; \
1498  }
1500 #undef ROOT_ACCESSOR
1501 
1502 #ifdef DEBUG
1503  // If the --gc-interval flag is set to a positive value, this
1504  // variable holds the value indicating the number of allocations
1505  // remain until the next failure and garbage collection.
1506  int allocation_timeout_;
1507 #endif // DEBUG
1508 
1509  // Limit that triggers a global GC on the next (normally caused) GC. This
1510  // is checked when we have already decided to do a GC to help determine
1511  // which collector to invoke, before expanding a paged space in the old
1512  // generation and on every allocation in large object space.
1514 
1515  // Indicates that an allocation has failed in the old generation since the
1516  // last GC.
1518 
1519  // Indicates that inline bump-pointer allocation has been globally disabled
1520  // for all spaces. This is used to disable allocations in generated code.
1522 
1523  // Weak list heads, threaded through the objects.
1524  // List heads are initilized lazily and contain the undefined_value at start.
1528 
1529  // WeakHashTable that maps objects embedded in optimized code to dependent
1530  // code list. It is initilized lazily and contains the undefined_value at
1531  // start.
1533 
1534  // List of encountered weak collections (JSWeakMap and JSWeakSet) during
1535  // marking. It is initialized during marking, destroyed after marking and
1536  // contains Smi(0) while marking is not active.
1538 
1540 
1543  int size;
1545  };
1546 
1548  const char* contents;
1550  };
1551 
1552  struct StructTable {
1554  int size;
1556  };
1557 
1560  static const StructTable struct_table[];
1561 
1562  // The special hidden string which is an empty string, but does not match
1563  // any string when looked up in properties.
1565 
1566  // GC callback function, called before and after mark-compact GC.
1567  // Allocations in the callback function are disallowed.
1570  GCType gc_type, bool pass_isolate)
1571  : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {}
1572  bool operator==(const GCPrologueCallbackPair& pair) const {
1573  return pair.callback == callback;
1574  }
1577  // TODO(dcarney): remove variable
1579  };
1581 
1584  GCType gc_type, bool pass_isolate)
1585  : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {}
1586  bool operator==(const GCEpilogueCallbackPair& pair) const {
1587  return pair.callback == callback;
1588  }
1591  // TODO(dcarney): remove variable
1593  };
1595 
1596  // Support for computing object sizes during GC.
1598  static int GcSafeSizeOfOldObject(HeapObject* object);
1599 
1600  // Update the GC state. Called from the mark-compact collector.
1601  void MarkMapPointersAsEncoded(bool encoded) {
1602  DCHECK(!encoded);
1604  }
1605 
1606  // Code that should be run before and after each GC. Includes some
1607  // reporting/verification activities when compiled with DEBUG set.
1610 
1611  // Pretenuring decisions are made based on feedback collected during new
1612  // space evacuation. Note that between feedback collection and calling this
1613  // method object in old space must not move.
1614  // Right now we only process pretenuring feedback in high promotion mode.
1616 
1617  // Checks whether a global GC is necessary
1619  const char** reason);
1620 
1621  // Make sure there is a filler value behind the top of the new space
1622  // so that the GC does not confuse some unintialized/stale memory
1623  // with the allocation memento of the object at the top
1624  void EnsureFillerObjectAtTop();
1625 
1626  // Ensure that we have swept all spaces in such a way that we can iterate
1627  // over all objects. May cause a GC.
1628  void MakeHeapIterable();
1629 
1630  // Performs garbage collection operation.
1631  // Returns whether there is a chance that another major GC could
1632  // collect more garbage.
1633  bool CollectGarbage(
1634  GarbageCollector collector, const char* gc_reason,
1635  const char* collector_reason,
1636  const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1637 
1638  // Performs garbage collection
1639  // Returns whether there is a chance another major GC could
1640  // collect more garbage.
1642  GarbageCollector collector,
1643  const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1644 
1645  inline void UpdateOldSpaceLimits();
1646 
1647  // Selects the proper allocation space depending on the given object
1648  // size, pretenuring decision, and preferred old-space.
1649  static AllocationSpace SelectSpace(int object_size,
1650  AllocationSpace preferred_old_space,
1651  PretenureFlag pretenure) {
1652  DCHECK(preferred_old_space == OLD_POINTER_SPACE ||
1653  preferred_old_space == OLD_DATA_SPACE);
1654  if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
1655  return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
1656  }
1657 
1658  // Allocate an uninitialized object. The memory is non-executable if the
1659  // hardware and OS allow. This is the single choke-point for allocations
1660  // performed by the runtime and should not be bypassed (to extend this to
1661  // inlined allocations, use the Heap::DisableInlineAllocation() support).
1663  int size_in_bytes, AllocationSpace space, AllocationSpace retry_space);
1664 
1665  // Allocates a heap object based on the map.
1668  AllocationSite* allocation_site = NULL);
1669 
1670  // Allocates a partial map for bootstrapping.
1672  AllocatePartialMap(InstanceType instance_type, int instance_size);
1673 
1674  // Initializes a JSObject based on its map.
1675  void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
1676  Map* map);
1678  AllocationSite* allocation_site);
1679 
1680  // Allocate a block of memory in the given space (filled with a filler).
1681  // Used as a fall-back for generated code when the space is full.
1683  AllocateFillerObject(int size, bool double_align, AllocationSpace space);
1684 
1685  // Allocate an uninitialized fixed array.
1687  AllocateRawFixedArray(int length, PretenureFlag pretenure);
1688 
1689  // Allocate an uninitialized fixed double array.
1691  AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
1692 
1693  // Allocate an initialized fixed array with the given filler value.
1695  AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
1696  Object* filler);
1697 
1698  // Allocate and partially initializes a String. There are two String
1699  // encodings: one-byte and two-byte. These functions allocate a string of
1700  // the given length and set its map and length fields. The characters of
1701  // the string are uninitialized.
1703  AllocateRawOneByteString(int length, PretenureFlag pretenure);
1705  AllocateRawTwoByteString(int length, PretenureFlag pretenure);
1706 
1707  bool CreateInitialMaps();
1708  void CreateInitialObjects();
1709 
1710  // Allocates an internalized string in old space based on the character
1711  // stream.
1713  Vector<const char> str, int chars, uint32_t hash_field);
1714 
1716  Vector<const uint8_t> str, uint32_t hash_field);
1717 
1719  Vector<const uc16> str, uint32_t hash_field);
1720 
1721  template <bool is_one_byte, typename T>
1723  AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
1724 
1725  template <typename T>
1727  T t, int chars, uint32_t hash_field);
1728 
1729  // Allocates an uninitialized fixed array. It must be filled by the caller.
1731 
1732  // Make a copy of src and return it. Returns
1733  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1735 
1736  // Make a copy of src, set the map, and return the copy. Returns
1737  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1740 
1741  // Make a copy of src and return it. Returns
1742  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1744  FixedDoubleArray* src);
1745 
1746  // Make a copy of src and return it. Returns
1747  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1749  ConstantPoolArray* src);
1750 
1751 
1752  // Computes a single character string where the character has code.
1753  // A cache is used for one-byte (Latin1) codes.
1756 
1757  // Allocate a symbol in old space.
1759 
1760  // Make a copy of src, set the map, and return the copy.
1763 
1765  const ConstantPoolArray::NumberOfEntries& small);
1766 
1768  const ConstantPoolArray::NumberOfEntries& small,
1769  const ConstantPoolArray::NumberOfEntries& extended);
1770 
1771  // Allocates an external array of the specified length and type.
1773  AllocateExternalArray(int length, ExternalArrayType array_type,
1774  void* external_pointer, PretenureFlag pretenure);
1775 
1776  // Allocates a fixed typed array of the specified length and type.
1778  AllocateFixedTypedArray(int length, ExternalArrayType array_type,
1779  PretenureFlag pretenure);
1780 
1781  // Make a copy of src and return it.
1783 
1784  // Make a copy of src, set the map, and return the copy.
1787 
1788  // Allocates a fixed double array with uninitialized values. Returns
1790  int length, PretenureFlag pretenure = NOT_TENURED);
1791 
1792  // These five Create*EntryStub functions are here and forced to not be inlined
1793  // because of a gcc-4.4 bug that assigns wrong vtable entries.
1794  NO_INLINE(void CreateJSEntryStub());
1795  NO_INLINE(void CreateJSConstructEntryStub());
1796 
1797  void CreateFixedStubs();
1798 
1799  // Allocate empty fixed array.
1801 
1802  // Allocate empty external array of given type.
1805 
1806  // Allocate empty fixed typed array of given type.
1809 
1810  // Allocate empty constant pool array.
1812 
1813  // Allocate a tenured simple cell.
1815 
1816  // Allocate a tenured JS global property cell initialized with the hole.
1818 
1819  // Allocates a new utility object in the old generation.
1821 
1822  // Allocates a new foreign object.
1824  AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
1825 
1827  AllocateCode(int object_size, bool immovable);
1828 
1830 
1832 
1833  // Performs a minor collection in new generation.
1834  void Scavenge();
1835 
1836  // Commits from space if it is uncommitted.
1838 
1839  // Uncommit unused semi space.
1841 
1842  // Fill in bogus values in from space
1843  void ZapFromSpace();
1844 
1846  Heap* heap, Object** pointer);
1847 
1848  Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1849  static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page,
1850  StoreBufferEvent event);
1851 
1852  // Performs a major collection in the whole heap.
1853  void MarkCompact();
1854 
1855  // Code to be run before and after mark-compact.
1856  void MarkCompactPrologue();
1857 
1858  void ProcessNativeContexts(WeakObjectRetainer* retainer);
1859  void ProcessArrayBuffers(WeakObjectRetainer* retainer);
1861 
1862  // Deopts all code that contains allocation instruction which are tenured or
1863  // not tenured. Moreover it clears the pretenuring allocation site statistics.
1865 
1866  // Evaluates local pretenuring for the old space and calls
1867  // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1868  // the old space.
1869  void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1870 
1871  // Called on heap tear-down.
1872  void TearDownArrayBuffers();
1873 
1874  // Record statistics before and after garbage collection.
1875  void ReportStatisticsBeforeGC();
1876  void ReportStatisticsAfterGC();
1877 
1878  // Slow part of scavenge object.
1879  static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1880 
1881  // Total RegExp code ever generated
1883 
1885 
1886  // Creates and installs the full-sized number string cache.
1888  // Flush the number to string cache.
1889  void FlushNumberStringCache();
1890 
1891  // Sets used allocation sites entries to undefined.
1893 
1894  // Initializes the allocation sites scratchpad with undefined values.
1896 
1897  // Adds an allocation site to the scratchpad if there is space left.
1900 
1901  void UpdateSurvivalStatistics(int start_new_space_size);
1902 
1903  static const int kYoungSurvivalRateHighThreshold = 90;
1904  static const int kYoungSurvivalRateAllowedDeviation = 15;
1905 
1906  static const int kOldSurvivalRateLowThreshold = 10;
1907 
1916 
1917  // This is the pretenuring trigger for allocation sites that are in maybe
1918  // tenure state. When we switched to the maximum new space size we deoptimize
1919  // the code that belongs to the allocation site and derive the lifetime
1920  // of the allocation site.
1922 
1923  // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
1924  // Re-visit incremental marking heuristics.
1926 
1928 
1929  void AdvanceIdleIncrementalMarking(intptr_t step_size);
1930 
1932 
1933  void ClearObjectStats(bool clear_last_time_stats = false);
1934 
1936  DCHECK(!InNewSpace(value));
1938  }
1939 
1942  }
1943 
1944  inline void UpdateAllocationsHash(HeapObject* object);
1945  inline void UpdateAllocationsHash(uint32_t value);
1946  inline void PrintAlloctionsHash();
1947 
1948  static const int kInitialStringTableSize = 2048;
1949  static const int kInitialEvalCacheSize = 64;
1950  static const int kInitialNumberStringCacheSize = 256;
1951 
1952  // Object counts and used memory by InstanceType
1957 
1958  // Maximum GC pause.
1960 
1961  // Total time spent in GC.
1963 
1964  // Maximum size of objects alive after GC.
1966 
1967  // Minimal interval between two subsequent collections.
1969 
1970  // Cumulative GC time spent in marking
1972 
1973  // Cumulative GC time spent in sweeping
1975 
1977 
1979 
1981 
1983 
1986 
1987  // These two counters are monotomically increasing and never reset.
1990 
1991  // If the --deopt_every_n_garbage_collections flag is set to a positive value,
1992  // this variable holds the number of garbage collections since the last
1993  // deoptimization triggered by garbage collection.
1995 
1996 #ifdef VERIFY_HEAP
1997  int no_weak_object_verification_scope_depth_;
1998 #endif
1999 
2000  static const int kAllocationSiteScratchpadSize = 256;
2002 
2003  static const int kMaxMarkCompactsInIdleRound = 7;
2004  static const int kIdleScavengeThreshold = 5;
2005 
2006  // Shared state read by the scavenge collector and set by ScavengeObject.
2008 
2009  // Flag is set when the heap has been configured. The heap can be repeatedly
2010  // configured through the API until it is set up.
2012 
2014 
2016 
2018 
2019  base::Mutex relocation_mutex_;
2020 
2022 
2023  friend class AlwaysAllocateScope;
2024  friend class Deserializer;
2025  friend class Factory;
2026  friend class GCCallbacksScope;
2027  friend class GCTracer;
2028  friend class HeapIterator;
2029  friend class Isolate;
2030  friend class MarkCompactCollector;
2032  friend class MapCompact;
2033 #ifdef VERIFY_HEAP
2034  friend class NoWeakObjectVerificationScope;
2035 #endif
2036  friend class Page;
2037 
2039 };
2040 
2041 
2042 class HeapStats {
2043  public:
2044  static const int kStartMarker = 0xDECADE00;
2045  static const int kEndMarker = 0xDECADE01;
2046 
2047  int* start_marker; // 0
2048  int* new_space_size; // 1
2050  intptr_t* old_pointer_space_size; // 3
2052  intptr_t* old_data_space_size; // 5
2053  intptr_t* old_data_space_capacity; // 6
2054  intptr_t* code_space_size; // 7
2055  intptr_t* code_space_capacity; // 8
2056  intptr_t* map_space_size; // 9
2057  intptr_t* map_space_capacity; // 10
2058  intptr_t* cell_space_size; // 11
2059  intptr_t* cell_space_capacity; // 12
2060  intptr_t* lo_space_size; // 13
2066  intptr_t* memory_allocator_size; // 19
2067  intptr_t* memory_allocator_capacity; // 20
2068  int* objects_per_type; // 21
2069  int* size_per_type; // 22
2070  int* os_error; // 23
2071  int* end_marker; // 24
2072  intptr_t* property_cell_space_size; // 25
2074 };
2075 
2076 
2078  public:
2079  explicit inline AlwaysAllocateScope(Isolate* isolate);
2080  inline ~AlwaysAllocateScope();
2081 
2082  private:
2083  // Implicitly disable artificial allocation failures.
2086 };
2087 
2088 
2089 #ifdef VERIFY_HEAP
2090 class NoWeakObjectVerificationScope {
2091  public:
2092  inline NoWeakObjectVerificationScope();
2093  inline ~NoWeakObjectVerificationScope();
2094 };
2095 #endif
2096 
2097 
2099  public:
2100  explicit inline GCCallbacksScope(Heap* heap);
2101  inline ~GCCallbacksScope();
2102 
2103  inline bool CheckReenter();
2104 
2105  private:
2107 };
2108 
2109 
2110 // Visitor class to verify interior pointers in spaces that do not contain
2111 // or care about intergenerational references. All heap object pointers have to
2112 // point into the heap to a location that has a map pointer at its first word.
2113 // Caveat: Heap::Contains is an approximation because it can return true for
2114 // objects in a heap space but above the allocation pointer.
2116  public:
2117  inline void VisitPointers(Object** start, Object** end);
2118 };
2119 
2120 
2121 // Verify that all objects are Smis.
2123  public:
2124  inline void VisitPointers(Object** start, Object** end);
2125 };
2126 
2127 
2128 // Space iterator for iterating over all spaces of the heap. Returns each space
2129 // in turn, and null when it is done.
2130 class AllSpaces BASE_EMBEDDED {
2131  public:
2132  explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2134 
2135  private:
2138 };
2139 
2140 
2141 // Space iterator for iterating over all old spaces of the heap: Old pointer
2142 // space, old data space and code space. Returns each space in turn, and null
2143 // when it is done.
2144 class OldSpaces BASE_EMBEDDED {
2145  public:
2146  explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2148 
2149  private:
2150  Heap* heap_;
2151  int counter_;
2152 };
2153 
2154 
2155 // Space iterator for iterating over all the paged spaces of the heap: Map
2156 // space, old pointer space, old data space, code space and cell space. Returns
2157 // each space in turn, and null when it is done.
2158 class PagedSpaces BASE_EMBEDDED {
2159  public:
2160  explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2162 
2163  private:
2164  Heap* heap_;
2165  int counter_;
2166 };
2167 
2168 
2169 // Space iterator for iterating over all spaces of the heap.
2170 // For each space an object iterator is provided. The deallocation of the
2171 // returned object iterators is handled by the space iterator.
2172 class SpaceIterator : public Malloced {
2173  public:
2174  explicit SpaceIterator(Heap* heap);
2175  SpaceIterator(Heap* heap, HeapObjectCallback size_func);
2176  virtual ~SpaceIterator();
2177 
2178  bool has_next();
2179  ObjectIterator* next();
2180 
2181  private:
2183 
2185  int current_space_; // from enum AllocationSpace.
2186  ObjectIterator* iterator_; // object iterator for the current space.
2188 };
2189 
2190 
2191 // A HeapIterator provides iteration over the whole heap. It
2192 // aggregates the specific iterators for the different spaces as
2193 // these can only iterate over one space only.
2194 //
2195 // HeapIterator ensures there is no allocation during its lifetime
2196 // (using an embedded DisallowHeapAllocation instance).
2197 //
2198 // HeapIterator can skip free list nodes (that is, de-allocated heap
2199 // objects that still remain in the heap). As implementation of free
2200 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2201 // phases. Also, it is forbidden to interrupt iteration in this mode,
2202 // as this will leave heap objects marked (and thus, unusable).
2203 class HeapObjectsFilter;
2204 
2205 class HeapIterator BASE_EMBEDDED {
2206  public:
2207  enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2208 
2209  explicit HeapIterator(Heap* heap);
2212 
2214  void reset();
2215 
2216  private:
2218  explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
2219  };
2220 
2221  // Perform the initialization.
2222  void Init();
2223  // Perform all necessary shutdown (destruction) work.
2224  void Shutdown();
2226 
2229  Heap* heap_;
2232  // Space iterator for iterating all the spaces.
2234  // Object iterator for the space currently being iterated.
2236 };
2237 
2238 
2239 // Cache for mapping (map, property name) into field offset.
2240 // Cleared at startup and prior to mark sweep collection.
2242  public:
2243  // Lookup field offset for (map, name). If absent, -1 is returned.
2245 
2246  // Update an element in the cache.
2247  void Update(Handle<Map> map, Handle<Name> name, int field_offset);
2248 
2249  // Clear the cache.
2250  void Clear();
2251 
2252  static const int kLength = 256;
2253  static const int kCapacityMask = kLength - 1;
2254  static const int kMapHashShift = 5;
2255  static const int kHashMask = -4; // Zero the last two bits.
2256  static const int kEntriesPerBucket = 4;
2257  static const int kEntryLength = 2;
2258  static const int kMapIndex = 0;
2259  static const int kKeyIndex = 1;
2260  static const int kNotFound = -1;
2261 
2262  // kEntriesPerBucket should be a power of 2.
2265 
2266  private:
2268  for (int i = 0; i < kLength; ++i) {
2269  keys_[i].map = NULL;
2270  keys_[i].name = NULL;
2272  }
2273  }
2274 
2275  static inline int Hash(Handle<Map> map, Handle<Name> name);
2276 
2277  // Get the address of the keys and field_offsets arrays. Used in
2278  // generated code to perform cache lookups.
2279  Address keys_address() { return reinterpret_cast<Address>(&keys_); }
2280 
2282  return reinterpret_cast<Address>(&field_offsets_);
2283  }
2284 
2285  struct Key {
2288  };
2289 
2292 
2293  friend class ExternalReference;
2294  friend class Isolate;
2296 };
2297 
2298 
2299 // Cache for mapping (map, property name) into descriptor index.
2300 // The cache contains both positive and negative results.
2301 // Descriptor index equals kNotFound means the property is absent.
2302 // Cleared at startup and prior to any gc.
2304  public:
2305  // Lookup descriptor index for (map, name).
2306  // If absent, kAbsent is returned.
2307  int Lookup(Map* source, Name* name) {
2308  if (!name->IsUniqueName()) return kAbsent;
2309  int index = Hash(source, name);
2310  Key& key = keys_[index];
2311  if ((key.source == source) && (key.name == name)) return results_[index];
2312  return kAbsent;
2313  }
2314 
2315  // Update an element in the cache.
2316  void Update(Map* source, Name* name, int result) {
2317  DCHECK(result != kAbsent);
2318  if (name->IsUniqueName()) {
2319  int index = Hash(source, name);
2320  Key& key = keys_[index];
2321  key.source = source;
2322  key.name = name;
2323  results_[index] = result;
2324  }
2325  }
2326 
2327  // Clear the cache.
2328  void Clear();
2329 
2330  static const int kAbsent = -2;
2331 
2332  private:
2334  for (int i = 0; i < kLength; ++i) {
2335  keys_[i].source = NULL;
2336  keys_[i].name = NULL;
2337  results_[i] = kAbsent;
2338  }
2339  }
2340 
2341  static int Hash(Object* source, Name* name) {
2342  // Uses only lower 32 bits if pointers are larger.
2343  uint32_t source_hash =
2344  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
2346  uint32_t name_hash =
2347  static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >>
2349  return (source_hash ^ name_hash) % kLength;
2350  }
2351 
2352  static const int kLength = 64;
2353  struct Key {
2356  };
2357 
2360 
2361  friend class Isolate;
2363 };
2364 
2365 
2367  public:
2369 
2370  // Attempt to retrieve a cached result. On failure, 0 is returned as a Smi.
2371  // On success, the returned result is guaranteed to be a COW-array.
2372  static Object* Lookup(Heap* heap, String* key_string, Object* key_pattern,
2373  ResultsCacheType type);
2374  // Attempt to add value_array to the cache specified by type. On success,
2375  // value_array is turned into a COW-array.
2376  static void Enter(Isolate* isolate, Handle<String> key_string,
2377  Handle<Object> key_pattern, Handle<FixedArray> value_array,
2378  ResultsCacheType type);
2379  static void Clear(FixedArray* cache);
2380  static const int kRegExpResultsCacheSize = 0x100;
2381 
2382  private:
2383  static const int kArrayEntriesPerCacheEntry = 4;
2384  static const int kStringOffset = 0;
2385  static const int kPatternOffset = 1;
2386  static const int kArrayOffset = 2;
2387 };
2388 
2389 
2390 // Abstract base class for checking whether a weak object should be retained.
2392  public:
2393  virtual ~WeakObjectRetainer() {}
2394 
2395  // Return whether this object should be retained. If NULL is returned the
2396  // object has no references. Otherwise the address of the retained object
2397  // should be returned as in some GC situations the object has been moved.
2398  virtual Object* RetainAs(Object* object) = 0;
2399 };
2400 
2401 
2402 // Intrusive object marking uses least significant bit of
2403 // heap object's map word to mark objects.
2404 // Normally all map words have least significant bit set
2405 // because they contain tagged map pointer.
2406 // If the bit is not set object is marked.
2407 // All objects should be unmarked before resuming
2408 // JavaScript execution.
2410  public:
2411  static bool IsMarked(HeapObject* object) {
2412  return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
2413  }
2414 
2415  static void ClearMark(HeapObject* object) {
2416  uintptr_t map_word = object->map_word().ToRawValue();
2417  object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2418  DCHECK(!IsMarked(object));
2419  }
2420 
2421  static void SetMark(HeapObject* object) {
2422  uintptr_t map_word = object->map_word().ToRawValue();
2423  object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
2424  DCHECK(IsMarked(object));
2425  }
2426 
2427  static Map* MapOfMarkedObject(HeapObject* object) {
2428  uintptr_t map_word = object->map_word().ToRawValue();
2429  return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
2430  }
2431 
2432  static int SizeOfMarkedObject(HeapObject* object) {
2433  return object->SizeFromMap(MapOfMarkedObject(object));
2434  }
2435 
2436  private:
2437  static const uintptr_t kNotMarkedBit = 0x1;
2439 };
2440 
2441 
2442 #ifdef DEBUG
2443 // Helper class for tracing paths to a search target Object from all roots.
2444 // The TracePathFrom() method can be used to trace paths from a specific
2445 // object to the search target object.
2446 class PathTracer : public ObjectVisitor {
2447  public:
2448  enum WhatToFind {
2449  FIND_ALL, // Will find all matches.
2450  FIND_FIRST // Will stop the search after first match.
2451  };
2452 
2453  // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2454  static const int kMarkTag = 2;
2455 
2456  // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2457  // after the first match. If FIND_ALL is specified, then tracing will be
2458  // done for all matches.
2459  PathTracer(Object* search_target, WhatToFind what_to_find,
2460  VisitMode visit_mode)
2461  : search_target_(search_target),
2462  found_target_(false),
2463  found_target_in_trace_(false),
2464  what_to_find_(what_to_find),
2465  visit_mode_(visit_mode),
2466  object_stack_(20),
2467  no_allocation() {}
2468 
2469  virtual void VisitPointers(Object** start, Object** end);
2470 
2471  void Reset();
2472  void TracePathFrom(Object** root);
2473 
2474  bool found() const { return found_target_; }
2475 
2476  static Object* const kAnyGlobalObject;
2477 
2478  protected:
2479  class MarkVisitor;
2480  class UnmarkVisitor;
2481 
2482  void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2483  void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2484  virtual void ProcessResults();
2485 
2486  Object* search_target_;
2487  bool found_target_;
2488  bool found_target_in_trace_;
2489  WhatToFind what_to_find_;
2490  VisitMode visit_mode_;
2491  List<Object*> object_stack_;
2492 
2493  DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2494 
2495  private:
2496  DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2497 };
2498 #endif // DEBUG
2499 }
2500 } // namespace v8::internal
2501 
2502 #endif // V8_HEAP_HEAP_H_
Interface for iterating through all external resources in the heap.
Definition: v8.h:4943
void(* GCEpilogueCallback)(Isolate *isolate, GCType type, GCCallbackFlags flags)
Definition: v8.h:4651
void(* GCPrologueCallback)(Isolate *isolate, GCType type, GCCallbackFlags flags)
Definition: v8.h:4648
A JavaScript object (ECMA-262, 4.3.3)
Definition: v8.h:2283
AlwaysAllocateScope(Isolate *isolate)
Definition: heap-inl.h:717
DisallowAllocationFailure daf_
Definition: heap.h:2085
HeapObjectsFiltering filtering_
Definition: heap.h:2230
AllSpaces(Heap *heap)
Definition: heap.h:2132
PagedSpaces(Heap *heap)
Definition: heap.h:2160
MakeHeapIterableHelper make_heap_iterable_helper_
Definition: heap.h:2227
HeapIterator(Heap *heap, HeapObjectsFiltering filtering)
OldSpaces(Heap *heap)
Definition: heap.h:2146
SpaceIterator * space_iterator_
Definition: heap.h:2233
ObjectIterator * object_iterator_
Definition: heap.h:2235
DisallowHeapAllocation no_heap_allocation_
Definition: heap.h:2228
HeapObjectsFilter * filter_
Definition: heap.h:2231
static const T kMax
Definition: utils.h:209
static int Hash(Object *source, Name *name)
Definition: heap.h:2341
DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache)
int Lookup(Map *source, Name *name)
Definition: heap.h:2307
void Update(Map *source, Name *name, int result)
Definition: heap.h:2316
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:654
void AddOldString(String *string)
Definition: heap-inl.h:684
void AddString(String *string)
Definition: heap-inl.h:644
DISALLOW_COPY_AND_ASSIGN(ExternalStringTable)
ExternalStringTable(Heap *heap)
Definition: heap.h:489
List< Object * > old_space_strings_
Definition: heap.h:503
void ShrinkNewStrings(int position)
Definition: heap-inl.h:691
List< Object * > new_space_strings_
Definition: heap.h:502
MapWord map_word() const
Definition: objects-inl.h:1440
intptr_t * code_space_size
Definition: heap.h:2054
intptr_t * map_space_size
Definition: heap.h:2056
intptr_t * map_space_capacity
Definition: heap.h:2057
int * near_death_global_handle_count
Definition: heap.h:2064
static const int kStartMarker
Definition: heap.h:2044
intptr_t * lo_space_size
Definition: heap.h:2060
intptr_t * code_space_capacity
Definition: heap.h:2055
intptr_t * memory_allocator_capacity
Definition: heap.h:2067
intptr_t * old_data_space_size
Definition: heap.h:2052
intptr_t * cell_space_size
Definition: heap.h:2058
intptr_t * old_pointer_space_size
Definition: heap.h:2050
static const int kEndMarker
Definition: heap.h:2045
int * global_handle_count
Definition: heap.h:2061
int * pending_global_handle_count
Definition: heap.h:2063
intptr_t * memory_allocator_size
Definition: heap.h:2066
intptr_t * old_data_space_capacity
Definition: heap.h:2053
int * new_space_capacity
Definition: heap.h:2049
int * weak_global_handle_count
Definition: heap.h:2062
intptr_t * property_cell_space_capacity
Definition: heap.h:2073
int * free_global_handle_count
Definition: heap.h:2065
intptr_t * old_pointer_space_capacity
Definition: heap.h:2051
intptr_t * property_cell_space_size
Definition: heap.h:2072
intptr_t * cell_space_capacity
Definition: heap.h:2059
int64_t amount_of_external_allocated_memory_
Definition: heap.h:1417
size_t CommittedPhysicalMemory()
Definition: heap.cc:180
MUST_USE_RESULT AllocationResult CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:148
List< GCPrologueCallbackPair > gc_prologue_callbacks_
Definition: heap.h:1580
MUST_USE_RESULT AllocationResult CopyJSObject(JSObject *source, AllocationSite *site=NULL)
Definition: heap.cc:3688
bool flush_monomorphic_ics_
Definition: heap.h:1450
intptr_t MaxOldGenerationSize()
Definition: heap.h:555
MUST_USE_RESULT AllocationResult AllocateRawOneByteString(int length, PretenureFlag pretenure)
Definition: heap.cc:3871
bool UncommitFromSpace()
Definition: heap.h:1840
void AdvanceIdleIncrementalMarking(intptr_t step_size)
Definition: heap.cc:4267
ExternalStringTable external_string_table_
Definition: heap.h:2013
bool Contains(Address addr)
Definition: heap.cc:4447
intptr_t max_old_generation_size_
Definition: heap.h:1432
Address remembered_unmapped_pages_[kRememberedUnmappedPages]
Definition: heap.h:1487
static const int kIdleScavengeThreshold
Definition: heap.h:2004
void EnsureFillerObjectAtTop()
Definition: heap.cc:770
Address NewSpaceStart()
Definition: heap.h:589
double max_gc_pause_
Definition: heap.h:1959
static const int kAllocationSiteScratchpadSize
Definition: heap.h:2000
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:4722
int nodes_copied_in_new_space_
Definition: heap.h:1914
void IncrementSemiSpaceCopiedObjectSize(int object_size)
Definition: heap.h:1119
static int GcSafeSizeOfOldObject(HeapObject *object)
Definition: heap.cc:228
void set_array_buffers_list(Object *object)
Definition: heap.h:795
HeapState gc_state()
Definition: heap.h:955
static const int kMaxSemiSpaceSizeHighMemoryDevice
Definition: heap.h:1019
Address * store_buffer_top_address()
Definition: heap.h:898
OldSpace * old_pointer_space()
Definition: heap.h:594
StoreBuffer store_buffer_
Definition: heap.h:1978
MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray(ExternalArrayType array_type)
Definition: heap.cc:3967
void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size)
Definition: heap.h:1306
FixedTypedArrayBase * EmptyFixedTypedArrayForMap(Map *map)
Definition: heap.cc:3184
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
Definition: heap.cc:1707
OldSpace * TargetSpace(HeapObject *object)
Definition: heap-inl.h:392
@ kStringTableRootIndex
Definition: heap.h:1067
@ kStrongRootListLength
Definition: heap.h:1073
static void ScavengeObject(HeapObject **p, HeapObject *object)
Definition: heap-inl.h:554
void AddWeakObjectToCodeDependency(Handle< Object > obj, Handle< DependentCode > dep)
Definition: heap.cc:5341
uintptr_t NewSpaceMask()
Definition: heap.h:590
void DeoptMarkedAllocationSites()
Definition: heap.cc:569
void SetGetterStubDeoptPCOffset(int pc_offset)
Definition: heap.h:1253
void CreateFixedStubs()
Definition: heap.cc:2713
void SetStackLimits()
Definition: heap.cc:5173
void CreateApiObjects()
Definition: heap.cc:2680
RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind)
Definition: heap.cc:3162
void ZapFromSpace()
Definition: heap.cc:4527
friend class HeapIterator
Definition: heap.h:2028
static const int kReduceMemoryFootprintMask
Definition: heap.h:717
static const int kStrictArgumentsObjectSize
Definition: heap.h:674
MarkCompactCollector mark_compact_collector_
Definition: heap.h:1976
MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap(ConstantPoolArray *src, Map *map)
Definition: heap.cc:4014
static const ConstantStringTable constant_string_table[]
Definition: heap.h:1559
int sweep_generation_
Definition: heap.h:1441
bool OldGenerationAllocationLimitReached()
Definition: heap-inl.h:364
void UpdateAllocationsHash(HeapObject *object)
Definition: heap-inl.h:277
static const int kSloppyArgumentsObjectSize
Definition: heap.h:671
MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey *key)
int global_ic_age()
Definition: heap.h:1268
int initial_semispace_size_
Definition: heap.h:1431
static const int kInitialStringTableSize
Definition: heap.h:1948
void ReserveSpace(int *sizes, Address *addresses)
Definition: heap.cc:920
StoreBufferRebuilder store_buffer_rebuilder_
Definition: heap.h:1539
void Scavenge()
Definition: heap.cc:1420
unsigned int maximum_size_scavenges_
Definition: heap.h:1921
Address * OldPointerSpaceAllocationLimitAddress()
Definition: heap.h:637
MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray()
Definition: heap.cc:4178
PropertyCellSpace * property_cell_space()
Definition: heap.h:599
intptr_t CommittedMemoryExecutable()
Definition: heap.cc:194
void MoveElements(FixedArray *array, int dst_index, int src_index, int len)
Definition: heap.cc:868
Object * allocation_sites_list_
Definition: heap.h:1527
INLINE(void RecordWrite(Address address, int offset))
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:3106
uint32_t dump_allocations_hash_countdown_
Definition: heap.h:1476
MUST_USE_RESULT AllocationResult AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3190
Isolate * isolate_
Definition: heap.h:1424
uint32_t raw_allocations_hash_
Definition: heap.h:1473
void MarkCompactPrologue()
Definition: heap.cc:1209
MUST_USE_RESULT AllocationResult AllocateCell(Object *value)
Definition: heap.cc:2646
int64_t amount_of_external_allocated_memory()
Definition: heap.h:1276
OldSpace * code_space()
Definition: heap.h:596
MUST_USE_RESULT AllocationResult AllocatePartialMap(InstanceType instance_type, int instance_size)
Definition: heap.cc:2271
ExternalStringTable * external_string_table()
Definition: heap.h:1207
void ClearObjectStats(bool clear_last_time_stats=false)
Definition: heap.cc:6087
unsigned int ms_count()
Definition: heap.h:816
friend class Factory
Definition: heap.h:2025
PagedSpace * paged_space(int idx)
Definition: heap.h:601
void FreeQueuedChunks()
Definition: heap.cc:6025
GCIdleTimeHandler gc_idle_time_handler_
Definition: heap.h:1984
MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:3973
void ProcessPretenuringFeedback()
Definition: heap.cc:493
static const int kRememberedUnmappedPages
Definition: heap.h:1485
Address DoScavenge(ObjectVisitor *scavenge_visitor, Address new_space_front)
Definition: heap.cc:1743
void InitializeJSObjectFromMap(JSObject *obj, FixedArray *properties, Map *map)
Definition: heap.cc:3600
Address * NewSpaceAllocationLimitAddress()
Definition: heap.h:630
Address * NewSpaceAllocationTopAddress()
Definition: heap.h:627
void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback)
Definition: heap.cc:5307
intptr_t MaximumCommittedMemory()
Definition: heap.h:572
OldSpace * code_space_
Definition: heap.h:1457
uint32_t allocations_count()
Definition: heap.h:939
intptr_t max_executable_size_
Definition: heap.h:1433
MUST_USE_RESULT AllocationResult AllocateEmptyExternalArray(ExternalArrayType array_type)
Definition: heap.cc:3933
void GarbageCollectionEpilogue()
Definition: heap.cc:587
List< GCEpilogueCallbackPair > gc_epilogue_callbacks_
Definition: heap.h:1594
MUST_USE_RESULT AllocationResult AllocateCode(int object_size, bool immovable)
Definition: heap.cc:3431
int scan_on_scavenge_pages_
Definition: heap.h:1452
MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length)
Definition: heap.cc:4078
Object * native_contexts_list() const
Definition: heap.h:793
double total_gc_time_ms_
Definition: heap.h:1962
void CheckNewSpaceExpansionCriteria()
Definition: heap.cc:1291
LargeObjectSpace * lo_space()
Definition: heap.h:600
void SelectScavengingVisitorsTable()
Definition: heap.cc:2224
void QueueMemoryChunkForFree(MemoryChunk *chunk)
Definition: heap.cc:6019
int gcs_since_last_deopt_
Definition: heap.h:1994
void ClearInstanceofCache()
Definition: heap-inl.h:701
Object * allocation_sites_list()
Definition: heap.h:801
MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4093
void CheckpointObjectStats()
Definition: heap.cc:6100
int global_ic_age_
Definition: heap.h:1448
void MarkCompact()
Definition: heap.cc:1181
static const int kMaxOldSpaceSizeHighMemoryDevice
Definition: heap.h:1027
void ResetAllAllocationSitesDependentCode(PretenureFlag flag)
Definition: heap.cc:1667
bool InOldDataSpace(Address address)
Definition: heap-inl.h:354
Object * native_contexts_list_
Definition: heap.h:1525
void public_set_non_monomorphic_cache(UnseededNumberDictionary *value)
Definition: heap.h:879
void public_set_store_buffer_top(Address *top)
Definition: heap.h:887
static const int kInitialNumberStringCacheSize
Definition: heap.h:1950
bool old_gen_exhausted_
Definition: heap.h:1517
void DoScavengeObject(Map *map, HeapObject **slot, HeapObject *obj)
Definition: heap.h:1221
static String * UpdateNewSpaceReferenceInExternalStringTableEntry(Heap *heap, Object **pointer)
Definition: heap.cc:1561
void IncrementCodeGeneratedBytes(bool is_crankshafted, int size)
Definition: heap.h:1176
Address NewSpaceTop()
Definition: heap.h:591
int survived_since_last_expansion_
Definition: heap.h:1438
size_t object_counts_[OBJECT_STATS_COUNT]
Definition: heap.h:1953
bool IsHeapIterable()
Definition: heap.cc:4248
void OnAllocationEvent(HeapObject *object, int size_in_bytes)
Definition: heap-inl.h:224
DISALLOW_COPY_AND_ASSIGN(Heap)
void InitializeWeakObjectToCodeTable()
Definition: heap.h:1348
void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
Definition: heap.cc:5298
void CreateInitialObjects()
Definition: heap.cc:2743
PromotionQueue * promotion_queue()
Definition: heap.h:753
bool WorthActivatingIncrementalMarking()
Definition: heap.cc:4290
void ProcessWeakReferences(WeakObjectRetainer *retainer)
Definition: heap.cc:1626
static const int kMaxExecutableSizeHighMemoryDevice
Definition: heap.h:1035
@ RECORD_SCRATCHPAD_SLOT
Definition: heap.h:974
@ IGNORE_SCRATCHPAD_SLOT
Definition: heap.h:974
static const int kYoungSurvivalRateAllowedDeviation
Definition: heap.h:1904
MUST_USE_RESULT AllocationResult AllocateInternalizedStringFromUtf8(Vector< const char > str, int chars, uint32_t hash_field)
Definition: heap-inl.h:66
STATIC_ASSERT(kTrueValueRootIndex==Internals::kTrueValueRootIndex)
MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
void RememberUnmappedPage(Address page, bool compacted)
Definition: heap.cc:6072
void ReportStatisticsAfterGC()
Definition: heap.cc:394
void ProcessNativeContexts(WeakObjectRetainer *retainer)
Definition: heap.cc:1635
friend class MapCompact
Definition: heap.h:2032
bool inline_allocation_disabled()
Definition: heap.h:1044
static const int kMaxOldSpaceSizeMediumMemoryDevice
Definition: heap.h:1025
intptr_t old_generation_allocation_limit_
Definition: heap.h:1513
unsigned int ms_count_
Definition: heap.h:1479
void FinalizeExternalString(String *string)
Definition: heap-inl.h:307
double get_max_gc_pause()
Definition: heap.h:1189
intptr_t MaxExecutableSize()
Definition: heap.h:556
bool InNewSpace(Object *object)
Definition: heap-inl.h:322
int unflattened_strings_length_
Definition: heap.h:1490
void IterateSmiRoots(ObjectVisitor *v)
Definition: heap.cc:4739
bool PerformGarbageCollection(GarbageCollector collector, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap.cc:1042
int contexts_disposed_
Definition: heap.h:1446
static const int kArgumentsCalleeIndex
Definition: heap.h:679
MUST_USE_RESULT AllocationResult AllocateByteArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3203
Address * OldPointerSpaceAllocationTopAddress()
Definition: heap.h:634
void GarbageCollectionPrologue()
Definition: heap.cc:410
void TearDownArrayBuffers()
Definition: heap.cc:1649
uint32_t allocations_count_
Definition: heap.h:1470
ExternalArray * EmptyExternalArrayForMap(Map *map)
Definition: heap.cc:3178
static const int kMakeHeapIterableMask
Definition: heap.h:721
CellSpace * cell_space()
Definition: heap.h:598
size_t crankshaft_codegen_bytes_generated_
Definition: heap.h:1989
void public_set_materialized_objects(FixedArray *objects)
Definition: heap.h:891
Marking * marking()
Definition: heap.h:1203
void CreateFillerObjectAt(Address addr, int size)
Definition: heap.cc:3221
void public_set_code_stubs(UnseededNumberDictionary *value)
Definition: heap.h:867
HeapObjectCallback GcSafeSizeOfOldObjectFunction()
Definition: heap.h:874
void InitializeAllocationMemento(AllocationMemento *memento, AllocationSite *allocation_site)
Definition: heap.cc:3563
intptr_t MaxReserved()
Definition: heap.h:549
void MakeHeapIterable()
Definition: heap.cc:4255
intptr_t OldGenerationSpaceAvailable()
Definition: heap.h:1003
double total_regexp_code_generated_
Definition: heap.h:1882
static bool IsOneByte(T t, int chars)
bool AllowedToBeMigrated(HeapObject *object, AllocationSpace dest)
Definition: heap-inl.h:427
size_t object_sizes_last_time_[OBJECT_STATS_COUNT]
Definition: heap.h:1956
Object * ToBoolean(bool condition)
Definition: heap-inl.h:706
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags)
Definition: heap.cc:1146
Object * encountered_weak_collections() const
Definition: heap.h:811
MUST_USE_RESULT AllocationResult AllocateFixedArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4073
void SetSetterStubDeoptPCOffset(int pc_offset)
Definition: heap.h:1258
NO_INLINE(void CreateJSEntryStub())
static const int kAbortIncrementalMarkingMask
Definition: heap.h:718
GCTracer tracer_
Definition: heap.h:1884
MUST_USE_RESULT AllocationResult AllocateConstantPoolArray(const ConstantPoolArray::NumberOfEntries &small)
Definition: heap.cc:4128
Object ** roots_array_start()
Definition: heap.h:896
MUST_USE_RESULT AllocationResult AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure)
Definition: heap.cc:4107
Object * weak_object_to_code_table()
Definition: heap.h:806
static const intptr_t kMinimumOldGenerationAllocationLimit
Definition: heap.h:1011
MUST_USE_RESULT AllocationResult AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Definition: heap-inl.h:166
int always_allocate_scope_depth_
Definition: heap.h:1443
void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator, double marking_time)
Definition: heap.cc:5881
bool IsHighSurvivalRate()
Definition: heap.h:1925
intptr_t Available()
Definition: heap.cc:211
StoreBuffer * store_buffer()
Definition: heap.h:1201
static const StringTypeTable string_type_table[]
Definition: heap.h:1558
Isolate * isolate()
Definition: heap-inl.h:589
Object * roots_[kRootListLength]
Definition: heap.h:1426
void increment_scan_on_scavenge_pages()
Definition: heap.h:739
void UpdateOldSpaceLimits()
int allocation_sites_scratchpad_length_
Definition: heap.h:2001
int FullSizeNumberStringCacheLength()
Definition: heap.cc:3038
void set_allocation_sites_list(Object *object)
Definition: heap.h:798
void SetConstructStubDeoptPCOffset(int pc_offset)
Definition: heap.h:1248
static const int kMaxMarkCompactsInIdleRound
Definition: heap.h:2003
static const int kOldSpaceRoots
Definition: heap.h:1235
double get_min_in_mutator()
Definition: heap.h:1195
static void MoveBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:475
void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
Definition: heap.cc:5319
Map * MapForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:3101
MUST_USE_RESULT AllocationResult CopyConstantPoolArray(ConstantPoolArray *src)
Definition: heap-inl.h:160
int64_t amount_of_external_allocated_memory_at_last_global_gc_
Definition: heap.h:1420
static const StructTable struct_table[]
Definition: heap.h:1560
bool ConfigureHeap(int max_semi_space_size, int max_old_space_size, int max_executable_size, size_t code_range_size)
Definition: heap.cc:4827
bool RootCanBeTreatedAsConstant(RootListIndex root_index)
Definition: heap.cc:2940
void ClearAllICsByKind(Code::Kind kind)
Definition: heap.cc:470
static const int kInitialEvalCacheSize
Definition: heap.h:1949
void AgeInlineCaches()
Definition: heap.h:1270
MUST_USE_RESULT AllocationResult CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:154
MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray *src)
Definition: heap.cc:3939
static void UpdateAllocationSiteFeedback(HeapObject *object, ScratchpadSlotMode mode)
Definition: heap-inl.h:536
MUST_USE_RESULT AllocationResult InternalizeString(String *str)
String * hidden_string_
Definition: heap.h:1564
void ClearJSFunctionResultCaches()
Definition: heap.cc:980
MUST_USE_RESULT AllocationResult Allocate(Map *map, AllocationSpace space, AllocationSite *allocation_site=NULL)
Definition: heap.cc:3574
MUST_USE_RESULT AllocationResult AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
Definition: heap.cc:2295
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:4728
MUST_USE_RESULT AllocationResult AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED, bool alloc_props=true, AllocationSite *allocation_site=NULL)
Definition: heap.cc:3631
int nodes_died_in_new_space_
Definition: heap.h:1913
bool InNewSpacePage(Address address)
MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray(const ConstantPoolArray::NumberOfEntries &small, const ConstantPoolArray::NumberOfEntries &extended)
Definition: heap.cc:4152
HeapState gc_state_
Definition: heap.h:1462
int nodes_promoted_
Definition: heap.h:1915
static const int kMaxExecutableSizeHugeMemoryDevice
Definition: heap.h:1037
int gc_post_processing_depth_
Definition: heap.h:1463
uint32_t HashSeed()
Definition: heap.h:1237
STATIC_ASSERT(kempty_stringRootIndex==Internals::kEmptyStringRootIndex)
PropertyCellSpace * property_cell_space_
Definition: heap.h:1460
Object * encountered_weak_collections_
Definition: heap.h:1537
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1576
void DisableInlineAllocation()
Definition: heap.cc:5034
static const int kNoGCFlags
Definition: heap.h:716
int max_semi_space_size_
Definition: heap.h:1430
static void ScavengeStoreBufferCallback(Heap *heap, MemoryChunk *page, StoreBufferEvent event)
Definition: heap.cc:1309
intptr_t maximum_committed_
Definition: heap.h:1434
int gc_count() const
Definition: heap.h:1228
Address * OldDataSpaceAllocationTopAddress()
Definition: heap.h:641
OldSpace * old_pointer_space_
Definition: heap.h:1455
void AddAllocationSiteToScratchpad(AllocationSite *site, ScratchpadSlotMode mode)
Definition: heap.cc:3077
static const int kYoungSurvivalRateHighThreshold
Definition: heap.h:1903
NewSpace new_space_
Definition: heap.h:1454
bool InFromSpace(Object *object)
Definition: heap-inl.h:334
void IncrementPromotedObjectsSize(int object_size)
Definition: heap.h:1114
int remembered_unmapped_pages_index_
Definition: heap.h:1486
MemoryChunk * chunks_queued_for_free_
Definition: heap.h:2017
Object * weak_object_to_code_table_
Definition: heap.h:1532
int InitialSemiSpaceSize()
Definition: heap.h:554
bool CollectGarbage(AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap-inl.h:581
void IncreaseTotalRegexpCodeGenerated(int size)
Definition: heap.h:1172
size_t object_counts_last_time_[OBJECT_STATS_COUNT]
Definition: heap.h:1954
bool inline_allocation_disabled_
Definition: heap.h:1521
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags)
Definition: heap.cc:1163
void ProcessAllocationSites(WeakObjectRetainer *retainer)
Definition: heap.cc:1660
void FlushAllocationSitesScratchpad()
Definition: heap.cc:3060
NO_INLINE(void CreateJSConstructEntryStub())
bool NextGCIsLikelyToBeFull()
Definition: heap.h:1135
base::Mutex relocation_mutex_
Definition: heap.h:2019
intptr_t PromotedTotalSize()
Definition: heap.h:996
bool IsInGCPostProcessing()
Definition: heap.h:957
int ReservedSemiSpaceSize()
Definition: heap.h:553
CellSpace * cell_space_
Definition: heap.h:1459
int gc_callbacks_depth_
Definition: heap.h:2021
void set_weak_object_to_code_table(Object *value)
Definition: heap.h:1935
void EnsureWeakObjectToCodeTable()
Definition: heap.cc:5367
OldSpace * old_data_space()
Definition: heap.h:595
static const int kMaxExecutableSizeMediumMemoryDevice
Definition: heap.h:1033
size_t object_sizes_[OBJECT_STATS_COUNT]
Definition: heap.h:1955
STATIC_ASSERT(kFalseValueRootIndex==Internals::kFalseValueRootIndex)
bool HasBeenSetUp()
Definition: heap.cc:221
MUST_USE_RESULT AllocationResult AllocateHeapNumber(double value, MutableMode mode=IMMUTABLE, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:2624
void RecordStats(HeapStats *stats, bool take_snapshot=false)
Definition: heap.cc:4929
PromotionQueue promotion_queue_
Definition: heap.h:2007
bool CanMoveObjectStart(HeapObject *object)
Definition: heap.cc:3235
static AllocationSpace TargetSpaceId(InstanceType type)
Definition: heap-inl.h:399
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:4747
MUST_USE_RESULT AllocationResult AllocateRawTwoByteString(int length, PretenureFlag pretenure)
Definition: heap.cc:3895
Object * array_buffers_list_
Definition: heap.h:1526
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
Definition: heap.cc:4540
bool IdleNotification(int idle_time_in_ms)
Definition: heap.cc:4296
STATIC_ASSERT(kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
void FlushNumberStringCache()
Definition: heap.cc:3051
int reserved_semispace_size_
Definition: heap.h:1429
void AdjustLiveBytes(Address address, int by, InvocationMode mode)
Definition: heap.cc:3254
void EnableInlineAllocation()
Definition: heap.cc:5025
IncrementalMarking * incremental_marking()
Definition: heap.h:1205
void TearDown()
Definition: heap.cc:5188
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
Definition: heap.cc:5376
Address new_space_top_after_last_gc_
Definition: heap.h:1464
void UpdateMaximumCommitted()
Definition: heap.cc:201
void IncrementNodesCopiedInNewSpace()
Definition: heap.h:1126
RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type)
Definition: heap.cc:3128
double synthetic_time()
Definition: heap.h:943
double marking_time_
Definition: heap.h:1971
void RepairFreeListsAfterBoot()
Definition: heap.cc:484
void PrintShortHeapStatistics()
Definition: heap.cc:313
MapSpace * map_space_
Definition: heap.h:1458
GarbageCollector SelectGarbageCollector(AllocationSpace space, const char **reason)
Definition: heap.cc:236
GCTracer * tracer()
Definition: heap.h:1166
@ FIRST_CODE_AGE_SUB_TYPE
Definition: heap.h:1295
@ FIRST_FIXED_ARRAY_SUB_TYPE
Definition: heap.h:1293
@ FIRST_CODE_KIND_SUB_TYPE
Definition: heap.h:1292
VisitorDispatchTable< ScavengingCallback > scavenging_visitors_table_
Definition: heap.h:2015
MUST_USE_RESULT AllocationResult AllocateRawFixedArray(int length, PretenureFlag pretenure)
Definition: heap.cc:4039
static AllocationSpace SelectSpace(int object_size, AllocationSpace preferred_old_space, PretenureFlag pretenure)
Definition: heap.h:1649
Address * OldDataSpaceAllocationLimitAddress()
Definition: heap.h:644
MUST_USE_RESULT AllocationResult AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure, Object *filler)
Definition: heap.cc:4051
static bool ShouldZapGarbage()
Definition: heap.h:926
bool ShouldBePromoted(Address old_address, int object_size)
Definition: heap-inl.h:370
static void CopyBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:469
intptr_t CommittedMemory()
Definition: heap.cc:170
void OnMoveEvent(HeapObject *target, HeapObject *source, int size_in_bytes)
Definition: heap-inl.h:245
MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type)
Definition: heap.cc:4223
NewSpace * new_space()
Definition: heap.h:593
int sweep_generation()
Definition: heap.h:1212
unsigned int gc_count_at_last_idle_gc_
Definition: heap.h:1985
void RightTrimFixedArray(FixedArrayBase *obj, int elements_to_trim)
Definition: heap.cc:3322
MUST_USE_RESULT AllocationResult AllocateSymbol()
Definition: heap.cc:4193
bool CreateHeapObjects()
Definition: heap.cc:5156
AllocationMemento * FindAllocationMemento(HeapObject *object)
Definition: heap-inl.h:497
bool DeoptMaybeTenuredAllocationSites()
Definition: heap.h:1284
FixedArrayBase * LeftTrimFixedArray(FixedArrayBase *obj, int elements_to_trim)
Definition: heap.cc:3266
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap.cc:724
static const int kArgumentsLengthIndex
Definition: heap.h:677
static const int kMaxOldSpaceSizeHugeMemoryDevice
Definition: heap.h:1028
unsigned int gc_count_
Definition: heap.h:1482
void IncrementYoungSurvivorsCounter(int survived)
Definition: heap.h:1130
Object * array_buffers_list() const
Definition: heap.h:796
MUST_USE_RESULT AllocationResult AllocateFixedTypedArray(int length, ExternalArrayType array_type, PretenureFlag pretenure)
Definition: heap.cc:3400
MUST_USE_RESULT AllocationResult AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED, AllocationSite *allocation_site=NULL)
Definition: heap.cc:3671
MUST_USE_RESULT AllocationResult CopyCode(Code *code, Vector< byte > reloc_info)
Definition: heap.cc:3501
double total_regexp_code_generated()
Definition: heap.h:1171
MUST_USE_RESULT AllocationResult AllocateFillerObject(int size, bool double_align, AllocationSpace space)
Definition: heap.cc:2329
static void ScavengeObjectSlow(HeapObject **p, HeapObject *object)
Definition: heap.cc:2262
static const int kPointerMultiplier
Definition: heap.h:1014
void EnsureFromSpaceIsCommitted()
Definition: heap.cc:971
MUST_USE_RESULT AllocationResult AllocatePropertyCell()
Definition: heap.cc:2661
RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind)
Definition: heap.cc:3145
void ClearNormalizedMapCaches()
Definition: heap.cc:1003
static const int kOldSurvivalRateLowThreshold
Definition: heap.h:1906
static const int kMaxOldSpaceSizeLowMemoryDevice
Definition: heap.h:1024
static const int kMaxSemiSpaceSizeHugeMemoryDevice
Definition: heap.h:1020
MUST_USE_RESULT AllocationResult AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
Definition: heap.cc:3362
Object ** weak_object_to_code_table_address()
Definition: heap.h:1940
Marking marking_
Definition: heap.h:1980
intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size, int freed_global_handles)
Definition: heap.cc:4984
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1613
intptr_t get_max_alive_after_gc()
Definition: heap.h:1192
bool always_allocate()
Definition: heap.h:622
intptr_t Capacity()
Definition: heap.cc:160
static void ScavengePointer(HeapObject **p)
Definition: heap-inl.h:494
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:711
bool CreateInitialMaps()
Definition: heap.cc:2372
IncrementalMarking incremental_marking_
Definition: heap.h:1982
void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback)
Definition: heap.cc:5328
intptr_t max_alive_after_gc_
Definition: heap.h:1965
double semi_space_copied_rate_
Definition: heap.h:1912
static const int kMaxSemiSpaceSizeMediumMemoryDevice
Definition: heap.h:1018
bool ConfigureHeapDefault()
Definition: heap.cc:4926
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc)
Definition: heap.cc:1684
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
Definition: heap.cc:735
void IncrementNodesPromoted()
Definition: heap.h:1128
void RecordFixedArraySubTypeStats(int array_sub_type, size_t size)
Definition: heap.h:1320
double sweeping_time_
Definition: heap.h:1974
void IncrementNodesDiedInNewSpace()
Definition: heap.h:1124
void decrement_scan_on_scavenge_pages()
Definition: heap.h:746
void set_native_contexts_list(Object *object)
Definition: heap.h:790
static const int kMaxExecutableSizeLowMemoryDevice
Definition: heap.h:1032
intptr_t promoted_objects_size_
Definition: heap.h:1909
MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
double promotion_rate_
Definition: heap.h:1910
MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
Definition: heap.cc:3998
size_t full_codegen_bytes_generated_
Definition: heap.h:1988
INLINE(void RecordWrites(Address address, int start, int len))
Address always_allocate_scope_depth_address()
Definition: heap.h:623
int64_t PromotedExternalMemorySize()
Definition: heap.cc:4975
bool configured_
Definition: heap.h:2011
OldSpace * old_data_space_
Definition: heap.h:1456
void InitializeAllocationSitesScratchpad()
Definition: heap.cc:3068
bool InToSpace(Object *object)
Definition: heap-inl.h:339
intptr_t SizeOfObjects()
Definition: heap.cc:460
String * hidden_string()
Definition: heap.h:788
double min_in_mutator_
Definition: heap.h:1968
void PrintAlloctionsHash()
Definition: heap-inl.h:301
void ReportStatisticsBeforeGC()
Definition: heap.cc:291
void set_encountered_weak_collections(Object *weak_collection)
Definition: heap.h:808
int MaxSemiSpaceSize()
Definition: heap.h:552
bool InOldPointerSpace(Address address)
Definition: heap-inl.h:344
static const int kMaxSemiSpaceSizeLowMemoryDevice
Definition: heap.h:1017
size_t code_range_size_
Definition: heap.h:1428
MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray()
Definition: heap.cc:3918
void public_set_empty_script(Script *script)
Definition: heap.h:883
LargeObjectSpace * lo_space_
Definition: heap.h:1461
DependentCode * LookupWeakObjectToCodeDependency(Handle< Object > obj)
Definition: heap.cc:5360
Map * MapForFixedTypedArray(ExternalArrayType array_type)
Definition: heap.cc:3123
intptr_t PromotedSpaceSizeOfObjects()
Definition: heap.cc:4967
int high_survival_rate_period_length_
Definition: heap.h:1908
HeapObjectCallback gc_safe_size_of_old_object_
Definition: heap.h:1597
void SetArgumentsAdaptorDeoptPCOffset(int pc_offset)
Definition: heap.h:1243
int NotifyContextDisposed()
Definition: heap.cc:857
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1197
MapSpace * map_space()
Definition: heap.h:597
void UpdateSurvivalStatistics(int start_new_space_size)
Definition: heap.cc:1023
void ProcessArrayBuffers(WeakObjectRetainer *retainer)
Definition: heap.cc:1642
intptr_t semi_space_copied_object_size_
Definition: heap.h:1911
bool flush_monomorphic_ics()
Definition: heap.h:1274
void RecordObjectStats(InstanceType type, size_t size)
Definition: heap.h:1300
intptr_t OldGenerationCapacityAvailable()
Definition: heap.h:1007
MUST_USE_RESULT AllocationResult LookupSingleCharacterStringFromCode(uint16_t code)
bool InSpace(Address addr, AllocationSpace space)
Definition: heap.cc:4464
void MarkMapPointersAsEncoded(bool encoded)
Definition: heap.h:1601
STATIC_ASSERT(kNullValueRootIndex==Internals::kNullValueRootIndex)
bool MaximumSizeScavenge()
Definition: heap.h:1282
MUST_USE_RESULT AllocationResult AllocateOneByteInternalizedString(Vector< const uint8_t > str, uint32_t hash_field)
Definition: heap-inl.h:86
MUST_USE_RESULT AllocationResult AllocateTwoByteInternalizedString(Vector< const uc16 > str, uint32_t hash_field)
Definition: heap-inl.h:118
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index)
Definition: heap.cc:2912
Object ** allocation_sites_list_address()
Definition: heap.h:804
static const int kEmptyStringRootIndex
Definition: v8.h:5847
static const int kUndefinedValueRootIndex
Definition: v8.h:5843
static const int kFalseValueRootIndex
Definition: v8.h:5846
static const int kTrueValueRootIndex
Definition: v8.h:5845
static const int kNullValueRootIndex
Definition: v8.h:5844
static void ClearMark(HeapObject *object)
Definition: heap.h:2415
static int SizeOfMarkedObject(HeapObject *object)
Definition: heap.h:2432
STATIC_ASSERT((kHeapObjectTag &kNotMarkedBit) !=0)
static bool IsMarked(HeapObject *object)
Definition: heap.h:2411
static void SetMark(HeapObject *object)
Definition: heap.h:2421
static const uintptr_t kNotMarkedBit
Definition: heap.h:2437
static Map * MapOfMarkedObject(HeapObject *object)
Definition: heap.h:2427
static const int kHeaderSize
Definition: objects.h:2195
static const int kLength
Definition: heap.h:2252
static const int kHashMask
Definition: heap.h:2255
static const int kKeyIndex
Definition: heap.h:2259
DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache)
Address field_offsets_address()
Definition: heap.h:2281
static const int kCapacityMask
Definition: heap.h:2253
int Lookup(Handle< Map > map, Handle< Name > name)
Definition: heap.cc:5906
static const int kMapHashShift
Definition: heap.h:2254
STATIC_ASSERT((kEntriesPerBucket &(kEntriesPerBucket - 1))==0)
friend class ExternalReference
Definition: heap.h:2293
static const int kEntriesPerBucket
Definition: heap.h:2256
static const int kNotFound
Definition: heap.h:2260
static int Hash(Handle< Map > map, Handle< Name > name)
Definition: heap.cc:5897
void Update(Handle< Map > map, Handle< Name > name, int field_offset)
Definition: heap.cc:5919
static const int kEntryLength
Definition: heap.h:2257
int field_offsets_[kLength]
Definition: heap.h:2291
static const int kMapIndex
Definition: heap.h:2258
STATIC_ASSERT(kEntriesPerBucket==-kHashMask)
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:276
NewSpacePage * prev_page() const
Definition: spaces.h:1999
static bool IsAtStart(Address addr)
Definition: spaces.h:2009
static NewSpacePage * FromAddress(Address address_in_page)
Definition: spaces.h:2021
bool IsAtMaximumCapacity()
Definition: spaces.h:2409
intptr_t Capacity()
Definition: spaces.h:2371
Address * allocation_top_address()
Definition: spaces.h:2460
bool UncommitFromSpace()
Definition: spaces.h:2539
Address * allocation_limit_address()
Definition: spaces.h:2463
uintptr_t mask()
Definition: spaces.h:2446
static const int kPageSize
Definition: spaces.h:748
static const int kMaxRegularHeapObjectSize
Definition: spaces.h:754
Address * allocation_limit_address()
Definition: spaces.h:1758
Address * allocation_top_address()
Definition: spaces.h:1755
bool IsBelowPromotionQueue(Address to_space_top)
Definition: heap.h:402
void remove(HeapObject **target, int *size)
Definition: heap.h:421
static const int kEntrySizeInWords
Definition: heap.h:449
List< Entry > * emergency_stack_
Definition: heap.h:457
PromotionQueue(Heap *heap)
Definition: heap.h:373
void insert(HeapObject *target, int size)
Definition: heap-inl.h:24
DISALLOW_COPY_AND_ASSIGN(PromotionQueue)
void SetNewLimit(Address limit)
Definition: heap.h:392
static const int kStringOffset
Definition: heap.h:2384
static const int kPatternOffset
Definition: heap.h:2385
static const int kArrayEntriesPerCacheEntry
Definition: heap.h:2383
static Object * Lookup(Heap *heap, String *key_string, Object *key_pattern, ResultsCacheType type)
Definition: heap.cc:2946
static void Enter(Isolate *isolate, Handle< String > key_string, Handle< Object > key_pattern, Handle< FixedArray > value_array, ResultsCacheType type)
Definition: heap.cc:2977
static const int kRegExpResultsCacheSize
Definition: heap.h:2380
static const int kArrayOffset
Definition: heap.h:2386
static void Clear(FixedArray *cache)
Definition: heap.cc:3031
static void AssertValidRange(Address from, Address to)
Definition: spaces.h:2165
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
ObjectIterator * next()
Definition: heap.cc:5485
ObjectIterator * CreateIterator()
Definition: heap.cc:5502
ObjectIterator * iterator_
Definition: heap.h:2186
HeapObjectCallback size_func_
Definition: heap.h:2187
SpaceIterator(Heap *heap)
Definition: heap.cc:5459
void Callback(MemoryChunk *page, StoreBufferEvent event)
Definition: heap.cc:1315
StoreBufferRebuilder(StoreBuffer *store_buffer)
Definition: heap.h:351
void VisitPointers(Object **start, Object **end)
Definition: heap-inl.h:761
void VisitPointers(Object **start, Object **end)
Definition: heap-inl.h:772
virtual Object * RetainAs(Object *object)=0
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define STRING_INDEX_DECLARATION(name, str)
Definition: heap.h:1059
#define ROOT_INDEX_DECLARATION(type, name, camel_name)
Definition: heap.h:1069
#define STRUCT_MAP_ACCESSOR(NAME, Name, name)
Definition: heap.h:776
#define ROOT_LIST(V)
Definition: heap.h:206
#define STRONG_ROOT_LIST(V)
Definition: heap.h:28
#define INTERNALIZED_STRING_LIST(V)
Definition: heap.h:262
#define DECLARE_STRUCT_MAP(NAME, Name, name)
Definition: heap.h:1064
#define STRING_ACCESSOR(name, str)
Definition: heap.h:781
#define SMI_ROOT_LIST(V)
Definition: heap.h:197
#define ROOT_ACCESSOR(type, name, camel_name)
Definition: heap.h:1492
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK(condition)
Definition: logging.h:205
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
Definition: macros.h:255
#define MUST_USE_RESULT
Definition: macros.h:266
unsigned short uint16_t
Definition: unicode.cc:23
const int kPointerSize
Definition: globals.h:129
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:110
@ LAST_FIXED_ARRAY_SUB_TYPE
Definition: objects.h:815
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
Definition: store-buffer.h:20
kSerializedDataOffset Object
Definition: objects-inl.h:5322
const int kMaxInt
Definition: globals.h:109
const int kPointerSizeLog2
Definition: globals.h:147
@ TERMINAL_FAST_ELEMENTS_KIND
Definition: elements-kind.h:63
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
Definition: heap.h:346
byte * Address
Definition: globals.h:101
void PrintF(const char *format,...)
Definition: utils.cc:80
@ OLD_DATA_SPACE
Definition: globals.h:361
@ PROPERTY_CELL_SPACE
Definition: globals.h:365
@ OLD_POINTER_SPACE
Definition: globals.h:360
const int kHeapObjectTag
Definition: v8.h:5737
int(* HeapObjectCallback)(HeapObject *obj)
Definition: globals.h:429
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418
void(* ScavengingCallback)(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.h:467
const int MB
Definition: globals.h:107
ArrayStorageAllocationMode
Definition: heap.h:511
@ INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
Definition: heap.h:513
@ DONT_INITIALIZE_ARRAY_ELEMENTS
Definition: heap.h:512
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
GCCallbackFlags
Definition: v8.h:4209
@ kNoGCCallbackFlags
Definition: v8.h:4210
ExternalArrayType
Definition: v8.h:2217
GCType
Applications can register callback functions which will be called before and after a garbage collecti...
Definition: v8.h:4203
#define STRUCT_LIST(V)
Definition: objects.h:515
bool operator==(const GCEpilogueCallbackPair &pair) const
Definition: heap.h:1586
v8::Isolate::GCPrologueCallback callback
Definition: heap.h:1589
GCEpilogueCallbackPair(v8::Isolate::GCPrologueCallback callback, GCType gc_type, bool pass_isolate)
Definition: heap.h:1583
bool operator==(const GCPrologueCallbackPair &pair) const
Definition: heap.h:1572
v8::Isolate::GCPrologueCallback callback
Definition: heap.h:1575
GCPrologueCallbackPair(v8::Isolate::GCPrologueCallback callback, GCType gc_type, bool pass_isolate)
Definition: heap.h:1569
Entry(HeapObject *obj, int size)
Definition: heap.h:452
#define T(name, string, precedence)
Definition: token.cc:25