36 return static_cast<uint32_t>(type) << 16 |
id;
43 static int dummy_counter = 0;
50 isolate->external_reference_table();
51 if (external_reference_table ==
NULL) {
53 isolate->set_external_reference_table(external_reference_table);
55 return external_reference_table;
112 for (
int type_code = 0; type_code <
kTypeCodeCount; type_code++) {
117 Add(ExternalReference::roots_array_start(isolate).
address(),
118 "Heap::roots_array_start()");
119 Add(ExternalReference::address_of_stack_limit(isolate).
address(),
120 "StackGuard::address_of_jslimit()");
121 Add(ExternalReference::address_of_real_stack_limit(isolate).
address(),
122 "StackGuard::address_of_real_jslimit()");
123 Add(ExternalReference::new_space_start(isolate).
address(),
124 "Heap::NewSpaceStart()");
125 Add(ExternalReference::new_space_mask(isolate).
address(),
126 "Heap::NewSpaceMask()");
127 Add(ExternalReference::new_space_allocation_limit_address(isolate).
address(),
128 "Heap::NewSpaceAllocationLimitAddress()");
129 Add(ExternalReference::new_space_allocation_top_address(isolate).
address(),
130 "Heap::NewSpaceAllocationTopAddress()");
131 Add(ExternalReference::debug_break(isolate).
address(),
"Debug::Break()");
132 Add(ExternalReference::debug_step_in_fp_address(isolate).
address(),
133 "Debug::step_in_fp_addr()");
134 Add(ExternalReference::mod_two_doubles_operation(isolate).
address(),
137 Add(ExternalReference::keyed_lookup_cache_keys(isolate).
address(),
138 "KeyedLookupCache::keys()");
139 Add(ExternalReference::keyed_lookup_cache_field_offsets(isolate).
address(),
140 "KeyedLookupCache::field_offsets()");
141 Add(ExternalReference::handle_scope_next_address(isolate).
address(),
142 "HandleScope::next");
143 Add(ExternalReference::handle_scope_limit_address(isolate).
address(),
144 "HandleScope::limit");
145 Add(ExternalReference::handle_scope_level_address(isolate).
address(),
146 "HandleScope::level");
147 Add(ExternalReference::new_deoptimizer_function(isolate).
address(),
148 "Deoptimizer::New()");
149 Add(ExternalReference::compute_output_frames_function(isolate).
address(),
150 "Deoptimizer::ComputeOutputFrames()");
151 Add(ExternalReference::address_of_min_int().
address(),
152 "LDoubleConstant::min_int");
153 Add(ExternalReference::address_of_one_half().
address(),
154 "LDoubleConstant::one_half");
155 Add(ExternalReference::isolate_address(isolate).
address(),
"isolate");
156 Add(ExternalReference::address_of_negative_infinity().
address(),
157 "LDoubleConstant::negative_infinity");
158 Add(ExternalReference::power_double_double_function(isolate).
address(),
159 "power_double_double_function");
160 Add(ExternalReference::power_double_int_function(isolate).
address(),
161 "power_double_int_function");
162 Add(ExternalReference::math_log_double_function(isolate).
address(),
164 Add(ExternalReference::store_buffer_top(isolate).
address(),
166 Add(ExternalReference::address_of_canonical_non_hole_nan().
address(),
168 Add(ExternalReference::address_of_the_hole_nan().
address(),
"the_hole_nan");
169 Add(ExternalReference::get_date_field_function(isolate).
address(),
171 Add(ExternalReference::date_cache_stamp(isolate).
address(),
173 Add(ExternalReference::address_of_pending_message_obj(isolate).
address(),
174 "address_of_pending_message_obj");
175 Add(ExternalReference::address_of_has_pending_message(isolate).
address(),
176 "address_of_has_pending_message");
177 Add(ExternalReference::address_of_pending_message_script(isolate).
address(),
178 "pending_message_script");
179 Add(ExternalReference::get_make_code_young_function(isolate).
address(),
180 "Code::MakeCodeYoung");
181 Add(ExternalReference::cpu_features().
address(),
"cpu_features");
182 Add(ExternalReference(Runtime::kAllocateInNewSpace, isolate).
address(),
183 "Runtime::AllocateInNewSpace");
184 Add(ExternalReference(Runtime::kAllocateInTargetSpace, isolate).
address(),
185 "Runtime::AllocateInTargetSpace");
186 Add(ExternalReference::old_pointer_space_allocation_top_address(isolate)
188 "Heap::OldPointerSpaceAllocationTopAddress");
189 Add(ExternalReference::old_pointer_space_allocation_limit_address(isolate)
191 "Heap::OldPointerSpaceAllocationLimitAddress");
192 Add(ExternalReference::old_data_space_allocation_top_address(isolate)
194 "Heap::OldDataSpaceAllocationTopAddress");
195 Add(ExternalReference::old_data_space_allocation_limit_address(isolate)
197 "Heap::OldDataSpaceAllocationLimitAddress");
198 Add(ExternalReference::allocation_sites_list_address(isolate).
address(),
199 "Heap::allocation_sites_list_address()");
200 Add(ExternalReference::address_of_uint32_bias().
address(),
"uint32_bias");
201 Add(ExternalReference::get_mark_code_as_executed_function(isolate).
address(),
202 "Code::MarkCodeAsExecuted");
203 Add(ExternalReference::is_profiling_address(isolate).
address(),
204 "CpuProfiler::is_profiling");
205 Add(ExternalReference::scheduled_exception_address(isolate).
address(),
206 "Isolate::scheduled_exception");
207 Add(ExternalReference::invoke_function_callback(isolate).
address(),
208 "InvokeFunctionCallback");
209 Add(ExternalReference::invoke_accessor_getter_callback(isolate).
address(),
210 "InvokeAccessorGetterCallback");
211 Add(ExternalReference::flush_icache_function(isolate).
address(),
212 "CpuFeatures::FlushICache");
213 Add(ExternalReference::log_enter_external_function(isolate).
address(),
214 "Logger::EnterExternal");
215 Add(ExternalReference::log_leave_external_function(isolate).
address(),
216 "Logger::LeaveExternal");
217 Add(ExternalReference::address_of_minus_one_half().
address(),
218 "double_constants.minus_one_half");
219 Add(ExternalReference::stress_deopt_count(isolate).
address(),
220 "Isolate::stress_deopt_count_address()");
221 Add(ExternalReference::incremental_marking_record_write_function(isolate)
223 "IncrementalMarking::RecordWriteFromCode");
226 Add(ExternalReference::debug_after_break_target_address(isolate).
address(),
227 "Debug::after_break_target_address()");
228 Add(ExternalReference::debug_restarter_frame_function_pointer_address(isolate)
230 "Debug::restarter_frame_function_pointer_address()");
231 Add(ExternalReference::debug_is_active_address(isolate).
address(),
232 "Debug::is_active_address()");
234 #ifndef V8_INTERPRETED_REGEXP
235 Add(ExternalReference::re_case_insensitive_compare_uc16(isolate).
address(),
236 "NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()");
237 Add(ExternalReference::re_check_stack_guard_state(isolate).
address(),
238 "RegExpMacroAssembler*::CheckStackGuardState()");
239 Add(ExternalReference::re_grow_stack(isolate).
address(),
240 "NativeRegExpMacroAssembler::GrowStack()");
241 Add(ExternalReference::re_word_character_map().
address(),
242 "NativeRegExpMacroAssembler::word_character_map");
243 Add(ExternalReference::address_of_regexp_stack_limit(isolate).
address(),
244 "RegExpStack::limit_address()");
245 Add(ExternalReference::address_of_regexp_stack_memory_address(isolate)
247 "RegExpStack::memory_address()");
248 Add(ExternalReference::address_of_regexp_stack_memory_size(isolate).
address(),
249 "RegExpStack::memory_size()");
250 Add(ExternalReference::address_of_static_offsets_vector(isolate).
address(),
251 "OffsetsVector::static_offsets_vector");
262 struct RefTableEntry {
268 static const RefTableEntry ref_table[] = {
270 #define DEF_ENTRY_C(name, ignored) \
272 Builtins::c_##name, \
273 "Builtins::" #name },
278 #define DEF_ENTRY_C(name, ignored) \
281 "Builtins::" #name },
282 #define DEF_ENTRY_A(name, kind, state, extra) DEF_ENTRY_C(name, ignored)
291 #define RUNTIME_ENTRY(name, nargs, ressize) \
292 { RUNTIME_FUNCTION, \
300 #define INLINE_OPTIMIZED_ENTRY(name, nargs, ressize) \
301 { RUNTIME_FUNCTION, \
302 Runtime::kInlineOptimized##name, \
306 #undef INLINE_OPTIMIZED_ENTRY
309 #define IC_ENTRY(name) \
326 struct StatsRefTableEntry {
332 const StatsRefTableEntry stats_ref_table[] = {
333 #define COUNTER_ENTRY(name, caption) \
335 Counters::k_##name, \
336 "Counters::" #name },
344 for (
size_t i = 0;
i <
arraysize(stats_ref_table); ++
i) {
346 (counters->*(stats_ref_table[
i].counter))())),
348 stats_ref_table[
i].id,
349 stats_ref_table[
i].name);
354 const char* AddressNames[] = {
355 #define BUILD_NAME_LITERAL(CamelName, hacker_name) \
356 "Isolate::" #hacker_name "_address",
359 #undef BUILD_NAME_LITERAL
368 #define ACCESSOR_INFO_DECLARATION(name) \
369 Add(FUNCTION_ADDR(&Accessors::name##Getter), \
371 Accessors::k##name##Getter, \
372 "Accessors::" #name "Getter"); \
373 Add(FUNCTION_ADDR(&Accessors::name##Setter), \
375 Accessors::k##name##Setter, \
376 "Accessors::" #name "Setter");
378 #undef ACCESSOR_INFO_DECLARATION
397 Add(ExternalReference::delete_handle_scope_extensions(isolate).
address(),
399 Add(ExternalReference::incremental_marking_record_write_function(isolate)
402 Add(ExternalReference::store_buffer_overflow_function(isolate).
address(),
420 : encodings_(
HashMap::PointersMatch),
424 for (
int i = 0;
i < external_references->
size(); ++
i) {
446 if (key ==
NULL)
return -1;
447 HashMap::Entry* entry =
451 :
static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value));
457 entry->value =
reinterpret_cast<void*
>(index);
467 int max = external_references->
max_id(type) + 1;
468 encodings_[type] = NewArray<Address>(max + 1);
470 for (
int i = 0;
i < external_references->
size(); ++
i) {
523 if (entry->value ==
NULL) {
529 HashMap::Entry* entry =
FindEntry(code_address);
530 return (entry !=
NULL) ?
static_cast<const char*
>(entry->value) :
NULL;
534 HashMap::Entry* entry =
FindEntry(code_address);
542 if (from ==
to)
return;
543 HashMap::Entry* from_entry =
FindEntry(from);
545 void* value = from_entry->value;
549 to_entry->value = value;
554 char* result = NewArray<char>(name_size + 1);
555 for (
int i = 0;
i < name_size; ++
i) {
557 if (c ==
'\0') c =
' ';
560 result[name_size] =
'\0';
597 attached_objects_(
NULL),
599 external_reference_decoder_(
NULL),
600 deserialized_large_objects_(0) {
609 while (it.has_next()) {
648 if (!source->IsUndefined()) {
649 ExternalOneByteString::cast(source)->update_data_cache();
709 site->set_weak_next(
isolate_->
heap()->undefined_value());
722 DCHECK(string->IsInternalizedString());
736 return String::cast(key)->Hash();
750 if (obj->IsString()) {
751 String*
string = String::cast(obj);
754 if (string->IsInternalizedString()) {
759 string->SetForwardedInternalizedString(canonical);
768 if (obj->IsInternalizedString()) {
769 return String::cast(obj)->GetForwardedInternalizedString();
788 if (FLAG_log_snapshot_positions) {
791 ReadChunk(current, limit, space_number, address);
840 Address current_object_address) {
845 bool write_barrier_needed = (current_object_address !=
NULL &&
851 while (current < limit) {
854 #define CASE_STATEMENT(where, how, within, space_number) \
855 case where + how + within + space_number: \
856 STATIC_ASSERT((where & ~kPointedToMask) == 0); \
857 STATIC_ASSERT((how & ~kHowToCodeMask) == 0); \
858 STATIC_ASSERT((within & ~kWhereToPointMask) == 0); \
859 STATIC_ASSERT((space_number & ~kSpaceMask) == 0);
861 #define CASE_BODY(where, how, within, space_number_if_any) \
863 bool emit_write_barrier = false; \
864 bool current_was_incremented = false; \
865 int space_number = space_number_if_any == kAnyOldSpace \
866 ? (data & kSpaceMask) \
867 : space_number_if_any; \
868 if (where == kNewObject && how == kPlain && within == kStartOfObject) { \
869 ReadObject(space_number, current); \
870 emit_write_barrier = (space_number == NEW_SPACE); \
872 Object* new_object = NULL; \
873 if (where == kNewObject) { \
874 ReadObject(space_number, &new_object); \
875 } else if (where == kRootArray) { \
876 int root_id = source_->GetInt(); \
877 new_object = isolate->heap()->roots_array_start()[root_id]; \
878 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
879 } else if (where == kPartialSnapshotCache) { \
880 int cache_index = source_->GetInt(); \
881 new_object = isolate->serialize_partial_snapshot_cache()[cache_index]; \
882 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
883 } else if (where == kExternalReference) { \
884 int skip = source_->GetInt(); \
885 current = reinterpret_cast<Object**>( \
886 reinterpret_cast<Address>(current) + skip); \
887 int reference_id = source_->GetInt(); \
888 Address address = external_reference_decoder_->Decode(reference_id); \
889 new_object = reinterpret_cast<Object*>(address); \
890 } else if (where == kBackref) { \
891 emit_write_barrier = (space_number == NEW_SPACE); \
892 new_object = GetAddressFromEnd(data & kSpaceMask); \
893 if (deserializing_user_code()) { \
894 new_object = ProcessBackRefInSerializedCode(new_object); \
896 } else if (where == kBuiltin) { \
897 DCHECK(deserializing_user_code()); \
898 int builtin_id = source_->GetInt(); \
899 DCHECK_LE(0, builtin_id); \
900 DCHECK_LT(builtin_id, Builtins::builtin_count); \
901 Builtins::Name name = static_cast<Builtins::Name>(builtin_id); \
902 new_object = isolate->builtins()->builtin(name); \
903 emit_write_barrier = false; \
904 } else if (where == kAttachedReference) { \
905 DCHECK(deserializing_user_code()); \
906 int index = source_->GetInt(); \
907 new_object = *attached_objects_->at(index); \
908 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
910 DCHECK(where == kBackrefWithSkip); \
911 int skip = source_->GetInt(); \
912 current = reinterpret_cast<Object**>( \
913 reinterpret_cast<Address>(current) + skip); \
914 emit_write_barrier = (space_number == NEW_SPACE); \
915 new_object = GetAddressFromEnd(data & kSpaceMask); \
916 if (deserializing_user_code()) { \
917 new_object = ProcessBackRefInSerializedCode(new_object); \
920 if (within == kInnerPointer) { \
921 if (space_number != CODE_SPACE || new_object->IsCode()) { \
922 Code* new_code_object = reinterpret_cast<Code*>(new_object); \
924 reinterpret_cast<Object*>(new_code_object->instruction_start()); \
926 DCHECK(space_number == CODE_SPACE); \
927 Cell* cell = Cell::cast(new_object); \
928 new_object = reinterpret_cast<Object*>(cell->ValueAddress()); \
931 if (how == kFromCode) { \
932 Address location_of_branch_data = reinterpret_cast<Address>(current); \
933 Assembler::deserialization_set_special_target_at( \
934 location_of_branch_data, \
935 Code::cast(HeapObject::FromAddress(current_object_address)), \
936 reinterpret_cast<Address>(new_object)); \
937 location_of_branch_data += Assembler::kSpecialTargetSize; \
938 current = reinterpret_cast<Object**>(location_of_branch_data); \
939 current_was_incremented = true; \
941 *current = new_object; \
944 if (emit_write_barrier && write_barrier_needed) { \
945 Address current_address = reinterpret_cast<Address>(current); \
946 isolate->heap()->RecordWrite( \
947 current_object_address, \
948 static_cast<int>(current_address - current_object_address)); \
950 if (!current_was_incremented) { \
959 #define ALL_SPACES(where, how, within) \
960 CASE_STATEMENT(where, how, within, NEW_SPACE) \
961 CASE_BODY(where, how, within, NEW_SPACE) \
962 CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
963 CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
964 CASE_STATEMENT(where, how, within, CODE_SPACE) \
965 CASE_STATEMENT(where, how, within, MAP_SPACE) \
966 CASE_STATEMENT(where, how, within, CELL_SPACE) \
967 CASE_STATEMENT(where, how, within, PROPERTY_CELL_SPACE) \
968 CASE_STATEMENT(where, how, within, LO_SPACE) \
969 CASE_BODY(where, how, within, kAnyOldSpace)
971 #define FOUR_CASES(byte_code) \
973 case byte_code + 1: \
974 case byte_code + 2: \
977 #define SIXTEEN_CASES(byte_code) \
978 FOUR_CASES(byte_code) \
979 FOUR_CASES(byte_code + 4) \
980 FOUR_CASES(byte_code + 8) \
981 FOUR_CASES(byte_code + 12)
983 #define COMMON_RAW_LENGTHS(f) \
1018 #define RAW_CASE(index) \
1019 case kRawData + index: { \
1020 byte* raw_data_out = reinterpret_cast<byte*>(current); \
1021 source_->CopyRaw(raw_data_out, index * kPointerSize); \
1023 reinterpret_cast<Object**>(raw_data_out + index * kPointerSize); \
1033 byte* raw_data_out =
reinterpret_cast<byte*
>(current);
1043 *current++ = object;
1051 current =
reinterpret_cast<Object**
>(
1052 reinterpret_cast<intptr_t
>(current) + skip);
1055 *current++ = object;
1060 int repeats =
source_->GetInt();
1061 Object*
object = current[-1];
1063 for (
int i = 0;
i < repeats;
i++) current[
i] =
object;
1076 Object*
object = current[-1];
1078 for (
int i = 0;
i < repeats;
i++) current[
i] =
object;
1098 #if defined(V8_TARGET_ARCH_MIPS) || V8_OOL_CONSTANT_POOL || \
1099 defined(V8_TARGET_ARCH_MIPS64)
1123 #if defined(V8_TARGET_ARCH_MIPS) || V8_OOL_CONSTANT_POOL || \
1124 defined(V8_TARGET_ARCH_MIPS64)
1160 #if V8_OOL_CONSTANT_POOL
1178 #undef CASE_STATEMENT
1184 current =
reinterpret_cast<Object**
>(
1185 reinterpret_cast<intptr_t
>(current) +
size);
1192 NativesExternalStringResource* resource =
1193 new NativesExternalStringResource(isolate->
bootstrapper(),
1194 source_vector.
start(),
1196 *current++ =
reinterpret_cast<Object*
>(resource);
1215 : isolate_(isolate),
1218 root_index_wave_front_(0),
1219 code_address_map_(
NULL),
1220 seen_large_objects_index_(0) {
1249 this->VisitPointer(
object);
1256 return current == &roots[Heap::kStoreBufferTopRootIndex]
1257 || current == &roots[Heap::kStackLimitRootIndex]
1258 || current == &roots[Heap::kRealStackLimitRootIndex];
1265 for (
Object** current = start; current < end; current++) {
1273 }
else if ((*current)->IsSmi()) {
1276 sink_->
Put(
reinterpret_cast<byte*
>(current)[
i],
"Byte");
1296 for (
int i = 0; ;
i++) {
1297 if (isolate->serialize_partial_snapshot_cache_length() <=
i) {
1302 Object** cache = isolate->serialize_partial_snapshot_cache();
1303 visitor->VisitPointers(&cache[
i], &cache[
i + 1]);
1306 if (cache[
i] == isolate->
heap()->undefined_value()) {
1317 i < isolate->serialize_partial_snapshot_cache_length();
1320 if (entry == heap_object)
return i;
1326 int length =
isolate->serialize_partial_snapshot_cache_length();
1331 DCHECK(length ==
isolate->serialize_partial_snapshot_cache_length() - 1);
1341 if (!root->IsSmi() && root == heap_object) {
1363 "BackRefSerWithSkip");
1385 CHECK(o->IsHeapObject());
1386 HeapObject* heap_object = HeapObject::cast(o);
1387 DCHECK(!heap_object->IsJSFunction());
1391 PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1422 VisitPointer(&undefined);
1433 if (how_to_code ==
kPlain &&
1436 !
isolate()->heap()->InNewSpace(
object)) {
1461 CHECK(o->IsHeapObject());
1462 HeapObject* heap_object = HeapObject::cast(o);
1464 if (heap_object->IsMap()) {
1467 DCHECK(Map::cast(heap_object)->code_cache() ==
1468 heap_object->
GetHeap()->empty_fixed_array());
1473 PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1480 sink_->
PutInt(skip,
"SkipDistanceFromSerializeObject");
1485 "PartialSnapshotCache");
1486 sink_->
PutInt(cache_index,
"partial_snapshot_cache_index");
1496 DCHECK(!heap_object->IsInternalizedString());
1504 sink_->
PutInt(skip,
"SkipDistanceFromSerializeObject");
1522 "ObjectSerialization");
1526 const char* code_name =
1561 Object** current = start;
1562 while (current < end) {
1563 while (current < end && (*current)->IsSmi()) current++;
1564 if (current < end) OutputRawData(
reinterpret_cast<Address>(current));
1566 while (current < end && !(*current)->IsSmi()) {
1567 HeapObject* current_contents = HeapObject::cast(*current);
1568 int root_index = serializer_->RootIndex(current_contents,
kPlain);
1572 if (current != start &&
1575 current_contents == current[-1]) {
1576 DCHECK(!serializer_->isolate()->heap()->InNewSpace(current_contents));
1577 int repeat_count = 1;
1578 while (¤t[repeat_count] < end - 1 &&
1579 current[repeat_count] == current_contents) {
1582 current += repeat_count;
1583 bytes_processed_so_far_ += repeat_count *
kPointerSize;
1591 serializer_->SerializeObject(
1605 int skip = OutputRawData(rinfo->target_address_address(),
1606 kCanReturnSkipInsteadOfSkipping);
1608 Object*
object = rinfo->target_object();
1609 serializer_->SerializeObject(
object, how_to_code,
kStartOfObject, skip);
1610 bytes_processed_so_far_ += rinfo->target_address_size();
1615 int skip = OutputRawData(
reinterpret_cast<Address>(p),
1616 kCanReturnSkipInsteadOfSkipping);
1620 sink_->
PutInt(serializer_->EncodeExternalReference(target),
"reference id");
1626 int skip = OutputRawData(rinfo->target_address_address(),
1627 kCanReturnSkipInsteadOfSkipping);
1631 Address target = rinfo->target_reference();
1632 sink_->
PutInt(serializer_->EncodeExternalReference(target),
"reference id");
1633 bytes_processed_so_far_ += rinfo->target_address_size();
1638 int skip = OutputRawData(rinfo->target_address_address(),
1639 kCanReturnSkipInsteadOfSkipping);
1643 Address target = rinfo->target_address();
1644 sink_->
PutInt(serializer_->EncodeExternalReference(target),
"reference id");
1645 bytes_processed_so_far_ += rinfo->target_address_size();
1653 int skip = OutputRawData(rinfo->target_address_address(),
1654 kCanReturnSkipInsteadOfSkipping);
1657 bytes_processed_so_far_ += rinfo->target_address_size();
1662 int skip = OutputRawData(entry_address, kCanReturnSkipInsteadOfSkipping);
1673 int skip = OutputRawData(rinfo->
pc(), kCanReturnSkipInsteadOfSkipping);
1674 Cell*
object = Cell::cast(rinfo->target_cell());
1682 Address references_start =
reinterpret_cast<Address>(resource_pointer);
1683 OutputRawData(references_start);
1686 serializer_->isolate()->heap()->natives_source_cache()->get(
i);
1687 if (!source->IsUndefined()) {
1690 const Resource* resource =
string->resource();
1691 if (resource == *resource_pointer) {
1694 bytes_processed_so_far_ +=
sizeof(resource);
1719 if (!(FLAG_enable_ool_constant_pool && it.rinfo()->IsInConstantPool())) {
1720 it.rinfo()->WipeOut();
1728 Address object_start = object_->address();
1729 int base = bytes_processed_so_far_;
1730 int up_to_offset =
static_cast<int>(up_to - object_start);
1731 int to_skip = up_to_offset - bytes_processed_so_far_;
1732 int bytes_to_output = to_skip;
1733 bytes_processed_so_far_ += to_skip;
1737 bool outputting_code =
false;
1738 if (to_skip != 0 && code_object_ && !code_has_been_output_) {
1740 bytes_to_output = object_->Size() + to_skip - bytes_processed_so_far_;
1741 outputting_code =
true;
1742 code_has_been_output_ =
true;
1744 if (bytes_to_output != 0 &&
1745 (!code_object_ || outputting_code)) {
1746 #define RAW_CASE(index) \
1747 if (!outputting_code && bytes_to_output == index * kPointerSize && \
1748 index * kPointerSize == to_skip) { \
1749 sink_->PutSection(kRawData + index, "RawDataFixed"); \
1767 object_start = code->
address();
1770 const char* description = code_object_ ?
"Code" :
"Byte";
1771 for (
int i = 0;
i < bytes_to_output;
i++) {
1774 if (code_object_)
delete[] object_start;
1776 if (to_skip != 0 && return_skip == kIgnoringReturn) {
1808 return allocation_address;
1824 for (
unsigned i = 0;
i <
sizeof(
int32_t) - 1;
i++) {
1839 base::ElapsedTimer timer;
1840 if (FLAG_profile_deserialization) timer.Start();
1852 cs.VisitPointer(location);
1858 if (FLAG_profile_deserialization) {
1859 double ms = timer.Elapsed().InMillisecondsF();
1860 int length = script_data->
length();
1861 PrintF(
"[Serializing to %d bytes took %0.3f ms]\n", length, ms);
1870 HeapObject* heap_object = HeapObject::cast(o);
1874 PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1886 sink_->
PutInt(skip,
"SkipDistanceFromSerializeObject");
1889 if (heap_object->IsCode()) {
1890 Code* code_object = Code::cast(heap_object);
1891 switch (code_object->
kind()) {
1892 case Code::OPTIMIZED_FUNCTION:
1898 SerializeBuiltin(code_object, how_to_code, where_to_point);
1901 SerializeCodeStub(code_object, how_to_code, where_to_point);
1903 #define IC_KIND_CASE(KIND) case Code::KIND:
1906 SerializeHeapObject(code_object, how_to_code, where_to_point);
1909 case Code::FUNCTION:
1913 if (code_object != main_code_) {
1915 SerializeBuiltin(lazy, how_to_code, where_to_point);
1917 SerializeHeapObject(code_object, how_to_code, where_to_point);
1923 if (heap_object == source_) {
1924 SerializeSourceObject(how_to_code, where_to_point);
1929 CHECK(!heap_object->IsMap());
1931 CHECK(!heap_object->IsJSGlobalProxy() && !heap_object->IsGlobalObject());
1933 CHECK(!heap_object->IsHashTable());
1935 SerializeHeapObject(heap_object, how_to_code, where_to_point);
1942 if (heap_object->IsScript()) {
1946 Script::cast(heap_object)->ClearWrapperCache();
1949 if (FLAG_trace_code_serializer) {
1950 PrintF(
"Encoding heap object: ");
1971 if (FLAG_trace_code_serializer) {
1972 PrintF(
"Encoding builtin: %s\n",
1987 DCHECK(CodeStub::MajorKeyFromKey(stub_key) != CodeStub::NoCache);
1989 int index = AddCodeStubKey(stub_key) + kCodeStubsBaseIndex;
1991 if (FLAG_trace_code_serializer) {
1992 PrintF(
"Encoding code stub %s as %d\n",
1993 CodeStub::MajorName(CodeStub::MajorKeyFromKey(stub_key),
false),
2005 while (index < stub_keys_.length()) {
2006 if (stub_keys_[index] == stub_key)
return index;
2009 stub_keys_.Add(stub_key);
2016 if (FLAG_trace_code_serializer)
PrintF(
"Encoding source object\n");
2020 sink_->
PutInt(kSourceObjectIndex,
"kSourceObjectIndex");
2027 base::ElapsedTimer timer;
2028 if (FLAG_profile_deserialization) timer.Start();
2046 code_stub_keys.
length() + kCodeStubsBaseIndex);
2047 attached_objects[kSourceObjectIndex] = source;
2048 for (
int i = 0;
i < code_stub_keys.
length();
i++) {
2049 attached_objects[
i + kCodeStubsBaseIndex] =
2050 CodeStub::GetCode(
isolate, code_stub_keys[
i]).ToHandleChecked();
2059 if (FLAG_profile_deserialization) {
2060 double ms = timer.Elapsed().InMillisecondsF();
2061 int length = data->
length();
2062 PrintF(
"[Deserializing from %d bytes took %0.3f ms]\n", length, ms);
2069 : owns_script_data_(
true) {
2074 int num_stub_keys = stub_keys->length();
2075 int stub_keys_size = stub_keys->length() *
kInt32Size;
2076 int data_length =
kHeaderSize + stub_keys_size + payload->length();
2079 byte* data = NewArray<byte>(data_length);
2099 static_cast<size_t>(payload->length()));
#define ACCESSOR_INFO_LIST(V)
#define BUILTIN_LIST_C(V)
#define BUILTIN_LIST_DEBUG_A(V)
#define BUILTIN_LIST_A(V)
An ExternalOneByteStringResource is a wrapper around an one-byte string buffer that resides outside V...
Object * ToObjectChecked()
const char * Lookup(Address code_address)
void Move(Address from, Address to)
void Insert(Address code_address, const char *name, int name_size)
static char * CopyName(const char *name, int name_size)
DISALLOW_COPY_AND_ASSIGN(NameMap)
HashMap::Entry * FindOrCreateEntry(Address code_address)
void Remove(Address code_address)
void RemoveEntry(HashMap::Entry *entry)
HashMap::Entry * FindEntry(Address code_address)
virtual ~CodeAddressMap()
virtual void CodeDeleteEvent(Address from)
virtual void CodeDisableOptEvent(Code *code, SharedFunctionInfo *shared)
virtual void LogRecordedBuffer(Code *code, SharedFunctionInfo *, const char *name, int length)
const char * Lookup(Address address)
NameMap address_to_name_map_
virtual void CodeMoveEvent(Address from, Address to)
CodeAddressMap(Isolate *isolate)
static ScriptData * Serialize(Isolate *isolate, Handle< SharedFunctionInfo > info, Handle< String > source)
void SerializeHeapObject(HeapObject *heap_object, HowToCode how_to_code, WhereToPoint where_to_point)
void SerializeSourceObject(HowToCode how_to_code, WhereToPoint where_to_point)
void SerializeBuiltin(Code *builtin, HowToCode how_to_code, WhereToPoint where_to_point)
int AddCodeStubKey(uint32_t stub_key)
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
static Handle< SharedFunctionInfo > Deserialize(Isolate *isolate, ScriptData *data, Handle< String > source)
void SerializeCodeStub(Code *stub, HowToCode how_to_code, WhereToPoint where_to_point)
static Code * GetCodeFromTargetAddress(Address address)
static Object * GetObjectFromEntryAddress(Address location_of_address)
static void FlushICache(void *start, size_t size)
static Address GetDeoptimizationEntry(Isolate *isolate, int id, BailoutType type, GetEntryMode mode=ENSURE_ENTRY_CODE)
@ CALCULATE_ENTRY_ADDRESS
ExternalReferenceDecoder * external_reference_decoder_
int reservations_[kNumberOfSpaces]
void ReadChunk(Object **start, Object **end, int space, Address object_address)
Address Allocate(int space_index, int size)
Vector< Handle< Object > > * attached_objects_
List< HeapObject * > deserialized_large_objects_
void DeserializePartial(Isolate *isolate, Object **root)
static const intptr_t kUninitializedReservation
SnapshotByteSource * source_
void Deserialize(Isolate *isolate)
Deserializer(SnapshotByteSource *source)
bool deserializing_user_code()
Object * ProcessBackRefInSerializedCode(Object *obj)
virtual void VisitPointers(Object **start, Object **end)
void RelinkAllocationSite(AllocationSite *site)
HeapObject * ProcessNewObjectFromSerializedCode(HeapObject *obj)
void SetAttachedObjects(Vector< Handle< Object > > *attached_objects)
Address high_water_[kNumberOfPreallocatedSpaces]
void set_reservation(int space_number, int reservation)
void ReadObject(int space_number, Object **write_back)
void FlushICacheForNewCodeObjects()
~ExternalReferenceDecoder()
ExternalReferenceDecoder(Isolate *isolate)
void Put(uint32_t key, Address value)
static uint32_t Hash(Address key)
int IndexOf(Address key) const
void Put(Address key, int index)
uint32_t Encode(Address key) const
ExternalReferenceEncoder(Isolate *isolate)
const char * NameOfAddress(Address key) const
void AddFromId(TypeCode type, uint16_t id, const char *name, Isolate *isolate)
List< ExternalReferenceEntry > refs_
static ExternalReferenceTable * instance(Isolate *isolate)
void Add(Address address, TypeCode type, uint16_t id, const char *name)
ExternalReferenceTable(Isolate *isolate)
uint16_t max_id_[kTypeCodeCount]
void PopulateTable(Isolate *isolate)
int NumberOfWeakHandles()
List< internal::Object ** > * blocks()
static Handle< T > cast(Handle< S > that)
void IterateBody(InstanceType type, int object_size, ObjectVisitor *v)
static HeapObject * FromAddress(Address address)
void set_array_buffers_list(Object *object)
void ReserveSpace(int *sizes, Address *addresses)
LargeObjectSpace * lo_space()
Object * allocation_sites_list()
void OnAllocationEvent(HeapObject *object, int size_in_bytes)
void InitializeWeakObjectToCodeTable()
bool InNewSpace(Object *object)
void IterateSmiRoots(ObjectVisitor *v)
Object ** roots_array_start()
void set_allocation_sites_list(Object *object)
static const int kOldSpaceRoots
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
void RepairFreeListsAfterBoot()
void set_native_contexts_list(Object *object)
bool InSpace(Address addr, AllocationSpace space)
bool serializer_enabled() const
bool has_installed_extensions()
HandleScopeImplementer * handle_scope_implementer()
MemoryAllocator * memory_allocator()
void PushToPartialSnapshotCache(Object *obj)
void InitializeLoggingAndCounters()
Address get_address_from_id(AddressId id)
ThreadManager * thread_manager()
EternalHandles * eternal_handles()
GlobalHandles * global_handles()
Bootstrapper * bootstrapper()
static uint32_t Hash(String *string, uint32_t seed)
MUST_USE_RESULT AllocationResult AllocateRaw(int object_size, Executability executable)
void removeCodeEventListener(CodeEventListener *listener)
void addCodeEventListener(CodeEventListener *listener)
InstanceType instance_type()
static int CodePageAreaSize()
static const int kObjectStartOffset
static const int kEmptyHashField
static Vector< const char > GetRawScriptSource(int index)
static int GetBuiltinsCount()
void ShortPrint(FILE *out=stdout)
static const int kPageSize
void Serialize(Object **o)
int PartialSnapshotCacheIndex(HeapObject *o)
Serializer * startup_serializer_
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
bool ShouldBeInThePartialSnapshotCache(HeapObject *o)
static int ModeMask(Mode mode)
static const int kCodeTargetMask
void AcquireDataOwnership()
int MappedTo(HeapObject *obj)
bool IsMapped(HeapObject *obj)
void AddMapping(HeapObject *obj, int to)
SerializedCodeData(ScriptData *data, String *source)
static const int kPayloadLengthOffset
void SetHeaderValue(int offset, int value)
static const int kReservationsOffset
int GetHeaderValue(int offset) const
int PayloadLength() const
static const int kCheckSumOffset
ScriptData * script_data_
Vector< const uint32_t > CodeStubKeys() const
const byte * Payload() const
int CheckSum(String *source)
int GetReservation(int space) const
static const int kNumCodeStubKeysOffset
ScriptData * GetScriptData()
static const int kHeaderSize
bool IsSane(String *source)
STATIC_ASSERT(kNumberOfSpaces<=kSpaceMask+1)
static int RootArrayConstantFromByteCode(int byte_code)
static int RepeatsForCode(int byte_code)
static const int kNumberOfSpaces
static const int kNativesStringResource
static const int kRawData
static const int kRootArrayNumberOfConstantEncodings
static void Iterate(Isolate *isolate, ObjectVisitor *visitor)
static int CodeForRepeats(int repeats)
static const int kRootArrayConstants
static const int kMaxRepeats
static const int kNumberOfPreallocatedSpaces
static const int kConstantRepeat
static const int kSynchronize
int OutputRawData(Address up_to, ReturnSkip return_skip=kIgnoringReturn)
void VisitCell(RelocInfo *rinfo)
void VisitEmbeddedPointer(RelocInfo *target)
void VisitPointers(Object **start, Object **end)
int bytes_processed_so_far_
void VisitRuntimeEntry(RelocInfo *reloc)
void VisitCodeEntry(Address entry_address)
void VisitExternalOneByteString(v8::String::ExternalOneByteStringResource **resource)
void VisitExternalReference(Address *p)
int reference_representation_
void VisitCodeTarget(RelocInfo *target)
void InitializeCodeAddressMap()
void SerializeReferenceToPreviousObject(HeapObject *heap_object, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
ExternalReferenceEncoder * external_reference_encoder_
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)=0
int fullness_[kNumberOfSpaces]
int RootIndex(HeapObject *heap_object, HowToCode from)
SerializationAddressMapper * address_mapper()
intptr_t root_index_wave_front_
int Allocate(int space, int size)
void VisitPointers(Object **start, Object **end)
int seen_large_objects_index_
static const int kInvalidRootIndex
CodeAddressMap * code_address_map_
int AllocateLargeObject(int size)
static int SpaceOfObject(HeapObject *object)
int CurrentAllocationAddress(int space) const
Serializer(Isolate *isolate, SnapshotByteSink *sink)
Isolate * isolate() const
SerializationAddressMapper address_mapper_
int SpaceAreaSize(int space)
bool ShouldBeSkipped(Object **current)
void PutRoot(int index, HeapObject *object, HowToCode how, WhereToPoint where, int skip)
static Smi * FromInt(int value)
Sink to write snapshot files to.
virtual void Put(byte b, const char *description)=0
virtual void PutSection(int b, const char *description)
void PutInt(uintptr_t integer, const char *description)
virtual void SerializeStrongReferences()
virtual void SerializeObject(Object *o, HowToCode how_to_code, WhereToPoint where_to_point, int skip)
void SerializeWeakReferences()
int * GetInternalPointer()
virtual uint32_t HashForObject(Object *key) OVERRIDE
virtual uint32_t Hash() OVERRIDE
virtual bool IsMatch(Object *string)
StringTableInsertionKey(String *string)
virtual MUST_USE_RESULT Handle< Object > AsHandle(Isolate *isolate) OVERRIDE
static Handle< String > LookupKey(Isolate *isolate, HashTableKey *key)
bool SlowEquals(String *other)
Entry * Next(Entry *p) const
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
void * Remove(void *key, uint32_t hash)
ThreadState * FirstThreadStateInUse()
#define STATS_COUNTER_LIST_1(SC)
#define STATS_COUNTER_LIST_2(SC)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions true
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define IC_UTIL_LIST(ICU)
#define FOR_EACH_ISOLATE_ADDRESS_NAME(C)
#define LOG(isolate, Call)
#define LOG_CODE_EVENT(isolate, Call)
#define CHECK_EQ(expected, value)
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
void DeleteArray(T *array)
T * NewArray(size_t size)
static Code * CloneCodeObject(HeapObject *code)
static uint32_t EncodeExternal(TypeCode type, uint16_t id)
const int kDeoptTableSerializeEntryCount
const int kPointerSizeLog2
static int * GetInternalPointer(StatsCounter *counter)
Handle< T > handle(T *t, Isolate *isolate)
uint32_t ComputePointerHash(void *ptr)
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
void PrintF(const char *format,...)
const int kObjectAlignmentBits
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static void WipeOutRelocations(Code *code)
bool IsAligned(T value, U alignment)
void MemCopy(void *dest, const void *src, size_t size)
const intptr_t kPointerAlignment
void CopyBytes(uint8_t *target, uint8_t *source)
Debugger support for the V8 JavaScript engine.
#define INLINE_OPTIMIZED_FUNCTION_LIST(F)
#define RUNTIME_FUNCTION_LIST(F)
#define BUILD_NAME_LITERAL(CamelName, hacker_name)
#define COUNTER_ENTRY(name, caption)
#define ALL_SPACES(where, how, within)
#define INLINE_OPTIMIZED_ENTRY(name, nargs, ressize)
#define DEF_ENTRY_C(name, ignored)
#define CASE_STATEMENT(where, how, within, space_number)
#define COMMON_RAW_LENGTHS(f)
#define CASE_BODY(where, how, within, space_number_if_any)
#define IC_KIND_CASE(KIND)
#define DEF_ENTRY_A(name, kind, state, extra)
#define FOUR_CASES(byte_code)
#define ACCESSOR_INFO_DECLARATION(name)
#define SIXTEEN_CASES(byte_code)