12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
40 PropertyDetails::PropertyDetails(Smi* smi) {
41 value_ = smi->value();
45 Smi* PropertyDetails::AsSmi()
const {
48 int value = value_ << 1;
53 PropertyDetails PropertyDetails::AsDeleted()
const {
54 Smi* smi =
Smi::FromInt(value_ | DeletedField::encode(1));
55 return PropertyDetails(smi);
59 #define TYPE_CHECKER(type, instancetype) \
60 bool Object::Is##type() const { \
61 return Object::IsHeapObject() && \
62 HeapObject::cast(this)->map()->instance_type() == instancetype; \
66 #define CAST_ACCESSOR(type) \
67 type* type::cast(Object* object) { \
68 SLOW_DCHECK(object->Is##type()); \
69 return reinterpret_cast<type*>(object); \
71 const type* type::cast(const Object* object) { \
72 SLOW_DCHECK(object->Is##type()); \
73 return reinterpret_cast<const type*>(object); \
77 #define INT_ACCESSORS(holder, name, offset) \
78 int holder::name() const { return READ_INT_FIELD(this, offset); } \
79 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
82 #define ACCESSORS(holder, name, type, offset) \
83 type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
84 void holder::set_##name(type* value, WriteBarrierMode mode) { \
85 WRITE_FIELD(this, offset, value); \
86 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
91 #define ACCESSORS_TO_SMI(holder, name, offset) \
92 Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
93 void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
94 WRITE_FIELD(this, offset, value); \
99 #define SMI_ACCESSORS(holder, name, offset) \
100 int holder::name() const { \
101 Object* value = READ_FIELD(this, offset); \
102 return Smi::cast(value)->value(); \
104 void holder::set_##name(int value) { \
105 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
108 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
109 int holder::synchronized_##name() const { \
110 Object* value = ACQUIRE_READ_FIELD(this, offset); \
111 return Smi::cast(value)->value(); \
113 void holder::synchronized_set_##name(int value) { \
114 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
117 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
118 int holder::nobarrier_##name() const { \
119 Object* value = NOBARRIER_READ_FIELD(this, offset); \
120 return Smi::cast(value)->value(); \
122 void holder::nobarrier_set_##name(int value) { \
123 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
126 #define BOOL_GETTER(holder, field, name, offset) \
127 bool holder::name() const { \
128 return BooleanBit::get(field(), offset); \
132 #define BOOL_ACCESSORS(holder, field, name, offset) \
133 bool holder::name() const { \
134 return BooleanBit::get(field(), offset); \
136 void holder::set_##name(bool value) { \
137 set_##field(BooleanBit::set(field(), offset, value)); \
141 bool Object::IsFixedArrayBase()
const {
142 return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
143 IsFixedTypedArrayBase() || IsExternalArray();
149 return Object::IsHeapObject() &&
150 HeapObject::cast(
this)->map() ==
151 HeapObject::cast(
this)->GetHeap()->external_map();
155 bool Object::IsAccessorInfo()
const {
156 return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
160 bool Object::IsSmi()
const {
165 bool Object::IsHeapObject()
const {
176 return Object::IsHeapObject()
182 return IsString() || IsSymbol();
186 bool Object::IsUniqueName()
const {
187 return IsInternalizedString() || IsSymbol();
191 bool Object::IsSpecObject()
const {
192 return Object::IsHeapObject()
197 bool Object::IsSpecFunction()
const {
198 if (!Object::IsHeapObject())
return false;
199 InstanceType type = HeapObject::cast(
this)->map()->instance_type();
204 bool Object::IsTemplateInfo()
const {
205 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
209 bool Object::IsInternalizedString()
const {
210 if (!this->IsHeapObject())
return false;
211 uint32_t type = HeapObject::cast(
this)->map()->instance_type();
218 bool Object::IsConsString()
const {
219 if (!IsString())
return false;
220 return StringShape(String::cast(
this)).IsCons();
224 bool Object::IsSlicedString()
const {
225 if (!IsString())
return false;
226 return StringShape(String::cast(
this)).IsSliced();
230 bool Object::IsSeqString()
const {
231 if (!IsString())
return false;
232 return StringShape(String::cast(
this)).IsSequential();
236 bool Object::IsSeqOneByteString()
const {
237 if (!IsString())
return false;
238 return StringShape(String::cast(
this)).IsSequential() &&
239 String::cast(
this)->IsOneByteRepresentation();
243 bool Object::IsSeqTwoByteString()
const {
244 if (!IsString())
return false;
245 return StringShape(String::cast(
this)).IsSequential() &&
246 String::cast(
this)->IsTwoByteRepresentation();
250 bool Object::IsExternalString()
const {
251 if (!IsString())
return false;
252 return StringShape(String::cast(
this)).IsExternal();
256 bool Object::IsExternalOneByteString()
const {
257 if (!IsString())
return false;
258 return StringShape(String::cast(
this)).IsExternal() &&
259 String::cast(
this)->IsOneByteRepresentation();
263 bool Object::IsExternalTwoByteString()
const {
264 if (!IsString())
return false;
265 return StringShape(String::cast(
this)).IsExternal() &&
266 String::cast(
this)->IsTwoByteRepresentation();
272 return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
273 IsFixedTypedArrayBase();
280 if (representation.
IsSmi() && object->IsUninitialized()) {
283 if (!representation.
IsDouble())
return object;
285 if (object->IsUninitialized()) {
287 }
else if (object->IsMutableHeapNumber()) {
288 value = HeapNumber::cast(*object)->value();
290 value =
object->Number();
299 DCHECK(!object->IsUninitialized());
301 DCHECK(object->FitsRepresentation(representation));
304 return isolate->
factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
308 StringShape::StringShape(
const String* str)
309 : type_(str->
map()->instance_type()) {
315 StringShape::StringShape(Map*
map)
316 : type_(
map->instance_type()) {
329 bool StringShape::IsInternalized() {
388 bool StringShape::IsCons() {
393 bool StringShape::IsSliced() {
398 bool StringShape::IsIndirect() {
403 bool StringShape::IsExternal() {
408 bool StringShape::IsSequential() {
419 uint32_t StringShape::encoding_tag() {
424 uint32_t StringShape::full_representation_tag() {
436 bool StringShape::IsSequentialOneByte() {
441 bool StringShape::IsSequentialTwoByte() {
446 bool StringShape::IsExternalOneByte() {
457 bool StringShape::IsExternalTwoByte() {
470 return static_cast<const byte*
>(
start_)[index];
472 return static_cast<const uc16*
>(
start_)[index];
498 template <
typename Char>
516 return String::cast(other)->Hash();
531 return String::cast(
string)->IsOneByteEqualTo(
string_);
557 return String::cast(other)->Hash();
577 return String::cast(
string)->IsTwoByteEqualTo(
string_);
591 return String::cast(
string)->IsUtf8EqualTo(
string_);
603 return String::cast(other)->Hash();
608 return isolate->factory()->NewInternalizedStringFromUtf8(
620 return IsSmi() || IsHeapNumber();
628 bool Object::IsFiller()
const {
629 if (!Object::IsHeapObject())
return false;
630 InstanceType instance_type = HeapObject::cast(
this)->map()->instance_type();
635 bool Object::IsExternalArray()
const {
636 if (!Object::IsHeapObject())
639 HeapObject::cast(
this)->map()->instance_type();
645 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
646 TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
647 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
650 #undef TYPED_ARRAY_TYPE_CHECKER
653 bool Object::IsFixedTypedArrayBase()
const {
654 if (!Object::IsHeapObject())
return false;
657 HeapObject::cast(
this)->map()->instance_type();
663 bool Object::IsJSReceiver()
const {
665 return IsHeapObject() &&
670 bool Object::IsJSObject()
const {
672 return IsHeapObject() &&
677 bool Object::IsJSProxy()
const {
678 if (!Object::IsHeapObject())
return false;
679 return HeapObject::cast(
this)->map()->IsJSProxyMap();
697 bool Object::IsJSWeakCollection()
const {
698 return IsJSWeakMap() || IsJSWeakSet();
702 bool Object::IsDescriptorArray()
const {
703 return IsFixedArray();
707 bool Object::IsTransitionArray()
const {
708 return IsFixedArray();
712 bool Object::IsTypeFeedbackVector()
const {
return IsFixedArray(); }
715 bool Object::IsDeoptimizationInputData()
const {
717 if (!IsFixedArray())
return false;
723 int length = FixedArray::cast(
this)->length();
724 if (length == 0)
return true;
731 bool Object::IsDeoptimizationOutputData()
const {
732 if (!IsFixedArray())
return false;
736 if (FixedArray::cast(
this)->length() % 2 != 0)
return false;
741 bool Object::IsDependentCode()
const {
742 if (!IsFixedArray())
return false;
749 bool Object::IsContext()
const {
750 if (!Object::IsHeapObject())
return false;
751 Map*
map = HeapObject::cast(
this)->map();
752 Heap* heap =
map->GetHeap();
753 return (
map == heap->function_context_map() ||
754 map == heap->catch_context_map() ||
755 map == heap->with_context_map() ||
756 map == heap->native_context_map() ||
757 map == heap->block_context_map() ||
758 map == heap->module_context_map() ||
759 map == heap->global_context_map());
763 bool Object::IsNativeContext()
const {
764 return Object::IsHeapObject() &&
765 HeapObject::cast(
this)->map() ==
766 HeapObject::cast(
this)->GetHeap()->native_context_map();
770 bool Object::IsScopeInfo()
const {
771 return Object::IsHeapObject() &&
772 HeapObject::cast(
this)->map() ==
773 HeapObject::cast(
this)->GetHeap()->scope_info_map();
781 return obj->IsJSFunction();
797 bool Object::IsStringWrapper()
const {
798 return IsJSValue() && JSValue::cast(
this)->value()->IsString();
806 return IsOddball() &&
817 bool Object::IsJSArrayBufferView()
const {
818 return IsJSDataView() || IsJSTypedArray();
826 return obj->IsJSArray();
830 bool Object::IsHashTable()
const {
831 return Object::IsHeapObject() &&
832 HeapObject::cast(
this)->
map() ==
833 HeapObject::cast(
this)->
GetHeap()->hash_table_map();
837 bool Object::IsWeakHashTable()
const {
838 return IsHashTable();
842 bool Object::IsDictionary()
const {
843 return IsHashTable() &&
844 this != HeapObject::cast(
this)->GetHeap()->string_table();
848 bool Object::IsNameDictionary()
const {
849 return IsDictionary();
853 bool Object::IsSeededNumberDictionary()
const {
854 return IsDictionary();
858 bool Object::IsUnseededNumberDictionary()
const {
859 return IsDictionary();
863 bool Object::IsStringTable()
const {
864 return IsHashTable();
868 bool Object::IsJSFunctionResultCache()
const {
869 if (!IsFixedArray())
return false;
870 const FixedArray*
self = FixedArray::cast(
this);
871 int length =
self->length();
878 if (FLAG_verify_heap) {
882 reinterpret_cast<JSFunctionResultCache*
>(
const_cast<Object*
>(
this))->
883 JSFunctionResultCacheVerify();
890 bool Object::IsNormalizedMapCache()
const {
901 if (!obj->IsFixedArray())
return false;
906 if (FLAG_verify_heap) {
908 NormalizedMapCacheVerify();
915 bool Object::IsCompilationCacheTable()
const {
916 return IsHashTable();
920 bool Object::IsCodeCacheHashTable()
const {
921 return IsHashTable();
925 bool Object::IsPolymorphicCodeCacheHashTable()
const {
926 return IsHashTable();
930 bool Object::IsMapCache()
const {
931 return IsHashTable();
935 bool Object::IsObjectHashTable()
const {
936 return IsHashTable();
940 bool Object::IsOrderedHashTable()
const {
941 return IsHeapObject() &&
942 HeapObject::cast(
this)->map() ==
943 HeapObject::cast(
this)->GetHeap()->ordered_hash_table_map();
947 bool Object::IsOrderedHashSet()
const {
948 return IsOrderedHashTable();
952 bool Object::IsOrderedHashMap()
const {
953 return IsOrderedHashTable();
957 bool Object::IsPrimitive()
const {
958 return IsOddball() || IsNumber() || IsString();
962 bool Object::IsJSGlobalProxy()
const {
963 bool result = IsHeapObject() &&
964 (HeapObject::cast(
this)->map()->instance_type() ==
967 HeapObject::cast(
this)->
map()->is_access_check_needed());
972 bool Object::IsGlobalObject()
const {
973 if (!IsHeapObject())
return false;
975 InstanceType type = HeapObject::cast(
this)->map()->instance_type();
985 bool Object::IsUndetectableObject()
const {
986 return IsHeapObject()
987 && HeapObject::cast(
this)->map()->is_undetectable();
991 bool Object::IsAccessCheckNeeded()
const {
992 if (!IsHeapObject())
return false;
993 if (IsJSGlobalProxy()) {
994 const JSGlobalProxy* proxy = JSGlobalProxy::cast(
this);
995 GlobalObject* global = proxy->GetIsolate()->context()->global_object();
996 return proxy->IsDetachedFrom(global);
998 return HeapObject::cast(
this)->map()->is_access_check_needed();
1002 bool Object::IsStruct()
const {
1003 if (!IsHeapObject())
return false;
1004 switch (HeapObject::cast(
this)->
map()->instance_type()) {
1005 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1007 #undef MAKE_STRUCT_CASE
1008 default:
return false;
1013 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
1014 bool Object::Is##Name() const { \
1015 return Object::IsHeapObject() \
1016 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1019 #undef MAKE_STRUCT_PREDICATE
1028 return IsOddball() && Oddball::cast(
this)->kind() ==
Oddball::kNull;
1032 bool Object::IsTheHole()
const {
1037 bool Object::IsException()
const {
1042 bool Object::IsUninitialized()
const {
1048 return IsOddball() && Oddball::cast(
this)->kind() ==
Oddball::kTrue;
1057 bool Object::IsArgumentsMarker()
const {
1065 ?
static_cast<double>(
reinterpret_cast<Smi*
>(
this)->value())
1070 bool Object::IsNaN()
const {
1071 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(
this)->value());
1076 return this->IsHeapNumber() &&
1083 if (object->IsHeapNumber()) {
1085 int int_value =
FastD2I(value);
1102 return this->IsJSObject() && (JSObject::cast(
this)->class_name() ==
name);
1119 DCHECK(AllowHeapAllocation::IsAllowed());
1128 if (
name->AsArrayIndex(&index))
return GetElement(isolate,
object, index);
1140 DCHECK(!str->AsArrayIndex(&index));
1150 proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1159 Isolate* isolate = proxy->GetIsolate();
1167 Isolate* isolate = proxy->GetIsolate();
1173 #define FIELD_ADDR(p, offset) \
1174 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1176 #define FIELD_ADDR_CONST(p, offset) \
1177 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1179 #define READ_FIELD(p, offset) \
1180 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1182 #define ACQUIRE_READ_FIELD(p, offset) \
1183 reinterpret_cast<Object*>(base::Acquire_Load( \
1184 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1186 #define NOBARRIER_READ_FIELD(p, offset) \
1187 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1188 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1190 #define WRITE_FIELD(p, offset, value) \
1191 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1193 #define RELEASE_WRITE_FIELD(p, offset, value) \
1194 base::Release_Store( \
1195 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1196 reinterpret_cast<base::AtomicWord>(value));
1198 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1199 base::NoBarrier_Store( \
1200 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1201 reinterpret_cast<base::AtomicWord>(value));
1203 #define WRITE_BARRIER(heap, object, offset, value) \
1204 heap->incremental_marking()->RecordWrite( \
1205 object, HeapObject::RawField(object, offset), value); \
1206 if (heap->InNewSpace(value)) { \
1207 heap->RecordWrite(object->address(), offset); \
1210 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1211 if (mode == UPDATE_WRITE_BARRIER) { \
1212 heap->incremental_marking()->RecordWrite( \
1213 object, HeapObject::RawField(object, offset), value); \
1214 if (heap->InNewSpace(value)) { \
1215 heap->RecordWrite(object->address(), offset); \
1219 #ifndef V8_TARGET_ARCH_MIPS
1220 #define READ_DOUBLE_FIELD(p, offset) \
1221 (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
1225 static inline double read_double_field(
const void* p,
int offset) {
1230 c.u[0] = (*
reinterpret_cast<const uint32_t*
>(
1232 c.u[1] = (*
reinterpret_cast<const uint32_t*
>(
1236 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1239 #ifndef V8_TARGET_ARCH_MIPS
1240 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1241 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1245 static inline void write_double_field(
void* p,
int offset,
1255 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1256 write_double_field(p, offset, value)
1260 #define READ_INT_FIELD(p, offset) \
1261 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1263 #define WRITE_INT_FIELD(p, offset, value) \
1264 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1266 #define READ_INTPTR_FIELD(p, offset) \
1267 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1269 #define WRITE_INTPTR_FIELD(p, offset, value) \
1270 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1272 #define READ_UINT32_FIELD(p, offset) \
1273 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1275 #define WRITE_UINT32_FIELD(p, offset, value) \
1276 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1278 #define READ_INT32_FIELD(p, offset) \
1279 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1281 #define WRITE_INT32_FIELD(p, offset, value) \
1282 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1284 #define READ_INT64_FIELD(p, offset) \
1285 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1287 #define WRITE_INT64_FIELD(p, offset, value) \
1288 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1290 #define READ_SHORT_FIELD(p, offset) \
1291 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1293 #define WRITE_SHORT_FIELD(p, offset, value) \
1294 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1296 #define READ_BYTE_FIELD(p, offset) \
1297 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1299 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1300 static_cast<byte>(base::NoBarrier_Load( \
1301 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1303 #define WRITE_BYTE_FIELD(p, offset, value) \
1304 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1306 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1307 base::NoBarrier_Store( \
1308 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1309 static_cast<base::Atomic8>(value));
1341 MapWord MapWord::FromMap(
const Map*
map) {
1346 Map* MapWord::ToMap() {
1347 return reinterpret_cast<Map*
>(value_);
1351 bool MapWord::IsForwardingAddress() {
1356 MapWord MapWord::FromForwardingAddress(HeapObject*
object) {
1358 return MapWord(
reinterpret_cast<uintptr_t>(raw));
1362 HeapObject* MapWord::ToForwardingAddress() {
1363 DCHECK(IsForwardingAddress());
1369 void HeapObject::VerifyObjectField(
int offset) {
1373 void HeapObject::VerifySmiField(
int offset) {
1396 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1397 return MapWord::FromRawValue(raw_value).ToMap();
1406 if (value !=
NULL) {
1421 if (value !=
NULL) {
1511 v->VisitNextCodeLink(
reinterpret_cast<Object**
>(
FIELD_ADDR(
this, offset)));
1548 Object* candidate = *current++;
1549 if (!candidate->IsSmi() && candidate != the_hole)
return false;
1562 #ifdef ENABLE_SLOW_DCHECKS
1577 set_dependent_code(DependentCode::cast(
GetHeap()->empty_fixed_array()),
1593 if (FLAG_pretenuring_call_new ||
1604 if (FLAG_pretenuring_call_new ||
1615 if (FLAG_allocation_site_pretenuring) {
1640 int value = pretenure_data()->value();
1662 DCHECK(FLAG_allocation_site_pretenuring);
1671 bool maximum_size_scavenge) {
1678 if (maximum_size_scavenge) {
1695 bool maximum_size_scavenge) {
1701 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1702 static_cast<double>(found_count) / create_count : 0.0;
1705 if (minimum_mementos_created) {
1707 current_decision, ratio, maximum_size_scavenge);
1710 if (FLAG_trace_pretenuring_statistics) {
1712 "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1713 static_cast<void*
>(
this), create_count, found_count, ratio,
1727 ElementsKind elements_kind =
object->map()->elements_kind();
1742 ElementsKind current_kind =
object->map()->elements_kind();
1749 Heap* heap =
object->GetHeap();
1750 Object* the_hole = heap->the_hole_value();
1752 Object* current = *objects++;
1753 if (current == the_hole) {
1756 }
else if (!current->IsSmi()) {
1765 }
else if (is_holey) {
1774 if (target_kind != current_kind) {
1784 Heap* heap =
object->GetHeap();
1785 if (elements->map() != heap->fixed_double_array_map()) {
1786 DCHECK(elements->map() == heap->fixed_array_map() ||
1787 elements->map() == heap->fixed_cow_array_map());
1804 if (double_array->is_the_hole(
i)) {
1818 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1819 (*value == object->GetHeap()->empty_fixed_array())) ==
1820 (value->map() == object->GetHeap()->fixed_array_map() ||
1821 value->map() == object->GetHeap()->fixed_cow_array_map()));
1822 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1823 (object->map()->has_fast_double_elements() ==
1824 value->IsFixedDoubleArray()));
1825 object->set_elements(*value);
1835 void JSObject::initialize_properties() {
1873 int transition = transitions->
Search(*key);
1875 PropertyDetails target_details = transitions->
GetTargetDetails(transition);
1887 return Smi::cast(
READ_FIELD(
this, kKindOffset))->value();
1896 Object* Cell::value()
const {
1903 DCHECK(!val->IsPropertyCell() && !val->IsCell());
1909 Object* PropertyCell::type_raw()
const {
2026 if (index.is_inobject()) {
2029 return properties()->get(index.outobject_array_index());
2035 if (index.is_inobject()) {
2036 int offset = index.offset();
2040 properties()->set(index.outobject_array_index(), value);
2069 Object* pre_allocated_value,
2071 DCHECK(!filler_value->IsHeapObject() ||
2072 !
GetHeap()->InNewSpace(filler_value));
2073 DCHECK(!pre_allocated_value->IsHeapObject() ||
2074 !
GetHeap()->InNewSpace(pre_allocated_value));
2077 if (filler_value != pre_allocated_value) {
2080 for (
int i = 0;
i < pre_allocated;
i++) {
2085 while (offset <
size) {
2093 DCHECK(properties()->IsDictionary() ==
map()->is_dictionary_map());
2094 return !properties()->IsDictionary();
2104 return external > limit;
2118 int value = Smi::cast(
this)->value();
2119 if (value < 0)
return false;
2123 if (IsHeapNumber()) {
2124 double value = HeapNumber::cast(
this)->value();
2126 if (value ==
static_cast<double>(uint_value)) {
2127 *index = uint_value;
2136 if (!this->IsJSValue())
return false;
2138 JSValue* js_value = JSValue::cast(
this);
2139 if (!js_value->value()->IsString())
return false;
2141 String* str = String::cast(js_value->value());
2149 #if ENABLE_EXTRA_CHECKS
2159 FATAL(
"API call returned invalid object");
2172 return handle(array->get(index), array->GetIsolate());
2177 return get(index) ==
GetHeap()->the_hole_value();
2236 if (array->is_the_hole(index)) {
2237 return array->GetIsolate()->factory()->the_hole_value();
2239 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2273 for (
int i = from;
i <
to;
i++) {
2279 void ConstantPoolArray::NumberOfEntries::increment(
Type type) {
2280 DCHECK(type < NUMBER_OF_TYPES);
2281 element_counts_[type]++;
2285 int ConstantPoolArray::NumberOfEntries::equals(
2286 const ConstantPoolArray::NumberOfEntries& other)
const {
2287 for (
int i = 0;
i < NUMBER_OF_TYPES;
i++) {
2288 if (element_counts_[
i] != other.element_counts_[
i])
return false;
2294 bool ConstantPoolArray::NumberOfEntries::is_empty()
const {
2295 return total_count() == 0;
2299 int ConstantPoolArray::NumberOfEntries::count_of(
Type type)
const {
2300 DCHECK(type < NUMBER_OF_TYPES);
2301 return element_counts_[type];
2305 int ConstantPoolArray::NumberOfEntries::base_of(
Type type)
const {
2307 DCHECK(type < NUMBER_OF_TYPES);
2308 for (
int i = 0;
i < type;
i++) {
2309 base += element_counts_[
i];
2315 int ConstantPoolArray::NumberOfEntries::total_count()
const {
2317 for (
int i = 0;
i < NUMBER_OF_TYPES;
i++) {
2318 count += element_counts_[
i];
2324 int ConstantPoolArray::NumberOfEntries::are_in_range(
int min,
int max)
const {
2326 if (element_counts_[
i] <
min || element_counts_[
i] > max) {
2334 int ConstantPoolArray::Iterator::next_index() {
2336 int ret = next_index_++;
2342 bool ConstantPoolArray::Iterator::is_finished() {
2343 return next_index_ > array_->last_index(type_, final_section_);
2347 void ConstantPoolArray::Iterator::update_section() {
2348 if (next_index_ > array_->last_index(type_, current_section_) &&
2349 current_section_ != final_section_) {
2350 DCHECK(final_section_ == EXTENDED_SECTION);
2351 current_section_ = EXTENDED_SECTION;
2352 next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2614 const NumberOfEntries& extended) {
2626 extended.count_of(
INT64));
2632 extended.count_of(
INT32));
2689 heap->RecordWrite(array->
address(), offset);
2710 GetHeap()->undefined_value());
2734 for (
int i = from;
i <
to;
i++) {
2747 this ==
GetHeap()->empty_descriptor_array());
2761 template<SearchMode search_mode,
typename T>
2768 while (low != high) {
2769 int mid = (low + high) / 2;
2770 Name* mid_name = array->GetSortedKey(mid);
2773 if (mid_hash >= hash) {
2780 for (; low <= limit; ++low) {
2781 int sort_index = array->GetSortedKeyIndex(low);
2782 Name* entry = array->GetKey(sort_index);
2783 if (entry->
Hash() != hash)
break;
2785 if (search_mode ==
ALL_ENTRIES || sort_index < valid_entries) {
2788 return T::kNotFound;
2792 return T::kNotFound;
2798 template<SearchMode search_mode,
typename T>
2802 for (
int number = 0; number < len; number++) {
2803 int sorted_index = array->GetSortedKeyIndex(number);
2804 Name* entry = array->GetKey(sorted_index);
2806 if (current_hash > hash)
break;
2807 if (current_hash == hash && entry->
Equals(
name))
return sorted_index;
2810 DCHECK(len >= valid_entries);
2811 for (
int number = 0; number < valid_entries; number++) {
2812 Name* entry = array->GetKey(number);
2814 if (current_hash == hash && entry->
Equals(
name))
return number;
2817 return T::kNotFound;
2821 template<SearchMode search_mode,
typename T>
2824 SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2829 int nof = array->number_of_entries();
2830 if (nof == 0)
return T::kNotFound;
2833 const int kMaxElementsForLinearSearch = 8;
2835 nof <= kMaxElementsForLinearSearch) ||
2837 valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2838 return LinearSearch<search_mode>(array,
name, nof, valid_entries);
2842 return BinarySearch<search_mode>(array,
name, 0, nof - 1, valid_entries);
2847 return internal::Search<VALID_ENTRIES>(
this,
name, valid_descriptors);
2851 int DescriptorArray::SearchWithCache(Name*
name, Map*
map) {
2853 if (number_of_own_descriptors == 0)
return kNotFound;
2859 number =
Search(
name, number_of_own_descriptors);
2860 cache->Update(
map,
name, number);
2868 return instance_descriptors()->GetDetails(
LastAdded());
2874 LookupResult* result) {
2876 int number = descriptors->SearchWithCache(
name,
this);
2878 result->DescriptorResult(holder, descriptors->
GetDetails(number), number);
2884 LookupResult* result) {
2887 result->TransitionResult(holder, this->
GetTransition(transition_index));
2895 return GetHeap()->empty_fixed_array();
2935 return GetDetails(descriptor_number).pointer();
2945 PropertyDetails details =
GetDetails(descriptor_index);
2953 PropertyDetails details =
GetDetails(descriptor_index);
2955 details.CopyWithRepresentation(representation).AsSmi());
2984 return PropertyDetails(Smi::cast(details));
2995 return GetDetails(descriptor_number).field_index();
3006 return GetValue(descriptor_number);
3012 return GetValue(descriptor_number);
3044 desc->GetDetails().AsSmi());
3062 Set(descriptor_number, desc);
3064 uint32_t hash = desc->GetKey()->Hash();
3068 for (insertion = descriptor_number; insertion > 0; --insertion) {
3070 if (key->
Hash() <= hash)
break;
3086 : marking_(array->GetHeap()->incremental_marking()) {
3089 Marking::Color(array) == Marking::WHITE_OBJECT);
3094 marking_->LeaveNoMarkingScope();
3098 template<
typename Derived,
typename Shape,
typename Key>
3100 const int kMinCapacity = 32;
3102 if (capacity < kMinCapacity) {
3103 capacity = kMinCapacity;
3109 template<
typename Derived,
typename Shape,
typename Key>
3116 template<
typename Derived,
typename Shape,
typename Key>
3123 Object* element = KeyAt(entry);
3126 if (element == isolate->
heap()->raw_unchecked_undefined_value())
break;
3127 if (element != isolate->
heap()->raw_unchecked_the_hole_value() &&
3128 Shape::IsMatch(key, element))
return entry;
3129 entry = NextProbe(entry, count++, capacity);
3136 Object* max_index_object =
get(kMaxNumberKeyIndex);
3137 if (!max_index_object->IsSmi())
return false;
3139 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3143 DCHECK(!requires_slow_elements());
3144 Object* max_index_object =
get(kMaxNumberKeyIndex);
3145 if (!max_index_object->IsSmi())
return 0;
3146 uint32_t value =
static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3147 return value >> kRequiresSlowElementsTagSize;
3246 template <
class Traits>
3249 HeapObject::cast(
object)->map()->instance_type() ==
3250 Traits::kInstanceType);
3255 template <
class Traits>
3259 HeapObject::cast(
object)->map()->instance_type() ==
3260 Traits::kInstanceType);
3265 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3267 #undef MAKE_STRUCT_CAST
3270 template <
typename Derived,
typename Shape,
typename Key>
3271 HashTable<Derived, Shape, Key>*
3272 HashTable<Derived, Shape, Key>::cast(
Object* obj) {
3274 return reinterpret_cast<HashTable*
>(obj);
3278 template <
typename Derived,
typename Shape,
typename Key>
3279 const HashTable<Derived, Shape, Key>*
3280 HashTable<Derived, Shape, Key>::cast(
const Object* obj) {
3282 return reinterpret_cast<const HashTable*
>(obj);
3303 #if V8_HOST_ARCH_64_BIT
3310 if (other ==
this)
return true;
3311 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3312 this->IsSymbol() || other->IsSymbol()) {
3315 return String::cast(
this)->SlowEquals(String::cast(other));
3320 if (one.is_identical_to(two))
return true;
3321 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3322 one->IsSymbol() || two->IsSymbol()) {
3337 if (other ==
this)
return true;
3338 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3341 return SlowEquals(other);
3346 if (one.is_identical_to(two))
return true;
3347 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3350 return SlowEquals(one, two);
3355 if (!string->IsConsString())
return string;
3357 if (cons->IsFlat())
return handle(cons->first());
3358 return SlowFlatten(cons, pretenure);
3364 switch (StringShape(
this).full_representation_tag()) {
3366 return SeqOneByteString::cast(
this)->SeqOneByteStringGet(index);
3368 return SeqTwoByteString::cast(
this)->SeqTwoByteStringGet(index);
3371 return ConsString::cast(
this)->ConsStringGet(index);
3373 return ExternalOneByteString::cast(
this)->ExternalOneByteStringGet(index);
3375 return ExternalTwoByteString::cast(
this)->ExternalTwoByteStringGet(index);
3378 return SlicedString::cast(
this)->SlicedStringGet(index);
3390 DCHECK(StringShape(
this).IsSequential());
3392 return this->IsOneByteRepresentation()
3393 ? SeqOneByteString::cast(
this)->SeqOneByteStringSet(index, value)
3394 : SeqTwoByteString::cast(
this)->SeqTwoByteStringSet(index, value);
3399 if (!StringShape(
this).IsCons())
return true;
3400 return ConsString::cast(
this)->second()->length() == 0;
3408 DCHECK(StringShape(
this).IsIndirect());
3411 return String::cast(
READ_FIELD(
this, kUnderlyingOffset));
3415 template<
class Visitor>
3419 int slice_offset = offset;
3420 const int length =
string->length();
3423 int32_t type =
string->map()->instance_type();
3426 visitor->VisitOneByteString(
3427 SeqOneByteString::cast(
string)->GetChars() + slice_offset,
3432 visitor->VisitTwoByteString(
3433 SeqTwoByteString::cast(
string)->GetChars() + slice_offset,
3438 visitor->VisitOneByteString(
3439 ExternalOneByteString::cast(
string)->GetChars() + slice_offset,
3444 visitor->VisitTwoByteString(
3445 ExternalTwoByteString::cast(
string)->GetChars() + slice_offset,
3451 SlicedString* slicedString = SlicedString::cast(
string);
3452 slice_offset += slicedString->
offset();
3453 string = slicedString->
parent();
3459 return ConsString::cast(
string);
3476 DCHECK(index >= 0 && index <
length() && value <= kMaxOneByteCharCode);
3478 static_cast<byte>(value));
3488 return reinterpret_cast<uint8_t*
>(GetCharsAddress());
3525 return String::cast(
READ_FIELD(
this, kParentOffset));
3530 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3556 return String::cast(
READ_FIELD(
this, kSecondOffset));
3583 if (is_short())
return;
3584 const char** data_field =
3585 reinterpret_cast<const char**
>(
FIELD_ADDR(
this, kResourceDataOffset));
3586 *data_field = resource()->data();
3593 *
reinterpret_cast<const Resource**
>(
3594 FIELD_ADDR(
this, kResourceOffset)) = resource;
3595 if (resource !=
NULL) update_data_cache();
3600 return reinterpret_cast<const uint8_t*
>(resource()->data());
3606 return GetChars()[index];
3616 if (is_short())
return;
3619 *data_field = resource()->data();
3625 *
reinterpret_cast<const Resource**
>(
3626 FIELD_ADDR(
this, kResourceOffset)) = resource;
3627 if (resource !=
NULL) update_data_cache();
3632 return resource()->data();
3638 return GetChars()[index];
3644 return GetChars() + start;
3649 return depth & kDepthMask;
3654 frames_[depth_++ & kDepthMask] = string;
3660 frames_[(depth_-1) & kDepthMask] =
string;
3665 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3671 DCHECK(depth_ <= maximum_depth_);
3679 if (buffer8_ == end_) HasMore();
3681 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3688 : is_one_byte_(
false),
3690 Reset(
string, offset);
3699 if (cons_string !=
NULL) {
3711 if (
string ==
NULL)
return false;
3719 const uint8_t* chars,
int length) {
3722 end_ = chars + length;
3727 const uint16_t* chars,
int length) {
3730 end_ =
reinterpret_cast<const uint8_t*
>(chars + length);
3741 int cache_size =
size();
3800 return reinterpret_cast<uint8_t*
>(external_pointer());
3815 array->GetIsolate());
3826 void* ExternalArray::external_pointer()
const {
3828 return reinterpret_cast<void*
>(ptr);
3833 intptr_t ptr =
reinterpret_cast<intptr_t
>(value);
3840 int8_t* ptr =
static_cast<int8_t*
>(external_pointer());
3848 array->GetIsolate());
3854 int8_t* ptr =
static_cast<int8_t*
>(external_pointer());
3861 uint8_t* ptr =
static_cast<uint8_t*
>(external_pointer());
3869 array->GetIsolate());
3875 uint8_t* ptr =
static_cast<uint8_t*
>(external_pointer());
3890 array->GetIsolate());
3911 array->GetIsolate());
3931 return array->GetIsolate()->factory()->
3932 NewNumberFromInt(array->get_scalar(index));
3952 return array->GetIsolate()->factory()->
3953 NewNumberFromUint(array->get_scalar(index));
3966 float* ptr =
static_cast<float*
>(external_pointer());
3973 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3979 float* ptr =
static_cast<float*
>(external_pointer());
3986 double* ptr =
static_cast<double*
>(external_pointer());
3993 return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3999 double* ptr =
static_cast<double*
>(external_pointer());
4012 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4013 case FIXED_##TYPE##_ARRAY_TYPE: \
4014 element_size = size; \
4018 #undef TYPED_ARRAY_CASE
4023 return length() * element_size;
4042 uint8_t Uint8ArrayTraits::defaultValue() {
return 0; }
4045 uint8_t Uint8ClampedArrayTraits::defaultValue() {
return 0; }
4048 int8_t Int8ArrayTraits::defaultValue() {
return 0; }
4051 uint16_t Uint16ArrayTraits::defaultValue() {
return 0; }
4054 int16_t Int16ArrayTraits::defaultValue() {
return 0; }
4057 uint32_t Uint32ArrayTraits::defaultValue() {
return 0; }
4060 int32_t Int32ArrayTraits::defaultValue() {
return 0; }
4063 float Float32ArrayTraits::defaultValue() {
4071 template <
class Traits>
4073 DCHECK((index >= 0) && (index < this->length()));
4083 DCHECK((index >= 0) && (index < this->length()));
4088 template <
class Traits>
4090 DCHECK((index >= 0) && (index < this->length()));
4099 int index, Float64ArrayTraits::ElementType value) {
4100 DCHECK((index >= 0) && (index < this->length()));
4105 template <
class Traits>
4113 if (value < 0)
return 0;
4114 if (value > 0xFF)
return 0xFF;
4115 return static_cast<uint8_t
>(value);
4119 template <
class Traits>
4128 if (value < 0)
return 0;
4129 if (value > 0xFF)
return 0xFF;
4130 return static_cast<uint8_t
>(lrint(value));
4136 return static_cast<float>(value);
4146 template <
class Traits>
4150 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4154 template <
class Traits>
4160 if (index <
static_cast<uint32_t>(array->length())) {
4161 if (value->IsSmi()) {
4163 cast_value = from_int(int_value);
4164 }
else if (value->IsHeapNumber()) {
4166 cast_value = from_double(double_value);
4170 DCHECK(value->IsUndefined());
4172 array->set(index, cast_value);
4174 return Traits::ToHandle(array->GetIsolate(), cast_value);
4189 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4194 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate,
uint16_t scalar) {
4199 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate,
int16_t scalar) {
4204 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate,
uint32_t scalar) {
4205 return isolate->factory()->NewNumberFromUint(scalar);
4209 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate,
int32_t scalar) {
4210 return isolate->factory()->NewNumberFromInt(scalar);
4214 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate,
float scalar) {
4215 return isolate->factory()->NewNumber(scalar);
4219 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate,
double scalar) {
4220 return isolate->factory()->NewNumber(scalar);
4230 DCHECK(0 <=
id &&
id < 256);
4273 return reinterpret_cast<ByteArray*
>(
this)->ByteArraySize();
4276 return reinterpret_cast<FreeSpace*
>(
this)->nobarrier_size();
4293 this)->TypedArraySize(instance_type);
4296 return reinterpret_cast<Code*
>(
this)->CodeSize();
4303 DCHECK(0 <= value && value < 256);
4310 DCHECK(0 <= value && value < 256);
4316 DCHECK(0 <= value && value < 256);
4319 static_cast<byte>(value));
4388 if (access_check_needed) {
4521 return code_cache() !=
GetIsolate()->
heap()->empty_fixed_array();
4527 for (
int i = 0;
i <= descriptor;
i++) {
4528 PropertyDetails details = instance_descriptors()->GetDetails(
i);
4529 if (details.representation().IsNone())
return true;
4530 if (details.representation().IsSmi())
return true;
4531 if (details.representation().IsDouble())
return true;
4532 if (details.representation().IsHeapObject())
return true;
4533 if (details.type() ==
CONSTANT)
return true;
4550 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4555 if (
length() == 0)
return 0;
4556 return Smi::cast(
get(group))->value();
4608 if (starts.
at(g) < starts.
at(g + 1)) {
4609 copy(starts.
at(g), starts.
at(g + 1));
4628 kind() == KEYED_LOAD_IC ||
kind() == CALL_IC ||
kind() == STORE_IC ||
4629 kind() == KEYED_STORE_IC ||
kind() == BINARY_OP_IC ||
4630 kind() == COMPARE_IC ||
kind() == COMPARE_NIL_IC ||
4631 kind() == TO_BOOLEAN_IC;
4783 if (
kind() == FUNCTION) {
4924 #define CASE(name) case name: return true;
4927 default:
return false;
4948 DCHECK(value->IsConstantPoolArray());
4963 return static_cast<Flags>(bits);
5008 return static_cast<Flags>(bits);
5014 return static_cast<Flags>(bits);
5024 Code* result =
reinterpret_cast<Code*
>(code);
5036 if (!FLAG_collect_maps)
return false;
5037 if (object->IsMap()) {
5038 return Map::cast(
object)->CanTransition() &&
5039 FLAG_weak_embedded_maps_in_optimized_code;
5041 if (object->IsJSObject() ||
5042 (object->IsCell() && Cell::cast(
object)->value()->IsJSObject())) {
5043 return FLAG_weak_embedded_objects_in_optimized_code;
5068 return object->IsMap() && Map::cast(
object)->CanTransition() &&
5069 FLAG_collect_maps &&
5070 FLAG_weak_embedded_maps_in_ic;
5074 Object* Map::prototype()
const {
5090 if (!
map->HasTransitionArray()) {
5092 transitions->set_back_pointer_storage(
map->GetBackPointer());
5093 }
else if (!
map->transitions()->IsFullTransitionArray()) {
5098 map->set_transitions(*transitions);
5104 set_instance_descriptors(descriptors);
5129 descriptors->
Append(desc);
5136 if (object->IsDescriptorArray()) {
5139 DCHECK(object->IsMap() || object->IsUndefined());
5152 return object->IsTransitionArray();
5157 int index = transitions()->Search(
GetHeap()->elements_transition_symbol());
5158 return transitions()->GetTarget(index);
5171 return transitions()->GetTarget(transition_index);
5184 return GetHeap()->empty_fixed_array();
5186 return transitions()->GetPrototypeTransitions();
5217 void Map::set_transitions(TransitionArray* transition_array,
5225 for (
int i = 0;
i < transitions()->number_of_transitions();
i++) {
5226 Map* target = transitions()->GetTarget(
i);
5227 if (target->instance_descriptors() == instance_descriptors()) {
5228 Name* key = transitions()->GetKey(
i);
5229 int new_target_index = transition_array->Search(key);
5231 DCHECK(transition_array->GetTarget(new_target_index) == target);
5235 DCHECK(transitions() != transition_array);
5246 DCHECK(undefined->IsUndefined());
5256 if (object->IsTransitionArray()) {
5285 kExpectedReceiverTypeOffset)
5288 kSerializedDataOffset)
5323 kPrototypeTemplateOffset)
5326 kNamedPropertyHandlerOffset)
5328 kIndexedPropertyHandlerOffset)
5330 kInstanceTemplateOffset)
5334 kInstanceCallHandlerOffset)
5336 kAccessCheckInfoOffset)
5341 kInternalFieldCountOffset)
5352 kPretenureCreateCountOffset)
5354 kDependentCodeOffset)
5369 kEvalFrominstructionsOffsetOffset)
5377 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5405 kOptimizedCodeMapOffset)
5408 kFeedbackVectorOffset)
5410 kInstanceClassNameOffset)
5419 kHiddenPrototypeBit)
5422 kNeedsAccessCheckBit)
5424 kReadOnlyPrototypeBit)
5426 kRemovePrototypeBit)
5436 allows_lazy_compilation,
5437 kAllowLazyCompilation)
5441 kAllowLazyCompilationWithoutContext)
5449 kHasDuplicateParameters)
5453 #if V8_HOST_ARCH_32_BIT
5456 kFormalParameterCountOffset)
5458 kExpectedNofPropertiesOffset)
5461 kStartPositionAndTypeOffset)
5464 kFunctionTokenPositionOffset)
5466 kCompilerHintsOffset)
5468 kOptCountAndBailoutReasonOffset)
5475 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
5476 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
5477 int holder::name() const { \
5478 int value = READ_INT_FIELD(this, offset); \
5479 DCHECK(kHeapObjectTag == 1); \
5480 DCHECK((value & kHeapObjectTag) == 0); \
5481 return value >> 1; \
5483 void holder::set_##name(int value) { \
5484 DCHECK(kHeapObjectTag == 1); \
5485 DCHECK((value & 0xC0000000) == 0xC0000000 || \
5486 (value & 0xC0000000) == 0x0); \
5487 WRITE_INT_FIELD(this, \
5489 (value << 1) & ~kHeapObjectTag); \
5492 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
5493 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
5494 INT_ACCESSORS(holder, name, offset)
5499 formal_parameter_count,
5500 kFormalParameterCountOffset)
5504 kExpectedNofPropertiesOffset)
5510 kStartPositionAndTypeOffset)
5514 kFunctionTokenPositionOffset)
5517 kCompilerHintsOffset)
5521 kOptCountAndBailoutReasonOffset)
5526 kAstNodeCountOffset)
5529 kProfilerTicksOffset)
5536 optimization_disabled,
5537 kOptimizationDisabled)
5540 void SharedFunctionInfo::set_optimization_disabled(
bool disable) {
5546 if ((code()->
kind() == Code::FUNCTION) && disable) {
5547 code()->set_optimizable(
false);
5585 kNameShouldPrintAsAnonymous)
5601 bool Script::HasValidSource() {
5602 Object* src = this->source();
5603 if (!src->IsString())
return true;
5604 String* src_str = String::cast(src);
5605 if (!StringShape(src_str).IsExternal())
return true;
5607 return ExternalOneByteString::cast(src)->resource() !=
NULL;
5609 return ExternalTwoByteString::cast(src)->resource() !=
NULL;
5632 Code* SharedFunctionInfo::code()
const {
5638 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5647 if (code()->gc_metadata() !=
NULL) {
5652 DCHECK(code()->gc_metadata() ==
NULL && value->gc_metadata() ==
NULL);
5658 ScopeInfo* SharedFunctionInfo::scope_info()
const {
5663 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5669 reinterpret_cast<Object*
>(value),
5680 return function_data()->IsFunctionTemplateInfo();
5686 return FunctionTemplateInfo::cast(function_data());
5691 return function_data()->IsSmi();
5758 Code* code = this->code();
5768 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5769 set_optimization_disabled(
false);
5772 code()->set_optimizable(
true);
5783 Object* script = shared()->script();
5784 bool native = script->IsScript() &&
5792 Object* script = shared()->script();
5793 return script->IsScript() &&
5799 return shared()->formal_parameter_count() !=
5805 return code()->
kind() == Code::OPTIMIZED_FUNCTION;
5816 Builtins::kCompileOptimized);
5822 Builtins::kCompileOptimizedConcurrent);
5828 Builtins::kInOptimizationQueue);
5864 bool is_optimized =
code->
kind() == Code::OPTIMIZED_FUNCTION;
5866 if (was_optimized && is_optimized) {
5867 shared()->EvictFromOptimizedCodeMap(this->
code(),
5868 "Replacing with another optimized code");
5875 if (!was_optimized && is_optimized) {
5878 if (was_optimized && !is_optimized) {
5896 DCHECK(value->IsUndefined() || value->IsContext());
5902 kPrototypeOrInitialMapOffset)
5906 return Map::cast(prototype_or_initial_map());
5911 return prototype_or_initial_map()->IsMap();
5930 return prototype_or_initial_map();
5938 if (
map()->has_non_instance_prototype())
return map()->constructor();
5954 DCHECK(!shared()->bound());
5955 return literals_or_bindings();
5960 DCHECK(!shared()->bound());
5961 set_literals_or_bindings(
literals);
5966 DCHECK(shared()->bound());
5967 return literals_or_bindings();
5972 DCHECK(shared()->bound());
5976 bindings->
map() ==
GetHeap()->fixed_cow_array_map());
5977 set_literals_or_bindings(bindings);
5982 DCHECK(!shared()->bound());
6022 DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6023 for (
int offset = kHeaderSize; offset < object_size; offset +=
kPointerSize) {
6032 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6033 template<class Derived, class TableType> \
6034 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6035 return type::cast(READ_FIELD(this, offset)); \
6037 template<class Derived, class TableType> \
6038 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6039 type* value, WriteBarrierMode mode) { \
6040 WRITE_FIELD(this, offset, value); \
6041 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6048 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6073 DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6075 return continuation() > 0;
6094 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6095 return reinterpret_cast<HeapNumber*
>(object);
6099 const HeapNumber* HeapNumber::cast(
const Object*
object) {
6100 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6101 return reinterpret_cast<const HeapNumber*
>(object);
6116 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6117 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6120 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6121 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6124 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6127 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6128 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6129 ACCESSORS(Code, raw_type_feedback_info,
Object, kTypeFeedbackInfoOffset)
6139 if (!
READ_FIELD(
this, kTypeFeedbackInfoOffset)->IsSmi()) {
6147 return raw_type_feedback_info();
6153 set_raw_type_feedback_info(value,
mode);
6161 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6219 void* JSArrayBuffer::backing_store()
const {
6221 return reinterpret_cast<void*
>(ptr);
6226 intptr_t ptr =
reinterpret_cast<intptr_t
>(value);
6269 Object* data = this->data();
6271 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6290 DCHECK(this->data()->IsFixedArray());
6291 Object* data = this->data();
6298 DCHECK(this->data()->IsFixedArray());
6299 Object* data = this->data();
6307 return FixedArray::cast(data())->get(index);
6314 FixedArray::cast(data())->set(index, value);
6326 if (ElementsAreSafeToExamine()) {
6332 (fixed_array->IsFixedDoubleArray() ||
6333 fixed_array ==
GetHeap()->empty_fixed_array())) ||
6335 fixed_array->IsFixedArray() &&
6336 fixed_array->IsDictionary()) ||
6339 (elements()->IsFixedArray() && elements()->length() >= 2));
6394 return array->IsExternalArray();
6398 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6399 bool JSObject::HasExternal##Type##Elements() { \
6400 HeapObject* array = elements(); \
6401 DCHECK(array != NULL); \
6402 if (!array->IsHeapObject()) \
6404 return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6409 #undef EXTERNAL_ELEMENTS_CHECK
6415 return array->IsFixedTypedArrayBase();
6419 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
6420 bool JSObject::HasFixed##Type##Elements() { \
6421 HeapObject* array = elements(); \
6422 DCHECK(array != NULL); \
6423 if (!array->IsHeapObject()) \
6425 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
6430 #undef FIXED_TYPED_ELEMENTS_CHECK
6445 return NameDictionary::cast(properties());
6451 return SeededNumberDictionary::cast(elements());
6470 return String::cast(
this)->ComputeAndSetHash();
6474 return this->IsSymbol() && Symbol::cast(
this)->is_own();
6480 raw_running_hash_(seed),
6482 is_array_index_(0 < length_ && length_ <=
String::kMaxArrayIndexSize),
6483 is_first_char_(
true) {
6495 running_hash += (running_hash << 10);
6496 running_hash ^= (running_hash >> 6);
6497 return running_hash;
6502 running_hash += (running_hash << 3);
6503 running_hash ^= (running_hash >> 11);
6504 running_hash += (running_hash << 15);
6508 return running_hash;
6521 if (c < '0' || c >
'9') {
6528 if (c ==
'0' &&
length_ > 1) {
6542 template<
typename Char>
6544 DCHECK(
sizeof(Char) == 1 ||
sizeof(Char) == 2);
6547 for (;
i < length;
i++) {
6555 for (;
i < length;
i++) {
6562 template <
typename s
char>
6582 while (
NULL != (
string = op.
Next(&offset))) {
6602 return IsString() && String::cast(
this)->AsArrayIndex(index);
6616 DCHECK(IsInternalizedString());
6618 if (canonical ==
this)
return;
6620 DCHECK(canonical->IsInternalizedString());
6631 DCHECK(IsInternalizedString());
6634 DCHECK(canonical->IsInternalizedString());
6642 return map()->constructor();
6648 if (object->IsJSProxy()) {
6660 if (object->IsJSProxy()) {
6673 if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6683 if (object->IsJSProxy()) {
6693 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(
this);
6705 return object->IsJSProxy()
6713 ? JSProxy::cast(
this)->GetIdentityHash()
6714 : JSObject::cast(
this)->GetIdentityHash();
6719 if (object->IsJSProxy()) {
6732 if (object->IsJSProxy()) {
6745 if (object->IsJSProxy()) {
6786 if (!receiver->IsJSObject())
return false;
6787 return FunctionTemplateInfo::cast(expected_receiver_type())
6788 ->IsTemplateFor(JSObject::cast(receiver)->
map());
6797 template<
typename Derived,
typename Shape,
typename Key>
6801 SetEntry(entry, key, value, PropertyDetails(
Smi::FromInt(0)));
6805 template<
typename Derived,
typename Shape,
typename Key>
6809 PropertyDetails details) {
6811 details.IsDeleted() ||
6812 details.dictionary_index() > 0);
6813 int index = DerivedHashTable::EntryToIndex(entry);
6823 DCHECK(other->IsNumber());
6835 DCHECK(other->IsNumber());
6848 DCHECK(other->IsNumber());
6854 return isolate->
factory()->NewNumberFromUint(key);
6861 if (key->Hash() != Name::cast(other)->Hash())
return false;
6862 return key->Equals(Name::cast(other));
6872 return Name::cast(other)->Hash();
6878 DCHECK(key->IsUniqueName());
6890 return key->SameValue(other);
6895 return Smi::cast(key->GetHash())->value();
6901 return Smi::cast(other->
GetHash())->value();
6917 template <
int entrysize>
6919 return key->SameValue(other);
6923 template <
int entrysize>
6925 intptr_t hash =
reinterpret_cast<intptr_t
>(*key);
6926 return (
uint32_t)(hash & 0xFFFFFFFF);
6930 template <
int entrysize>
6933 intptr_t hash =
reinterpret_cast<intptr_t
>(other);
6934 return (
uint32_t)(hash & 0xFFFFFFFF);
6938 template <
int entrysize>
6956 DCHECK(array->HasFastSmiOrObjectElements());
6958 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6959 if (elts->length() < required_size) {
6962 Expand(array, required_size + (required_size >> 3));
6964 }
else if (!array->GetHeap()->new_space()->Contains(*elts) &&
6965 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6968 Expand(array, required_size);
6980 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6991 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
6993 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
6997 array->set_elements(*storage);
7023 if (delta == 0)
return;
7031 if (new_count >= 0) {
7045 if (delta == 0)
return;
7047 if (new_count >= 0) {
7048 new_count &= ~
Smi::kMinValue;
7100 Relocatable::Relocatable(
Isolate* isolate) {
7102 prev_ = isolate->relocatable_top();
7103 isolate->set_relocatable_top(
this);
7107 Relocatable::~Relocatable() {
7108 DCHECK_EQ(isolate_->relocatable_top(),
this);
7109 isolate_->set_relocatable_top(prev_);
7119 v->VisitExternalReference(
7124 template<
typename StaticVisitor>
7126 StaticVisitor::VisitExternalReference(
7133 v->VisitExternalOneByteString(
7138 template <
typename StaticVisitor>
7141 StaticVisitor::VisitExternalOneByteString(
7148 v->VisitExternalTwoByteString(
7153 template<
typename StaticVisitor>
7156 StaticVisitor::VisitExternalTwoByteString(
7161 template<
int start_offset,
int end_offset,
int size>
7170 template<
int start_offset>
7179 template<
class Derived,
class TableType>
7181 TableType* table(TableType::cast(this->table()));
7182 int index = Smi::cast(this->index())->value();
7183 Object* key = table->KeyAt(index);
7184 DCHECK(!key->IsTheHole());
7190 array->
set(0, CurrentKey());
7195 array->
set(0, CurrentKey());
7196 array->
set(1, CurrentValue());
7202 int index = Smi::cast(this->index())->value();
7204 DCHECK(!value->IsTheHole());
7210 #undef CAST_ACCESSOR
7211 #undef INT_ACCESSORS
7213 #undef ACCESSORS_TO_SMI
7214 #undef SMI_ACCESSORS
7215 #undef SYNCHRONIZED_SMI_ACCESSORS
7216 #undef NOBARRIER_SMI_ACCESSORS
7218 #undef BOOL_ACCESSORS
7220 #undef FIELD_ADDR_CONST
7222 #undef NOBARRIER_READ_FIELD
7224 #undef NOBARRIER_WRITE_FIELD
7225 #undef WRITE_BARRIER
7226 #undef CONDITIONAL_WRITE_BARRIER
7227 #undef READ_DOUBLE_FIELD
7228 #undef WRITE_DOUBLE_FIELD
7229 #undef READ_INT_FIELD
7230 #undef WRITE_INT_FIELD
7231 #undef READ_INTPTR_FIELD
7232 #undef WRITE_INTPTR_FIELD
7233 #undef READ_UINT32_FIELD
7234 #undef WRITE_UINT32_FIELD
7235 #undef READ_SHORT_FIELD
7236 #undef WRITE_SHORT_FIELD
7237 #undef READ_BYTE_FIELD
7238 #undef WRITE_BYTE_FIELD
7239 #undef NOBARRIER_READ_BYTE_FIELD
7240 #undef NOBARRIER_WRITE_BYTE_FIELD
#define DCHECK_TAG_ALIGNED(address)
#define SLOW_DCHECK(condition)
An object reference managed by the v8 garbage collector.
Isolate represents an isolated instance of the V8 engine.
A superclass for symbols and strings.
A JavaScript object (ECMA-262, 4.3.3)
An ExternalOneByteStringResource is a wrapper around an one-byte string buffer that resides outside V...
An ExternalStringResource is a wrapper around a two-byte string buffer that resides outside V8's heap...
bool IsName() const
Returns true if this value is a symbol or a string.
bool IsTrue() const
Returns true if this value is true.
bool IsString() const
Returns true if this value is an instance of the String type.
bool IsNumber() const
Returns true if this value is a number.
bool IsNull() const
Returns true if this value is the null value.
bool IsBoolean() const
Returns true if this value is boolean.
bool IsFalse() const
Returns true if this value is false.
bool IsExternal() const
Returns true if this value is external.
bool IsUndefined() const
Returns true if this value is the undefined value.
static double nan_value()
PropertyAttributes property_attributes()
void set_all_can_write(bool value)
bool HasExpectedReceiverType()
static const int kAllCanWriteBit
static const int kAllCanReadBit
void set_property_attributes(PropertyAttributes attributes)
bool IsCompatibleReceiver(Object *receiver)
void set_all_can_read(bool value)
const char * PretenureDecisionName(PretenureDecision decision)
int memento_found_count()
void set_pretenure_decision(PretenureDecision decision)
static const double kPretenureRatio
static bool CanTrack(InstanceType type)
static const int kPretenureMinimumCreated
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
void set_deopt_dependent_code(bool deopt)
PretenureDecision pretenure_decision()
int memento_create_count()
bool DigestPretenuringFeedback(bool maximum_size_scavenge)
void set_memento_create_count(int count)
void IncrementMementoCreateCount()
bool MakePretenureDecision(PretenureDecision current_decision, double ratio, bool maximum_size_scavenge)
bool IncrementMementoFoundCount()
DependentCode::DependencyGroup ToDependencyGroup(Reason reason)
void set_memento_found_count(int count)
void SetElementsKind(ElementsKind kind)
static U update(U previous, T value)
static bool get(Smi *smi, int bit_position)
static Smi * set(Smi *smi, int bit_position, bool v)
Code * builtin(Name name)
static ByteArray * FromDataStartAddress(Address address)
void set(int index, byte value)
Address GetDataStartAddress()
static const int kValueOffset
static Handle< Object > AsHandle(Isolate *isolate, HashTableKey *key)
void EvictCandidate(SharedFunctionInfo *shared_info)
void Add(Handle< Map > map_to_find, Handle< Object > obj_to_replace)
Handle< Map > find_[kMaxCount]
static const int kMaxCount
Handle< Object > replace_[kMaxCount]
void set_allow_osr_at_loop_nesting_level(int level)
void set_compiled_optimizable(bool value)
ConstantPoolArray * constant_pool()
void set_has_deoptimization_support(bool value)
static const int kOptimizableOffset
void set_raw_kind_specific_flags1(int value)
unsigned safepoint_table_offset()
bool is_compiled_optimizable()
static Flags ComputeHandlerFlags(Kind handler_kind, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
void set_has_debug_break_slots(bool value)
static const int kFlagsOffset
void set_constant_pool(Object *constant_pool)
void set_back_edge_table_offset(unsigned offset)
void set_profiler_ticks(int ticks)
static Code * GetCodeFromTargetAddress(Address address)
static const int kKindSpecificFlags2Offset
ExtraICState extra_ic_state()
void set_has_function_cache(bool flag)
bool has_function_cache()
int allow_osr_at_loop_nesting_level()
static StubType ExtractTypeFromFlags(Flags flags)
static bool IsWeakObjectInOptimizedCode(Object *object)
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
static const int kKindSpecificFlags1Offset
static const int kConstantPoolOffset
bool is_inline_cache_stub()
bool marked_for_deoptimization()
static const int kTypeFeedbackInfoOffset
void set_stub_key(uint32_t key)
byte * relocation_start()
static Flags RemoveTypeAndHolderFromFlags(Flags flags)
STATIC_ASSERT(NUMBER_OF_KINDS<=16)
int instruction_size() const
bool is_keyed_store_stub()
void set_flags(Flags flags)
bool is_keyed_load_stub()
ByteArray * unchecked_relocation_info()
void set_builtin_index(int id)
static InlineCacheState ExtractICStateFromFlags(Flags flags)
static Kind ExtractKindFromFlags(Flags flags)
void set_safepoint_table_offset(unsigned offset)
void set_is_crankshafted(bool value)
void set_marked_for_deoptimization(bool flag)
byte * instruction_start()
static Object * GetObjectFromEntryAddress(Address location_of_address)
void mark_as_invalidated_weak_stub()
bool is_invalidated_weak_stub()
InlineCacheState ic_state()
static const int kRelocationInfoOffset
static const int kSafepointTableOffsetBitCount
static const int kHeaderSize
static CacheHolderFlag ExtractCacheHolderFromFlags(Flags flags)
void set_stack_slots(unsigned slots)
unsigned back_edge_table_offset()
static const int kFullCodeFlags
Object * type_feedback_info()
void set_optimizable(bool value)
void set_raw_kind_specific_flags2(int value)
bool has_debug_break_slots()
static Flags ComputeMonomorphicFlags(Kind kind, ExtraICState extra_ic_state=kNoExtraICState, CacheHolderFlag holder=kCacheOnReceiver, StubType type=NORMAL)
bool back_edges_patched_for_osr()
static Flags RemoveTypeFromFlags(Flags flags)
static bool IsWeakObjectInIC(Object *object)
void set_is_turbofanned(bool value)
static ExtraICState ExtractExtraICStateFromFlags(Flags flags)
void set_type_feedback_info(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static const int kStackSlotsBitCount
static const int kMaxLoopNestingMarker
static const int kProfilerTicksOffset
bool has_deoptimization_support()
static Handle< Object > AsHandle(Isolate *isolate, HashTableKey *key)
void Reset(ConsString *cons_string, int offset=0)
void PushRight(ConsString *string)
static int OffsetForDepth(int depth)
String * Next(int *offset_out)
void PushLeft(ConsString *string)
void AdjustMaximumDepth()
void set_first(String *first, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Object * unchecked_second()
static const int kFirstOffset
Object * unchecked_first()
void set_second(String *second, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static Type next_type(Type type)
LayoutSection final_section()
int64_t get_int64_entry(int index)
Address get_code_ptr_entry(int index)
int32_t get_int32_entry(int index)
static const int kFirstEntryOffset
WeakObjectState get_weak_object_state()
bool offset_is_type(int offset, Type type)
void set_at_offset(int offset, int32_t value)
static const int kExtendedInt64CountOffset
Object * get_heap_ptr_entry(int index)
void InitExtended(const NumberOfEntries &small, const NumberOfEntries &extended)
static const int kExtendedInt32CountOffset
static const int kHeaderSize
int get_extended_section_header_offset()
int last_index(Type type, LayoutSection layout_section)
int OffsetOfElementAt(int index)
void set(int index, Address value)
static const int kExtendedCodePtrCountOffset
void Init(const NumberOfEntries &small)
static int SizeFor(const NumberOfEntries &small)
void set_weak_object_state(WeakObjectState state)
static const int kSmallLayout1Offset
static const int kSmallLayout2Offset
bool is_extended_layout()
int first_index(Type type, LayoutSection layout_section)
int number_of_entries(Type type, LayoutSection layout_section)
double get_int64_entry_as_double(int index)
static int SizeForExtended(const NumberOfEntries &small, const NumberOfEntries &extended)
int first_extended_section_index()
static const int kExtendedHeapPtrCountOffset
JSObject * global_proxy()
static Context * cast(Object *context)
Context * native_context()
void AddOptimizedFunction(JSFunction *function)
void RemoveOptimizedFunction(JSFunction *function)
GlobalObject * global_object()
Object * object_at(int i)
int number_of_entries(DependencyGroup group)
void ExtendGroup(DependencyGroup group)
@ kAllocationSiteTenuringChangedGroup
@ kAllocationSiteTransitionChangedGroup
static const int kCodesStartIndex
static const int kGroupCount
void set_number_of_entries(DependencyGroup group, int value)
void set_object_at(int i, Object *object)
CompilationInfo * compilation_info_at(int i)
void copy(int from, int to)
WhitenessWitness(DescriptorArray *array)
IncrementalMarking * marking_
Object ** GetDescriptorStartSlot(int descriptor_number)
void SetValue(int descriptor_number, Object *value)
int GetSortedKeyIndex(int descriptor_number)
void SetRepresentation(int descriptor_number, Representation representation)
static const int kDescriptorLengthOffset
Name * GetKey(int descriptor_number)
static const int kNotFound
void SetSortedKey(int pointer, int descriptor_number)
static int ToValueIndex(int descriptor_number)
Object ** GetValueSlot(int descriptor_number)
PropertyDetails GetDetails(int descriptor_number)
AccessorDescriptor * GetCallbacks(int descriptor_number)
void Get(int descriptor_number, Descriptor *desc)
void SetNumberOfDescriptors(int number_of_descriptors)
Object * GetValue(int descriptor_number)
static int GetValueOffset(int descriptor_number)
void SwapSortedKeys(int first, int second)
int GetFieldIndex(int descriptor_number)
void Set(int descriptor_number, Descriptor *desc)
static int ToDetailsIndex(int descriptor_number)
Object ** GetDescriptorEndSlot(int descriptor_number)
PropertyType GetType(int descriptor_number)
Name * GetSortedKey(int descriptor_number)
Object * GetConstant(int descriptor_number)
void Append(Descriptor *desc)
Object ** GetKeySlot(int descriptor_number)
Object * GetCallbacksObject(int descriptor_number)
static const int kFirstIndex
static int ToKeyIndex(int descriptor_number)
static const int kFirstOffset
int number_of_descriptors()
HeapType * GetFieldType(int descriptor_number)
int Lookup(Map *source, Name *name)
static void GenerateNewEnumerationIndices(Handle< NameDictionary > dictionary)
void SetEntry(int entry, Handle< Object > key, Handle< Object > value)
virtual void Validate(Handle< JSObject > obj)=0
static ElementsAccessor * ForKind(ElementsKind elements_kind)
static const int kExternalPointerOffset
static Handle< Object > get(Handle< ExternalFloat32Array > array, int index)
float get_scalar(int index)
void set(int index, float value)
static Handle< Object > get(Handle< ExternalFloat64Array > array, int index)
double get_scalar(int index)
void set(int index, double value)
int16_t get_scalar(int index)
void set(int index, int16_t value)
static Handle< Object > get(Handle< ExternalInt16Array > array, int index)
int32_t get_scalar(int index)
static Handle< Object > get(Handle< ExternalInt32Array > array, int index)
void set(int index, int32_t value)
void set(int index, int8_t value)
int8_t get_scalar(int index)
static Handle< Object > get(Handle< ExternalInt8Array > array, int index)
void ExternalOneByteStringIterateBody()
void set_resource(const Resource *buffer)
const uint8_t * GetChars()
const Resource * resource()
uint16_t ExternalOneByteStringGet(int index)
uint16_t ExternalTwoByteStringGet(int index)
const uint16_t * ExternalTwoByteStringGetData(unsigned start)
void set_resource(const Resource *buffer)
const uint16_t * GetChars()
const Resource * resource()
void ExternalTwoByteStringIterateBody()
uint16_t get_scalar(int index)
static Handle< Object > get(Handle< ExternalUint16Array > array, int index)
void set(int index, uint16_t value)
static Handle< Object > get(Handle< ExternalUint32Array > array, int index)
uint32_t get_scalar(int index)
void set(int index, uint32_t value)
void set(int index, uint8_t value)
static Handle< Object > get(Handle< ExternalUint8Array > array, int index)
uint8_t get_scalar(int index)
uint8_t * external_uint8_clamped_pointer()
static Handle< Object > get(Handle< ExternalUint8ClampedArray > array, int index)
void set(int index, uint8_t value)
uint8_t get_scalar(int index)
static const int kLengthOffset
static const int kHeaderSize
static int SizeOf(Map *map, HeapObject *object)
Object ** RawFieldOfElementAt(int index)
bool ContainsOnlySmisOrHoles()
static int OffsetOfElementAt(int index)
static void NoWriteBarrierSet(FixedArray *array, int index, Object *value)
static void NoIncrementalWriteBarrierSet(FixedArray *array, int index, Object *value)
Object ** GetFirstElementAddress()
bool is_the_hole(int index)
static int SizeFor(int length)
void FillWithHoles(int from, int to)
void set(int index, Object *value)
STATIC_ASSERT(kHeaderSize==Internals::kFixedArrayHeaderSize)
void set_undefined(int index)
void set_the_hole(int index)
static void IterateBody(HeapObject *obj, ObjectVisitor *v)
static double canonical_not_the_hole_nan_as_double()
static double hole_nan_as_double()
bool is_the_hole(int index)
static bool is_the_hole_nan(double value)
void set(int index, double value)
void set_the_hole(int index)
int64_t get_representation(int index)
static int SizeFor(int length)
double get_scalar(int index)
static Handle< Object > get(Handle< FixedDoubleArray > array, int index)
void FillWithHoles(int from, int to)
static const int kDataOffset
int TypedArraySize(InstanceType type)
static Handle< Object > SetValue(Handle< FixedTypedArray< Traits > > array, uint32_t index, Handle< Object > value)
void set(int index, ElementType value)
static Handle< Object > get(Handle< FixedTypedArray > array, int index)
static ElementType from_int(int value)
ElementType get_scalar(int index)
static ElementType from_double(double value)
Traits::ElementType ElementType
static void IterateBody(HeapObject *obj, int object_size, ObjectVisitor *v)
Address foreign_address()
void set_foreign_address(Address value)
void ForeignIterateBody()
static const int kForeignAddressOffset
static Handle< T > cast(Handle< S > that)
static Handle< T > null()
virtual MUST_USE_RESULT Handle< Object > AsHandle(Isolate *isolate)=0
static int ComputeCapacity(int at_least_space_for)
static MUST_USE_RESULT Handle< ObjectHashTable > Shrink(Handle< ObjectHashTable > table, Handle< Object > key)
static const uint32_t kSignMask
static const int kValueOffset
static const uint32_t kExponentMask
static const int kExponentBias
static const int kExponentShift
void set_value(double value)
void synchronized_set_map_no_write_barrier(Map *value)
void set_map_no_write_barrier(Map *value)
static const int kMapOffset
STATIC_ASSERT(kMapOffset==Internals::kHeapObjectMapOffset)
MapWord synchronized_map_word() const
static Object ** RawField(HeapObject *obj, int offset)
void synchronized_set_map_word(MapWord map_word)
void IterateNextCodeLink(ObjectVisitor *v, int offset)
void IteratePointer(ObjectVisitor *v, int offset)
Isolate * GetIsolate() const
static HeapObject * FromAddress(Address address)
static const int kHeaderSize
void set_map_word(MapWord map_word)
void synchronized_set_map(Map *value)
WriteBarrierMode GetWriteBarrierMode(const DisallowHeapAllocation &promise)
bool MayContainRawValues()
void IteratePointers(ObjectVisitor *v, int start, int end)
int SizeFromMap(Map *map)
FixedTypedArrayBase * EmptyFixedTypedArrayForMap(Map *map)
bool InNewSpace(Object *object)
ExternalArray * EmptyExternalArrayForMap(Map *map)
IncrementalMarking * incremental_marking()
MarkCompactCollector * mark_compact_collector()
void EnterNoMarkingScope()
static const int kStringEncodingMask
static const int kFullStringRepresentationMask
static const int kExternalTwoByteRepresentationTag
static internal::Object * IntToSmi(int value)
static int SmiValue(const internal::Object *value)
static const int kExternalOneByteRepresentationTag
static bool IsValidSmi(intptr_t value)
static bool HasHeapObjectTag(const internal::Object *value)
DescriptorLookupCache * descriptor_lookup_cache()
void VisitTwoByteString(const uint16_t *chars, int length)
void VisitOneByteString(const uint8_t *chars, int length)
static uint32_t Hash(String *string, uint32_t seed)
static const int kIsExternalBit
static const int kShouldBeFreed
void set_should_be_freed(bool value)
void set_is_external(bool value)
static const int kBackingStoreOffset
static void EnsureSize(Handle< JSArray > array, int minimum_size_of_backing_fixed_array)
void set_length(Smi *length)
static void SetContent(Handle< JSArray > array, Handle< FixedArrayBase > storage)
static void Expand(Handle< JSArray > array, int minimum_size_of_backing_fixed_array)
bool AllowsSetElementsLength()
static const int kJSBuiltinsCount
void set_javascript_builtin(Builtins::JavaScript id, Object *value)
static int OffsetOfFunctionWithId(Builtins::JavaScript id)
void set_javascript_builtin_code(Builtins::JavaScript id, Code *value)
Code * javascript_builtin_code(Builtins::JavaScript id)
static int OffsetOfCodeWithId(Builtins::JavaScript id)
Object * javascript_builtin(Builtins::JavaScript id)
static const int kEntriesIndex
static const int kEntrySize
static const int kCacheSizeIndex
void set_finger_index(int finger_index)
static const int kFingerIndex
void set_code(Code *code)
bool IsFromExtensionScript()
void set_context(Object *context)
void set_code_no_write_barrier(Code *code)
Object * instance_prototype()
void ReplaceCode(Code *code)
JSObject * global_proxy()
bool should_have_prototype()
bool IsInobjectSlackTrackingInProgress()
void set_literals(FixedArray *literals)
bool IsFromNativeScript()
bool IsMarkedForOptimization()
bool NeedsArgumentsAdaption()
FixedArray * function_bindings()
static const int kNoSlackTracking
void set_function_bindings(FixedArray *bindings)
bool IsMarkedForConcurrentOptimization()
static const int kContextOffset
bool has_instance_prototype()
bool IsInOptimizationQueue()
static const int kCodeEntryOffset
static const int kGeneratorClosed
static const int kGeneratorExecuting
bool IsDetachedFrom(GlobalObject *global) const
void PopulateValueArray(FixedArray *array)
static int SizeOf(Map *map, HeapObject *object)
static void SetMapAndElements(Handle< JSObject > object, Handle< Map > map, Handle< FixedArrayBase > elements)
Object * InObjectPropertyAtPut(int index, Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
bool HasFastSmiOrObjectElements()
bool HasIndexedInterceptor()
Object * InObjectPropertyAt(int index)
bool HasFastObjectElements()
void FastPropertyAtPut(FieldIndex index, Object *value)
bool HasFixedTypedArrayElements()
static void EnsureCanContainHeapObjectElements(Handle< JSObject > obj)
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
SeededNumberDictionary * element_dictionary()
int GetInObjectPropertyOffset(int index)
static MUST_USE_RESULT Maybe< PropertyAttributes > GetElementAttributeWithReceiver(Handle< JSObject > object, Handle< JSReceiver > receiver, uint32_t index, bool check_prototype)
static const int kHeaderSize
static void TransitionElementsKind(Handle< JSObject > object, ElementsKind to_kind)
ElementsAccessor * GetElementsAccessor()
void SetInternalField(int index, Object *value)
static void MigrateToMap(Handle< JSObject > object, Handle< Map > new_map)
bool HasNamedInterceptor()
void initialize_elements()
int GetInternalFieldCount()
Object * GetInternalField(int index)
bool HasSloppyArgumentsElements()
static const int kPropertiesOffset
bool HasFastSmiElements()
bool HasFastHoleyElements()
static void ValidateElements(Handle< JSObject > object)
static void EnsureCanContainElements(Handle< JSObject > object, Object **elements, uint32_t count, EnsureElementsMode mode)
int GetInternalFieldOffset(int index)
ElementsKind GetElementsKind()
bool HasDictionaryElements()
static const int kElementsOffset
Object * RawFastPropertyAt(FieldIndex index)
static Handle< Smi > GetOrCreateIdentityHash(Handle< JSObject > object)
bool HasFastDoubleElements()
bool HasExternalArrayElements()
NameDictionary * property_dictionary()
static MUST_USE_RESULT MaybeHandle< Object > GetPropertyWithHandler(Handle< JSProxy > proxy, Handle< Object > receiver, Handle< Name > name)
static MUST_USE_RESULT Maybe< bool > HasElementWithHandler(Handle< JSProxy > proxy, uint32_t index)
static MUST_USE_RESULT MaybeHandle< Object > GetElementWithHandler(Handle< JSProxy > proxy, Handle< Object > receiver, uint32_t index)
static MUST_USE_RESULT Maybe< bool > HasPropertyWithHandler(Handle< JSProxy > proxy, Handle< Name > name)
static Handle< Smi > GetOrCreateIdentityHash(Handle< JSProxy > proxy)
static MUST_USE_RESULT Maybe< PropertyAttributes > GetElementAttributeWithHandler(Handle< JSProxy > proxy, Handle< JSReceiver > receiver, uint32_t index)
static MUST_USE_RESULT MaybeHandle< Object > SetElementWithHandler(Handle< JSProxy > proxy, Handle< JSReceiver > receiver, uint32_t index, Handle< Object > value, StrictMode strict_mode)
static MUST_USE_RESULT MaybeHandle< Object > SetPropertyWithHandler(Handle< JSProxy > proxy, Handle< Object > receiver, Handle< Name > name, Handle< Object > value, StrictMode strict_mode)
static MUST_USE_RESULT Maybe< bool > HasOwnElement(Handle< JSReceiver > object, uint32_t index)
static MUST_USE_RESULT Maybe< PropertyAttributes > GetElementAttribute(Handle< JSReceiver > object, uint32_t index)
Object * GetConstructor()
static MUST_USE_RESULT Maybe< PropertyAttributes > GetPropertyAttributes(Handle< JSReceiver > object, Handle< Name > name)
static MUST_USE_RESULT Maybe< bool > HasElement(Handle< JSReceiver > object, uint32_t index)
static MUST_USE_RESULT Maybe< PropertyAttributes > GetOwnPropertyAttributes(Handle< JSReceiver > object, Handle< Name > name)
static Handle< Smi > GetOrCreateIdentityHash(Handle< JSReceiver > object)
static MUST_USE_RESULT Maybe< bool > HasOwnProperty(Handle< JSReceiver >, Handle< Name > name)
static MUST_USE_RESULT Maybe< PropertyAttributes > GetOwnElementAttribute(Handle< JSReceiver > object, uint32_t index)
Object * GetIdentityHash()
static MUST_USE_RESULT Maybe< bool > HasProperty(Handle< JSReceiver > object, Handle< Name > name)
static const int kIrregexpCaptureCountIndex
void SetDataAt(int index, Object *value)
Object * DataAt(int index)
static const int kFlagsIndex
static const int kSourceIndex
static const int kDataIndex
void PopulateValueArray(FixedArray *array)
static Handle< Object > AsHandle(Isolate *isolate, HashTableKey *key)
bool HasPrototypeTransitions()
static const int kIsExtensible
bool has_external_array_elements()
ElementsKind elements_kind()
int NumberOfOwnDescriptors()
void set_non_instance_prototype(bool value)
void set_migration_target(bool value)
int unused_property_fields()
FixedArray * GetPrototypeTransitions()
int pre_allocated_property_fields()
void set_is_access_check_needed(bool access_check_needed)
Object * GetBackPointer()
bool is_access_check_needed()
Map * elements_transition_map()
bool HasElementsTransition()
void set_bit_field3(uint32_t bits)
static const int kBitFieldOffset
void set_unused_property_fields(int value)
void set_bit_field(byte value)
Map * GetTransition(int transition_index)
void set_function_with_prototype(bool value)
bool has_fast_smi_or_object_elements()
void set_construction_count(int value)
static const int kIsAccessCheckNeeded
void SetNumberOfProtoTransitions(int value)
bool has_named_interceptor()
void NotifyLeafMapLayoutChange()
static Handle< String > ExpectedTransitionKey(Handle< Map > map)
void LookupTransition(JSObject *holder, Name *name, LookupResult *result)
void SetBackPointer(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
int inobject_properties()
static const int kBitField3Offset
static Handle< Map > FindTransitionToField(Handle< Map > map, Handle< Name > key)
bool done_inobject_slack_tracking()
static const int kHasNonInstancePrototype
bool CanHaveMoreTransitions()
void set_instance_type(InstanceType value)
PropertyDetails GetLastDescriptorDetails()
void set_is_extensible(bool value)
bool has_instance_call_handler()
static const int kInstanceTypeOffset
void SetNumberOfOwnDescriptors(int number)
bool is_migration_target()
bool has_fixed_typed_array_elements()
static const int kVisitorIdOffset
void InitializeDescriptors(DescriptorArray *descriptors)
static Handle< Map > ExpectedTransitionTarget(Handle< Map > map)
static const int kBitField2Offset
void set_is_prototype_map(bool value)
void ZapPrototypeTransitions()
void LookupDescriptor(JSObject *holder, Name *name, LookupResult *result)
static const int kInstanceSizeOffset
void set_has_instance_call_handler()
void set_pre_allocated_property_fields(int value)
FixedArrayBase * GetInitialElements()
InstanceType instance_type()
static const int kPreAllocatedPropertyFieldsOffset
void set_done_inobject_slack_tracking(bool value)
bool has_non_instance_prototype()
void set_instance_size(int value)
static const int kTransitionsOrBackPointerOffset
int GetInObjectPropertyOffset(int index)
bool has_fast_double_elements()
static void SetPrototypeTransitions(Handle< Map > map, Handle< FixedArray > prototype_transitions)
void set_bit_field2(byte value)
static const int kInObjectPropertiesOffset
static const int kCodeCacheOffset
void set_visitor_id(int visitor_id)
void set_inobject_properties(int value)
int NumberOfProtoTransitions()
void init_back_pointer(Object *undefined)
bool function_with_prototype()
void set_owns_descriptors(bool owns_descriptors)
void ClearCodeCache(Heap *heap)
bool HasTransitionArray() const
void AppendDescriptor(Descriptor *desc)
bool has_indexed_interceptor()
static const int kPrototypeOffset
int SearchTransition(Name *name)
bool TooManyFastProperties(StoreFromKeyed store_mode)
void set_dictionary_map(bool value)
static const int kUnusedPropertyFieldsOffset
CodeFlusher * code_flusher()
static MemoryChunk * FromAddress(Address a)
static Address & Address_at(Address addr)
static uint32_t Hash(Handle< Name > key)
static Handle< Object > AsHandle(Isolate *isolate, Handle< Name > key)
static bool IsMatch(Handle< Name > key, Object *other)
static uint32_t HashForObject(Handle< Name > key, Object *object)
static void DoGenerateNewEnumerationIndices(Handle< NameDictionary > dictionary)
static const int kHashShift
static const int kHashNotComputedMask
static const int kHashFieldOffset
void set_hash_field(uint32_t value)
static const uint32_t kHashBitMask
static const int kIsNotArrayIndexMask
static bool IsHashFieldComputed(uint32_t field)
bool AsArrayIndex(uint32_t *index)
static const int kEntries
static int GetIndex(Handle< Map > map)
static bool IsNormalizedMapCache(const Object *obj)
static bool IsMatch(uint32_t key, Object *other)
static Handle< Object > AsHandle(Isolate *isolate, uint32_t key)
static uint32_t Hash(Handle< Object > key)
static uint32_t HashForObject(Handle< Object > key, Object *object)
static Handle< Object > AsHandle(Isolate *isolate, Handle< Object > key)
static bool IsMatch(Handle< Object > key, Object *other)
static MUST_USE_RESULT Handle< ObjectHashTable > Shrink(Handle< ObjectHashTable > table, Handle< Object > key)
static MUST_USE_RESULT MaybeHandle< Object > GetPropertyOrElement(Handle< Object > object, Handle< Name > key)
static MaybeHandle< JSReceiver > ToObject(Isolate *isolate, Handle< Object > object)
static MUST_USE_RESULT MaybeHandle< Object > GetElement(Isolate *isolate, Handle< Object > object, uint32_t index)
friend class LookupIterator
static MUST_USE_RESULT MaybeHandle< Object > GetProperty(LookupIterator *it)
@ CERTAINLY_NOT_STORE_FROM_KEYED
static Handle< Object > NewStorageFor(Isolate *isolate, Handle< Object > object, Representation representation)
static MUST_USE_RESULT MaybeHandle< Smi > ToSmi(Isolate *isolate, Handle< Object > object)
void VerifyApiCallResultType()
bool IsStringObjectWithCharacterAt(uint32_t index)
bool ToArrayIndex(uint32_t *index)
static Handle< Object > WrapForRead(Isolate *isolate, Handle< Object > object, Representation representation)
static MUST_USE_RESULT MaybeHandle< Object > GetElementWithReceiver(Isolate *isolate, Handle< Object > object, Handle< Object > receiver, uint32_t index)
bool HasSpecificClassOf(String *name)
static const byte kNotBooleanMask
static const byte kUndefined
static const int kKindOffset
static const byte kArgumentMarker
static const byte kException
static const byte kTheHole
static const byte kUninitialized
virtual bool IsMatch(Object *string) OVERRIDE
virtual Handle< Object > AsHandle(Isolate *isolate) OVERRIDE
OneByteStringKey(Vector< const uint8_t > str, uint32_t seed)
Object * ValueAt(int entry)
static const int kMaxRegularHeapObjectSize
static const int kTypeOffset
A class to uniformly access the prototype of any Object and walk its prototype chain.
Object * GetCurrent() const
static const int kCompilationTypeBit
@ COMPILATION_STATE_INITIAL
@ COMPILATION_STATE_COMPILED
CompilationState compilation_state()
void set_compilation_type(CompilationType type)
void set_compilation_state(CompilationState state)
static const int kCompilationStateBit
static uint32_t SeededHashForObject(uint32_t key, uint32_t seed, Object *object)
static uint32_t SeededHash(uint32_t key, uint32_t seed)
bool requires_slow_elements()
void set_requires_slow_elements()
uint32_t max_number_key()
static int SizeFor(int length)
Address GetCharsAddress()
int SeqOneByteStringSize(InstanceType instance_type)
uint16_t SeqOneByteStringGet(int index)
void SeqOneByteStringSet(int index, uint16_t value)
Handle< SeqOneByteString > string_
virtual Handle< Object > AsHandle(Isolate *isolate) OVERRIDE
virtual uint32_t Hash() OVERRIDE
SeqOneByteSubStringKey(Handle< SeqOneByteString > string, int from, int length)
virtual bool IsMatch(Object *string) OVERRIDE
virtual uint32_t HashForObject(Object *other) OVERRIDE
void SeqTwoByteStringSet(int index, uint16_t value)
static int SizeFor(int length)
Address GetCharsAddress()
uint16_t SeqTwoByteStringGet(int index)
int SeqTwoByteStringSize(InstanceType instance_type)
virtual uint32_t HashForObject(Object *other) OVERRIDE
virtual uint32_t Hash() OVERRIDE
SequentialStringKey(Vector< const Char > string, uint32_t seed)
Vector< const Char > string_
FunctionTemplateInfo * get_api_func_data()
int opt_count_and_bailout_reason() const
int start_position() const
static const int kStartPositionShift
void set_start_position_and_type(int value)
void set_formal_parameter_count(int value)
void set_start_position(int start_position)
void increment_deopt_count()
void TryReenableOptimization()
static const int kScopeInfoOffset
int start_position_and_type() const
void set_opt_count(int opt_count)
void set_kind(FunctionKind kind)
static const int kDontAdaptArgumentsSentinel
void set_opt_reenable_tries(int value)
void set_counters(int value)
static const int kStartPositionMask
BuiltinFunctionId builtin_function_id()
void ReplaceCode(Code *code)
void set_opt_count_and_bailout_reason(int value)
BailoutReason DisableOptimizationReason()
bool has_deoptimization_support()
void set_strict_mode(StrictMode strict_mode)
void DontAdaptArguments()
static const int kCodeOffset
int compiler_hints() const
void set_compiler_hints(int value)
void set_deopt_count(int value)
bool HasBuiltinFunctionId()
static const int kParentOffset
void set_parent(String *parent, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static const int kMaxValue
static const int kMinValue
static Smi * FromInt(int value)
static Smi * FromIntptr(intptr_t value)
static bool IsValid(intptr_t value)
void Reset(String *string, int offset=0)
const uint16_t * buffer16_
ConsStringIteratorOp * op_
StringCharacterStream(String *string, ConsStringIteratorOp *op, int offset=0)
void VisitOneByteString(const uint8_t *chars, int length)
void VisitTwoByteString(const uint16_t *chars, int length)
void AddCharacter(uint16_t c)
void AddCharacters(const Char *chars, int len)
static uint32_t ComputeUtf8Hash(Vector< const char > chars, uint32_t seed, int *utf16_length_out)
bool UpdateIndex(uint16_t c)
uint32_t raw_running_hash_
static const int kZeroHash
static uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
StringHasher(int length, uint32_t seed)
static Handle< Object > AsHandle(Isolate *isolate, HashTableKey *key)
bool IsTwoByteRepresentationUnderneath()
bool IsTwoByteRepresentation() const
void SetForwardedInternalizedString(String *string)
bool HasOnlyOneByteChars()
static const int kMaxHashCalcLength
bool SlowAsArrayIndex(uint32_t *index)
String * GetForwardedInternalizedString()
bool IsOneByteRepresentationUnderneath()
bool SlowEquals(String *other)
STATIC_ASSERT(kMaxArrayIndexSize<(1<< kArrayIndexLengthBits))
static ConsString * VisitFlat(Visitor *visitor, String *string, int offset=0)
bool IsOneByteRepresentation() const
static Handle< String > Flatten(Handle< String > string, PretenureFlag pretenure=NOT_TENURED)
void Set(int index, uint16_t value)
bool AsArrayIndex(uint32_t *index)
bool Equals(String *other)
void InitializeBody(int object_size)
static const int kSimpleTransitionIndex
static TransitionArray * cast(Object *obj)
void set_back_pointer_storage(Object *back_pointer, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static Handle< TransitionArray > ExtendToFullTransitionArray(Handle< Map > containing_map)
static const int kTransitionSize
static const int kNotFound
Object * back_pointer_storage()
Name * GetKey(int transition_number)
static Handle< TransitionArray > Allocate(Isolate *isolate, int number_of_transitions)
PropertyDetails GetTargetDetails(int transition_number)
Map * GetTarget(int transition_number)
bool IsSimpleTransition()
virtual Handle< Object > AsHandle(Isolate *isolate) OVERRIDE
TwoByteStringKey(Vector< const uc16 > str, uint32_t seed)
virtual bool IsMatch(Object *string) OVERRIDE
static const int kStorage2Offset
static const int kStorage3Offset
bool matches_inlined_type_change_checksum(int checksum)
void change_own_type_change_checksum()
static const int kStorage1Offset
void change_ic_generic_count(int delta)
void set_ic_total_count(int count)
void initialize_storage()
static const int kTypeChangeChecksumBits
void change_ic_with_type_info_count(int delta)
void set_inlined_type_change_checksum(int checksum)
int own_type_change_checksum()
int ic_with_type_info_count()
static TypeImpl * cast(typename Config::Base *object)
static uint32_t Hash(uint32_t key)
static uint32_t HashForObject(uint32_t key, Object *object)
virtual uint32_t Hash() OVERRIDE
Utf8StringKey(Vector< const char > string, uint32_t seed)
virtual uint32_t HashForObject(Object *other) OVERRIDE
virtual bool IsMatch(Object *string) OVERRIDE
virtual Handle< Object > AsHandle(Isolate *isolate) OVERRIDE
Vector< const char > string_
static uint32_t Hash(Handle< Object > key)
static Handle< Object > AsHandle(Isolate *isolate, Handle< Object > key)
static uint32_t HashForObject(Handle< Object > key, Object *object)
static bool IsMatch(Handle< Object > key, Object *other)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions true
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define HAS_SMI_TAG(value)
#define OBJECT_POINTER_ALIGN(value)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
uint32_t RoundUpToPowerOfTwo32(uint32_t value)
IN DWORD64 OUT PDWORD64 OUT PIMAGEHLP_SYMBOL64 Symbol
bool Equals(Node *a, Node *b)
@ DONT_ALLOW_DOUBLE_ELEMENTS
@ ALLOW_CONVERTED_DOUBLE_ELEMENTS
@ ALLOW_COPIED_DOUBLE_ELEMENTS
bool IsFastHoleyElementsKind(ElementsKind kind)
const uint32_t kStringEncodingMask
bool IsValidFunctionKind(FunctionKind kind)
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset internal_field_count
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit is_toplevel
@ DONT_TRACK_ALLOCATION_SITE
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit kAllowLazyCompilationWithoutContext has_duplicate_parameters
kExpectedNofPropertiesOffset kFunctionTokenPositionOffset kOptCountAndBailoutReasonOffset profiler_ticks
kSerializedDataOffset kPrototypeTemplateOffset indexed_property_handler
TypeImpl< ZoneTypeConfig > Type
static int min(int a, int b)
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
const uint32_t kTwoByteStringTag
bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind, ElementsKind to_kind)
const uint32_t kShortExternalStringTag
kExpectedNofPropertiesOffset function_token_position
name_should_print_as_anonymous
void MemsetPointer(T **dest, U *value, int counter)
kSerializedDataOffset Object
const uint32_t kNotInternalizedTag
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset instance_call_handler
const int kPointerSizeLog2
const uint32_t kStringTag
kExpectedNofPropertiesOffset kFunctionTokenPositionOffset kOptCountAndBailoutReasonOffset PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo
@ FIRST_FIXED_TYPED_ARRAY_TYPE
@ FIXED_DOUBLE_ARRAY_TYPE
@ JS_GENERATOR_OBJECT_TYPE
@ JS_CONTEXT_EXTENSION_OBJECT_TYPE
@ MUTABLE_HEAP_NUMBER_TYPE
@ FIRST_EXTERNAL_ARRAY_TYPE
@ SHARED_FUNCTION_INFO_TYPE
@ INTERNALIZED_STRING_TYPE
@ LAST_FIXED_TYPED_ARRAY_TYPE
@ JS_BUILTINS_OBJECT_TYPE
@ CONSTANT_POOL_ARRAY_TYPE
@ ONE_BYTE_INTERNALIZED_STRING_TYPE
@ LAST_EXTERNAL_ARRAY_TYPE
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ SLOPPY_ARGUMENTS_ELEMENTS
@ FAST_HOLEY_SMI_ELEMENTS
bool IsFastDoubleElementsKind(ElementsKind kind)
int LinearSearch(T *array, Name *name, int len, int valid_entries)
Handle< T > handle(T *t, Isolate *isolate)
const uint32_t kOneByteStringTag
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit start_position_and_type
const int kVariableSizeSentinel
kExpectedNofPropertiesOffset kFunctionTokenPositionOffset kOptCountAndBailoutReasonOffset kProfilerTicksOffset BOOL_GETTER(SharedFunctionInfo, compiler_hints, optimization_disabled, kOptimizationDisabled) void SharedFunctionInfo
const intptr_t kObjectAlignment
kFeedbackVectorOffset kHiddenPrototypeBit read_only_prototype
const bool FLAG_enable_slow_asserts
const uint32_t kShortExternalStringMask
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit allows_lazy_compilation_without_context
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, formal_parameter_count, kFormalParameterCountOffset) PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
const uint32_t kStringRepresentationMask
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit kDoNotCacheBit kIsTopLevelBit compiler_hints
bool IsFastElementsKind(ElementsKind kind)
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset dependent_code
void PrintF(const char *format,...)
int32_t DoubleToInt32(double x)
kFeedbackVectorOffset hidden_prototype
const uint32_t kOneByteDataHintMask
kExpectedNofPropertiesOffset kFunctionTokenPositionOffset opt_count_and_bailout_reason
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
const uint32_t kIsIndirectStringTag
kFeedbackVectorOffset flag
kFeedbackVectorOffset kHiddenPrototypeBit kReadOnlyPrototypeBit do_not_cache
int BinarySearch(T *array, Name *name, int low, int high, int valid_entries)
const uint32_t kInternalizedTag
static void RoundUp(Vector< char > buffer, int *length, int *decimal_point)
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count, kPretenureCreateCountOffset) ACCESSORS(AllocationSite
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const uint64_t kHoleNanInt64
const uint32_t kIsNotInternalizedMask
const uint32_t kOneByteDataHintTag
kFeedbackVectorOffset kHiddenPrototypeBit BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check, kNeedsAccessCheckBit) BOOL_ACCESSORS(FunctionTemplateInfo
kSerializedDataOffset prototype_template
static bool IsMinusZero(double value)
bool IsFastSmiElementsKind(ElementsKind kind)
const uint32_t kIsNotStringMask
int Search(T *array, Name *name, int valid_entries)
bool IsAligned(T value, U alignment)
ACCESSORS(AccessorInfo, expected_receiver_type, Object, kExpectedReceiverTypeOffset) ACCESSORS(DeclaredAccessorDescriptor
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset DependentCode
ElementsKind GetInitialFastElementsKind()
static void EnsureHasTransitionArray(Handle< Map > map)
const uint32_t kIsIndirectStringMask
const uint32_t kHoleNanUpper32
bool IsFastObjectElementsKind(ElementsKind kind)
Debugger support for the V8 JavaScript engine.
#define READ_BYTE_FIELD(p, offset)
#define WRITE_SHORT_FIELD(p, offset, value)
#define READ_UINT32_FIELD(p, offset)
#define NOBARRIER_READ_FIELD(p, offset)
#define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)
#define WRITE_INTPTR_FIELD(p, offset, value)
#define FIELD_ADDR(p, offset)
#define READ_INTPTR_FIELD(p, offset)
#define WRITE_DOUBLE_FIELD(p, offset, value)
#define WRITE_INT_FIELD(p, offset, value)
#define MAKE_STRUCT_CAST(NAME, Name, name)
#define TYPE_CHECKER(type, instancetype)
#define WRITE_UINT32_FIELD(p, offset, value)
#define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset)
#define READ_INT32_FIELD(p, offset)
#define INT_ACCESSORS(holder, name, offset)
#define WRITE_FIELD(p, offset, value)
#define WRITE_INT64_FIELD(p, offset, value)
#define SMI_ACCESSORS(holder, name, offset)
#define WRITE_BARRIER(heap, object, offset, value)
#define WRITE_BYTE_FIELD(p, offset, value)
#define NOBARRIER_SMI_ACCESSORS(holder, name, offset)
#define READ_INT64_FIELD(p, offset)
#define CAST_ACCESSOR(type)
#define NOBARRIER_WRITE_FIELD(p, offset, value)
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)
#define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size)
#define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset)
#define READ_DOUBLE_FIELD(p, offset)
#define FIELD_ADDR_CONST(p, offset)
#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)
#define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value)
#define READ_FIELD(p, offset)
#define WRITE_INT32_FIELD(p, offset, value)
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)
#define MAKE_STRUCT_CASE(NAME, Name, name)
#define READ_SHORT_FIELD(p, offset)
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)
#define READ_INT_FIELD(p, offset)
#define ACQUIRE_READ_FIELD(p, offset)
#define NOBARRIER_READ_BYTE_FIELD(p, offset)
#define RELEASE_WRITE_FIELD(p, offset, value)
A simple Maybe type, representing an object which may or may not have a value.
#define T(name, string, precedence)