19 #if V8_CC_GNU && V8_GNUC_PREREQ(2, 96, 0) && !V8_GNUC_PREREQ(4, 1, 0)
21 # define V8_INFINITY std::numeric_limits<double>::infinity()
23 # define V8_INFINITY HUGE_VAL
25 # define V8_INFINITY INFINITY
28 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM || \
30 #define V8_TURBOFAN_BACKEND 1
32 #define V8_TURBOFAN_BACKEND 0
34 #if V8_TURBOFAN_BACKEND && !(V8_OS_WIN && V8_TARGET_ARCH_X64)
35 #define V8_TURBOFAN_TARGET 1
37 #define V8_TURBOFAN_TARGET 0
53 #if !defined(USE_SIMULATOR)
54 #if (V8_TARGET_ARCH_ARM64 && !V8_HOST_ARCH_ARM64)
55 #define USE_SIMULATOR 1
57 #if (V8_TARGET_ARCH_ARM && !V8_HOST_ARCH_ARM)
58 #define USE_SIMULATOR 1
60 #if (V8_TARGET_ARCH_MIPS && !V8_HOST_ARCH_MIPS)
61 #define USE_SIMULATOR 1
63 #if (V8_TARGET_ARCH_MIPS64 && !V8_HOST_ARCH_MIPS64)
64 #define USE_SIMULATOR 1
69 #define V8_OOL_CONSTANT_POOL 0
71 #ifdef V8_TARGET_ARCH_ARM
75 #define V8_DEFAULT_STACK_SIZE_KB 864
79 #define V8_DEFAULT_STACK_SIZE_KB 984
96 typedef unsigned int __my_bool__;
97 #define bool __my_bool__
130 #if V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT
140 #if V8_HOST_ARCH_64_BIT
150 #if V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT
191 #define ROUND_UP(n, sz) (((n) + ((sz) - 1)) & ~((sz) - 1))
195 #define FUNCTION_ADDR(f) \
196 (reinterpret_cast<v8::internal::Address>(reinterpret_cast<intptr_t>(f)))
201 template <
typename F>
203 return reinterpret_cast<F>(
reinterpret_cast<intptr_t
>(addr));
211 class FreeStoreAllocationPolicy;
212 template <
typename T,
class P = FreeStoreAllocationPolicy>
class List;
256 #ifdef V8_HOST_ARCH_64_BIT
283 #define PROCESSOR_CACHE_LINE_SIZE 64
307 class ExternalReference;
315 template <
typename T>
class Handle;
339 template <
typename Config,
class Allocator = FreeStoreAllocationPolicy>
571 #define HAS_SMI_TAG(value) \
572 ((reinterpret_cast<intptr_t>(value) & kSmiTagMask) == kSmiTag)
574 #define HAS_FAILURE_TAG(value) \
575 ((reinterpret_cast<intptr_t>(value) & kFailureTagMask) == kFailureTag)
578 #define OBJECT_POINTER_ALIGN(value) \
579 (((value) + kObjectAlignmentMask) & ~kObjectAlignmentMask)
582 #define POINTER_SIZE_ALIGN(value) \
583 (((value) + kPointerAlignmentMask) & ~kPointerAlignmentMask)
586 #define CODE_POINTER_ALIGN(value) \
587 (((value) + kCodeAlignmentMask) & ~kCodeAlignmentMask)
595 #define TRACK_MEMORY(name) \
596 void* operator new(size_t size) { \
597 void* result = ::operator new(size); \
598 Logger::NewEventStatic(name, result, size); \
601 void operator delete(void* object) { \
602 Logger::DeleteEventStatic(name, object); \
603 ::operator delete(object); \
606 #define TRACK_MEMORY(name)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
#define DCHECK(condition)
bool(* WeakSlotCallback)(Object **pointer)
bool(* ConstraintCallback)(Address new_addr, Address old_addr)
const uint32_t kDebugZapValue
bool(* WeakSlotCallbackWithHeap)(Heap *heap, Object **pointer)
@ MOVW_MOVT_IMMEDIATE_LOADS
bool IsValidFunctionKind(FunctionKind kind)
bool IsArrowFunction(FunctionKind kind)
const int kBinary32MantissaBits
const int kBinary32ExponentShift
const uintptr_t kUintptrAllBitsSet
const Address kHandleZapValue
const int kBinary32MinExponent
@ VISIT_ALL_IN_SWEEP_NEWSPACE
const intptr_t kPointerAlignmentMask
const intptr_t kCodeAlignment
bool IsLexicalVariableMode(VariableMode mode)
const intptr_t kSmiSignMask
bool IsDeclaredVariableMode(VariableMode mode)
const int kBinary32ExponentBias
const Address kGlobalHandleZapValue
const intptr_t kPageHeaderTagMask
@ USE_DEFAULT_MINIMUM_CAPACITY
@ USE_CUSTOM_MINIMUM_CAPACITY
const int kDoubleSizeLog2
bool IsGeneratorFunction(FunctionKind kind)
const size_t kMaximalCodeRangeSize
bool IsConciseMethod(FunctionKind kind)
void(* InlineCacheCallback)(Code *code, Address ic)
const int kPointerSizeLog2
@ kStoreBufferScanningPageEvent
@ kStoreBufferStartScanningPagesEvent
@ kCacheOnPrototypeReceiverIsPrimitive
@ kCacheOnPrototypeReceiverIsDictionary
const int kBitsPerPointer
const intptr_t kObjectAlignmentMask
@ TREAT_MINUS_ZERO_AS_ZERO
const intptr_t kObjectAlignment
bool IsImmutableVariableMode(VariableMode mode)
const uint64_t kLastNonNaNInt64
const Address kFromSpaceZapValue
const uint32_t kFreeListZapValue
const bool kRequiresCodeRange
const int kBinary32MaxExponent
const int kBitsPerByteLog2
const intptr_t kCodeAlignmentMask
const uint32_t kQuietNaNHighBitsMask
void(* StoreBufferCallback)(Heap *heap, MemoryChunk *page, StoreBufferEvent event)
const uint32_t kBinary32SignMask
const uint64_t kQuietNaNMask
const int kPageHeaderTagSize
int(* HeapObjectCallback)(HeapObject *obj)
const int kObjectAlignmentBits
const int kCodeAlignmentBits
const uint32_t kBinary32ExponentMask
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const uint64_t kHoleNanInt64
const intptr_t kIntptrSignBit
@ NO_CALL_CONSTRUCTOR_FLAGS
@ RECORD_CONSTRUCTOR_TARGET
const uint32_t kNaNOrInfinityLowerBoundUpper32
bool IsDynamicVariableMode(VariableMode mode)
const uint32_t kMaxUInt32
const uint32_t kHoleNanLower32
const uint32_t kSlotsZapValue
const intptr_t kDoubleAlignment
const intptr_t kPointerAlignment
@ kConciseGeneratorMethod
const uint32_t kBinary32MantissaMask
F FUNCTION_CAST(Address addr)
const intptr_t kDoubleAlignmentMask
const uint32_t kHoleNanUpper32
Debugger support for the V8 JavaScript engine.
DoubleRepresentation(double x)
bool operator==(const DoubleRepresentation &other) const
struct v8::internal::IeeeDoubleBigEndianArchType::@16 bits
struct v8::internal::IeeeDoubleLittleEndianArchType::@15 bits