14 class CompilationInfo;
46 for (
int i = 0;
i < 3;
i++)
52 DCHECK(index >= 0 && index < 3);
144 template<
class Visitor>
153 VariableProxy* proxy =
309 VariableDeclaration*
function()
const {
395 int statement_position);
412 void Print(
int n = 0);
566 VariableProxy* proxy,
613 if (inner_scope !=
NULL) {
DynamicScopePart(Zone *zone)
VariableMap * GetMap(VariableMode mode)
static Interface * NewValue()
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const int kNoPosition
int ContextLocalCount() const
Variable * module_var() const
void SetDefaults(ScopeType type, Scope *outer_scope, Handle< ScopeInfo > scope_info)
void SetStrictMode(StrictMode strict_mode)
void AllocateNonParameterLocal(Variable *var)
int num_heap_slots() const
void AllocateHeapSlot(Variable *var)
bool asm_function() const
bool outer_scope_calls_sloppy_eval_
void ForceContextAllocation()
ZoneList< Scope * > * inner_scopes()
bool contains_with() const
void set_end_position(int statement_pos)
bool is_global_scope() const
bool AllowsLazyCompilationWithoutContext() const
Variable * LookupFunctionVar(const AstRawString *name, AstNodeFactory< AstNullVisitor > *factory)
ZoneList< Variable * > temps_
MUST_USE_RESULT bool AllocateVariables(CompilationInfo *info, AstNodeFactory< AstNullVisitor > *factory)
bool force_eager_compilation_
static bool Analyze(CompilationInfo *info)
Variable * Lookup(const AstRawString *name)
void DeclareFunctionVar(VariableDeclaration *declaration)
const AstRawString * scope_name_
ZoneList< VariableProxy * > unresolved_
void AllocateParameterLocals()
Handle< ScopeInfo > scope_info_
Scope * outer_scope() const
int num_parameters() const
bool HasTrivialOuterContext() const
Variable * DeclareDynamicGlobal(const AstRawString *name)
Variable * DeclareLocal(const AstRawString *name, VariableMode mode, InitializationFlag init_flag, MaybeAssignedFlag maybe_assigned_flag=kNotAssigned, Interface *interface=Interface::NewValue())
void AllocateNonParameterLocals()
Variable * LookupRecursive(VariableProxy *proxy, BindingKind *binding_kind, AstNodeFactory< AstNullVisitor > *factory)
bool HasLazyCompilableOuterContext() const
int ContextChainLength(Scope *scope)
ZoneList< Scope * > inner_scopes_
bool HasTrivialContext() const
void AllocateVariablesRecursively()
Variable * NewInternal(const AstRawString *name)
void AddInnerScope(Scope *inner_scope)
void ForceEagerCompilation()
void PropagateScopeInfo(bool outer_scope_calls_sloppy_eval)
ZoneList< Variable * > params_
bool HasIllegalRedeclaration() const
VariableProxy * NewUnresolved(AstNodeFactory< Visitor > *factory, const AstRawString *name, Interface *interface=Interface::NewValue(), int position=RelocInfo::kNoPosition)
bool is_eval_scope() const
void SetIllegalRedeclaration(Expression *expression)
void CollectStackAndContextLocals(ZoneList< Variable * > *stack_locals, ZoneList< Variable * > *context_locals)
bool is_function_scope() const
ZoneList< Declaration * > decls_
ScopeType scope_type() const
bool AllowsLazyCompilation() const
void AllocateStackSlot(Variable *var)
MUST_USE_RESULT bool ResolveVariable(CompilationInfo *info, VariableProxy *proxy, AstNodeFactory< AstNullVisitor > *factory)
bool MustAllocateInContext(Variable *var)
ZoneList< Declaration * > * declarations()
bool outer_scope_calls_sloppy_eval() const
void VisitIllegalRedeclaration(AstVisitor *visitor)
bool HasArgumentsParameter()
void AllocateModulesRecursively(Scope *host_scope)
void RemoveUnresolved(VariableProxy *var)
bool IsDeclared(const AstRawString *name)
Variable * NonLocal(const AstRawString *name, VariableMode mode)
ZoneList< Variable * > internals_
Scope * DeclarationScope()
Variable * arguments() const
Expression * illegal_redecl_
Variable * NewTemporary(const AstRawString *name)
DynamicScopePart * dynamics_
bool inner_scope_calls_eval_
bool is_with_scope() const
void set_start_position(int statement_pos)
bool scope_contains_with_
AstValueFactory * ast_value_factory_
Scope(Scope *outer_scope, ScopeType scope_type, AstValueFactory *value_factory, Zone *zone)
bool is_block_scope() const
Scope * FinalizeBlockScope()
int num_stack_slots() const
StrictMode strict_mode() const
friend class ParserFactory
Declaration * CheckConflictingVarDeclarations()
bool has_forced_context_allocation() const
bool MustAllocate(Variable *var)
Variable * DeclareParameter(const AstRawString *name, VariableMode mode)
VariableDeclaration * function_
Interface * interface() const
void RecordWithStatement()
Variable * parameter(int index) const
bool force_context_allocation_
Handle< ScopeInfo > GetScopeInfo()
void AddDeclaration(Declaration *declaration)
void SetScopeName(const AstRawString *scope_name)
static Scope * DeserializeScopeChain(Context *context, Scope *global_scope, Zone *zone)
bool is_catch_scope() const
int start_position() const
bool is_module_scope() const
void GetNestedScopeChain(List< Handle< ScopeInfo > > *chain, int statement_position)
MUST_USE_RESULT bool ResolveVariablesRecursively(CompilationInfo *info, AstNodeFactory< AstNullVisitor > *factory)
int StackLocalCount() const
bool is_declaration_scope() const
Variable * LookupLocal(const AstRawString *name)
bool is_strict_eval_scope() const
Variable * Declare(Scope *scope, const AstRawString *name, VariableMode mode, bool is_valid_lhs, Variable::Kind kind, InitializationFlag initialization_flag, MaybeAssignedFlag maybe_assigned_flag=kNotAssigned, Interface *interface=Interface::NewValue())
Variable * Lookup(const AstRawString *name)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
Debugger support for the V8 JavaScript engine.