5 #ifndef V8_SPLAY_TREE_INL_H_
6 #define V8_SPLAY_TREE_INL_H_
14 template<
typename Config,
class Allocator>
17 ForEachNode(&deleter);
21 template<
typename Config,
class Allocator>
26 root_ =
new(allocator_)
Node(key, Config::NoValue());
38 Node* node =
new(allocator_)
Node(key, Config::NoValue());
39 InsertInternal(cmp, node);
46 template<
typename Config,
class Allocator>
61 template<
typename Config,
class Allocator>
70 template<
typename Config,
class Allocator>
72 return FindInternal(key);
76 template<
typename Config,
class Allocator>
78 if (FindInternal(key)) {
87 template<
typename Config,
class Allocator>
103 root_ = root_->left_;
104 bool result = FindGreatest(locator);
111 template<
typename Config,
class Allocator>
123 locator->bind(root_);
128 bool result = FindLeast(locator);
135 template<
typename Config,
class Allocator>
139 Node* current = root_;
141 current = current->
right_;
142 locator->bind(current);
147 template<
typename Config,
class Allocator>
151 Node* current = root_;
152 while (current->left_ !=
NULL)
153 current = current->left_;
154 locator->bind(current);
159 template<
typename Config,
class Allocator>
161 const Key& new_key) {
162 if (!FindInternal(old_key))
164 Node* node_to_move = root_;
173 node_to_move->key_ = new_key;
174 InsertInternal(cmp, node_to_move);
179 template<
typename Config,
class Allocator>
181 if (!FindInternal(key))
183 Node* node_to_remove = root_;
185 delete node_to_remove;
190 template<
typename Config,
class Allocator>
194 root_ = root_->right_;
197 Node* right = root_->right_;
199 root_ = root_->left_;
204 root_->right_ = right;
209 template<
typename Config,
class Allocator>
213 Node dummy_node(Config::kNoKey, Config::NoValue());
219 Node* dummy = &dummy_node;
222 Node* current = root_;
238 right->
left_ = current;
240 current = current->
left_;
241 }
else if (cmp > 0) {
248 temp->
left_ = current;
256 current = current->
right_;
270 template <
typename Config,
class Allocator>
template <
class Callback>
272 NodeToPairAdaptor<Callback> callback_adaptor(callback);
273 ForEachNode(&callback_adaptor);
277 template <
typename Config,
class Allocator>
template <
class Callback>
279 if (root_ ==
NULL)
return;
282 nodes_to_visit.
Add(root_, allocator_);
284 while (pos < nodes_to_visit.length()) {
285 Node* node = nodes_to_visit[pos++];
286 if (node->
left() !=
NULL) nodes_to_visit.
Add(node->
left(), allocator_);
288 callback->Call(node);
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
bool FindGreatestLessThan(const Key &key, Locator *locator)
bool FindLeast(Locator *locator)
bool Remove(const Key &key)
bool Contains(const Key &key)
bool FindInternal(const Key &key)
bool Move(const Key &old_key, const Key &new_key)
void RemoveRootNode(const Key &key)
void ForEach(Callback *callback)
bool FindLeastGreaterThan(const Key &key, Locator *locator)
bool FindGreatest(Locator *locator)
void InsertInternal(int cmp, Node *node)
bool Find(const Key &key, Locator *locator)
void ForEachNode(Callback *callback)
void Splay(const Key &key)
bool Insert(const Key &key, Locator *locator)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
int Compare(const T &a, const T &b)
Debugger support for the V8 JavaScript engine.