V8 Project
stub-cache-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_X64
8 
9 #include "src/codegen.h"
10 #include "src/ic/stub-cache.h"
11 
12 namespace v8 {
13 namespace internal {
14 
15 #define __ ACCESS_MASM(masm)
16 
17 
18 static void ProbeTable(Isolate* isolate, MacroAssembler* masm,
19  Code::Flags flags, bool leave_frame,
20  StubCache::Table table, Register receiver, Register name,
21  // The offset is scaled by 4, based on
22  // kCacheIndexShift, which is two bits
23  Register offset) {
24  // We need to scale up the pointer by 2 when the offset is scaled by less
25  // than the pointer size.
29  ScaleFactor scale_factor = kPointerSize == kInt64Size ? times_2 : times_1;
30 
31  DCHECK_EQ(3 * kPointerSize, sizeof(StubCache::Entry));
32  // The offset register holds the entry offset times four (due to masking
33  // and shifting optimizations).
34  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
35  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
36  Label miss;
37 
38  // Multiply by 3 because there are 3 fields per entry (name, code, map).
39  __ leap(offset, Operand(offset, offset, times_2, 0));
40 
41  __ LoadAddress(kScratchRegister, key_offset);
42 
43  // Check that the key in the entry matches the name.
44  // Multiply entry offset by 16 to get the entry address. Since the
45  // offset register already holds the entry offset times four, multiply
46  // by a further four.
47  __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
48  __ j(not_equal, &miss);
49 
50  // Get the map entry from the cache.
51  // Use key_offset + kPointerSize * 2, rather than loading map_offset.
52  __ movp(kScratchRegister,
53  Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
55  __ j(not_equal, &miss);
56 
57  // Get the code entry from the cache.
58  __ LoadAddress(kScratchRegister, value_offset);
59  __ movp(kScratchRegister, Operand(kScratchRegister, offset, scale_factor, 0));
60 
61  // Check that the flags match what we're looking for.
63  __ andp(offset, Immediate(~Code::kFlagsNotUsedInLookup));
64  __ cmpl(offset, Immediate(flags));
65  __ j(not_equal, &miss);
66 
67 #ifdef DEBUG
68  if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
69  __ jmp(&miss);
70  } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
71  __ jmp(&miss);
72  }
73 #endif
74 
75  if (leave_frame) __ leave();
76 
77  // Jump to the first instruction in the code stub.
79  __ jmp(kScratchRegister);
80 
81  __ bind(&miss);
82 }
83 
84 
85 void StubCache::GenerateProbe(MacroAssembler* masm, Code::Flags flags,
86  bool leave_frame, Register receiver,
87  Register name, Register scratch, Register extra,
88  Register extra2, Register extra3) {
89  Isolate* isolate = masm->isolate();
90  Label miss;
91  USE(extra); // The register extra is not used on the X64 platform.
92  USE(extra2); // The register extra2 is not used on the X64 platform.
93  USE(extra3); // The register extra2 is not used on the X64 platform.
94  // Make sure that code is valid. The multiplying code relies on the
95  // entry size being 3 * kPointerSize.
96  DCHECK(sizeof(Entry) == 3 * kPointerSize);
97 
98  // Make sure the flags do not name a specific type.
100 
101  // Make sure that there are no register conflicts.
102  DCHECK(!scratch.is(receiver));
103  DCHECK(!scratch.is(name));
104 
105  // Check scratch register is valid, extra and extra2 are unused.
106  DCHECK(!scratch.is(no_reg));
107  DCHECK(extra2.is(no_reg));
108  DCHECK(extra3.is(no_reg));
109 
110  Counters* counters = masm->isolate()->counters();
111  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
112 
113  // Check that the receiver isn't a smi.
114  __ JumpIfSmi(receiver, &miss);
115 
116  // Get the map of the receiver and compute the hash.
117  __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
118  // Use only the low 32 bits of the map pointer.
119  __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
120  __ xorp(scratch, Immediate(flags));
121  // We mask out the last two bits because they are not part of the hash and
122  // they are always 01 for maps. Also in the two 'and' instructions below.
123  __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
124 
125  // Probe the primary table.
126  ProbeTable(isolate, masm, flags, leave_frame, kPrimary, receiver, name,
127  scratch);
128 
129  // Primary miss: Compute hash for secondary probe.
130  __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
131  __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
132  __ xorp(scratch, Immediate(flags));
133  __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
134  __ subl(scratch, name);
135  __ addl(scratch, Immediate(flags));
136  __ andp(scratch, Immediate((kSecondaryTableSize - 1) << kCacheIndexShift));
137 
138  // Probe the secondary table.
139  ProbeTable(isolate, masm, flags, leave_frame, kSecondary, receiver, name,
140  scratch);
141 
142  // Cache miss: Fall-through and let caller handle the miss by
143  // entering the runtime system.
144  __ bind(&miss);
145  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
146 }
147 
148 
149 #undef __
150 }
151 } // namespace v8::internal
152 
153 #endif // V8_TARGET_ARCH_X64
static const int kFlagsOffset
Definition: objects.h:5361
static StubType ExtractTypeFromFlags(Flags flags)
Definition: objects-inl.h:4996
static const int kFlagsNotUsedInLookup
Definition: objects.h:5448
static const int kHeaderSize
Definition: objects.h:5373
uint32_t Flags
Definition: objects.h:4929
static const int kMapOffset
Definition: objects.h:1427
static const int kHashFieldOffset
Definition: objects.h:8486
static const int kCacheIndexShift
Definition: stub-cache.h:93
void GenerateProbe(MacroAssembler *masm, Code::Flags flags, bool leave_frame, Register receiver, Register name, Register scratch, Register extra, Register extra2=no_reg, Register extra3=no_reg)
static const int kSecondaryTableSize
Definition: stub-cache.h:156
static const int kPrimaryTableSize
Definition: stub-cache.h:154
friend class Isolate
Definition: stub-cache.h:163
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
void USE(T)
Definition: macros.h:322
const int kPointerSize
Definition: globals.h:129
const Register kScratchRegister
Operand FieldOperand(Register object, int offset)
const int kPointerSizeLog2
Definition: globals.h:147
const int kInt64Size
Definition: globals.h:126
const int kHeapObjectTag
Definition: v8.h:5737
const Register no_reg
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20