V8 Project
handler-compiler-x64.cc
Go to the documentation of this file.
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_X64
8 
11 #include "src/ic/ic.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 #define __ ACCESS_MASM(masm)
17 
19  MacroAssembler* masm, Label* miss_label, Register receiver,
20  Handle<Name> name, Register scratch0, Register scratch1) {
21  DCHECK(name->IsUniqueName());
22  DCHECK(!receiver.is(scratch0));
23  Counters* counters = masm->isolate()->counters();
24  __ IncrementCounter(counters->negative_lookups(), 1);
25  __ IncrementCounter(counters->negative_lookups_miss(), 1);
26 
27  __ movp(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
28 
29  const int kInterceptorOrAccessCheckNeededMask =
31 
32  // Bail out if the receiver has a named interceptor or requires access checks.
33  __ testb(FieldOperand(scratch0, Map::kBitFieldOffset),
34  Immediate(kInterceptorOrAccessCheckNeededMask));
35  __ j(not_zero, miss_label);
36 
37  // Check that receiver is a JSObject.
38  __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
39  __ j(below, miss_label);
40 
41  // Load properties array.
42  Register properties = scratch0;
43  __ movp(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
44 
45  // Check that the properties array is a dictionary.
46  __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
47  Heap::kHashTableMapRootIndex);
48  __ j(not_equal, miss_label);
49 
50  Label done;
52  properties, name, scratch1);
53  __ bind(&done);
54  __ DecrementCounter(counters->negative_lookups_miss(), 1);
55 }
56 
57 
59  MacroAssembler* masm, int index, Register prototype, Label* miss) {
60  Isolate* isolate = masm->isolate();
61  // Get the global function with the given index.
62  Handle<JSFunction> function(
63  JSFunction::cast(isolate->native_context()->get(index)));
64 
65  // Check we're still in the same context.
66  Register scratch = prototype;
68  __ movp(scratch, Operand(rsi, offset));
69  __ movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
70  __ Cmp(Operand(scratch, Context::SlotOffset(index)), function);
71  __ j(not_equal, miss);
72 
73  // Load its initial map. The global functions all have initial maps.
74  __ Move(prototype, Handle<Map>(function->initial_map()));
75  // Load the prototype from the initial map.
76  __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
77 }
78 
79 
81  MacroAssembler* masm, Register receiver, Register result, Register scratch,
82  Label* miss_label) {
83  __ TryGetFunctionPrototype(receiver, result, miss_label);
84  if (!result.is(rax)) __ movp(rax, result);
85  __ ret(0);
86 }
87 
88 
89 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
90  Register holder, Register name,
91  Handle<JSObject> holder_obj) {
97  __ Push(name);
98  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
99  DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
100  __ Move(kScratchRegister, interceptor);
102  __ Push(receiver);
103  __ Push(holder);
104 }
105 
106 
107 static void CompileCallLoadPropertyWithInterceptor(
108  MacroAssembler* masm, Register receiver, Register holder, Register name,
109  Handle<JSObject> holder_obj, IC::UtilityId id) {
110  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
111  __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()),
113 }
114 
115 
116 // Generate call to api function.
118  MacroAssembler* masm, const CallOptimization& optimization,
119  Handle<Map> receiver_map, Register receiver, Register scratch_in,
120  bool is_store, int argc, Register* values) {
121  DCHECK(optimization.is_simple_api_call());
122 
123  __ PopReturnAddressTo(scratch_in);
124  // receiver
125  __ Push(receiver);
126  // Write the arguments to stack frame.
127  for (int i = 0; i < argc; i++) {
128  Register arg = values[argc - 1 - i];
129  DCHECK(!receiver.is(arg));
130  DCHECK(!scratch_in.is(arg));
131  __ Push(arg);
132  }
133  __ PushReturnAddressFrom(scratch_in);
134  // Stack now matches JSFunction abi.
135 
136  // Abi for CallApiFunctionStub.
137  Register callee = rax;
138  Register call_data = rbx;
139  Register holder = rcx;
140  Register api_function_address = rdx;
141  Register scratch = rdi; // scratch_in is no longer valid.
142 
143  // Put holder in place.
144  CallOptimization::HolderLookup holder_lookup;
145  Handle<JSObject> api_holder =
146  optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
147  switch (holder_lookup) {
148  case CallOptimization::kHolderIsReceiver:
149  __ Move(holder, receiver);
150  break;
151  case CallOptimization::kHolderFound:
152  __ Move(holder, api_holder);
153  break;
154  case CallOptimization::kHolderNotFound:
155  UNREACHABLE();
156  break;
157  }
158 
159  Isolate* isolate = masm->isolate();
160  Handle<JSFunction> function = optimization.constant_function();
161  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
162  Handle<Object> call_data_obj(api_call_info->data(), isolate);
163 
164  // Put callee in place.
165  __ Move(callee, function);
166 
167  bool call_data_undefined = false;
168  // Put call_data in place.
169  if (isolate->heap()->InNewSpace(*call_data_obj)) {
170  __ Move(scratch, api_call_info);
171  __ movp(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
172  } else if (call_data_obj->IsUndefined()) {
173  call_data_undefined = true;
174  __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
175  } else {
176  __ Move(call_data, call_data_obj);
177  }
178 
179  // Put api_function_address in place.
180  Address function_address = v8::ToCData<Address>(api_call_info->callback());
181  __ Move(api_function_address, function_address,
183 
184  // Jump to stub.
185  CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
186  __ TailCallStub(&stub);
187 }
188 
189 
191  MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
192  Register scratch, Label* miss) {
193  Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
194  DCHECK(cell->value()->IsTheHole());
195  __ Move(scratch, cell);
196  __ Cmp(FieldOperand(scratch, Cell::kValueOffset),
197  masm->isolate()->factory()->the_hole_value());
198  __ j(not_equal, miss);
199 }
200 
201 
203  MacroAssembler* masm, Handle<HeapType> type, Register receiver,
204  Handle<JSFunction> setter) {
205  // ----------- S t a t e -------------
206  // -- rsp[0] : return address
207  // -----------------------------------
208  {
209  FrameScope scope(masm, StackFrame::INTERNAL);
210 
211  // Save value register, so we can restore it later.
212  __ Push(value());
213 
214  if (!setter.is_null()) {
215  // Call the JavaScript setter with receiver and value on the stack.
216  if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
217  // Swap in the global receiver.
218  __ movp(receiver,
220  }
221  __ Push(receiver);
222  __ Push(value());
223  ParameterCount actual(1);
224  ParameterCount expected(setter);
225  __ InvokeFunction(setter, expected, actual, CALL_FUNCTION,
226  NullCallWrapper());
227  } else {
228  // If we generate a global code snippet for deoptimization only, remember
229  // the place to continue after deoptimization.
230  masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
231  }
232 
233  // We have to return the passed value, not the return value of the setter.
234  __ Pop(rax);
235 
236  // Restore context register.
238  }
239  __ ret(0);
240 }
241 
242 
244  MacroAssembler* masm, Handle<HeapType> type, Register receiver,
245  Handle<JSFunction> getter) {
246  // ----------- S t a t e -------------
247  // -- rax : receiver
248  // -- rcx : name
249  // -- rsp[0] : return address
250  // -----------------------------------
251  {
252  FrameScope scope(masm, StackFrame::INTERNAL);
253 
254  if (!getter.is_null()) {
255  // Call the JavaScript getter with the receiver on the stack.
256  if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
257  // Swap in the global receiver.
258  __ movp(receiver,
260  }
261  __ Push(receiver);
262  ParameterCount actual(0);
263  ParameterCount expected(getter);
264  __ InvokeFunction(getter, expected, actual, CALL_FUNCTION,
265  NullCallWrapper());
266  } else {
267  // If we generate a global code snippet for deoptimization only, remember
268  // the place to continue after deoptimization.
269  masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
270  }
271 
272  // Restore context register.
274  }
275  __ ret(0);
276 }
277 
278 
279 static void StoreIC_PushArgs(MacroAssembler* masm) {
280  Register receiver = StoreDescriptor::ReceiverRegister();
281  Register name = StoreDescriptor::NameRegister();
282  Register value = StoreDescriptor::ValueRegister();
283 
284  DCHECK(!rbx.is(receiver) && !rbx.is(name) && !rbx.is(value));
285 
286  __ PopReturnAddressTo(rbx);
287  __ Push(receiver);
288  __ Push(name);
289  __ Push(value);
290  __ PushReturnAddressFrom(rbx);
291 }
292 
293 
294 void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
295  // Return address is on the stack.
296  StoreIC_PushArgs(masm);
297 
298  // Do tail-call to runtime routine.
299  ExternalReference ref(IC_Utility(IC::kStoreIC_Slow), masm->isolate());
300  __ TailCallExternalReference(ref, 3, 1);
301 }
302 
303 
304 void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
305  // Return address is on the stack.
306  StoreIC_PushArgs(masm);
307 
308  // Do tail-call to runtime routine.
309  ExternalReference ref(IC_Utility(IC::kKeyedStoreIC_Slow), masm->isolate());
310  __ TailCallExternalReference(ref, 3, 1);
311 }
312 
313 
314 #undef __
315 #define __ ACCESS_MASM((masm()))
316 
317 
319  Handle<Name> name) {
320  if (!label->is_unused()) {
321  __ bind(label);
322  __ Move(this->name(), name);
323  }
324 }
325 
326 
327 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
328 // store is successful.
330  Handle<Map> transition, Handle<Name> name, Register receiver_reg,
331  Register storage_reg, Register value_reg, Register scratch1,
332  Register scratch2, Register unused, Label* miss_label, Label* slow) {
333  int descriptor = transition->LastAdded();
334  DescriptorArray* descriptors = transition->instance_descriptors();
335  PropertyDetails details = descriptors->GetDetails(descriptor);
336  Representation representation = details.representation();
337  DCHECK(!representation.IsNone());
338 
339  if (details.type() == CONSTANT) {
340  Handle<Object> constant(descriptors->GetValue(descriptor), isolate());
341  __ Cmp(value_reg, constant);
342  __ j(not_equal, miss_label);
343  } else if (representation.IsSmi()) {
344  __ JumpIfNotSmi(value_reg, miss_label);
345  } else if (representation.IsHeapObject()) {
346  __ JumpIfSmi(value_reg, miss_label);
347  HeapType* field_type = descriptors->GetFieldType(descriptor);
348  HeapType::Iterator<Map> it = field_type->Classes();
349  if (!it.Done()) {
350  Label do_store;
351  while (true) {
352  __ CompareMap(value_reg, it.Current());
353  it.Advance();
354  if (it.Done()) {
355  __ j(not_equal, miss_label);
356  break;
357  }
358  __ j(equal, &do_store, Label::kNear);
359  }
360  __ bind(&do_store);
361  }
362  } else if (representation.IsDouble()) {
363  Label do_store, heap_number;
364  __ AllocateHeapNumber(storage_reg, scratch1, slow, MUTABLE);
365 
366  __ JumpIfNotSmi(value_reg, &heap_number);
367  __ SmiToInteger32(scratch1, value_reg);
368  __ Cvtlsi2sd(xmm0, scratch1);
369  __ jmp(&do_store);
370 
371  __ bind(&heap_number);
372  __ CheckMap(value_reg, isolate()->factory()->heap_number_map(), miss_label,
374  __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
375 
376  __ bind(&do_store);
377  __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
378  }
379 
380  // Stub never generated for objects that require access checks.
381  DCHECK(!transition->is_access_check_needed());
382 
383  // Perform map transition for the receiver if necessary.
384  if (details.type() == FIELD &&
385  Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
386  // The properties must be extended before we can store the value.
387  // We jump to a runtime call that extends the properties array.
388  __ PopReturnAddressTo(scratch1);
389  __ Push(receiver_reg);
390  __ Push(transition);
391  __ Push(value_reg);
392  __ PushReturnAddressFrom(scratch1);
393  __ TailCallExternalReference(
394  ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
395  isolate()),
396  3, 1);
397  return;
398  }
399 
400  // Update the map of the object.
401  __ Move(scratch1, transition);
402  __ movp(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
403 
404  // Update the write barrier for the map field.
405  __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2,
407 
408  if (details.type() == CONSTANT) {
409  DCHECK(value_reg.is(rax));
410  __ ret(0);
411  return;
412  }
413 
414  int index = transition->instance_descriptors()->GetFieldIndex(
415  transition->LastAdded());
416 
417  // Adjust for the number of properties stored in the object. Even in the
418  // face of a transition we can use the old map here because the size of the
419  // object and the number of in-object properties is not going to change.
420  index -= transition->inobject_properties();
421 
422  // TODO(verwaest): Share this code as a code stub.
423  SmiCheck smi_check =
424  representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
425  if (index < 0) {
426  // Set the property straight into the object.
427  int offset = transition->instance_size() + (index * kPointerSize);
428  if (representation.IsDouble()) {
429  __ movp(FieldOperand(receiver_reg, offset), storage_reg);
430  } else {
431  __ movp(FieldOperand(receiver_reg, offset), value_reg);
432  }
433 
434  if (!representation.IsSmi()) {
435  // Update the write barrier for the array address.
436  if (!representation.IsDouble()) {
437  __ movp(storage_reg, value_reg);
438  }
439  __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1,
441  }
442  } else {
443  // Write to the properties array.
444  int offset = index * kPointerSize + FixedArray::kHeaderSize;
445  // Get the properties array (optimistically).
446  __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
447  if (representation.IsDouble()) {
448  __ movp(FieldOperand(scratch1, offset), storage_reg);
449  } else {
450  __ movp(FieldOperand(scratch1, offset), value_reg);
451  }
452 
453  if (!representation.IsSmi()) {
454  // Update the write barrier for the array address.
455  if (!representation.IsDouble()) {
456  __ movp(storage_reg, value_reg);
457  }
458  __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg,
460  }
461  }
462 
463  // Return the value (register rax).
464  DCHECK(value_reg.is(rax));
465  __ ret(0);
466 }
467 
468 
469 void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup,
470  Register value_reg,
471  Label* miss_label) {
472  DCHECK(lookup->representation().IsHeapObject());
473  __ JumpIfSmi(value_reg, miss_label);
474  HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes();
475  Label do_store;
476  while (true) {
477  __ CompareMap(value_reg, it.Current());
478  it.Advance();
479  if (it.Done()) {
480  __ j(not_equal, miss_label);
481  break;
482  }
483  __ j(equal, &do_store, Label::kNear);
484  }
485  __ bind(&do_store);
486 
487  StoreFieldStub stub(isolate(), lookup->GetFieldIndex(),
488  lookup->representation());
489  GenerateTailCall(masm(), stub.GetCode());
490 }
491 
492 
494  Register object_reg, Register holder_reg, Register scratch1,
495  Register scratch2, Handle<Name> name, Label* miss,
496  PrototypeCheckType check) {
497  Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
498 
499  // Make sure there's no overlap between holder and object registers.
500  DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
501  DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
502  !scratch2.is(scratch1));
503 
504  // Keep track of the current object in register reg. On the first
505  // iteration, reg is an alias for object_reg, on later iterations,
506  // it is an alias for holder_reg.
507  Register reg = object_reg;
508  int depth = 0;
509 
510  Handle<JSObject> current = Handle<JSObject>::null();
511  if (type()->IsConstant()) {
512  current = Handle<JSObject>::cast(type()->AsConstant()->Value());
513  }
514  Handle<JSObject> prototype = Handle<JSObject>::null();
515  Handle<Map> current_map = receiver_map;
516  Handle<Map> holder_map(holder()->map());
517  // Traverse the prototype chain and check the maps in the prototype chain for
518  // fast and global objects or do negative lookup for normal objects.
519  while (!current_map.is_identical_to(holder_map)) {
520  ++depth;
521 
522  // Only global objects and objects that do not require access
523  // checks are allowed in stubs.
524  DCHECK(current_map->IsJSGlobalProxyMap() ||
525  !current_map->is_access_check_needed());
526 
527  prototype = handle(JSObject::cast(current_map->prototype()));
528  if (current_map->is_dictionary_map() &&
529  !current_map->IsJSGlobalObjectMap()) {
530  DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast.
531  if (!name->IsUniqueName()) {
532  DCHECK(name->IsString());
533  name = factory()->InternalizeString(Handle<String>::cast(name));
534  }
535  DCHECK(current.is_null() ||
536  current->property_dictionary()->FindEntry(name) ==
538 
539  GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
540  scratch2);
541 
542  __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
543  reg = holder_reg; // From now on the object will be in holder_reg.
544  __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
545  } else {
546  bool in_new_space = heap()->InNewSpace(*prototype);
547  // Two possible reasons for loading the prototype from the map:
548  // (1) Can't store references to new space in code.
549  // (2) Handler is shared for all receivers with the same prototype
550  // map (but not necessarily the same prototype instance).
551  bool load_prototype_from_map = in_new_space || depth == 1;
552  if (load_prototype_from_map) {
553  // Save the map in scratch1 for later.
554  __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
555  }
556  if (depth != 1 || check == CHECK_ALL_MAPS) {
557  __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
558  }
559 
560  // Check access rights to the global object. This has to happen after
561  // the map check so that we know that the object is actually a global
562  // object.
563  // This allows us to install generated handlers for accesses to the
564  // global proxy (as opposed to using slow ICs). See corresponding code
565  // in LookupForRead().
566  if (current_map->IsJSGlobalProxyMap()) {
567  __ CheckAccessGlobalProxy(reg, scratch2, miss);
568  } else if (current_map->IsJSGlobalObjectMap()) {
570  name, scratch2, miss);
571  }
572  reg = holder_reg; // From now on the object will be in holder_reg.
573 
574  if (load_prototype_from_map) {
575  __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
576  } else {
577  __ Move(reg, prototype);
578  }
579  }
580 
581  // Go to the next object in the prototype chain.
582  current = prototype;
583  current_map = handle(current->map());
584  }
585 
586  // Log the check depth.
587  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
588 
589  if (depth != 0 || check == CHECK_ALL_MAPS) {
590  // Check the holder map.
591  __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
592  }
593 
594  // Perform security check for access to the global object.
595  DCHECK(current_map->IsJSGlobalProxyMap() ||
596  !current_map->is_access_check_needed());
597  if (current_map->IsJSGlobalProxyMap()) {
598  __ CheckAccessGlobalProxy(reg, scratch1, miss);
599  }
600 
601  // Return the register containing the holder.
602  return reg;
603 }
604 
605 
606 void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
607  if (!miss->is_unused()) {
608  Label success;
609  __ jmp(&success);
610  __ bind(miss);
611  TailCallBuiltin(masm(), MissBuiltin(kind()));
612  __ bind(&success);
613  }
614 }
615 
616 
617 void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
618  if (!miss->is_unused()) {
619  Label success;
620  __ jmp(&success);
621  GenerateRestoreName(miss, name);
622  TailCallBuiltin(masm(), MissBuiltin(kind()));
623  __ bind(&success);
624  }
625 }
626 
627 
629  Register reg, Handle<ExecutableAccessorInfo> callback) {
630  // Insert additional parameters into the stack frame above return address.
631  DCHECK(!scratch4().is(reg));
632  __ PopReturnAddressTo(scratch4());
633 
641  __ Push(receiver()); // receiver
642  if (heap()->InNewSpace(callback->data())) {
643  DCHECK(!scratch2().is(reg));
644  __ Move(scratch2(), callback);
645  __ Push(FieldOperand(scratch2(),
647  } else {
648  __ Push(Handle<Object>(callback->data(), isolate()));
649  }
650  DCHECK(!kScratchRegister.is(reg));
651  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
652  __ Push(kScratchRegister); // return value
653  __ Push(kScratchRegister); // return value default
654  __ PushAddress(ExternalReference::isolate_address(isolate()));
655  __ Push(reg); // holder
656  __ Push(name()); // name
657  // Save a pointer to where we pushed the arguments pointer. This will be
658  // passed as the const PropertyAccessorInfo& to the C++ callback.
659 
660  __ PushReturnAddressFrom(scratch4());
661 
662  // Abi for CallApiGetter
663  Register api_function_address = ApiGetterDescriptor::function_address();
664  Address getter_address = v8::ToCData<Address>(callback->getter());
665  __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE);
666 
667  CallApiGetterStub stub(isolate());
668  __ TailCallStub(&stub);
669 }
670 
671 
672 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
673  // Return the constant value.
674  __ Move(rax, value);
675  __ ret(0);
676 }
677 
678 
680  LookupIterator* it, Register holder_reg) {
681  DCHECK(holder()->HasNamedInterceptor());
682  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
683 
684  // Compile the interceptor call, followed by inline code to load the
685  // property from further up the prototype chain if the call fails.
686  // Check that the maps haven't changed.
687  DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
688 
689  // Preserve the receiver register explicitly whenever it is different from the
690  // holder and it is needed should the interceptor return without any result.
691  // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
692  // case might cause a miss during the prototype check.
693  bool must_perform_prototype_check =
694  !holder().is_identical_to(it->GetHolder<JSObject>());
695  bool must_preserve_receiver_reg =
696  !receiver().is(holder_reg) &&
697  (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
698 
699  // Save necessary data before invoking an interceptor.
700  // Requires a frame to make GC aware of pushed pointers.
701  {
702  FrameScope frame_scope(masm(), StackFrame::INTERNAL);
703 
704  if (must_preserve_receiver_reg) {
705  __ Push(receiver());
706  }
707  __ Push(holder_reg);
708  __ Push(this->name());
709 
710  // Invoke an interceptor. Note: map checks from receiver to
711  // interceptor's holder has been compiled before (see a caller
712  // of this method.)
713  CompileCallLoadPropertyWithInterceptor(
714  masm(), receiver(), holder_reg, this->name(), holder(),
715  IC::kLoadPropertyWithInterceptorOnly);
716 
717  // Check if interceptor provided a value for property. If it's
718  // the case, return immediately.
719  Label interceptor_failed;
720  __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
721  __ j(equal, &interceptor_failed);
722  frame_scope.GenerateLeaveFrame();
723  __ ret(0);
724 
725  __ bind(&interceptor_failed);
726  __ Pop(this->name());
727  __ Pop(holder_reg);
728  if (must_preserve_receiver_reg) {
729  __ Pop(receiver());
730  }
731 
732  // Leave the internal frame.
733  }
734 
735  GenerateLoadPostInterceptor(it, holder_reg);
736 }
737 
738 
739 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
740  // Call the runtime system to load the interceptor.
741  DCHECK(holder()->HasNamedInterceptor());
742  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
743  __ PopReturnAddressTo(scratch2());
744  PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
745  holder());
746  __ PushReturnAddressFrom(scratch2());
747 
748  ExternalReference ref = ExternalReference(
749  IC_Utility(IC::kLoadPropertyWithInterceptor), isolate());
750  __ TailCallExternalReference(
752 }
753 
754 
756  Handle<JSObject> object, Handle<Name> name,
757  Handle<ExecutableAccessorInfo> callback) {
758  Register holder_reg = Frontend(receiver(), name);
759 
760  __ PopReturnAddressTo(scratch1());
761  __ Push(receiver());
762  __ Push(holder_reg);
763  __ Push(callback); // callback info
764  __ Push(name);
765  __ Push(value());
766  __ PushReturnAddressFrom(scratch1());
767 
768  // Do tail-call to the runtime system.
769  ExternalReference store_callback_property =
770  ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
771  __ TailCallExternalReference(store_callback_property, 5, 1);
772 
773  // Return the generated code.
774  return GetCode(kind(), Code::FAST, name);
775 }
776 
777 
779  Handle<Name> name) {
780  __ PopReturnAddressTo(scratch1());
781  __ Push(receiver());
782  __ Push(this->name());
783  __ Push(value());
784  __ PushReturnAddressFrom(scratch1());
785 
786  // Do tail-call to the runtime system.
787  ExternalReference store_ic_property = ExternalReference(
788  IC_Utility(IC::kStorePropertyWithInterceptor), isolate());
789  __ TailCallExternalReference(store_ic_property, 3, 1);
790 
791  // Return the generated code.
792  return GetCode(kind(), Code::FAST, name);
793 }
794 
795 
798 }
799 
800 
802  Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
803  Label miss;
804  FrontendHeader(receiver(), name, &miss);
805 
806  // Get the value from the cell.
807  Register result = StoreDescriptor::ValueRegister();
808  __ Move(result, cell);
809  __ movp(result, FieldOperand(result, PropertyCell::kValueOffset));
810 
811  // Check for deleted property if property can actually be deleted.
812  if (is_configurable) {
813  __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
814  __ j(equal, &miss);
815  } else if (FLAG_debug_code) {
816  __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
817  __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
818  }
819 
820  Counters* counters = isolate()->counters();
821  __ IncrementCounter(counters->named_load_global_stub(), 1);
822  __ ret(0);
823 
824  FrontendFooter(name, &miss);
825 
826  // Return the generated code.
827  return GetCode(kind(), Code::NORMAL, name);
828 }
829 
830 
831 #undef __
832 }
833 } // namespace v8::internal
834 
835 #endif // V8_TARGET_ARCH_X64
static const Register function_address()
static const int kDataOffset
Definition: objects.h:10420
static const int kValueOffset
Definition: objects.h:9446
static int SlotOffset(int index)
Definition: contexts.h:552
static void GenerateStoreSlow(MacroAssembler *masm)
static const int kHeaderSize
Definition: objects.h:2393
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static Handle< T > cast(Handle< S > that)
Definition: handles.h:116
static Handle< T > null()
Definition: handles.h:123
static const int kNotFound
Definition: objects.h:3283
static const int kValueOffset
Definition: objects.h:1506
static const int kMapOffset
Definition: objects.h:1427
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
Definition: ic.cc:719
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
Definition: objects.cc:14576
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kIsAccessCheckNeeded
Definition: objects.h:6246
static const int kHasNamedInterceptor
Definition: objects.h:6242
static const int kPrototypeOffset
Definition: objects.h:6190
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
Handle< Code > CompileLoadGlobal(Handle< PropertyCell > cell, Handle< Name > name, bool is_configurable)
virtual void FrontendFooter(Handle< Name > name, Label *miss)
static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler *masm, int index, Register prototype, Label *miss)
void GenerateLoadInterceptorWithFollowup(LookupIterator *it, Register holder_reg)
virtual Register FrontendHeader(Register object_reg, Handle< Name > name, Label *miss)
void GenerateLoadConstant(Handle< Object > value)
void GenerateLoadInterceptor(Register holder_reg)
static void GenerateLoadFunctionPrototype(MacroAssembler *masm, Register receiver, Register scratch1, Register scratch2, Label *miss_label)
void GenerateLoadPostInterceptor(LookupIterator *it, Register reg)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
Handle< Code > CompileStoreInterceptor(Handle< Name > name)
virtual void FrontendFooter(Handle< Name > name, Label *miss)
void GenerateStoreField(LookupIterator *lookup, Register value_reg, Label *miss_label)
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
static void GenerateSlow(MacroAssembler *masm)
void GenerateRestoreName(Label *label, Handle< Name > name)
void GenerateStoreTransition(Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
static const int kReturnValueDefaultValueIndex
Definition: arguments.h:160
Handle< JSObject > holder() const
Register CheckPrototypes(Register object_reg, Register holder_reg, Register scratch1, Register scratch2, Handle< Name > name, Label *miss, PrototypeCheckType check=CHECK_ALL_MAPS)
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
Handle< HeapType > type() const
static void GenerateDictionaryNegativeLookup(MacroAssembler *masm, Label *miss_label, Register receiver, Handle< Name > name, Register r0, Register r1)
static void GenerateFastApiCall(MacroAssembler *masm, const CallOptimization &optimization, Handle< Map > receiver_map, Register receiver, Register scratch, bool is_store, int argc, Register *values)
Register Frontend(Register object_reg, Handle< Name > name)
static void GenerateCheckPropertyCell(MacroAssembler *masm, Handle< JSGlobalObject > global, Handle< Name > name, Register scratch, Label *miss)
static const int kContextOffset
Definition: frames.h:162
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
#define LOG(isolate, Call)
Definition: log.h:69
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK(condition)
Definition: logging.h:205
@ CALL_FUNCTION
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
const int kPointerSize
Definition: globals.h:129
@ DONT_DO_SMI_CHECK
Definition: globals.h:640
const Register kScratchRegister
Operand FieldOperand(Register object, int offset)
const Register rsi
const Register rbp
TypeImpl< HeapTypeConfig > HeapType
Definition: list.h:191
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:146
const Register rdi
const XMMRegister xmm0
const Register rbx
byte * Address
Definition: globals.h:101
const Register rdx
const Register rax
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const Register rcx
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
bool is(Register reg) const