V8 Project
handler-compiler-ia32.cc
Go to the documentation of this file.
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_IA32
8 
11 #include "src/ic/ic.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 #define __ ACCESS_MASM(masm)
17 
18 
20  MacroAssembler* masm, Handle<HeapType> type, Register receiver,
21  Handle<JSFunction> getter) {
22  {
23  FrameScope scope(masm, StackFrame::INTERNAL);
24 
25  if (!getter.is_null()) {
26  // Call the JavaScript getter with the receiver on the stack.
27  if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
28  // Swap in the global receiver.
29  __ mov(receiver,
31  }
32  __ push(receiver);
33  ParameterCount actual(0);
34  ParameterCount expected(getter);
35  __ InvokeFunction(getter, expected, actual, CALL_FUNCTION,
36  NullCallWrapper());
37  } else {
38  // If we generate a global code snippet for deoptimization only, remember
39  // the place to continue after deoptimization.
40  masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
41  }
42 
43  // Restore context register.
45  }
46  __ ret(0);
47 }
48 
49 
51  MacroAssembler* masm, Label* miss_label, Register receiver,
52  Handle<Name> name, Register scratch0, Register scratch1) {
53  DCHECK(name->IsUniqueName());
54  DCHECK(!receiver.is(scratch0));
55  Counters* counters = masm->isolate()->counters();
56  __ IncrementCounter(counters->negative_lookups(), 1);
57  __ IncrementCounter(counters->negative_lookups_miss(), 1);
58 
59  __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
60 
61  const int kInterceptorOrAccessCheckNeededMask =
63 
64  // Bail out if the receiver has a named interceptor or requires access checks.
65  __ test_b(FieldOperand(scratch0, Map::kBitFieldOffset),
66  kInterceptorOrAccessCheckNeededMask);
67  __ j(not_zero, miss_label);
68 
69  // Check that receiver is a JSObject.
70  __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
71  __ j(below, miss_label);
72 
73  // Load properties array.
74  Register properties = scratch0;
75  __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
76 
77  // Check that the properties array is a dictionary.
78  __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
79  Immediate(masm->isolate()->factory()->hash_table_map()));
80  __ j(not_equal, miss_label);
81 
82  Label done;
84  properties, name, scratch1);
85  __ bind(&done);
86  __ DecrementCounter(counters->negative_lookups_miss(), 1);
87 }
88 
89 
91  MacroAssembler* masm, int index, Register prototype, Label* miss) {
92  // Get the global function with the given index.
93  Handle<JSFunction> function(
94  JSFunction::cast(masm->isolate()->native_context()->get(index)));
95  // Check we're still in the same context.
96  Register scratch = prototype;
98  __ mov(scratch, Operand(esi, offset));
99  __ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
100  __ cmp(Operand(scratch, Context::SlotOffset(index)), function);
101  __ j(not_equal, miss);
102 
103  // Load its initial map. The global functions all have initial maps.
104  __ Move(prototype, Immediate(Handle<Map>(function->initial_map())));
105  // Load the prototype from the initial map.
106  __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
107 }
108 
109 
111  MacroAssembler* masm, Register receiver, Register scratch1,
112  Register scratch2, Label* miss_label) {
113  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
114  __ mov(eax, scratch1);
115  __ ret(0);
116 }
117 
118 
119 // Generate call to api function.
120 // This function uses push() to generate smaller, faster code than
121 // the version above. It is an optimization that should will be removed
122 // when api call ICs are generated in hydrogen.
124  MacroAssembler* masm, const CallOptimization& optimization,
125  Handle<Map> receiver_map, Register receiver, Register scratch_in,
126  bool is_store, int argc, Register* values) {
127  // Copy return value.
128  __ pop(scratch_in);
129  // receiver
130  __ push(receiver);
131  // Write the arguments to stack frame.
132  for (int i = 0; i < argc; i++) {
133  Register arg = values[argc - 1 - i];
134  DCHECK(!receiver.is(arg));
135  DCHECK(!scratch_in.is(arg));
136  __ push(arg);
137  }
138  __ push(scratch_in);
139  // Stack now matches JSFunction abi.
140  DCHECK(optimization.is_simple_api_call());
141 
142  // Abi for CallApiFunctionStub.
143  Register callee = eax;
144  Register call_data = ebx;
145  Register holder = ecx;
146  Register api_function_address = edx;
147  Register scratch = edi; // scratch_in is no longer valid.
148 
149  // Put holder in place.
150  CallOptimization::HolderLookup holder_lookup;
151  Handle<JSObject> api_holder =
152  optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
153  switch (holder_lookup) {
154  case CallOptimization::kHolderIsReceiver:
155  __ Move(holder, receiver);
156  break;
157  case CallOptimization::kHolderFound:
158  __ LoadHeapObject(holder, api_holder);
159  break;
160  case CallOptimization::kHolderNotFound:
161  UNREACHABLE();
162  break;
163  }
164 
165  Isolate* isolate = masm->isolate();
166  Handle<JSFunction> function = optimization.constant_function();
167  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
168  Handle<Object> call_data_obj(api_call_info->data(), isolate);
169 
170  // Put callee in place.
171  __ LoadHeapObject(callee, function);
172 
173  bool call_data_undefined = false;
174  // Put call_data in place.
175  if (isolate->heap()->InNewSpace(*call_data_obj)) {
176  __ mov(scratch, api_call_info);
177  __ mov(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
178  } else if (call_data_obj->IsUndefined()) {
179  call_data_undefined = true;
180  __ mov(call_data, Immediate(isolate->factory()->undefined_value()));
181  } else {
182  __ mov(call_data, call_data_obj);
183  }
184 
185  // Put api_function_address in place.
186  Address function_address = v8::ToCData<Address>(api_call_info->callback());
187  __ mov(api_function_address, Immediate(function_address));
188 
189  // Jump to stub.
190  CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
191  __ TailCallStub(&stub);
192 }
193 
194 
195 // Generate code to check that a global property cell is empty. Create
196 // the property cell at compilation time if no cell exists for the
197 // property.
199  MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
200  Register scratch, Label* miss) {
201  Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
202  DCHECK(cell->value()->IsTheHole());
203  Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
204  if (masm->serializer_enabled()) {
205  __ mov(scratch, Immediate(cell));
207  Immediate(the_hole));
208  } else {
209  __ cmp(Operand::ForCell(cell), Immediate(the_hole));
210  }
211  __ j(not_equal, miss);
212 }
213 
214 
216  MacroAssembler* masm, Handle<HeapType> type, Register receiver,
217  Handle<JSFunction> setter) {
218  // ----------- S t a t e -------------
219  // -- esp[0] : return address
220  // -----------------------------------
221  {
222  FrameScope scope(masm, StackFrame::INTERNAL);
223 
224  // Save value register, so we can restore it later.
225  __ push(value());
226 
227  if (!setter.is_null()) {
228  // Call the JavaScript setter with receiver and value on the stack.
229  if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
230  // Swap in the global receiver.
231  __ mov(receiver,
233  }
234  __ push(receiver);
235  __ push(value());
236  ParameterCount actual(1);
237  ParameterCount expected(setter);
238  __ InvokeFunction(setter, expected, actual, CALL_FUNCTION,
239  NullCallWrapper());
240  } else {
241  // If we generate a global code snippet for deoptimization only, remember
242  // the place to continue after deoptimization.
243  masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
244  }
245 
246  // We have to return the passed value, not the return value of the setter.
247  __ pop(eax);
248 
249  // Restore context register.
251  }
252  __ ret(0);
253 }
254 
255 
256 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
257  Register holder, Register name,
258  Handle<JSObject> holder_obj) {
264  __ push(name);
265  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
266  DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
267  Register scratch = name;
268  __ mov(scratch, Immediate(interceptor));
269  __ push(scratch);
270  __ push(receiver);
271  __ push(holder);
272 }
273 
274 
275 static void CompileCallLoadPropertyWithInterceptor(
276  MacroAssembler* masm, Register receiver, Register holder, Register name,
277  Handle<JSObject> holder_obj, IC::UtilityId id) {
278  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
279  __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()),
281 }
282 
283 
284 static void StoreIC_PushArgs(MacroAssembler* masm) {
285  Register receiver = StoreDescriptor::ReceiverRegister();
286  Register name = StoreDescriptor::NameRegister();
287  Register value = StoreDescriptor::ValueRegister();
288 
289  DCHECK(!ebx.is(receiver) && !ebx.is(name) && !ebx.is(value));
290 
291  __ pop(ebx);
292  __ push(receiver);
293  __ push(name);
294  __ push(value);
295  __ push(ebx);
296 }
297 
298 
299 void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
300  // Return address is on the stack.
301  StoreIC_PushArgs(masm);
302 
303  // Do tail-call to runtime routine.
304  ExternalReference ref(IC_Utility(IC::kStoreIC_Slow), masm->isolate());
305  __ TailCallExternalReference(ref, 3, 1);
306 }
307 
308 
309 void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
310  // Return address is on the stack.
311  StoreIC_PushArgs(masm);
312 
313  // Do tail-call to runtime routine.
314  ExternalReference ref(IC_Utility(IC::kKeyedStoreIC_Slow), masm->isolate());
315  __ TailCallExternalReference(ref, 3, 1);
316 }
317 
318 
319 #undef __
320 #define __ ACCESS_MASM(masm())
321 
322 
324  Handle<Name> name) {
325  if (!label->is_unused()) {
326  __ bind(label);
327  __ mov(this->name(), Immediate(name));
328  }
329 }
330 
331 
332 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
333 // store is successful.
335  Handle<Map> transition, Handle<Name> name, Register receiver_reg,
336  Register storage_reg, Register value_reg, Register scratch1,
337  Register scratch2, Register unused, Label* miss_label, Label* slow) {
338  int descriptor = transition->LastAdded();
339  DescriptorArray* descriptors = transition->instance_descriptors();
340  PropertyDetails details = descriptors->GetDetails(descriptor);
341  Representation representation = details.representation();
342  DCHECK(!representation.IsNone());
343 
344  if (details.type() == CONSTANT) {
345  Handle<Object> constant(descriptors->GetValue(descriptor), isolate());
346  __ CmpObject(value_reg, constant);
347  __ j(not_equal, miss_label);
348  } else if (representation.IsSmi()) {
349  __ JumpIfNotSmi(value_reg, miss_label);
350  } else if (representation.IsHeapObject()) {
351  __ JumpIfSmi(value_reg, miss_label);
352  HeapType* field_type = descriptors->GetFieldType(descriptor);
353  HeapType::Iterator<Map> it = field_type->Classes();
354  if (!it.Done()) {
355  Label do_store;
356  while (true) {
357  __ CompareMap(value_reg, it.Current());
358  it.Advance();
359  if (it.Done()) {
360  __ j(not_equal, miss_label);
361  break;
362  }
363  __ j(equal, &do_store, Label::kNear);
364  }
365  __ bind(&do_store);
366  }
367  } else if (representation.IsDouble()) {
368  Label do_store, heap_number;
369  __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow, MUTABLE);
370 
371  __ JumpIfNotSmi(value_reg, &heap_number);
372  __ SmiUntag(value_reg);
373  __ Cvtsi2sd(xmm0, value_reg);
374  __ SmiTag(value_reg);
375  __ jmp(&do_store);
376 
377  __ bind(&heap_number);
378  __ CheckMap(value_reg, isolate()->factory()->heap_number_map(), miss_label,
380  __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
381 
382  __ bind(&do_store);
383  __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
384  }
385 
386  // Stub never generated for objects that require access checks.
387  DCHECK(!transition->is_access_check_needed());
388 
389  // Perform map transition for the receiver if necessary.
390  if (details.type() == FIELD &&
391  Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
392  // The properties must be extended before we can store the value.
393  // We jump to a runtime call that extends the properties array.
394  __ pop(scratch1); // Return address.
395  __ push(receiver_reg);
396  __ push(Immediate(transition));
397  __ push(value_reg);
398  __ push(scratch1);
399  __ TailCallExternalReference(
400  ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
401  isolate()),
402  3, 1);
403  return;
404  }
405 
406  // Update the map of the object.
407  __ mov(scratch1, Immediate(transition));
408  __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
409 
410  // Update the write barrier for the map field.
411  __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2,
413 
414  if (details.type() == CONSTANT) {
415  DCHECK(value_reg.is(eax));
416  __ ret(0);
417  return;
418  }
419 
420  int index = transition->instance_descriptors()->GetFieldIndex(
421  transition->LastAdded());
422 
423  // Adjust for the number of properties stored in the object. Even in the
424  // face of a transition we can use the old map here because the size of the
425  // object and the number of in-object properties is not going to change.
426  index -= transition->inobject_properties();
427 
428  SmiCheck smi_check =
429  representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
430  // TODO(verwaest): Share this code as a code stub.
431  if (index < 0) {
432  // Set the property straight into the object.
433  int offset = transition->instance_size() + (index * kPointerSize);
434  if (representation.IsDouble()) {
435  __ mov(FieldOperand(receiver_reg, offset), storage_reg);
436  } else {
437  __ mov(FieldOperand(receiver_reg, offset), value_reg);
438  }
439 
440  if (!representation.IsSmi()) {
441  // Update the write barrier for the array address.
442  if (!representation.IsDouble()) {
443  __ mov(storage_reg, value_reg);
444  }
445  __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1,
447  }
448  } else {
449  // Write to the properties array.
450  int offset = index * kPointerSize + FixedArray::kHeaderSize;
451  // Get the properties array (optimistically).
452  __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
453  if (representation.IsDouble()) {
454  __ mov(FieldOperand(scratch1, offset), storage_reg);
455  } else {
456  __ mov(FieldOperand(scratch1, offset), value_reg);
457  }
458 
459  if (!representation.IsSmi()) {
460  // Update the write barrier for the array address.
461  if (!representation.IsDouble()) {
462  __ mov(storage_reg, value_reg);
463  }
464  __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg,
466  }
467  }
468 
469  // Return the value (register eax).
470  DCHECK(value_reg.is(eax));
471  __ ret(0);
472 }
473 
474 
475 void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup,
476  Register value_reg,
477  Label* miss_label) {
478  DCHECK(lookup->representation().IsHeapObject());
479  __ JumpIfSmi(value_reg, miss_label);
480  HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes();
481  Label do_store;
482  while (true) {
483  __ CompareMap(value_reg, it.Current());
484  it.Advance();
485  if (it.Done()) {
486  __ j(not_equal, miss_label);
487  break;
488  }
489  __ j(equal, &do_store, Label::kNear);
490  }
491  __ bind(&do_store);
492 
493  StoreFieldStub stub(isolate(), lookup->GetFieldIndex(),
494  lookup->representation());
495  GenerateTailCall(masm(), stub.GetCode());
496 }
497 
498 
500  Register object_reg, Register holder_reg, Register scratch1,
501  Register scratch2, Handle<Name> name, Label* miss,
502  PrototypeCheckType check) {
503  Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
504 
505  // Make sure there's no overlap between holder and object registers.
506  DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
507  DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
508  !scratch2.is(scratch1));
509 
510  // Keep track of the current object in register reg.
511  Register reg = object_reg;
512  int depth = 0;
513 
514  Handle<JSObject> current = Handle<JSObject>::null();
515  if (type()->IsConstant())
516  current = Handle<JSObject>::cast(type()->AsConstant()->Value());
517  Handle<JSObject> prototype = Handle<JSObject>::null();
518  Handle<Map> current_map = receiver_map;
519  Handle<Map> holder_map(holder()->map());
520  // Traverse the prototype chain and check the maps in the prototype chain for
521  // fast and global objects or do negative lookup for normal objects.
522  while (!current_map.is_identical_to(holder_map)) {
523  ++depth;
524 
525  // Only global objects and objects that do not require access
526  // checks are allowed in stubs.
527  DCHECK(current_map->IsJSGlobalProxyMap() ||
528  !current_map->is_access_check_needed());
529 
530  prototype = handle(JSObject::cast(current_map->prototype()));
531  if (current_map->is_dictionary_map() &&
532  !current_map->IsJSGlobalObjectMap()) {
533  DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast.
534  if (!name->IsUniqueName()) {
535  DCHECK(name->IsString());
536  name = factory()->InternalizeString(Handle<String>::cast(name));
537  }
538  DCHECK(current.is_null() ||
539  current->property_dictionary()->FindEntry(name) ==
541 
542  GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
543  scratch2);
544 
545  __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
546  reg = holder_reg; // From now on the object will be in holder_reg.
547  __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
548  } else {
549  bool in_new_space = heap()->InNewSpace(*prototype);
550  // Two possible reasons for loading the prototype from the map:
551  // (1) Can't store references to new space in code.
552  // (2) Handler is shared for all receivers with the same prototype
553  // map (but not necessarily the same prototype instance).
554  bool load_prototype_from_map = in_new_space || depth == 1;
555  if (depth != 1 || check == CHECK_ALL_MAPS) {
556  __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
557  }
558 
559  // Check access rights to the global object. This has to happen after
560  // the map check so that we know that the object is actually a global
561  // object.
562  // This allows us to install generated handlers for accesses to the
563  // global proxy (as opposed to using slow ICs). See corresponding code
564  // in LookupForRead().
565  if (current_map->IsJSGlobalProxyMap()) {
566  __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
567  } else if (current_map->IsJSGlobalObjectMap()) {
569  name, scratch2, miss);
570  }
571 
572  if (load_prototype_from_map) {
573  // Save the map in scratch1 for later.
574  __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
575  }
576 
577  reg = holder_reg; // From now on the object will be in holder_reg.
578 
579  if (load_prototype_from_map) {
580  __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
581  } else {
582  __ mov(reg, prototype);
583  }
584  }
585 
586  // Go to the next object in the prototype chain.
587  current = prototype;
588  current_map = handle(current->map());
589  }
590 
591  // Log the check depth.
592  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
593 
594  if (depth != 0 || check == CHECK_ALL_MAPS) {
595  // Check the holder map.
596  __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
597  }
598 
599  // Perform security check for access to the global object.
600  DCHECK(current_map->IsJSGlobalProxyMap() ||
601  !current_map->is_access_check_needed());
602  if (current_map->IsJSGlobalProxyMap()) {
603  __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
604  }
605 
606  // Return the register containing the holder.
607  return reg;
608 }
609 
610 
611 void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
612  if (!miss->is_unused()) {
613  Label success;
614  __ jmp(&success);
615  __ bind(miss);
616  TailCallBuiltin(masm(), MissBuiltin(kind()));
617  __ bind(&success);
618  }
619 }
620 
621 
622 void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
623  if (!miss->is_unused()) {
624  Label success;
625  __ jmp(&success);
626  GenerateRestoreName(miss, name);
627  TailCallBuiltin(masm(), MissBuiltin(kind()));
628  __ bind(&success);
629  }
630 }
631 
632 
634  Register reg, Handle<ExecutableAccessorInfo> callback) {
635  // Insert additional parameters into the stack frame above return address.
636  DCHECK(!scratch3().is(reg));
637  __ pop(scratch3()); // Get return address to place it below.
638 
645  __ push(receiver()); // receiver
646  // Push data from ExecutableAccessorInfo.
647  if (isolate()->heap()->InNewSpace(callback->data())) {
648  DCHECK(!scratch2().is(reg));
649  __ mov(scratch2(), Immediate(callback));
651  } else {
652  __ push(Immediate(Handle<Object>(callback->data(), isolate())));
653  }
654  __ push(Immediate(isolate()->factory()->undefined_value())); // ReturnValue
655  // ReturnValue default value
656  __ push(Immediate(isolate()->factory()->undefined_value()));
657  __ push(Immediate(reinterpret_cast<int>(isolate())));
658  __ push(reg); // holder
659 
660  // Save a pointer to where we pushed the arguments. This will be
661  // passed as the const PropertyAccessorInfo& to the C++ callback.
662  __ push(esp);
663 
664  __ push(name()); // name
665 
666  __ push(scratch3()); // Restore return address.
667 
668  // Abi for CallApiGetter
669  Register getter_address = ApiGetterDescriptor::function_address();
670  Address function_address = v8::ToCData<Address>(callback->getter());
671  __ mov(getter_address, Immediate(function_address));
672 
673  CallApiGetterStub stub(isolate());
674  __ TailCallStub(&stub);
675 }
676 
677 
678 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
679  // Return the constant value.
680  __ LoadObject(eax, value);
681  __ ret(0);
682 }
683 
684 
686  LookupIterator* it, Register holder_reg) {
687  DCHECK(holder()->HasNamedInterceptor());
688  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
689 
690  // Compile the interceptor call, followed by inline code to load the
691  // property from further up the prototype chain if the call fails.
692  // Check that the maps haven't changed.
693  DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
694 
695  // Preserve the receiver register explicitly whenever it is different from the
696  // holder and it is needed should the interceptor return without any result.
697  // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
698  // case might cause a miss during the prototype check.
699  bool must_perform_prototype_check =
700  !holder().is_identical_to(it->GetHolder<JSObject>());
701  bool must_preserve_receiver_reg =
702  !receiver().is(holder_reg) &&
703  (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
704 
705  // Save necessary data before invoking an interceptor.
706  // Requires a frame to make GC aware of pushed pointers.
707  {
708  FrameScope frame_scope(masm(), StackFrame::INTERNAL);
709 
710  if (must_preserve_receiver_reg) {
711  __ push(receiver());
712  }
713  __ push(holder_reg);
714  __ push(this->name());
715 
716  // Invoke an interceptor. Note: map checks from receiver to
717  // interceptor's holder has been compiled before (see a caller
718  // of this method.)
719  CompileCallLoadPropertyWithInterceptor(
720  masm(), receiver(), holder_reg, this->name(), holder(),
721  IC::kLoadPropertyWithInterceptorOnly);
722 
723  // Check if interceptor provided a value for property. If it's
724  // the case, return immediately.
725  Label interceptor_failed;
726  __ cmp(eax, factory()->no_interceptor_result_sentinel());
727  __ j(equal, &interceptor_failed);
728  frame_scope.GenerateLeaveFrame();
729  __ ret(0);
730 
731  // Clobber registers when generating debug-code to provoke errors.
732  __ bind(&interceptor_failed);
733  if (FLAG_debug_code) {
734  __ mov(receiver(), Immediate(bit_cast<int32_t>(kZapValue)));
735  __ mov(holder_reg, Immediate(bit_cast<int32_t>(kZapValue)));
736  __ mov(this->name(), Immediate(bit_cast<int32_t>(kZapValue)));
737  }
738 
739  __ pop(this->name());
740  __ pop(holder_reg);
741  if (must_preserve_receiver_reg) {
742  __ pop(receiver());
743  }
744 
745  // Leave the internal frame.
746  }
747 
748  GenerateLoadPostInterceptor(it, holder_reg);
749 }
750 
751 
752 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
753  DCHECK(holder()->HasNamedInterceptor());
754  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
755  // Call the runtime system to load the interceptor.
756  __ pop(scratch2()); // save old return address
757  PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
758  holder());
759  __ push(scratch2()); // restore old return address
760 
761  ExternalReference ref = ExternalReference(
762  IC_Utility(IC::kLoadPropertyWithInterceptor), isolate());
763  __ TailCallExternalReference(
765 }
766 
767 
769  Handle<JSObject> object, Handle<Name> name,
770  Handle<ExecutableAccessorInfo> callback) {
771  Register holder_reg = Frontend(receiver(), name);
772 
773  __ pop(scratch1()); // remove the return address
774  __ push(receiver());
775  __ push(holder_reg);
776  __ Push(callback);
777  __ Push(name);
778  __ push(value());
779  __ push(scratch1()); // restore return address
780 
781  // Do tail-call to the runtime system.
782  ExternalReference store_callback_property =
783  ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
784  __ TailCallExternalReference(store_callback_property, 5, 1);
785 
786  // Return the generated code.
787  return GetCode(kind(), Code::FAST, name);
788 }
789 
790 
792  Handle<Name> name) {
793  __ pop(scratch1()); // remove the return address
794  __ push(receiver());
795  __ push(this->name());
796  __ push(value());
797  __ push(scratch1()); // restore return address
798 
799  // Do tail-call to the runtime system.
800  ExternalReference store_ic_property = ExternalReference(
801  IC_Utility(IC::kStorePropertyWithInterceptor), isolate());
802  __ TailCallExternalReference(store_ic_property, 3, 1);
803 
804  // Return the generated code.
805  return GetCode(kind(), Code::FAST, name);
806 }
807 
808 
811 }
812 
813 
815  Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
816  Label miss;
817 
818  FrontendHeader(receiver(), name, &miss);
819  // Get the value from the cell.
820  Register result = StoreDescriptor::ValueRegister();
821  if (masm()->serializer_enabled()) {
822  __ mov(result, Immediate(cell));
823  __ mov(result, FieldOperand(result, PropertyCell::kValueOffset));
824  } else {
825  __ mov(result, Operand::ForCell(cell));
826  }
827 
828  // Check for deleted property if property can actually be deleted.
829  if (is_configurable) {
830  __ cmp(result, factory()->the_hole_value());
831  __ j(equal, &miss);
832  } else if (FLAG_debug_code) {
833  __ cmp(result, factory()->the_hole_value());
834  __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
835  }
836 
837  Counters* counters = isolate()->counters();
838  __ IncrementCounter(counters->named_load_global_stub(), 1);
839  // The code above already loads the result into the return register.
840  __ ret(0);
841 
842  FrontendFooter(name, &miss);
843 
844  // Return the generated code.
845  return GetCode(kind(), Code::NORMAL, name);
846 }
847 
848 
849 #undef __
850 }
851 } // namespace v8::internal
852 
853 #endif // V8_TARGET_ARCH_IA32
static const Register function_address()
static const int kDataOffset
Definition: objects.h:10420
static const int kValueOffset
Definition: objects.h:9446
static int SlotOffset(int index)
Definition: contexts.h:552
static void GenerateStoreSlow(MacroAssembler *masm)
static const int kHeaderSize
Definition: objects.h:2393
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kGlobalProxyOffset
Definition: objects.h:7461
static Handle< T > cast(Handle< S > that)
Definition: handles.h:116
static Handle< T > null()
Definition: handles.h:123
static const int kNotFound
Definition: objects.h:3283
static const int kValueOffset
Definition: objects.h:1506
static const int kMapOffset
Definition: objects.h:1427
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
Definition: ic.cc:719
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
Definition: objects.cc:14576
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kBitFieldOffset
Definition: objects.h:6228
static const int kIsAccessCheckNeeded
Definition: objects.h:6246
static const int kHasNamedInterceptor
Definition: objects.h:6242
static const int kPrototypeOffset
Definition: objects.h:6190
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
Handle< Code > CompileLoadGlobal(Handle< PropertyCell > cell, Handle< Name > name, bool is_configurable)
virtual void FrontendFooter(Handle< Name > name, Label *miss)
static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler *masm, int index, Register prototype, Label *miss)
void GenerateLoadInterceptorWithFollowup(LookupIterator *it, Register holder_reg)
virtual Register FrontendHeader(Register object_reg, Handle< Name > name, Label *miss)
void GenerateLoadConstant(Handle< Object > value)
void GenerateLoadInterceptor(Register holder_reg)
static void GenerateLoadFunctionPrototype(MacroAssembler *masm, Register receiver, Register scratch1, Register scratch2, Label *miss_label)
void GenerateLoadPostInterceptor(LookupIterator *it, Register reg)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
Handle< Code > CompileStoreInterceptor(Handle< Name > name)
virtual void FrontendFooter(Handle< Name > name, Label *miss)
void GenerateStoreField(LookupIterator *lookup, Register value_reg, Label *miss_label)
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
static void GenerateSlow(MacroAssembler *masm)
void GenerateRestoreName(Label *label, Handle< Name > name)
void GenerateStoreTransition(Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
static const int kReturnValueDefaultValueIndex
Definition: arguments.h:160
Handle< JSObject > holder() const
Register CheckPrototypes(Register object_reg, Register holder_reg, Register scratch1, Register scratch2, Handle< Name > name, Label *miss, PrototypeCheckType check=CHECK_ALL_MAPS)
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
Handle< HeapType > type() const
static void GenerateDictionaryNegativeLookup(MacroAssembler *masm, Label *miss_label, Register receiver, Handle< Name > name, Register r0, Register r1)
static void GenerateFastApiCall(MacroAssembler *masm, const CallOptimization &optimization, Handle< Map > receiver_map, Register receiver, Register scratch, bool is_store, int argc, Register *values)
Register Frontend(Register object_reg, Handle< Name > name)
static void GenerateCheckPropertyCell(MacroAssembler *masm, Handle< JSGlobalObject > global, Handle< Name > name, Register scratch, Label *miss)
static const int kContextOffset
Definition: frames.h:162
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
#define __
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
#define LOG(isolate, Call)
Definition: log.h:69
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK(condition)
Definition: logging.h:205
@ CALL_FUNCTION
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Definition: scheduler.cc:773
const int kPointerSize
Definition: globals.h:129
const Register edx
const Register edi
@ DONT_DO_SMI_CHECK
Definition: globals.h:640
const Register esp
Operand FieldOperand(Register object, int offset)
const Address kZapValue
Definition: globals.h:269
TypeImpl< HeapTypeConfig > HeapType
Definition: list.h:191
@ FIRST_SPEC_OBJECT_TYPE
Definition: objects.h:781
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:146
const Register esi
const Register eax
const Register ebx
const XMMRegister xmm0
byte * Address
Definition: globals.h:101
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const Register ebp
const Register ecx
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
bool is(Register reg) const