22 DCHECK(!masm->has_frame());
23 masm->set_has_frame(
true);
30 masm->set_has_frame(
false);
51 #define __ ACCESS_MASM(masm)
53 enum Direction { FORWARD, BACKWARD };
54 enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
57 void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
68 class LabelConverter {
70 explicit LabelConverter(
byte* buffer) : buffer_(buffer) {}
71 int32_t address(Label* l)
const {
72 return reinterpret_cast<int32_t>(buffer_) + l->pos();
79 MemMoveFunction CreateMemMoveFunction() {
85 MacroAssembler masm(
NULL, buffer,
static_cast<int>(actual_size));
86 LabelConverter conv(buffer);
104 int stack_offset = 0;
106 Label backward, backward_much_overlap;
107 Label forward_much_overlap, small_size, medium_size, pop_and_return;
113 Register count =
ecx;
114 __ mov(dst, Operand(
esp, stack_offset + kDestinationOffset));
115 __ mov(src, Operand(
esp, stack_offset + kSourceOffset));
116 __ mov(count, Operand(
esp, stack_offset + kSizeOffset));
130 Label forward_loop_1byte, forward_loop_4byte;
131 __ bind(&forward_loop_4byte);
132 __ mov(
eax, Operand(src, 0));
133 __ sub(count, Immediate(4));
134 __ add(src, Immediate(4));
135 __ mov(Operand(dst, 0),
eax);
136 __ add(dst, Immediate(4));
139 __ j(
above, &forward_loop_4byte);
140 __ bind(&forward_loop_1byte);
143 __ mov_b(
eax, Operand(src, 0));
146 __ mov_b(Operand(dst, 0),
eax);
148 __ jmp(&forward_loop_1byte);
152 Label backward_loop_1byte, backward_loop_4byte, entry_shortcut;
159 __ bind(&backward_loop_4byte);
160 __ sub(src, Immediate(4));
161 __ sub(count, Immediate(4));
162 __ mov(
eax, Operand(src, 0));
163 __ sub(dst, Immediate(4));
164 __ mov(Operand(dst, 0),
eax);
166 __ j(
above, &backward_loop_4byte);
167 __ bind(&backward_loop_1byte);
170 __ bind(&entry_shortcut);
173 __ mov_b(
eax, Operand(src, 0));
175 __ mov_b(Operand(dst, 0),
eax);
176 __ jmp(&backward_loop_1byte);
179 __ bind(&pop_and_return);
180 MemMoveEmitPopAndReturn(&masm);
184 DCHECK(!RelocInfo::RequiresRelocation(desc));
189 return FUNCTION_CAST<MemMoveFunction>(buffer);
198 #define __ ACCESS_MASM(masm)
202 MacroAssembler* masm,
208 Label* allocation_memento_found) {
209 Register scratch =
edi;
214 __ JumpIfJSArrayHasAllocationMemento(
215 receiver, scratch, allocation_memento_found);
226 MacroAssembler* masm,
239 Label loop, entry, convert_hole, gc_required, only_change_map;
242 __ JumpIfJSArrayHasAllocationMemento(
edx,
edi, fail);
248 __ cmp(
edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
268 Immediate(masm->isolate()->factory()->fixed_double_array_map()));
280 ExternalReference canonical_the_hole_nan_reference =
281 ExternalReference::address_of_the_hole_nan();
285 __ bind(&gc_required);
298 __ JumpIfNotSmi(
ebx, &convert_hole);
303 __ fild_s(Operand(
esp, 0));
309 __ bind(&convert_hole);
311 if (FLAG_debug_code) {
312 __ cmp(
ebx, masm->isolate()->factory()->the_hole_value());
313 __ Assert(
equal, kObjectFoundInSmiOnlyArray);
316 __ fld_d(Operand::StaticVariable(canonical_the_hole_nan_reference));
329 __ bind(&only_change_map);
340 MacroAssembler* masm,
353 Label loop, entry, convert_hole, gc_required, only_change_map, success;
356 __ JumpIfJSArrayHasAllocationMemento(
edx,
edi, fail);
362 __ cmp(
edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
379 Immediate(masm->isolate()->factory()->fixed_array_map()));
388 __ bind(&only_change_map);
395 __ bind(&gc_required);
422 __ jmp(&entry, Label::kNear);
425 __ bind(&convert_hole);
427 masm->isolate()->factory()->the_hole_value());
459 Label* call_runtime) {
465 Label check_sequential;
467 __ j(
zero, &check_sequential, Label::kNear);
472 __ j(
zero, &cons_string, Label::kNear);
475 Label indirect_string_loaded;
478 __ add(index, result);
480 __ jmp(&indirect_string_loaded, Label::kNear);
487 __ bind(&cons_string);
489 Immediate(factory->empty_string()));
493 __ bind(&indirect_string_loaded);
501 __ bind(&check_sequential);
504 __ j(
zero, &seq_string, Label::kNear);
507 Label one_byte_external, done;
508 if (FLAG_debug_code) {
512 __ Assert(
zero, kExternalStringExpectedButNotFound);
524 __ movzx_w(result, Operand(result, index,
times_2, 0));
525 __ jmp(&done, Label::kNear);
526 __ bind(&one_byte_external);
528 __ movzx_b(result, Operand(result, index,
times_1, 0));
529 __ jmp(&done, Label::kNear);
533 __ bind(&seq_string);
545 __ jmp(&done, Label::kNear);
564 patcher.masm()->push(
ebp);
565 patcher.masm()->mov(
ebp,
esp);
566 patcher.masm()->push(
esi);
567 patcher.masm()->push(
edi);
572 bool CodeAgingHelper::IsOld(
byte* candidate)
const {
579 bool result = isolate->code_aging_helper()->IsYoung(sequence);
580 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
592 Address target_address = sequence + *
reinterpret_cast<int*
>(sequence) +
604 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
606 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
610 CodePatcher patcher(sequence, young_length);
static void * Allocate(const size_t requested, size_t *allocated, bool is_executable)
static void ProtectCode(void *address, const size_t size)
static const int kCallTargetAddressOffset
const EmbeddedVector< byte, kNoCodeAgeSequenceLength > young_sequence_
static Code * GetCodeAgeStub(Isolate *isolate, Age age, MarkingParity parity)
static Code * GetCodeFromTargetAddress(Address address)
static void PatchPlatformCodeAge(Isolate *isolate, byte *sequence, Age age, MarkingParity parity)
static bool IsYoungSequence(Isolate *isolate, byte *sequence)
static void GetCodeAgeAndParity(Code *code, Age *age, MarkingParity *parity)
static const int kFirstOffset
static const int kSecondOffset
static void FlushICache(void *start, size_t size)
static void GenerateSmiToDouble(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *allocation_memento_found)
static void GenerateDoubleToObject(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static const int kResourceDataOffset
static const int kLengthOffset
static const int kHeaderSize
static const int kValueOffset
static const int kMapOffset
static const int kElementsOffset
static const int kInstanceTypeOffset
static const int kHeaderSize
static const int kParentOffset
static const int kOffsetOffset
static Smi * FromInt(int value)
static const int kContextOffset
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
virtual void AfterCall(MacroAssembler *masm) const
virtual void BeforeCall(MacroAssembler *masm) const
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
const uint32_t kStringEncodingMask
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
const uint32_t kTwoByteStringTag
const uint32_t kShortExternalStringTag
Operand FieldOperand(Register object, int offset)
const uint32_t kOneByteStringTag
UnaryMathFunction CreateExpFunction()
const uint32_t kShortExternalStringMask
const uint32_t kStringRepresentationMask
const uint32_t kSlicedNotConsMask
OStream & dec(OStream &os)
static const byte kCallOpcode
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
UnaryMathFunction CreateSqrtFunction()
double(* UnaryMathFunction)(double x)
static const int kNoCodeAgeSequenceLength
const uint32_t kHoleNanLower32
const uint32_t kIsIndirectStringMask
const uint32_t kHoleNanUpper32
Debugger support for the V8 JavaScript engine.