7 #if V8_TARGET_ARCH_IA32
22 DCHECK(!masm->has_frame());
23 masm->set_has_frame(
true);
30 masm->set_has_frame(
false);
38 if (!FLAG_fast_math)
return &std::exp;
42 if (buffer ==
NULL)
return &std::exp;
43 ExternalReference::InitializeMathExpData();
45 MacroAssembler masm(
NULL, buffer,
static_cast<int>(actual_size));
49 XMMRegister input =
xmm1;
50 XMMRegister result =
xmm2;
66 DCHECK(!RelocInfo::RequiresRelocation(desc));
70 return FUNCTION_CAST<UnaryMathFunction>(buffer);
79 if (buffer ==
NULL)
return &std::sqrt;
80 MacroAssembler masm(
NULL, buffer,
static_cast<int>(actual_size));
95 DCHECK(!RelocInfo::RequiresRelocation(desc));
99 return FUNCTION_CAST<UnaryMathFunction>(buffer);
105 #define __ ACCESS_MASM(masm)
107 enum Direction { FORWARD, BACKWARD };
108 enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
115 void MemMoveEmitMainLoop(MacroAssembler* masm,
118 Alignment alignment) {
121 Register count =
ecx;
122 Register loop_count =
edx;
123 Label loop, move_last_31, move_last_63;
124 __ cmp(loop_count, 0);
128 if (direction == BACKWARD)
__ sub(src, Immediate(0x40));
129 __ movdq(alignment == MOVE_ALIGNED,
xmm0, Operand(src, 0x00));
130 __ movdq(alignment == MOVE_ALIGNED,
xmm1, Operand(src, 0x10));
131 __ movdq(alignment == MOVE_ALIGNED,
xmm2, Operand(src, 0x20));
132 __ movdq(alignment == MOVE_ALIGNED,
xmm3, Operand(src, 0x30));
133 if (direction == FORWARD)
__ add(src, Immediate(0x40));
134 if (direction == BACKWARD)
__ sub(dst, Immediate(0x40));
135 __ movdqa(Operand(dst, 0x00),
xmm0);
136 __ movdqa(Operand(dst, 0x10),
xmm1);
137 __ movdqa(Operand(dst, 0x20),
xmm2);
138 __ movdqa(Operand(dst, 0x30),
xmm3);
139 if (direction == FORWARD)
__ add(dst, Immediate(0x40));
143 __ bind(&move_last_63);
144 __ test(count, Immediate(0x20));
145 __ j(
zero, &move_last_31);
146 if (direction == BACKWARD)
__ sub(src, Immediate(0x20));
147 __ movdq(alignment == MOVE_ALIGNED,
xmm0, Operand(src, 0x00));
148 __ movdq(alignment == MOVE_ALIGNED,
xmm1, Operand(src, 0x10));
149 if (direction == FORWARD)
__ add(src, Immediate(0x20));
150 if (direction == BACKWARD)
__ sub(dst, Immediate(0x20));
151 __ movdqa(Operand(dst, 0x00),
xmm0);
152 __ movdqa(Operand(dst, 0x10),
xmm1);
153 if (direction == FORWARD)
__ add(dst, Immediate(0x20));
155 __ bind(&move_last_31);
156 __ test(count, Immediate(0x10));
158 if (direction == BACKWARD)
__ sub(src, Immediate(0x10));
159 __ movdq(alignment == MOVE_ALIGNED,
xmm0, Operand(src, 0));
160 if (direction == FORWARD)
__ add(src, Immediate(0x10));
161 if (direction == BACKWARD)
__ sub(dst, Immediate(0x10));
162 __ movdqa(Operand(dst, 0),
xmm0);
163 if (direction == FORWARD)
__ add(dst, Immediate(0x10));
167 void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
178 class LabelConverter {
180 explicit LabelConverter(
byte* buffer) : buffer_(buffer) {}
181 int32_t address(Label* l)
const {
182 return reinterpret_cast<int32_t>(buffer_) + l->pos();
189 MemMoveFunction CreateMemMoveFunction() {
195 MacroAssembler masm(
NULL, buffer,
static_cast<int>(actual_size));
196 LabelConverter conv(buffer);
215 const size_t kSmallCopySize = 8;
217 const size_t kMediumCopySize = 63;
220 const size_t kMinMoveDistance = 16;
224 int stack_offset = 0;
226 Label backward, backward_much_overlap;
227 Label forward_much_overlap, small_size, medium_size, pop_and_return;
233 Register count =
ecx;
234 Register loop_count =
edx;
235 __ mov(dst, Operand(
esp, stack_offset + kDestinationOffset));
236 __ mov(src, Operand(
esp, stack_offset + kSourceOffset));
237 __ mov(count, Operand(
esp, stack_offset + kSizeOffset));
242 __ prefetch(Operand(src, 0), 1);
243 __ cmp(count, kSmallCopySize);
245 __ cmp(count, kMediumCopySize);
252 Label unaligned_source, move_last_15, skip_last_move;
255 __ cmp(
eax, kMinMoveDistance);
256 __ j(
below, &forward_much_overlap);
258 __ movdqu(
xmm0, Operand(src, 0));
259 __ movdqu(Operand(dst, 0),
xmm0);
264 __ add(
edx, Immediate(16));
269 __ mov(loop_count, count);
270 __ shr(loop_count, 6);
272 __ test(src, Immediate(0xF));
275 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_ALIGNED);
277 __ bind(&move_last_15);
279 __ j(
zero, &skip_last_move, Label::kNear);
282 __ bind(&skip_last_move);
283 MemMoveEmitPopAndReturn(&masm);
286 __ bind(&unaligned_source);
287 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
288 __ jmp(&move_last_15);
291 Label loop_until_aligned, last_15_much_overlap;
292 __ bind(&loop_until_aligned);
293 __ mov_b(
eax, Operand(src, 0));
295 __ mov_b(Operand(dst, 0),
eax);
298 __ bind(&forward_much_overlap);
299 __ test(dst, Immediate(0xF));
302 __ mov(loop_count, count);
303 __ shr(loop_count, 6);
304 MemMoveEmitMainLoop(&masm, &last_15_much_overlap,
305 FORWARD, MOVE_UNALIGNED);
306 __ bind(&last_15_much_overlap);
308 __ j(
zero, &pop_and_return);
309 __ cmp(count, kSmallCopySize);
311 __ jmp(&medium_size);
316 Label unaligned_source, move_first_15, skip_last_move;
323 __ cmp(
eax, kMinMoveDistance);
324 __ j(
below, &backward_much_overlap);
326 __ movdqu(
xmm0, Operand(src, -0x10));
327 __ movdqu(Operand(dst, -0x10),
xmm0);
335 __ mov(loop_count, count);
336 __ shr(loop_count, 6);
338 __ test(src, Immediate(0xF));
341 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
343 __ bind(&move_first_15);
345 __ j(
zero, &skip_last_move, Label::kNear);
348 __ movdqu(
xmm0, Operand(src, 0));
349 __ movdqu(Operand(dst, 0),
xmm0);
350 __ bind(&skip_last_move);
351 MemMoveEmitPopAndReturn(&masm);
354 __ bind(&unaligned_source);
355 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
356 __ jmp(&move_first_15);
359 Label loop_until_aligned, first_15_much_overlap;
360 __ bind(&loop_until_aligned);
363 __ mov_b(
eax, Operand(src, 0));
364 __ mov_b(Operand(dst, 0),
eax);
366 __ bind(&backward_much_overlap);
367 __ test(dst, Immediate(0xF));
370 __ mov(loop_count, count);
371 __ shr(loop_count, 6);
372 MemMoveEmitMainLoop(&masm, &first_15_much_overlap,
373 BACKWARD, MOVE_UNALIGNED);
374 __ bind(&first_15_much_overlap);
376 __ j(
zero, &pop_and_return);
380 __ cmp(count, kSmallCopySize);
382 __ jmp(&medium_size);
388 Label medium_handlers, f9_16, f17_32, f33_48, f49_63;
391 __ movsd(
xmm0, Operand(src, 0));
393 __ movsd(Operand(dst, 0),
xmm0);
395 MemMoveEmitPopAndReturn(&masm);
398 __ movdqu(
xmm0, Operand(src, 0));
400 __ movdqu(Operand(dst, 0x00),
xmm0);
402 MemMoveEmitPopAndReturn(&masm);
405 __ movdqu(
xmm0, Operand(src, 0x00));
406 __ movdqu(
xmm1, Operand(src, 0x10));
408 __ movdqu(Operand(dst, 0x00),
xmm0);
409 __ movdqu(Operand(dst, 0x10),
xmm1);
411 MemMoveEmitPopAndReturn(&masm);
414 __ movdqu(
xmm0, Operand(src, 0x00));
415 __ movdqu(
xmm1, Operand(src, 0x10));
416 __ movdqu(
xmm2, Operand(src, 0x20));
418 __ movdqu(Operand(dst, 0x00),
xmm0);
419 __ movdqu(Operand(dst, 0x10),
xmm1);
420 __ movdqu(Operand(dst, 0x20),
xmm2);
422 MemMoveEmitPopAndReturn(&masm);
424 __ bind(&medium_handlers);
425 __ dd(conv.address(&f9_16));
426 __ dd(conv.address(&f17_32));
427 __ dd(conv.address(&f33_48));
428 __ dd(conv.address(&f49_63));
430 __ bind(&medium_size);
434 if (FLAG_debug_code) {
446 Label small_handlers,
f0,
f1,
f2,
f3,
f4, f5_8;
448 MemMoveEmitPopAndReturn(&masm);
451 __ mov_b(
eax, Operand(src, 0));
452 __ mov_b(Operand(dst, 0),
eax);
453 MemMoveEmitPopAndReturn(&masm);
456 __ mov_w(
eax, Operand(src, 0));
457 __ mov_w(Operand(dst, 0),
eax);
458 MemMoveEmitPopAndReturn(&masm);
461 __ mov_w(
eax, Operand(src, 0));
462 __ mov_b(
edx, Operand(src, 2));
463 __ mov_w(Operand(dst, 0),
eax);
464 __ mov_b(Operand(dst, 2),
edx);
465 MemMoveEmitPopAndReturn(&masm);
468 __ mov(
eax, Operand(src, 0));
469 __ mov(Operand(dst, 0),
eax);
470 MemMoveEmitPopAndReturn(&masm);
473 __ mov(
eax, Operand(src, 0));
475 __ mov(Operand(dst, 0),
eax);
477 MemMoveEmitPopAndReturn(&masm);
479 __ bind(&small_handlers);
480 __ dd(conv.address(&
f0));
481 __ dd(conv.address(&
f1));
482 __ dd(conv.address(&
f2));
483 __ dd(conv.address(&
f3));
484 __ dd(conv.address(&
f4));
485 __ dd(conv.address(&f5_8));
486 __ dd(conv.address(&f5_8));
487 __ dd(conv.address(&f5_8));
488 __ dd(conv.address(&f5_8));
490 __ bind(&small_size);
491 if (FLAG_debug_code) {
498 __ mov(
eax, Operand(count,
times_4, conv.address(&small_handlers)));
502 __ bind(&pop_and_return);
503 MemMoveEmitPopAndReturn(&masm);
507 DCHECK(!RelocInfo::RequiresRelocation(desc));
512 return FUNCTION_CAST<MemMoveFunction>(buffer);
521 #define __ ACCESS_MASM(masm)
525 MacroAssembler* masm,
531 Label* allocation_memento_found) {
532 Register scratch =
edi;
537 __ JumpIfJSArrayHasAllocationMemento(
538 receiver, scratch, allocation_memento_found);
543 __ RecordWriteField(receiver,
554 MacroAssembler* masm,
567 Label loop, entry, convert_hole, gc_required, only_change_map;
570 __ JumpIfJSArrayHasAllocationMemento(
edx,
edi, fail);
576 __ cmp(
edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
596 Immediate(masm->isolate()->factory()->fixed_double_array_map()));
602 __ RecordWriteField(
edx,
613 ExternalReference canonical_the_hole_nan_reference =
614 ExternalReference::address_of_the_hole_nan();
615 XMMRegister the_hole_nan =
xmm1;
616 __ movsd(the_hole_nan,
617 Operand::StaticVariable(canonical_the_hole_nan_reference));
621 __ bind(&gc_required);
634 __ JumpIfNotSmi(
ebx, &convert_hole);
644 __ bind(&convert_hole);
646 if (FLAG_debug_code) {
647 __ cmp(
ebx, masm->isolate()->factory()->the_hole_value());
648 __ Assert(
equal, kObjectFoundInSmiOnlyArray);
664 __ bind(&only_change_map);
669 __ RecordWriteField(
edx,
680 MacroAssembler* masm,
693 Label loop, entry, convert_hole, gc_required, only_change_map, success;
696 __ JumpIfJSArrayHasAllocationMemento(
edx,
edi, fail);
702 __ cmp(
edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
719 Immediate(masm->isolate()->factory()->fixed_array_map()));
728 __ bind(&only_change_map);
730 __ RecordWriteField(
edx,
740 __ bind(&gc_required);
764 __ RecordWriteArray(
eax,
770 __ jmp(&entry, Label::kNear);
773 __ bind(&convert_hole);
775 masm->isolate()->factory()->the_hole_value());
787 __ RecordWriteField(
edx,
796 __ RecordWriteField(
edx,
817 Label* call_runtime) {
823 Label check_sequential;
825 __ j(
zero, &check_sequential, Label::kNear);
830 __ j(
zero, &cons_string, Label::kNear);
833 Label indirect_string_loaded;
836 __ add(index, result);
838 __ jmp(&indirect_string_loaded, Label::kNear);
845 __ bind(&cons_string);
847 Immediate(factory->empty_string()));
851 __ bind(&indirect_string_loaded);
859 __ bind(&check_sequential);
862 __ j(
zero, &seq_string, Label::kNear);
865 Label one_byte_external, done;
866 if (FLAG_debug_code) {
870 __ Assert(
zero, kExternalStringExpectedButNotFound);
882 __ movzx_w(result, Operand(result, index,
times_2, 0));
883 __ jmp(&done, Label::kNear);
884 __ bind(&one_byte_external);
886 __ movzx_b(result, Operand(result, index,
times_1, 0));
887 __ jmp(&done, Label::kNear);
891 __ bind(&seq_string);
903 __ jmp(&done, Label::kNear);
916 static Operand ExpConstant(
int index) {
917 return Operand::StaticVariable(ExternalReference::math_exp_constants(index));
924 XMMRegister double_scratch,
927 DCHECK(!input.is(double_scratch));
928 DCHECK(!input.is(result));
929 DCHECK(!result.is(double_scratch));
931 DCHECK(ExternalReference::math_exp_constants(0).address() !=
NULL);
932 DCHECK(!masm->serializer_enabled());
936 __ movsd(double_scratch, ExpConstant(0));
937 __ xorpd(result, result);
938 __ ucomisd(double_scratch, input);
940 __ ucomisd(input, ExpConstant(1));
941 __ movsd(result, ExpConstant(2));
943 __ movsd(double_scratch, ExpConstant(3));
944 __ movsd(result, ExpConstant(4));
945 __ mulsd(double_scratch, input);
946 __ addsd(double_scratch, result);
947 __ movd(temp2, double_scratch);
948 __ subsd(double_scratch, result);
949 __ movsd(result, ExpConstant(6));
950 __ mulsd(double_scratch, ExpConstant(5));
951 __ subsd(double_scratch, input);
952 __ subsd(result, double_scratch);
953 __ movsd(input, double_scratch);
954 __ mulsd(input, double_scratch);
955 __ mulsd(result, input);
956 __ mov(temp1, temp2);
957 __ mulsd(result, ExpConstant(7));
958 __ subsd(result, double_scratch);
959 __ add(temp1, Immediate(0x1ff800));
960 __ addsd(result, ExpConstant(8));
961 __ and_(temp2, Immediate(0x7ff));
964 __ movd(input, temp1);
965 __ pshufd(input, input,
static_cast<uint8_t
>(0xe1));
966 __ movsd(double_scratch, Operand::StaticArray(
967 temp2,
times_8, ExternalReference::math_exp_log_table()));
968 __ orps(input, double_scratch);
969 __ mulsd(result, input);
979 patcher.masm()->push(
ebp);
980 patcher.masm()->mov(
ebp,
esp);
981 patcher.masm()->push(
esi);
982 patcher.masm()->push(
edi);
987 bool CodeAgingHelper::IsOld(
byte* candidate)
const {
994 bool result = isolate->code_aging_helper()->IsYoung(sequence);
995 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
1007 Address target_address = sequence + *
reinterpret_cast<int*
>(sequence) +
1019 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
1021 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
1025 CodePatcher patcher(sequence, young_length);
static void * Allocate(const size_t requested, size_t *allocated, bool is_executable)
static void ProtectCode(void *address, const size_t size)
static const int kCallTargetAddressOffset
const EmbeddedVector< byte, kNoCodeAgeSequenceLength > young_sequence_
static Code * GetCodeAgeStub(Isolate *isolate, Age age, MarkingParity parity)
static Code * GetCodeFromTargetAddress(Address address)
static void PatchPlatformCodeAge(Isolate *isolate, byte *sequence, Age age, MarkingParity parity)
static bool IsYoungSequence(Isolate *isolate, byte *sequence)
static void GetCodeAgeAndParity(Code *code, Age *age, MarkingParity *parity)
static const int kFirstOffset
static const int kSecondOffset
static void FlushICache(void *start, size_t size)
static void GenerateSmiToDouble(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static void GenerateMapChangeElementsTransition(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *allocation_memento_found)
static void GenerateDoubleToObject(MacroAssembler *masm, Register receiver, Register key, Register value, Register target_map, AllocationSiteMode mode, Label *fail)
static const int kResourceDataOffset
static const int kLengthOffset
static const int kHeaderSize
static const int kValueOffset
static const int kMapOffset
static const int kElementsOffset
static const int kInstanceTypeOffset
static void EmitMathExp(MacroAssembler *masm, DwVfpRegister input, DwVfpRegister result, DwVfpRegister double_scratch1, DwVfpRegister double_scratch2, Register temp1, Register temp2, Register temp3)
static const int kHeaderSize
static const int kParentOffset
static const int kOffsetOffset
static Smi * FromInt(int value)
static const int kContextOffset
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
virtual void AfterCall(MacroAssembler *masm) const
virtual void BeforeCall(MacroAssembler *masm) const
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
const uint32_t kStringEncodingMask
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
const uint32_t kTwoByteStringTag
const uint32_t kShortExternalStringTag
Operand FieldOperand(Register object, int offset)
const uint32_t kOneByteStringTag
UnaryMathFunction CreateExpFunction()
const uint32_t kShortExternalStringMask
const uint32_t kStringRepresentationMask
const uint32_t kSlicedNotConsMask
OStream & dec(OStream &os)
static const byte kCallOpcode
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
UnaryMathFunction CreateSqrtFunction()
double(* UnaryMathFunction)(double x)
static const int kNoCodeAgeSequenceLength
const uint32_t kHoleNanLower32
const uint32_t kIsIndirectStringMask
const uint32_t kHoleNanUpper32
Debugger support for the V8 JavaScript engine.