25 class SafepointGenerator
FINAL :
public CallWrapper {
27 SafepointGenerator(LCodeGen* codegen,
28 LPointerMap* pointers,
29 Safepoint::DeoptMode
mode)
33 virtual ~SafepointGenerator() {}
35 virtual void BeforeCall(
int call_size)
const OVERRIDE {}
37 virtual void AfterCall() const
OVERRIDE {
38 codegen_->RecordSafepoint(pointers_, deopt_mode_);
43 LPointerMap* pointers_;
44 Safepoint::DeoptMode deopt_mode_;
51 LPhase phase(
"Z_Code generation",
chunk());
58 FrameScope frame_scope(masm_, StackFrame::MANUAL);
63 ((
chunk()->num_double_slots() > 2 &&
64 !
chunk()->graph()->is_recursive()) ||
65 !info()->osr_ast_id().IsNone());
78 code->set_safepoint_table_offset(
safepoints_.GetCodeOffset());
79 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code);
81 if (!info()->IsStub()) {
88 void LCodeGen::MakeSureStackPagesMapped(
int offset) {
89 const int kPageSize = 4 *
KB;
90 for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
100 if (info()->IsOptimizing()) {
104 if (strlen(FLAG_stop_at) > 0 &&
105 info_->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
113 if (info_->this_has_uses() &&
114 info_->strict_mode() ==
SLOPPY &&
115 !info_->is_native()) {
119 __ mov(
ecx, Operand(
esp, receiver_offset));
121 __ cmp(
ecx, isolate()->factory()->undefined_value());
127 __ mov(Operand(
esp, receiver_offset),
ecx);
136 Label do_not_pad, align_loop;
141 __ push(Immediate(0));
145 __ mov(
ecx, Immediate(
scope()->num_parameters() + 2));
147 __ bind(&align_loop);
154 __ bind(&do_not_pad);
158 info()->set_prologue_offset(masm_->pc_offset());
162 if (info()->IsStub()) {
165 __ Prologue(info()->IsCodePreAgingActive());
167 info()->AddNoFrameRange(0, masm_->pc_offset());
170 if (info()->IsOptimizing() &&
174 __ Assert(
zero, kFrameIsExpectedToBeAligned);
179 DCHECK(slots != 0 || !info()->IsOptimizing());
188 if (FLAG_debug_code) {
194 __ mov(Operand(
eax), Immediate(slots));
210 Comment(
";;; Store dynamic frame alignment tag for spilled doubles");
224 if (heap_slots > 0) {
225 Comment(
";;; Allocate local context");
226 bool need_write_barrier =
true;
228 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
229 FastNewContextStub stub(isolate(), heap_slots);
232 need_write_barrier =
false;
245 for (
int i = 0;
i < num_parameters;
i++) {
247 if (var->IsContextSlot()) {
251 __ mov(
eax, Operand(
ebp, parameter_offset));
254 __ mov(Operand(
esi, context_offset),
eax);
256 if (need_write_barrier) {
257 __ RecordWriteContextSlot(
esi, context_offset,
eax,
ebx,
259 }
else if (FLAG_debug_code) {
261 __ JumpIfInNewSpace(
esi,
eax, &done, Label::kNear);
262 __ Abort(kExpectedNewSpaceObject);
267 Comment(
";;; End allocate local context");
273 if (FLAG_trace && info()->IsOptimizing()) {
278 return !is_aborted();
293 Label do_not_pad, align_loop;
296 __ j(
zero, &do_not_pad, Label::kNear);
297 __ push(Immediate(0));
304 __ mov(
ecx, Immediate(
scope()->num_parameters() +
305 5 +
graph()->osr()->UnoptimizedFrameSlots()));
307 __ bind(&align_loop);
315 __ bind(&do_not_pad);
320 __ push(alignment_loc);
323 __ mov(alignment_loc,
edx);
337 if (instr->IsCall()) {
340 if (!instr->IsLazyBailout() && !instr->IsGap()) {
349 if (instr->IsCall() && instr->ClobbersDoubleRegisters(isolate())) {
350 bool double_result = instr->HasDoubleRegisterResult();
353 __ fstp_d(Operand(
esp, 0));
357 __ fld_d(Operand(
esp, 0));
361 if (instr->IsGoto()) {
364 !instr->IsGap() && !instr->IsReturn()) {
365 if (instr->ClobbersDoubleRegisters(isolate())) {
366 if (instr->HasDoubleRegisterResult()) {
380 Comment(
";;; -------------------- Jump table --------------------");
383 Deoptimizer::JumpTableEntry* table_entry = &
jump_table_[
i];
384 __ bind(&table_entry->label);
385 Address entry = table_entry->address;
386 DeoptComment(table_entry->reason);
387 if (table_entry->needs_frame) {
388 DCHECK(!info()->saves_caller_doubles());
389 __ push(Immediate(ExternalReference::ForDeoptEntry(entry)));
390 if (needs_frame.is_bound()) {
391 __ jmp(&needs_frame);
393 __ bind(&needs_frame);
404 Label push_approx_pc;
405 __ call(&push_approx_pc);
406 __ bind(&push_approx_pc);
418 return !is_aborted();
425 for (
int i = 0; !is_aborted() &&
i <
deferred_.length();
i++) {
431 instructions_->at(code->instruction_index())->hydrogen_value();
433 chunk()->
graph()->SourcePositionToScriptPosition(value->position()));
435 Comment(
";;; <@%d,#%d> "
436 "-------------------- Deferred %s --------------------",
437 code->instruction_index(),
438 code->instr()->hydrogen_value()->id(),
439 code->instr()->Mnemonic());
440 __ bind(code->entry());
442 Comment(
";;; Build frame");
451 Comment(
";;; Deferred code");
455 __ bind(code->done());
456 Comment(
";;; Destroy frame");
462 __ jmp(code->exit());
468 if (!is_aborted()) status_ =
DONE;
469 return !is_aborted();
475 if (!info()->IsStub()) {
480 while (masm()->pc_offset() < target_offset) {
485 return !is_aborted();
535 DCHECK(Contains(reg) && stack_depth_ > other_slot);
536 int i = ArrayIndex(reg);
538 if (st != other_slot) {
539 int other_i = st2idx(other_slot);
540 X87Register other = stack_[other_i];
541 stack_[other_i] = reg;
545 }
else if (other_slot == 0) {
557 return stack_depth_ - pos - 1;
562 for (
int i = 0;
i < stack_depth_;
i++) {
563 if (stack_[
i].is(reg))
return i;
571 for (
int i = 0;
i < stack_depth_;
i++) {
572 if (stack_[
i].is(reg))
return true;
581 int i = ArrayIndex(reg);
585 int tos_i = st2idx(0);
586 stack_[
i] = stack_[tos_i];
621 DCHECK(!src.is_reg_only());
639 DCHECK(!dst.is_reg_only());
663 stack_[stack_depth_] = reg;
670 DCHECK(!Contains(reg) && stack_[stack_depth_].is(reg) &&
677 X87Register left, X87Register right, X87Register result) {
686 if (stack_depth_ > 0 && instr->ClobbersDoubleRegisters(isolate())) {
687 bool double_inputs = instr->HasDoubleRegisterInput();
690 for (
int i = stack_depth_-1;
i >= 0;
i--) {
691 X87Register reg = stack_[
i];
694 if (double_inputs && instr->IsDoubleInput(reg, cgen)) {
698 if (
i < stack_depth_-1)
i++;
701 if (instr->IsReturn()) {
702 while (stack_depth_ > 0) {
717 int goto_block_id = goto_instr->block_id();
718 if (current_block_id + 1 != goto_block_id) {
724 if (FLAG_unreachable_code_elimination) {
725 int length = goto_instr->block()->predecessors()->length();
726 bool has_unreachable_last_predecessor =
false;
727 for (
int i = 0;
i < length;
i++) {
728 HBasicBlock* block = goto_instr->block()->predecessors()->at(
i);
729 if (block->IsUnreachable() &&
730 (block->block_id() + 1) == goto_block_id) {
731 has_unreachable_last_predecessor =
true;
734 if (has_unreachable_last_predecessor) {
735 if (cgen->x87_stack_map_.find(goto_block_id) ==
736 cgen->x87_stack_map_.end()) {
738 cgen->x87_stack_map_.insert(std::make_pair(goto_block_id, stack));
771 DCHECK(op->IsDoubleRegister());
782 const Representation& r)
const {
783 HConstant* constant = chunk_->LookupConstant(op);
784 int32_t value = constant->Integer32Value();
785 if (r.IsInteger32())
return value;
786 DCHECK(r.IsSmiOrTagged());
792 HConstant* constant = chunk_->LookupConstant(op);
793 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
794 return constant->handle(isolate());
799 HConstant* constant = chunk_->LookupConstant(op);
800 DCHECK(constant->HasDoubleValue());
801 return constant->DoubleValue();
806 HConstant* constant = chunk_->LookupConstant(op);
807 DCHECK(constant->HasExternalReferenceValue());
808 return constant->ExternalReferenceValue();
813 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
818 return chunk_->LookupLiteralRepresentation(op).IsSmi();
829 if (op->IsRegister())
return Operand(
ToRegister(op));
830 DCHECK(!op->IsDoubleRegister());
831 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
843 DCHECK(op->IsDoubleStackSlot());
856 Translation* translation) {
857 if (environment ==
NULL)
return;
860 int translation_size = environment->translation_size();
862 int height = translation_size - environment->parameter_count();
865 bool has_closure_id = !info()->closure().is_null() &&
866 !info()->closure().is_identical_to(environment->closure());
867 int closure_id = has_closure_id
869 : Translation::kSelfLiteralId;
870 switch (environment->frame_type()) {
872 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
875 translation->BeginConstructStubFrame(closure_id, translation_size);
878 DCHECK(translation_size == 1);
880 translation->BeginGetterStubFrame(closure_id);
883 DCHECK(translation_size == 2);
885 translation->BeginSetterStubFrame(closure_id);
888 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
891 translation->BeginCompiledStubFrame();
897 int object_index = 0;
898 int dematerialized_index = 0;
899 for (
int i = 0;
i < translation_size; ++
i) {
900 LOperand* value = environment->values()->at(
i);
904 environment->HasTaggedValueAt(
i),
905 environment->HasUint32ValueAt(
i),
907 &dematerialized_index);
913 Translation* translation,
917 int* object_index_pointer,
918 int* dematerialized_index_pointer) {
919 if (op == LEnvironment::materialization_marker()) {
920 int object_index = (*object_index_pointer)++;
921 if (environment->ObjectIsDuplicateAt(object_index)) {
922 int dupe_of = environment->ObjectDuplicateOfAt(object_index);
923 translation->DuplicateObject(dupe_of);
926 int object_length = environment->ObjectLengthAt(object_index);
927 if (environment->ObjectIsArgumentsAt(object_index)) {
928 translation->BeginArgumentsObject(object_length);
930 translation->BeginCapturedObject(object_length);
932 int dematerialized_index = *dematerialized_index_pointer;
933 int env_offset = environment->translation_size() + dematerialized_index;
934 *dematerialized_index_pointer += object_length;
935 for (
int i = 0;
i < object_length; ++
i) {
936 LOperand* value = environment->values()->at(env_offset +
i);
940 environment->HasTaggedValueAt(env_offset +
i),
941 environment->HasUint32ValueAt(env_offset +
i),
942 object_index_pointer,
943 dematerialized_index_pointer);
948 if (op->IsStackSlot()) {
950 translation->StoreStackSlot(op->index());
951 }
else if (is_uint32) {
952 translation->StoreUint32StackSlot(op->index());
954 translation->StoreInt32StackSlot(op->index());
956 }
else if (op->IsDoubleStackSlot()) {
957 translation->StoreDoubleStackSlot(op->index());
958 }
else if (op->IsRegister()) {
961 translation->StoreRegister(reg);
962 }
else if (is_uint32) {
963 translation->StoreUint32Register(reg);
965 translation->StoreInt32Register(reg);
967 }
else if (op->IsDoubleRegister()) {
969 translation->StoreDoubleRegister(reg);
970 }
else if (op->IsConstantOperand()) {
971 HConstant* constant =
chunk()->LookupConstant(LConstantOperand::cast(op));
973 translation->StoreLiteral(src_index);
990 if (code->kind() == Code::BINARY_OP_IC ||
991 code->kind() == Code::COMPARE_IC) {
999 LInstruction* instr) {
1007 DCHECK(instr->HasPointerMap());
1013 DCHECK(info()->is_calling());
1018 if (context->IsRegister()) {
1022 }
else if (context->IsStackSlot()) {
1024 }
else if (context->IsConstantOperand()) {
1025 HConstant* constant =
1026 chunk_->LookupConstant(LConstantOperand::cast(context));
1035 LInstruction* instr,
1036 LOperand* context) {
1039 __ CallRuntimeSaveDoubles(
id);
1041 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
1043 DCHECK(info()->is_calling());
1049 environment->set_has_been_used();
1050 if (!environment->HasBeenRegistered()) {
1064 int frame_count = 0;
1065 int jsframe_count = 0;
1072 Translation translation(&
translations_, frame_count, jsframe_count, zone());
1075 int pc_offset = masm()->pc_offset();
1076 environment->Register(deoptimization_index,
1077 translation.index(),
1078 (
mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
1089 DCHECK(environment->HasBeenRegistered());
1090 int id = environment->deoptimization_index();
1091 DCHECK(info()->IsOptimizing() || info()->IsStub());
1094 if (entry ==
NULL) {
1095 Abort(kBailoutWasNotPrepared);
1100 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
1104 __ mov(
eax, Operand::StaticVariable(count));
1105 __ sub(
eax, Immediate(1));
1107 if (FLAG_trap_on_deopt)
__ int3();
1108 __ mov(
eax, Immediate(FLAG_deopt_every_n_times));
1109 __ mov(Operand::StaticVariable(count),
eax);
1121 __ mov(Operand::StaticVariable(count),
eax);
1134 __ push(Immediate(x87_stack_layout));
1141 if (info()->ShouldTrapOnDeopt()) {
1148 Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
1149 instr->Mnemonic(), detail);
1152 DeoptComment(reason);
1155 Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
1160 !table_entry.IsEquivalentTo(
jump_table_.last())) {
1173 const char* detail) {
1183 if (length == 0)
return;
1184 Handle<DeoptimizationInputData> data =
1187 Handle<ByteArray> translations =
1189 data->SetTranslationByteArray(*translations);
1191 data->SetOptimizationId(
Smi::FromInt(info_->optimization_id()));
1192 if (info_->IsOptimizing()) {
1195 data->SetSharedFunctionInfo(*info_->shared_info());
1209 data->SetOsrAstId(
Smi::FromInt(info_->osr_ast_id().ToInt()));
1213 for (
int i = 0;
i < length;
i++) {
1215 data->SetAstId(
i, env->ast_id());
1216 data->SetTranslationIndex(
i,
Smi::FromInt(env->translation_index()));
1217 data->SetArgumentsStackHeight(
i,
1221 code->set_deoptimization_data(*data);
1238 const ZoneList<Handle<JSFunction> >* inlined_closures =
1239 chunk()->inlined_closures();
1241 for (
int i = 0, length = inlined_closures->length();
1258 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
1264 LPointerMap* pointers,
1265 Safepoint::Kind kind,
1267 Safepoint::DeoptMode deopt_mode) {
1269 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
1270 Safepoint safepoint =
1271 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode);
1272 for (
int i = 0;
i < operands->length();
i++) {
1273 LOperand* pointer = operands->at(
i);
1274 if (pointer->IsStackSlot()) {
1275 safepoint.DefinePointerSlot(pointer->index(), zone());
1276 }
else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
1277 safepoint.DefinePointerRegister(
ToRegister(pointer), zone());
1284 Safepoint::DeoptMode
mode) {
1290 LPointerMap empty_pointers(zone());
1297 Safepoint::DeoptMode
mode) {
1304 masm()->positions_recorder()->RecordPosition(position);
1305 masm()->positions_recorder()->WriteRecordedPositions();
1309 static const char*
LabelType(LLabel* label) {
1310 if (label->is_loop_header())
return " (loop header)";
1311 if (label->is_osr_entry())
return " (OSR entry)";
1316 void LCodeGen::DoLabel(LLabel* label) {
1317 Comment(
";;; <@%d,#%d> -------------------- B%d%s --------------------",
1318 current_instruction_,
1319 label->hydrogen_value()->id(),
1322 __ bind(label->label());
1323 current_block_ = label->block_id();
1324 if (label->block()->predecessors()->length() > 1) {
1328 X87StackMap::const_iterator it =
x87_stack_map_.find(current_block_);
1348 LParallelMove* move = gap->GetParallelMove(inner_pos);
1354 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
1359 void LCodeGen::DoParameter(LParameter* instr) {
1364 void LCodeGen::DoCallStub(LCallStub* instr) {
1367 switch (instr->hydrogen()->major_key()) {
1368 case CodeStub::RegExpExec: {
1369 RegExpExecStub stub(isolate());
1373 case CodeStub::SubString: {
1374 SubStringStub stub(isolate());
1378 case CodeStub::StringCompare: {
1379 StringCompareStub stub(isolate());
1389 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
1394 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) {
1395 Register dividend =
ToRegister(instr->dividend());
1396 int32_t divisor = instr->divisor();
1405 HMod* hmod = instr->hydrogen();
1406 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1407 Label dividend_is_not_negative, done;
1409 __ test(dividend, dividend);
1410 __ j(
not_sign, ÷nd_is_not_negative, Label::kNear);
1413 __ and_(dividend, mask);
1418 __ jmp(&done, Label::kNear);
1421 __ bind(÷nd_is_not_negative);
1422 __ and_(dividend, mask);
1427 void LCodeGen::DoModByConstI(LModByConstI* instr) {
1428 Register dividend =
ToRegister(instr->dividend());
1429 int32_t divisor = instr->divisor();
1437 __ TruncatingDiv(dividend,
Abs(divisor));
1439 __ mov(
eax, dividend);
1443 HMod* hmod = instr->hydrogen();
1445 Label remainder_not_zero;
1446 __ j(
not_zero, &remainder_not_zero, Label::kNear);
1447 __ cmp(dividend, Immediate(0));
1449 __ bind(&remainder_not_zero);
1454 void LCodeGen::DoModI(LModI* instr) {
1455 HMod* hmod = instr->hydrogen();
1457 Register left_reg =
ToRegister(instr->left());
1459 Register right_reg =
ToRegister(instr->right());
1462 Register result_reg =
ToRegister(instr->result());
1469 __ test(right_reg, Operand(right_reg));
1476 Label no_overflow_possible;
1478 __ j(
not_equal, &no_overflow_possible, Label::kNear);
1479 __ cmp(right_reg, -1);
1483 __ j(
not_equal, &no_overflow_possible, Label::kNear);
1484 __ Move(result_reg, Immediate(0));
1485 __ jmp(&done, Label::kNear);
1487 __ bind(&no_overflow_possible);
1495 Label positive_left;
1496 __ test(left_reg, Operand(left_reg));
1497 __ j(
not_sign, &positive_left, Label::kNear);
1499 __ test(result_reg, Operand(result_reg));
1501 __ jmp(&done, Label::kNear);
1502 __ bind(&positive_left);
1509 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) {
1510 Register dividend =
ToRegister(instr->dividend());
1511 int32_t divisor = instr->divisor();
1512 Register result =
ToRegister(instr->result());
1514 DCHECK(!result.is(dividend));
1517 HDiv* hdiv = instr->hydrogen();
1519 __ test(dividend, dividend);
1529 divisor != 1 && divisor != -1) {
1530 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1531 __ test(dividend, Immediate(mask));
1534 __ Move(result, dividend);
1538 if (
shift > 1)
__ sar(result, 31);
1540 __ add(result, dividend);
1543 if (divisor < 0)
__ neg(result);
1547 void LCodeGen::DoDivByConstI(LDivByConstI* instr) {
1548 Register dividend =
ToRegister(instr->dividend());
1549 int32_t divisor = instr->divisor();
1558 HDiv* hdiv = instr->hydrogen();
1560 __ test(dividend, dividend);
1564 __ TruncatingDiv(dividend,
Abs(divisor));
1565 if (divisor < 0)
__ neg(
edx);
1570 __ sub(
eax, dividend);
1577 void LCodeGen::DoDivI(LDivI* instr) {
1578 HBinaryOperation* hdiv = instr->hydrogen();
1579 Register dividend =
ToRegister(instr->dividend());
1580 Register divisor =
ToRegister(instr->divisor());
1581 Register remainder =
ToRegister(instr->temp());
1590 __ test(divisor, divisor);
1596 Label dividend_not_zero;
1597 __ test(dividend, dividend);
1598 __ j(
not_zero, ÷nd_not_zero, Label::kNear);
1599 __ test(divisor, divisor);
1601 __ bind(÷nd_not_zero);
1606 Label dividend_not_min_int;
1608 __ j(
not_zero, ÷nd_not_min_int, Label::kNear);
1609 __ cmp(divisor, -1);
1611 __ bind(÷nd_not_min_int);
1620 __ test(remainder, remainder);
1626 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) {
1627 Register dividend =
ToRegister(instr->dividend());
1628 int32_t divisor = instr->divisor();
1633 if (divisor == 1)
return;
1647 if (divisor == -1) {
1660 Label not_kmin_int, done;
1662 __ mov(dividend, Immediate(
kMinInt / divisor));
1663 __ jmp(&done, Label::kNear);
1664 __ bind(¬_kmin_int);
1670 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) {
1671 Register dividend =
ToRegister(instr->dividend());
1672 int32_t divisor = instr->divisor();
1681 HMathFloorOfDiv* hdiv = instr->hydrogen();
1683 __ test(dividend, dividend);
1691 __ TruncatingDiv(dividend,
Abs(divisor));
1692 if (divisor < 0)
__ neg(
edx);
1699 DCHECK(!temp.is(dividend) && !temp.is(
eax) && !temp.is(
edx));
1700 Label needs_adjustment, done;
1701 __ cmp(dividend, Immediate(0));
1702 __ j(divisor > 0 ?
less :
greater, &needs_adjustment, Label::kNear);
1703 __ TruncatingDiv(dividend,
Abs(divisor));
1704 if (divisor < 0)
__ neg(
edx);
1705 __ jmp(&done, Label::kNear);
1706 __ bind(&needs_adjustment);
1707 __ lea(temp, Operand(dividend, divisor > 0 ? 1 : -1));
1708 __ TruncatingDiv(temp,
Abs(divisor));
1709 if (divisor < 0)
__ neg(
edx);
1716 void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) {
1717 HBinaryOperation* hdiv = instr->hydrogen();
1718 Register dividend =
ToRegister(instr->dividend());
1719 Register divisor =
ToRegister(instr->divisor());
1720 Register remainder =
ToRegister(instr->temp());
1721 Register result =
ToRegister(instr->result());
1730 __ test(divisor, divisor);
1736 Label dividend_not_zero;
1737 __ test(dividend, dividend);
1738 __ j(
not_zero, ÷nd_not_zero, Label::kNear);
1739 __ test(divisor, divisor);
1741 __ bind(÷nd_not_zero);
1746 Label dividend_not_min_int;
1748 __ j(
not_zero, ÷nd_not_min_int, Label::kNear);
1749 __ cmp(divisor, -1);
1751 __ bind(÷nd_not_min_int);
1759 __ test(remainder, remainder);
1760 __ j(
zero, &done, Label::kNear);
1761 __ xor_(remainder, divisor);
1762 __ sar(remainder, 31);
1763 __ add(result, remainder);
1768 void LCodeGen::DoMulI(LMulI* instr) {
1770 LOperand* right = instr->right();
1776 if (right->IsConstantOperand()) {
1780 int constant =
ToInteger32(LConstantOperand::cast(right));
1781 if (constant == -1) {
1783 }
else if (constant == 0) {
1784 __ xor_(left, Operand(left));
1785 }
else if (constant == 2) {
1786 __ add(left, Operand(left));
1796 __ lea(left, Operand(left, left,
times_2, 0));
1802 __ lea(left, Operand(left, left,
times_4, 0));
1808 __ lea(left, Operand(left, left,
times_8, 0));
1814 __ imul(left, left, constant);
1818 __ imul(left, left, constant);
1821 if (instr->hydrogen()->representation().IsSmi()) {
1834 __ test(left, Operand(left));
1836 if (right->IsConstantOperand()) {
1837 if (
ToInteger32(LConstantOperand::cast(right)) < 0) {
1839 }
else if (
ToInteger32(LConstantOperand::cast(right)) == 0) {
1853 void LCodeGen::DoBitI(LBitI* instr) {
1854 LOperand* left = instr->left();
1855 LOperand* right = instr->right();
1856 DCHECK(left->Equals(instr->result()));
1857 DCHECK(left->IsRegister());
1859 if (right->IsConstantOperand()) {
1862 instr->hydrogen()->representation());
1863 switch (instr->op()) {
1864 case Token::BIT_AND:
1870 case Token::BIT_XOR:
1871 if (right_operand ==
int32_t(~0)) {
1882 switch (instr->op()) {
1883 case Token::BIT_AND:
1889 case Token::BIT_XOR:
1900 void LCodeGen::DoShiftI(LShiftI* instr) {
1901 LOperand* left = instr->left();
1902 LOperand* right = instr->right();
1903 DCHECK(left->Equals(instr->result()));
1904 DCHECK(left->IsRegister());
1905 if (right->IsRegister()) {
1908 switch (instr->op()) {
1917 if (instr->can_deopt()) {
1930 int value =
ToInteger32(LConstantOperand::cast(right));
1931 uint8_t shift_count =
static_cast<uint8_t
>(value & 0x1F);
1932 switch (instr->op()) {
1934 if (shift_count == 0 && instr->can_deopt()) {
1942 if (shift_count != 0) {
1947 if (shift_count != 0) {
1949 }
else if (instr->can_deopt()) {
1955 if (shift_count != 0) {
1956 if (instr->hydrogen_value()->representation().IsSmi() &&
1957 instr->can_deopt()) {
1958 if (shift_count != 1) {
1976 void LCodeGen::DoSubI(LSubI* instr) {
1977 LOperand* left = instr->left();
1978 LOperand* right = instr->right();
1979 DCHECK(left->Equals(instr->result()));
1981 if (right->IsConstantOperand()) {
1983 ToImmediate(right, instr->hydrogen()->representation()));
1993 void LCodeGen::DoConstantI(LConstantI* instr) {
1994 __ Move(
ToRegister(instr->result()), Immediate(instr->value()));
1998 void LCodeGen::DoConstantS(LConstantS* instr) {
1999 __ Move(
ToRegister(instr->result()), Immediate(instr->value()));
2003 void LCodeGen::DoConstantD(LConstantD* instr) {
2004 double v = instr->value();
2005 uint64_t int_val = bit_cast<uint64_t, double>(v);
2008 DCHECK(instr->result()->IsDoubleRegister());
2010 __ push(Immediate(upper));
2011 __ push(Immediate(lower));
2018 void LCodeGen::DoConstantE(LConstantE* instr) {
2019 __ lea(
ToRegister(instr->result()), Operand::StaticVariable(instr->value()));
2023 void LCodeGen::DoConstantT(LConstantT* instr) {
2025 Handle<Object>
object = instr->value(isolate());
2027 __ LoadObject(reg,
object);
2031 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
2032 Register result =
ToRegister(instr->result());
2034 __ EnumLength(result,
map);
2038 void LCodeGen::DoDateField(LDateField* instr) {
2040 Register result =
ToRegister(instr->result());
2041 Register scratch =
ToRegister(instr->temp());
2042 Smi* index = instr->index();
2043 Label runtime, done;
2044 DCHECK(
object.is(result));
2052 if (index->value() == 0) {
2056 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
2057 __ mov(scratch, Operand::StaticVariable(stamp));
2062 __ jmp(&done, Label::kNear);
2065 __ PrepareCallCFunction(2, scratch);
2066 __ mov(Operand(
esp, 0),
object);
2068 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
2077 if (index->IsConstantOperand()) {
2093 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) {
2095 Register result =
ToRegister(instr->result());
2096 Register
string =
ToRegister(instr->string());
2098 if (FLAG_debug_code) {
2107 ? one_byte_seq_type : two_byte_seq_type));
2108 __ Check(
equal, kUnexpectedStringType);
2114 __ movzx_b(result, operand);
2116 __ movzx_w(result, operand);
2121 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
2123 Register
string =
ToRegister(instr->string());
2125 if (FLAG_debug_code) {
2132 ? one_byte_seq_type : two_byte_seq_type;
2133 __ EmitSeqStringSetCharCheck(
string, index, value, encoding_mask);
2137 if (instr->value()->IsConstantOperand()) {
2143 __ mov_b(operand,
static_cast<int8_t
>(value));
2146 __ mov_w(operand,
static_cast<int16_t>(value));
2151 __ mov_b(operand, value);
2153 __ mov_w(operand, value);
2159 void LCodeGen::DoAddI(LAddI* instr) {
2160 LOperand* left = instr->left();
2161 LOperand* right = instr->right();
2163 if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) {
2164 if (right->IsConstantOperand()) {
2166 instr->hydrogen()->representation());
2173 if (right->IsConstantOperand()) {
2175 ToImmediate(right, instr->hydrogen()->representation()));
2186 void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
2187 LOperand* left = instr->left();
2188 LOperand* right = instr->right();
2189 DCHECK(left->Equals(instr->result()));
2190 HMathMinMax::Operation operation = instr->hydrogen()->operation();
2191 if (instr->hydrogen()->representation().IsSmiOrInteger32()) {
2193 Condition condition = (operation == HMathMinMax::kMathMin)
2196 if (right->IsConstantOperand()) {
2198 Immediate immediate =
ToImmediate(LConstantOperand::cast(instr->right()),
2199 instr->hydrogen()->representation());
2200 __ cmp(left_op, immediate);
2201 __ j(condition, &return_left, Label::kNear);
2202 __ mov(left_op, immediate);
2206 __ cmp(left_reg, right_op);
2207 __ j(condition, &return_left, Label::kNear);
2208 __ mov(left_reg, right_op);
2210 __ bind(&return_left);
2212 DCHECK(instr->hydrogen()->representation().IsDouble());
2213 Label check_nan_left, check_zero, return_left, return_right;
2223 __ j(
equal, &check_zero, Label::kNear);
2224 __ j(condition, &return_left, Label::kNear);
2225 __ jmp(&return_right, Label::kNear);
2227 __ bind(&check_zero);
2233 if (operation == HMathMinMax::kMathMin) {
2236 Register scratch_reg =
ToRegister(instr->temp());
2242 __ pop(scratch_reg);
2245 __ pop(scratch_reg);
2251 __ jmp(&return_left, Label::kNear);
2253 __ bind(&check_nan_left);
2259 __ bind(&return_right);
2261 X87Mov(left_reg, right_reg);
2263 __ bind(&return_left);
2268 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
2272 if (instr->op() != Token::MOD) {
2275 switch (instr->op()) {
2290 __ PrepareCallCFunction(4,
eax);
2297 ExternalReference::mod_two_doubles_operation(isolate()),
2312 __ fstp_d(Operand(
esp, 0));
2313 __ fld_d(Operand(
esp, 0));
2318 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
2325 CodeFactory::BinaryOpIC(isolate(), instr->op(),
NO_OVERWRITE).code();
2330 template<
class InstrType>
2332 int left_block = instr->TrueDestination(chunk_);
2333 int right_block = instr->FalseDestination(chunk_);
2335 int next_block = GetNextEmittedBlock();
2339 }
else if (left_block == next_block) {
2341 }
else if (right_block == next_block) {
2342 __ j(
cc, chunk_->GetAssemblyLabel(left_block));
2344 __ j(
cc, chunk_->GetAssemblyLabel(left_block));
2345 __ jmp(chunk_->GetAssemblyLabel(right_block));
2350 template<
class InstrType>
2352 int false_block = instr->FalseDestination(chunk_);
2354 __ jmp(chunk_->GetAssemblyLabel(false_block));
2356 __ j(
cc, chunk_->GetAssemblyLabel(false_block));
2361 void LCodeGen::DoBranch(LBranch* instr) {
2362 Representation r = instr->hydrogen()->value()->representation();
2363 if (r.IsSmiOrInteger32()) {
2365 __ test(reg, Operand(reg));
2367 }
else if (r.IsDouble()) {
2376 HType type = instr->hydrogen()->value()->type();
2377 if (type.IsBoolean()) {
2378 DCHECK(!info()->IsStub());
2379 __ cmp(reg, factory()->true_value());
2381 }
else if (type.IsSmi()) {
2382 DCHECK(!info()->IsStub());
2383 __ test(reg, Operand(reg));
2385 }
else if (type.IsJSArray()) {
2386 DCHECK(!info()->IsStub());
2388 }
else if (type.IsHeapNumber()) {
2390 }
else if (type.IsString()) {
2391 DCHECK(!info()->IsStub());
2395 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
2400 __ cmp(reg, factory()->undefined_value());
2401 __ j(
equal, instr->FalseLabel(chunk_));
2405 __ cmp(reg, factory()->true_value());
2406 __ j(
equal, instr->TrueLabel(chunk_));
2408 __ cmp(reg, factory()->false_value());
2409 __ j(
equal, instr->FalseLabel(chunk_));
2413 __ cmp(reg, factory()->null_value());
2414 __ j(
equal, instr->FalseLabel(chunk_));
2419 __ test(reg, Operand(reg));
2420 __ j(
equal, instr->FalseLabel(chunk_));
2421 __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
2422 }
else if (expected.NeedsMap()) {
2429 if (expected.NeedsMap()) {
2434 if (expected.CanBeUndetectable()) {
2455 __ jmp(instr->FalseLabel(chunk_));
2456 __ bind(¬_string);
2462 __ j(
equal, instr->TrueLabel(chunk_));
2467 Label not_heap_number;
2469 factory()->heap_number_map());
2474 __ j(
zero, instr->FalseLabel(chunk_));
2475 __ jmp(instr->TrueLabel(chunk_));
2476 __ bind(¬_heap_number);
2479 if (!expected.IsGeneric()) {
2496 void LCodeGen::DoClobberDoubles(LClobberDoubles* instr) {
2500 void LCodeGen::DoGoto(LGoto* instr) {
2509 case Token::EQ_STRICT:
2513 case Token::NE_STRICT:
2529 case Token::INSTANCEOF:
2537 void LCodeGen::DoCompareNumericAndBranch(LCompareNumericAndBranch* instr) {
2538 LOperand* left = instr->left();
2539 LOperand* right = instr->right();
2541 instr->is_double() ||
2546 if (left->IsConstantOperand() && right->IsConstantOperand()) {
2548 double left_val =
ToDouble(LConstantOperand::cast(left));
2549 double right_val =
ToDouble(LConstantOperand::cast(right));
2550 int next_block =
EvalComparison(instr->op(), left_val, right_val) ?
2551 instr->TrueDestination(chunk_) : instr->FalseDestination(chunk_);
2554 if (instr->is_double()) {
2561 if (right->IsConstantOperand()) {
2563 ToImmediate(right, instr->hydrogen()->representation()));
2564 }
else if (left->IsConstantOperand()) {
2566 ToImmediate(left, instr->hydrogen()->representation()));
2578 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
2581 if (instr->right()->IsConstantOperand()) {
2582 Handle<Object> right =
ToHandle(LConstantOperand::cast(instr->right()));
2583 __ CmpObject(left, right);
2585 Operand right =
ToOperand(instr->right());
2586 __ cmp(left, right);
2592 void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
2593 if (instr->hydrogen()->representation().IsTagged()) {
2594 Register input_reg =
ToRegister(instr->object());
2595 __ cmp(input_reg, factory()->the_hole_value());
2623 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) {
2624 Representation rep = instr->hydrogen()->value()->representation();
2625 DCHECK(!rep.IsInteger32());
2627 if (rep.IsDouble()) {
2634 Handle<Map>
map = masm()->isolate()->factory()->heap_number_map();
2640 Immediate(0x00000000));
2648 Label* is_not_object,
2650 __ JumpIfSmi(input, is_not_object);
2652 __ cmp(input, isolate()->factory()->null_value());
2669 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
2674 reg, temp, instr->FalseLabel(chunk_), instr->TrueLabel(chunk_));
2682 Label* is_not_string,
2685 __ JumpIfSmi(input, is_not_string);
2688 Condition cond = masm_->IsObjectStringType(input, temp1, temp1);
2694 void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
2699 instr->hydrogen()->value()->type().IsHeapObject()
2703 reg, temp, instr->FalseLabel(chunk_), check_needed);
2709 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
2710 Operand input =
ToOperand(instr->value());
2717 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
2721 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2723 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2734 case Token::EQ_STRICT:
2752 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2755 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2785 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
2789 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2790 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2793 __ CmpObjectType(input,
TestType(instr->hydrogen()), temp);
2798 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
2800 Register result =
ToRegister(instr->result());
2802 __ AssertString(input);
2805 __ IndexFromHash(result, result);
2809 void LCodeGen::DoHasCachedArrayIndexAndBranch(
2810 LHasCachedArrayIndexAndBranch* instr) {
2823 Handle<String>class_name,
2828 DCHECK(!input.is(temp2));
2830 __ JumpIfSmi(input, is_false);
2832 if (
String::Equals(isolate()->factory()->Function_string(), class_name)) {
2862 if (
String::Equals(class_name, isolate()->factory()->Object_string())) {
2879 __ cmp(temp, class_name);
2884 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
2889 Handle<String> class_name = instr->hydrogen()->class_name();
2892 class_name, input, temp, temp2);
2898 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2905 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2911 Label true_value, done;
2913 __ j(
zero, &true_value, Label::kNear);
2914 __ mov(
ToRegister(instr->result()), factory()->false_value());
2915 __ jmp(&done, Label::kNear);
2916 __ bind(&true_value);
2917 __ mov(
ToRegister(instr->result()), factory()->true_value());
2922 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2925 DeferredInstanceOfKnownGlobal(
LCodeGen* codegen,
2926 LInstanceOfKnownGlobal* instr,
2930 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
2932 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
2933 Label* map_check() {
return &map_check_; }
2935 LInstanceOfKnownGlobal* instr_;
2939 DeferredInstanceOfKnownGlobal* deferred;
2940 deferred =
new(zone()) DeferredInstanceOfKnownGlobal(
this, instr,
x87_stack_);
2942 Label done, false_result;
2943 Register
object =
ToRegister(instr->value());
2947 __ JumpIfSmi(
object, &false_result, Label::kNear);
2955 __ bind(deferred->map_check());
2956 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
2957 __ cmp(
map, Operand::ForCell(cache_cell));
2959 __ mov(
eax, factory()->the_hole_value());
2960 __ jmp(&done, Label::kNear);
2964 __ bind(&cache_miss);
2966 __ cmp(
object, factory()->null_value());
2967 __ j(
equal, &false_result, Label::kNear);
2970 Condition is_string = masm_->IsObjectStringType(
object, temp, temp);
2971 __ j(is_string, &false_result, Label::kNear);
2974 __ jmp(deferred->entry());
2976 __ bind(&false_result);
2977 __ mov(
ToRegister(instr->result()), factory()->false_value());
2981 __ bind(deferred->exit());
2988 PushSafepointRegistersScope
scope(
this);
2997 InstanceofStub stub(isolate(),
flags);
3006 static const int kAdditionalDelta = 13;
3007 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
3008 __ mov(temp, Immediate(delta));
3009 __ StoreToSafepointRegisterSlot(temp, temp);
3016 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
3017 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
3020 __ StoreToSafepointRegisterSlot(
eax,
eax);
3024 void LCodeGen::DoCmpT(LCmpT* instr) {
3027 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3031 Label true_value, done;
3033 __ j(condition, &true_value, Label::kNear);
3034 __ mov(
ToRegister(instr->result()), factory()->false_value());
3035 __ jmp(&done, Label::kNear);
3036 __ bind(&true_value);
3037 __ mov(
ToRegister(instr->result()), factory()->true_value());
3043 int extra_value_count = dynamic_frame_alignment ? 2 : 1;
3045 if (instr->has_constant_parameter_count()) {
3046 int parameter_count =
ToInteger32(instr->constant_parameter_count());
3047 if (dynamic_frame_alignment && FLAG_debug_code) {
3051 __ Assert(
equal, kExpectedAlignmentMarker);
3055 Register reg =
ToRegister(instr->parameter_count());
3058 Register return_addr_reg = reg.is(
ecx) ?
ebx :
ecx;
3059 if (dynamic_frame_alignment && FLAG_debug_code) {
3060 DCHECK(extra_value_count == 2);
3064 __ Assert(
equal, kExpectedAlignmentMarker);
3068 __ pop(return_addr_reg);
3069 if (dynamic_frame_alignment) {
3074 __ jmp(return_addr_reg);
3079 void LCodeGen::DoReturn(LReturn* instr) {
3080 if (FLAG_trace && info()->IsOptimizing()) {
3094 int no_frame_start = -1;
3098 no_frame_start = masm_->pc_offset();
3103 __ j(
equal, &no_padding, Label::kNear);
3106 __ bind(&no_padding);
3110 if (no_frame_start != -1) {
3111 info()->AddNoFrameRange(no_frame_start, masm_->pc_offset());
3116 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
3117 Register result =
ToRegister(instr->result());
3118 __ mov(result, Operand::ForCell(instr->hydrogen()->cell().handle()));
3119 if (instr->hydrogen()->RequiresHoleCheck()) {
3120 __ cmp(result, factory()->the_hole_value());
3129 Register vector =
ToRegister(instr->temp_vector());
3131 __ mov(vector, instr->hydrogen()->feedback_vector());
3139 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
3146 if (FLAG_vector_ics) {
3147 EmitVectorLoadICRegisters<LLoadGlobalGeneric>(instr);
3150 Handle<Code> ic = CodeFactory::LoadIC(isolate(),
mode).code();
3155 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
3157 Handle<PropertyCell> cell_handle = instr->hydrogen()->cell().handle();
3163 if (instr->hydrogen()->RequiresHoleCheck()) {
3164 __ cmp(Operand::ForCell(cell_handle), factory()->the_hole_value());
3169 __ mov(Operand::ForCell(cell_handle), value);
3174 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
3175 Register context =
ToRegister(instr->context());
3176 Register result =
ToRegister(instr->result());
3179 if (instr->hydrogen()->RequiresHoleCheck()) {
3180 __ cmp(result, factory()->the_hole_value());
3181 if (instr->hydrogen()->DeoptimizesOnHole()) {
3186 __ mov(result, factory()->undefined_value());
3187 __ bind(&is_not_hole);
3193 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
3194 Register context =
ToRegister(instr->context());
3197 Label skip_assignment;
3200 if (instr->hydrogen()->RequiresHoleCheck()) {
3201 __ cmp(target, factory()->the_hole_value());
3202 if (instr->hydrogen()->DeoptimizesOnHole()) {
3209 __ mov(target, value);
3210 if (instr->hydrogen()->NeedsWriteBarrier()) {
3212 instr->hydrogen()->value()->type().IsHeapObject()
3216 __ RecordWriteContextSlot(context, offset, value, temp,
kSaveFPRegs,
3220 __ bind(&skip_assignment);
3224 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
3225 HObjectAccess access = instr->hydrogen()->access();
3226 int offset = access.offset();
3228 if (access.IsExternalMemory()) {
3229 Register result =
ToRegister(instr->result());
3230 MemOperand operand = instr->object()->IsConstantOperand()
3232 LConstantOperand::cast(instr->object())))
3234 __ Load(result, operand, access.representation());
3238 Register
object =
ToRegister(instr->object());
3239 if (instr->hydrogen()->representation().IsDouble()) {
3244 Register result =
ToRegister(instr->result());
3245 if (!access.IsInobject()) {
3249 __ Load(result,
FieldOperand(
object, offset), access.representation());
3254 DCHECK(!operand->IsDoubleRegister());
3255 if (operand->IsConstantOperand()) {
3256 Handle<Object>
object =
ToHandle(LConstantOperand::cast(operand));
3258 if (object->IsSmi()) {
3263 }
else if (operand->IsRegister()) {
3271 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
3277 if (FLAG_vector_ics) {
3278 EmitVectorLoadICRegisters<LLoadNamedGeneric>(instr);
3280 Handle<Code> ic = CodeFactory::LoadIC(isolate(),
NOT_CONTEXTUAL).code();
3285 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
3286 Register
function =
ToRegister(instr->function());
3288 Register result =
ToRegister(instr->result());
3295 __ cmp(Operand(result), Immediate(factory()->the_hole_value()));
3311 void LCodeGen::DoLoadRoot(LLoadRoot* instr) {
3312 Register result =
ToRegister(instr->result());
3313 __ LoadRoot(result, instr->index());
3317 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
3318 Register arguments =
ToRegister(instr->arguments());
3319 Register result =
ToRegister(instr->result());
3320 if (instr->length()->IsConstantOperand() &&
3321 instr->index()->IsConstantOperand()) {
3322 int const_index =
ToInteger32(LConstantOperand::cast(instr->index()));
3323 int const_length =
ToInteger32(LConstantOperand::cast(instr->length()));
3324 int index = (const_length - const_index) + 1;
3327 Register length =
ToRegister(instr->length());
3328 Operand index =
ToOperand(instr->index());
3331 __ sub(length, index);
3339 LOperand* key = instr->key();
3340 if (!key->IsConstantOperand() &&
3348 instr->hydrogen()->key()->representation(),
3350 instr->base_offset()));
3358 Register result(
ToRegister(instr->result()));
3359 switch (elements_kind) {
3362 __ movsx_b(result, operand);
3368 __ movzx_b(result, operand);
3372 __ movsx_w(result, operand);
3376 __ movzx_w(result, operand);
3380 __ mov(result, operand);
3384 __ mov(result, operand);
3386 __ test(result, Operand(result));
3410 if (instr->hydrogen()->RequiresHoleCheck()) {
3412 instr->elements(), instr->key(),
3413 instr->hydrogen()->key()->representation(),
3423 instr->hydrogen()->key()->representation(),
3425 instr->base_offset());
3431 Register result =
ToRegister(instr->result());
3436 instr->hydrogen()->key()->representation(),
3440 if (instr->hydrogen()->RequiresHoleCheck()) {
3445 __ cmp(result, factory()->the_hole_value());
3452 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
3453 if (instr->is_typed_elements()) {
3455 }
else if (instr->hydrogen()->representation().IsDouble()) {
3464 LOperand* elements_pointer,
3466 Representation key_representation,
3469 Register elements_pointer_reg =
ToRegister(elements_pointer);
3471 int shift_size = element_shift_size;
3472 if (key->IsConstantOperand()) {
3473 int constant_value =
ToInteger32(LConstantOperand::cast(key));
3474 if (constant_value & 0xF0000000) {
3475 Abort(kArrayIndexConstantValueTooBig);
3477 return Operand(elements_pointer_reg,
3478 ((constant_value) << shift_size)
3482 if (key_representation.IsSmi() && (shift_size >= 1)) {
3486 return Operand(elements_pointer_reg,
3494 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3499 if (FLAG_vector_ics) {
3500 EmitVectorLoadICRegisters<LLoadKeyedGeneric>(instr);
3503 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
3508 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3509 Register result =
ToRegister(instr->result());
3511 if (instr->hydrogen()->from_inlined()) {
3515 Label done, adapted;
3518 __ cmp(Operand(result),
3520 __ j(
equal, &adapted, Label::kNear);
3523 __ mov(result, Operand(
ebp));
3524 __ jmp(&done, Label::kNear);
3537 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
3538 Operand elem =
ToOperand(instr->elements());
3539 Register result =
ToRegister(instr->result());
3545 __ mov(result, Immediate(
scope()->num_parameters()));
3546 __ j(
equal, &done, Label::kNear);
3550 __ mov(result, Operand(result,
3552 __ SmiUntag(result);
3559 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
3560 Register receiver =
ToRegister(instr->receiver());
3561 Register
function =
ToRegister(instr->function());
3566 Label receiver_ok, global_object;
3568 Register scratch =
ToRegister(instr->temp());
3570 if (!instr->hydrogen()->known_function()) {
3575 __ test_b(
FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset),
3580 __ test_b(
FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset),
3586 __ cmp(receiver, factory()->null_value());
3587 __ j(
equal, &global_object, Label::kNear);
3588 __ cmp(receiver, factory()->undefined_value());
3589 __ j(
equal, &global_object, Label::kNear);
3597 __ jmp(&receiver_ok, Label::kNear);
3598 __ bind(&global_object);
3601 __ mov(receiver, Operand(receiver, global_offset));
3604 __ bind(&receiver_ok);
3608 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
3609 Register receiver =
ToRegister(instr->receiver());
3610 Register
function =
ToRegister(instr->function());
3611 Register length =
ToRegister(instr->length());
3612 Register elements =
ToRegister(instr->elements());
3620 __ cmp(length, kArgumentsLimit);
3624 __ mov(receiver, length);
3630 __ test(length, Operand(length));
3631 __ j(
zero, &invoke, Label::kNear);
3639 DCHECK(instr->HasPointerMap());
3640 LPointerMap* pointers = instr->pointer_map();
3642 this, pointers, Safepoint::kLazyDeopt);
3643 ParameterCount actual(
eax);
3644 __ InvokeFunction(
function, actual,
CALL_FUNCTION, safepoint_generator);
3648 void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
3653 void LCodeGen::DoPushArgument(LPushArgument* instr) {
3654 LOperand* argument = instr->value();
3659 void LCodeGen::DoDrop(LDrop* instr) {
3660 __ Drop(instr->count());
3664 void LCodeGen::DoThisFunction(LThisFunction* instr) {
3665 Register result =
ToRegister(instr->result());
3670 void LCodeGen::DoContext(LContext* instr) {
3671 Register result =
ToRegister(instr->result());
3672 if (info()->IsOptimizing()) {
3681 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3684 __ push(Immediate(instr->hydrogen()->pairs()));
3691 int formal_parameter_count,
3693 LInstruction* instr,
3695 bool dont_adapt_arguments =
3697 bool can_invoke_directly =
3698 dont_adapt_arguments || formal_parameter_count == arity;
3700 if (can_invoke_directly) {
3702 __ LoadHeapObject(
edi,
function);
3710 if (dont_adapt_arguments) {
3715 if (
function.is_identical_to(info()->closure())) {
3723 LPointerMap* pointers = instr->pointer_map();
3725 this, pointers, Safepoint::kLazyDeopt);
3726 ParameterCount count(arity);
3727 ParameterCount expected(formal_parameter_count);
3728 __ InvokeFunction(
function, expected, count,
CALL_FUNCTION, generator);
3733 void LCodeGen::DoTailCallThroughMegamorphicCache(
3734 LTailCallThroughMegamorphicCache* instr) {
3735 Register receiver =
ToRegister(instr->receiver());
3740 Register scratch =
ebx;
3741 Register extra =
eax;
3742 DCHECK(!scratch.is(receiver) && !scratch.is(
name));
3743 DCHECK(!extra.is(receiver) && !extra.is(
name));
3749 isolate()->stub_cache()->GenerateProbe(masm(), instr->hydrogen()->flags(),
3750 must_teardown_frame, receiver,
name,
3754 if (must_teardown_frame)
__ leave();
3759 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) {
3762 LPointerMap* pointers = instr->pointer_map();
3765 if (instr->target()->IsConstantOperand()) {
3766 LConstantOperand* target = LConstantOperand::cast(instr->target());
3771 DCHECK(instr->target()->IsRegister());
3772 Register target =
ToRegister(instr->target());
3773 generator.BeforeCall(
__ CallSize(Operand(target)));
3777 generator.AfterCall();
3781 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
3785 if (instr->hydrogen()->pass_argument_count()) {
3786 __ mov(
eax, instr->arity());
3792 bool is_self_call =
false;
3793 if (instr->hydrogen()->function()->IsConstant()) {
3794 HConstant* fun_const = HConstant::cast(instr->hydrogen()->function());
3795 Handle<JSFunction> jsfun =
3797 is_self_call = jsfun.is_identical_to(info()->closure());
3811 Register input_reg =
ToRegister(instr->value());
3813 factory()->heap_number_map());
3816 Label slow, allocated, done;
3817 Register tmp = input_reg.is(
eax) ?
ecx :
eax;
3821 PushSafepointRegistersScope
scope(
this);
3823 __ mov(tmp,
FieldOperand(input_reg, HeapNumber::kExponentOffset));
3829 __ j(
zero, &done, Label::kNear);
3831 __ AllocateHeapNumber(tmp, tmp2,
no_reg, &slow);
3832 __ jmp(&allocated, Label::kNear);
3837 instr, instr->context());
3839 if (!tmp.is(
eax))
__ mov(tmp,
eax);
3841 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
3843 __ bind(&allocated);
3844 __ mov(tmp2,
FieldOperand(input_reg, HeapNumber::kExponentOffset));
3847 __ mov(tmp2,
FieldOperand(input_reg, HeapNumber::kMantissaOffset));
3849 __ StoreToSafepointRegisterSlot(input_reg, tmp);
3856 Register input_reg =
ToRegister(instr->value());
3857 __ test(input_reg, Operand(input_reg));
3862 __ bind(&is_positive);
3866 void LCodeGen::DoMathAbs(LMathAbs* instr) {
3870 DeferredMathAbsTaggedHeapNumber(
LCodeGen* codegen,
3875 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3877 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
3882 DCHECK(instr->value()->Equals(instr->result()));
3883 Representation r = instr->hydrogen()->value()->representation();
3889 }
else if (r.IsSmiOrInteger32()) {
3892 DeferredMathAbsTaggedHeapNumber* deferred =
3893 new(zone()) DeferredMathAbsTaggedHeapNumber(
this, instr,
x87_stack_);
3894 Register input_reg =
ToRegister(instr->value());
3896 __ JumpIfNotSmi(input_reg, deferred->entry());
3898 __ bind(deferred->exit());
3903 void LCodeGen::DoMathFloor(LMathFloor* instr) {
3904 Register output_reg =
ToRegister(instr->result());
3908 Label not_minus_zero, done;
3914 __ j(
below, ¬_minus_zero, Label::kNear);
3923 __ Move(output_reg, Immediate(0));
3924 __ jmp(&done, Label::kFar);
3929 __ bind(¬_minus_zero);
3931 __ X87SetRC(0x0400);
3933 __ fist_s(Operand(
esp, 0));
3938 __ X87SetRC(0x0000);
3943 void LCodeGen::DoMathRound(LMathRound* instr) {
3945 Register result =
ToRegister(instr->result());
3947 Label below_one_half, below_minus_one_half, done;
3949 ExternalReference one_half = ExternalReference::address_of_one_half();
3950 ExternalReference minus_one_half =
3951 ExternalReference::address_of_minus_one_half();
3953 __ fld_d(Operand::StaticVariable(one_half));
3960 __ fadd_d(Operand::StaticVariable(one_half));
3962 __ X87SetRC(0x0c00);
3973 __ X87SetRC(0x0000);
3976 __ bind(&below_one_half);
3977 __ fld_d(Operand::StaticVariable(minus_one_half));
3980 __ j(
carry, &below_minus_one_half);
3989 __ Move(result, Immediate(0));
3992 __ bind(&below_minus_one_half);
3994 __ fadd_d(Operand::StaticVariable(one_half));
3996 __ X87SetRC(0x0400);
4007 __ X87SetRC(0x0000);
4013 void LCodeGen::DoMathFround(LMathFround* instr) {
4023 void LCodeGen::DoMathSqrt(LMathSqrt* instr) {
4026 Register temp_result =
ToRegister(instr->temp1());
4028 Label slow, done, smi, finish;
4029 DCHECK(result_reg.is(input));
4032 if (FLAG_inline_new) {
4033 __ AllocateHeapNumber(temp_result, temp,
no_reg, &slow);
4034 __ jmp(&done, Label::kNear);
4043 __ Move(temp_result, Immediate(0));
4046 PushSafepointRegistersScope
scope(
this);
4049 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4051 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4052 __ StoreToSafepointRegisterSlot(temp_result,
eax);
4060 PushSafepointRegistersScope
scope(
this);
4063 __ push(temp_result);
4064 __ CallRuntimeSaveDoubles(Runtime::kMathSqrtRT);
4066 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4067 __ StoreToSafepointRegisterSlot(temp_result,
eax);
4071 __ JumpIfSmi(temp_result, &smi);
4077 __ SmiUntag(temp_result);
4078 __ push(temp_result);
4080 __ pop(temp_result);
4086 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
4098 __ and_(
eax, Immediate(0x4700));
4099 __ cmp(
eax, Immediate(0x0700));
4103 __ jmp(&done, Label::kNear);
4115 void LCodeGen::DoPower(LPower* instr) {
4116 Representation exponent_type = instr->hydrogen()->right()->representation();
4120 ExternalReference one_half = ExternalReference::address_of_one_half();
4122 if (exponent_type.IsSmi()) {
4123 Register exponent =
ToRegister(instr->right());
4125 __ SmiUntag(exponent);
4129 }
else if (exponent_type.IsTagged()) {
4130 Register exponent =
ToRegister(instr->right());
4131 Register temp = exponent.is(
ecx) ?
eax :
ecx;
4132 Label no_deopt, done;
4134 __ JumpIfSmi(exponent, &no_deopt);
4142 __ SmiUntag(exponent);
4147 }
else if (exponent_type.IsInteger32()) {
4148 Register exponent =
ToRegister(instr->right());
4154 DCHECK(exponent_type.IsDouble());
4155 X87Register exponent_double =
ToX87Register(instr->right());
4171 __ bind(¬_plus_0);
4174 __ PrepareCallCFunction(4,
eax);
4178 __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
4186 void LCodeGen::DoMathLog(LMathLog* instr) {
4187 DCHECK(instr->value()->Equals(instr->result()));
4195 __ j(
below, &nan_result, Label::kNear);
4204 __ jmp(&done, Label::kNear);
4206 __ bind(&nan_result);
4207 ExternalReference nan =
4208 ExternalReference::address_of_canonical_non_hole_nan();
4210 __ fld_d(Operand::StaticVariable(nan));
4212 __ jmp(&done, Label::kNear);
4215 ExternalReference ninf = ExternalReference::address_of_negative_infinity();
4217 __ fld_d(Operand::StaticVariable(ninf));
4224 void LCodeGen::DoMathClz32(LMathClz32* instr) {
4226 Register result =
ToRegister(instr->result());
4227 Label not_zero_input;
4228 __ bsr(result, input);
4231 __ Move(result, Immediate(63));
4233 __ bind(¬_zero_input);
4234 __ xor_(result, Immediate(31));
4238 void LCodeGen::DoMathExp(LMathExp* instr) {
4241 Register temp_result =
ToRegister(instr->temp1());
4243 Label slow, done, smi, finish;
4244 DCHECK(result_reg.is(input));
4247 if (FLAG_inline_new) {
4248 __ AllocateHeapNumber(temp_result, temp,
no_reg, &slow);
4249 __ jmp(&done, Label::kNear);
4258 __ Move(temp_result, Immediate(0));
4261 PushSafepointRegistersScope
scope(
this);
4264 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4266 Safepoint::kNoLazyDeopt);
4267 __ StoreToSafepointRegisterSlot(temp_result,
eax);
4275 PushSafepointRegistersScope
scope(
this);
4278 __ push(temp_result);
4279 __ CallRuntimeSaveDoubles(Runtime::kMathExpRT);
4281 Safepoint::kNoLazyDeopt);
4282 __ StoreToSafepointRegisterSlot(temp_result,
eax);
4286 __ JumpIfSmi(temp_result, &smi);
4292 __ SmiUntag(temp_result);
4293 __ push(temp_result);
4295 __ pop(temp_result);
4301 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
4304 DCHECK(instr->HasPointerMap());
4306 Handle<JSFunction> known_function = instr->hydrogen()->known_function();
4307 if (known_function.is_null()) {
4308 LPointerMap* pointers = instr->pointer_map();
4310 this, pointers, Safepoint::kLazyDeopt);
4311 ParameterCount count(instr->arity());
4315 instr->hydrogen()->formal_parameter_count(),
4323 void LCodeGen::DoCallFunction(LCallFunction* instr) {
4328 int arity = instr->arity();
4329 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
4334 void LCodeGen::DoCallNew(LCallNew* instr) {
4340 __ mov(
ebx, isolate()->factory()->undefined_value());
4342 __ Move(
eax, Immediate(instr->arity()));
4347 void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
4352 __ Move(
eax, Immediate(instr->arity()));
4353 __ mov(
ebx, isolate()->factory()->undefined_value());
4354 ElementsKind kind = instr->hydrogen()->elements_kind();
4360 if (instr->arity() == 0) {
4361 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
4363 }
else if (instr->arity() == 1) {
4371 __ j(
zero, &packed_case, Label::kNear);
4374 ArraySingleArgumentConstructorStub stub(isolate(),
4378 __ jmp(&done, Label::kNear);
4379 __ bind(&packed_case);
4382 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
4386 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
4392 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
4394 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles());
4398 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) {
4399 Register
function =
ToRegister(instr->function());
4400 Register code_object =
ToRegister(instr->code_object());
4406 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
4407 Register result =
ToRegister(instr->result());
4408 Register base =
ToRegister(instr->base_object());
4409 if (instr->offset()->IsConstantOperand()) {
4410 LConstantOperand* offset = LConstantOperand::cast(instr->offset());
4413 Register offset =
ToRegister(instr->offset());
4414 __ lea(result, Operand(base, offset,
times_1, 0));
4419 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
4420 Representation representation = instr->hydrogen()->field_representation();
4422 HObjectAccess access = instr->hydrogen()->access();
4423 int offset = access.offset();
4425 if (access.IsExternalMemory()) {
4426 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4427 MemOperand operand = instr->object()->IsConstantOperand()
4428 ? MemOperand::StaticVariable(
4431 if (instr->value()->IsConstantOperand()) {
4432 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4436 __ Store(value, operand, representation);
4441 Register
object =
ToRegister(instr->object());
4442 __ AssertNotSmi(
object);
4443 DCHECK(!representation.IsSmi() ||
4444 !instr->value()->IsConstantOperand() ||
4445 IsSmi(LConstantOperand::cast(instr->value())));
4446 if (representation.IsDouble()) {
4447 DCHECK(access.IsInobject());
4448 DCHECK(!instr->hydrogen()->has_transition());
4449 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4455 if (instr->hydrogen()->has_transition()) {
4456 Handle<Map> transition = instr->hydrogen()->transition_map();
4457 AddDeprecationDependency(transition);
4459 if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
4461 Register temp_map =
ToRegister(instr->temp_map());
4462 __ mov(temp_map, transition);
4465 __ RecordWriteForMap(
object, transition, temp_map, temp,
kSaveFPRegs);
4470 Register write_register = object;
4471 if (!access.IsInobject()) {
4477 if (instr->value()->IsConstantOperand()) {
4478 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4479 if (operand_value->IsRegister()) {
4481 __ Store(value, operand, representation);
4482 }
else if (representation.IsInteger32()) {
4483 Immediate immediate =
ToImmediate(operand_value, representation);
4484 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4485 __ mov(operand, immediate);
4487 Handle<Object> handle_value =
ToHandle(operand_value);
4488 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4489 __ mov(operand, handle_value);
4493 __ Store(value, operand, representation);
4496 if (instr->hydrogen()->NeedsWriteBarrier()) {
4498 Register temp = access.IsInobject() ?
ToRegister(instr->temp()) : object;
4500 __ RecordWriteField(write_register, offset, value, temp,
kSaveFPRegs,
4502 instr->hydrogen()->SmiCheckForWriteBarrier(),
4503 instr->hydrogen()->PointersToHereCheckForValue());
4508 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
4519 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
4521 if (instr->index()->IsConstantOperand()) {
4523 ToImmediate(LConstantOperand::cast(instr->index()),
4524 instr->hydrogen()->length()->representation()));
4526 }
else if (instr->length()->IsConstantOperand()) {
4528 ToImmediate(LConstantOperand::cast(instr->length()),
4529 instr->hydrogen()->index()->representation()));
4533 if (FLAG_debug_code && instr->hydrogen()->skip_check()) {
4546 LOperand* key = instr->key();
4547 if (!key->IsConstantOperand() &&
4555 instr->hydrogen()->key()->representation(),
4557 instr->base_offset()));
4566 switch (elements_kind) {
4573 __ mov_b(operand, value);
4579 __ mov_w(operand, value);
4585 __ mov(operand, value);
4607 ExternalReference canonical_nan_reference =
4608 ExternalReference::address_of_canonical_non_hole_nan();
4612 instr->hydrogen()->key()->representation(),
4614 instr->base_offset());
4617 if (instr->hydrogen()->IsConstantHoleStore()) {
4621 uint64_t int_val = bit_cast<uint64_t, double>(nan_double);
4625 __ mov(double_store_operand, Immediate(lower));
4629 instr->hydrogen()->key()->representation(),
4632 __ mov(double_store_operand2, Immediate(upper));
4634 Label no_special_nan_handling;
4638 if (instr->NeedsCanonicalization()) {
4643 __ j(
parity_odd, &no_special_nan_handling, Label::kNear);
4651 __ jmp(&no_special_nan_handling, Label::kNear);
4652 __ bind(&canonicalize);
4654 __ fld_d(Operand::StaticVariable(canonical_nan_reference));
4657 __ bind(&no_special_nan_handling);
4658 __ fst_d(double_store_operand);
4664 Register elements =
ToRegister(instr->elements());
4665 Register key = instr->key()->IsRegister() ?
ToRegister(instr->key()) :
no_reg;
4670 instr->hydrogen()->key()->representation(),
4672 instr->base_offset());
4673 if (instr->value()->IsRegister()) {
4676 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4677 if (
IsSmi(operand_value)) {
4679 __ mov(operand, immediate);
4682 Handle<Object> handle_value =
ToHandle(operand_value);
4683 __ mov(operand, handle_value);
4687 if (instr->hydrogen()->NeedsWriteBarrier()) {
4688 DCHECK(instr->value()->IsRegister());
4690 DCHECK(!instr->key()->IsConstantOperand());
4692 instr->hydrogen()->value()->type().IsHeapObject()
4695 __ lea(key, operand);
4698 instr->hydrogen()->PointersToHereCheckForValue());
4703 void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
4705 if (instr->is_typed_elements()) {
4707 }
else if (instr->hydrogen()->value()->representation().IsDouble()) {
4715 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
4722 CodeFactory::KeyedStoreIC(isolate(), instr->strict_mode()).code();
4727 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4728 Register
object =
ToRegister(instr->object());
4730 Label no_memento_found;
4731 __ TestJSArrayForAllocationMemento(
object, temp, &no_memento_found);
4733 __ bind(&no_memento_found);
4737 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4738 Register object_reg =
ToRegister(instr->object());
4740 Handle<Map> from_map = instr->original_map();
4741 Handle<Map> to_map = instr->transitioned_map();
4745 Label not_applicable;
4746 bool is_simple_map_transition =
4748 Label::Distance branch_distance =
4749 is_simple_map_transition ? Label::kNear : Label::kFar;
4751 __ j(
not_equal, ¬_applicable, branch_distance);
4752 if (is_simple_map_transition) {
4753 Register new_map_reg =
ToRegister(instr->new_map_temp());
4758 __ RecordWriteForMap(object_reg, to_map, new_map_reg,
4763 PushSafepointRegistersScope
scope(
this);
4764 __ mov(
ebx, to_map);
4765 bool is_js_array = from_map->instance_type() ==
JS_ARRAY_TYPE;
4766 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
4771 __ bind(¬_applicable);
4775 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4778 DeferredStringCharCodeAt(
LCodeGen* codegen,
4779 LStringCharCodeAt* instr,
4783 codegen()->DoDeferredStringCharCodeAt(instr_);
4785 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4787 LStringCharCodeAt* instr_;
4790 DeferredStringCharCodeAt* deferred =
4791 new(zone()) DeferredStringCharCodeAt(
this, instr,
x87_stack_);
4799 __ bind(deferred->exit());
4804 Register
string =
ToRegister(instr->string());
4805 Register result =
ToRegister(instr->result());
4810 __ Move(result, Immediate(0));
4812 PushSafepointRegistersScope
scope(
this);
4817 if (instr->index()->IsConstantOperand()) {
4818 Immediate immediate =
ToImmediate(LConstantOperand::cast(instr->index()),
4827 instr, instr->context());
4830 __ StoreToSafepointRegisterSlot(result,
eax);
4834 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
4837 DeferredStringCharFromCode(
LCodeGen* codegen,
4838 LStringCharFromCode* instr,
4842 codegen()->DoDeferredStringCharFromCode(instr_);
4844 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4846 LStringCharFromCode* instr_;
4849 DeferredStringCharFromCode* deferred =
4850 new(zone()) DeferredStringCharFromCode(
this, instr,
x87_stack_);
4852 DCHECK(instr->hydrogen()->value()->representation().IsInteger32());
4853 Register char_code =
ToRegister(instr->char_code());
4854 Register result =
ToRegister(instr->result());
4855 DCHECK(!char_code.is(result));
4858 __ j(
above, deferred->entry());
4859 __ Move(result, Immediate(factory()->single_character_string_cache()));
4863 __ cmp(result, factory()->undefined_value());
4864 __ j(
equal, deferred->entry());
4865 __ bind(deferred->exit());
4870 Register char_code =
ToRegister(instr->char_code());
4871 Register result =
ToRegister(instr->result());
4876 __ Move(result, Immediate(0));
4878 PushSafepointRegistersScope
scope(
this);
4879 __ SmiTag(char_code);
4882 __ StoreToSafepointRegisterSlot(result,
eax);
4886 void LCodeGen::DoStringAdd(LStringAdd* instr) {
4890 StringAddStub stub(isolate(),
4891 instr->hydrogen()->flags(),
4892 instr->hydrogen()->pretenure_flag());
4897 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4898 LOperand* input = instr->value();
4899 LOperand* output = instr->result();
4900 DCHECK(input->IsRegister() || input->IsStackSlot());
4901 DCHECK(output->IsDoubleRegister());
4902 if (input->IsRegister()) {
4913 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
4914 LOperand* input = instr->value();
4915 LOperand* output = instr->result();
4923 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
4926 DeferredNumberTagI(
LCodeGen* codegen,
4931 codegen()->DoDeferredNumberTagIU(instr_, instr_->value(), instr_->temp(),
4934 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4936 LNumberTagI* instr_;
4939 LOperand* input = instr->value();
4940 DCHECK(input->IsRegister() && input->Equals(instr->result()));
4943 DeferredNumberTagI* deferred =
4944 new(zone()) DeferredNumberTagI(
this, instr,
x87_stack_);
4947 __ bind(deferred->exit());
4951 void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
4954 DeferredNumberTagU(
LCodeGen* codegen,
4959 codegen()->DoDeferredNumberTagIU(instr_, instr_->value(), instr_->temp(),
4962 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4964 LNumberTagU* instr_;
4967 LOperand* input = instr->value();
4968 DCHECK(input->IsRegister() && input->Equals(instr->result()));
4971 DeferredNumberTagU* deferred =
4972 new(zone()) DeferredNumberTagU(
this, instr,
x87_stack_);
4974 __ j(
above, deferred->entry());
4976 __ bind(deferred->exit());
4993 __ xor_(reg, 0x80000000);
4995 __ fild_s(Operand(
esp, 0));
5000 __ push(Immediate(0));
5002 __ fild_d(Operand(
esp, 0));
5007 if (FLAG_inline_new) {
5008 __ AllocateHeapNumber(reg, tmp,
no_reg, &slow);
5009 __ jmp(&done, Label::kNear);
5018 __ Move(reg, Immediate(0));
5021 PushSafepointRegistersScope
scope(
this);
5029 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
5031 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
5032 __ StoreToSafepointRegisterSlot(reg,
eax);
5040 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
5043 DeferredNumberTagD(
LCodeGen* codegen,
5048 codegen()->DoDeferredNumberTagD(instr_);
5050 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5052 LNumberTagD* instr_;
5063 DeferredNumberTagD* deferred =
5064 new(zone()) DeferredNumberTagD(
this, instr,
x87_stack_);
5065 if (FLAG_inline_new) {
5067 __ AllocateHeapNumber(reg, tmp,
no_reg, deferred->entry());
5069 __ jmp(deferred->entry());
5071 __ bind(deferred->exit());
5081 __ Move(reg, Immediate(0));
5083 PushSafepointRegistersScope
scope(
this);
5090 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
5092 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
5093 __ StoreToSafepointRegisterSlot(reg,
eax);
5097 void LCodeGen::DoSmiTag(LSmiTag* instr) {
5098 HChange* hchange = instr->hydrogen();
5102 __ test(input, Immediate(0xc0000000));
5113 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
5114 LOperand* input = instr->value();
5116 DCHECK(input->IsRegister() && input->Equals(instr->result()));
5117 if (instr->needs_check()) {
5121 __ AssertSmi(result);
5123 __ SmiUntag(result);
5128 Register temp_reg, X87Register res_reg,
5130 bool can_convert_undefined_to_nan =
5131 instr->hydrogen()->can_convert_undefined_to_nan();
5132 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero();
5134 Label load_smi, done;
5139 __ JumpIfSmi(input_reg, &load_smi);
5143 factory()->heap_number_map());
5144 if (!can_convert_undefined_to_nan) {
5147 Label heap_number, convert;
5151 __ cmp(input_reg, factory()->undefined_value());
5155 ExternalReference nan =
5156 ExternalReference::address_of_canonical_non_hole_nan();
5157 __ fld_d(Operand::StaticVariable(nan));
5158 __ jmp(&done, Label::kNear);
5160 __ bind(&heap_number);
5164 if (deoptimize_on_minus_zero) {
5171 __ mov(temp_reg,
FieldOperand(input_reg, HeapNumber::kExponentOffset));
5173 __ j(
zero, &done, Label::kNear);
5179 __ jmp(&done, Label::kNear);
5187 __ mov(temp_reg, input_reg);
5188 __ SmiUntag(temp_reg);
5190 __ fild_s(Operand(
esp, 0));
5198 Register input_reg =
ToRegister(instr->value());
5204 if (instr->truncating()) {
5205 Label no_heap_number, check_bools, check_false;
5209 factory()->heap_number_map());
5211 __ TruncateHeapNumberToI(input_reg, input_reg);
5214 __ bind(&no_heap_number);
5217 __ cmp(input_reg, factory()->undefined_value());
5219 __ Move(input_reg, Immediate(0));
5222 __ bind(&check_bools);
5223 __ cmp(input_reg, factory()->true_value());
5225 __ Move(input_reg, Immediate(1));
5228 __ bind(&check_false);
5229 __ cmp(input_reg, factory()->false_value());
5231 __ Move(input_reg, Immediate(0));
5236 isolate()->factory()->heap_number_map());
5243 Label no_precision_lost, not_nan, zero_check;
5251 __ j(
equal, &no_precision_lost, Label::kNear);
5254 __ bind(&no_precision_lost);
5261 __ test(input_reg, Operand(input_reg));
5262 __ j(
zero, &zero_check, Label::kNear);
5266 __ bind(&zero_check);
5270 __ fstp_s(Operand(
esp, 0));
5272 __ test(input_reg, Operand(input_reg));
5286 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
5289 DeferredTaggedToI(
LCodeGen* codegen,
5294 codegen()->DoDeferredTaggedToI(instr_, done());
5296 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5301 LOperand* input = instr->value();
5302 DCHECK(input->IsRegister());
5306 if (instr->hydrogen()->value()->representation().IsSmi()) {
5307 __ SmiUntag(input_reg);
5309 DeferredTaggedToI* deferred =
5310 new(zone()) DeferredTaggedToI(
this, instr,
x87_stack_);
5314 __ SmiUntag(input_reg);
5317 __ j(
carry, deferred->entry());
5318 __ bind(deferred->exit());
5323 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
5324 LOperand* input = instr->value();
5325 DCHECK(input->IsRegister());
5326 LOperand* temp = instr->temp();
5327 DCHECK(temp->IsRegister());
5328 LOperand* result = instr->result();
5329 DCHECK(result->IsDoubleRegister());
5334 HValue* value = instr->hydrogen()->value();
5343 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
5344 LOperand* input = instr->value();
5345 DCHECK(input->IsDoubleRegister());
5346 LOperand* result = instr->result();
5347 DCHECK(result->IsRegister());
5350 if (instr->truncating()) {
5353 __ TruncateX87TOSToI(result_reg);
5355 Label lost_precision, is_nan, minus_zero, done;
5359 __ X87TOSToI(result_reg, instr->hydrogen()->GetMinusZeroMode(),
5360 &lost_precision, &is_nan, &minus_zero, dist);
5362 __ bind(&lost_precision);
5366 __ bind(&minus_zero);
5373 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) {
5374 LOperand* input = instr->value();
5375 DCHECK(input->IsDoubleRegister());
5376 LOperand* result = instr->result();
5377 DCHECK(result->IsRegister());
5380 Label lost_precision, is_nan, minus_zero, done;
5384 __ X87TOSToI(result_reg, instr->hydrogen()->GetMinusZeroMode(),
5385 &lost_precision, &is_nan, &minus_zero, dist);
5387 __ bind(&lost_precision);
5391 __ bind(&minus_zero);
5394 __ SmiTag(result_reg);
5399 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
5400 LOperand* input = instr->value();
5406 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
5407 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
5408 LOperand* input = instr->value();
5415 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
5421 if (instr->hydrogen()->is_interval_check()) {
5424 instr->hydrogen()->GetCheckInterval(&first, &last);
5427 static_cast<int8_t
>(first));
5430 if (first == last) {
5437 static_cast<int8_t
>(last));
5444 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
5452 __ and_(temp, mask);
5460 void LCodeGen::DoCheckValue(LCheckValue* instr) {
5461 Handle<HeapObject>
object = instr->hydrogen()->object().handle();
5462 if (instr->hydrogen()->object_in_new_space()) {
5464 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
5465 __ cmp(reg, Operand::ForCell(cell));
5467 Operand operand =
ToOperand(instr->value());
5468 __ cmp(operand,
object);
5476 PushSafepointRegistersScope
scope(
this);
5479 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
5481 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
5489 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
5492 DeferredCheckMaps(
LCodeGen* codegen,
5496 :
LDeferredCode(codegen, x87_stack), instr_(instr), object_(object) {
5497 SetExit(check_maps());
5500 codegen()->DoDeferredInstanceMigration(instr_, object_);
5502 Label* check_maps() {
return &check_maps_; }
5503 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5510 if (instr->hydrogen()->IsStabilityCheck()) {
5511 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5512 for (
int i = 0;
i < maps->size(); ++
i) {
5513 AddStabilityDependency(maps->at(
i).handle());
5518 LOperand* input = instr->value();
5519 DCHECK(input->IsRegister());
5522 DeferredCheckMaps* deferred =
NULL;
5523 if (instr->hydrogen()->HasMigrationTarget()) {
5524 deferred =
new(zone()) DeferredCheckMaps(
this, instr, reg,
x87_stack_);
5525 __ bind(deferred->check_maps());
5528 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5530 for (
int i = 0;
i < maps->size() - 1;
i++) {
5531 Handle<Map>
map = maps->at(
i).handle();
5532 __ CompareMap(reg,
map);
5533 __ j(
equal, &success, Label::kNear);
5536 Handle<Map>
map = maps->at(maps->size() - 1).handle();
5537 __ CompareMap(reg,
map);
5538 if (instr->hydrogen()->HasMigrationTarget()) {
5548 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
5550 Register result_reg =
ToRegister(instr->result());
5552 __ ClampTOSToUint8(result_reg);
5556 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
5557 DCHECK(instr->unclamped()->Equals(instr->result()));
5558 Register value_reg =
ToRegister(instr->result());
5559 __ ClampUint8(value_reg);
5563 void LCodeGen::DoClampTToUint8NoSSE2(LClampTToUint8NoSSE2* instr) {
5564 Register input_reg =
ToRegister(instr->unclamped());
5565 Register result_reg =
ToRegister(instr->result());
5566 Register scratch =
ToRegister(instr->scratch());
5567 Register scratch2 =
ToRegister(instr->scratch2());
5568 Register scratch3 =
ToRegister(instr->scratch3());
5569 Label is_smi, done, heap_number, valid_exponent,
5570 largest_value, zero_result, maybe_nan_or_infinity;
5572 __ JumpIfSmi(input_reg, &is_smi);
5576 factory()->heap_number_map());
5577 __ j(
equal, &heap_number, Label::kNear);
5581 __ cmp(input_reg, factory()->undefined_value());
5583 __ jmp(&zero_result, Label::kNear);
5586 __ bind(&heap_number);
5593 __ mov(scratch,
FieldOperand(input_reg, HeapNumber::kExponentOffset));
5594 __ mov(scratch3,
FieldOperand(input_reg, HeapNumber::kMantissaOffset));
5597 __ test(scratch, scratch);
5601 __ mov(scratch2, scratch);
5604 __ j(
zero, &zero_result, Label::kNear);
5608 const uint32_t non_int8_exponent = 7;
5609 __ cmp(scratch2, Immediate(non_int8_exponent + 1));
5611 __ j(
greater, &maybe_nan_or_infinity, Label::kNear);
5613 __ bind(&valid_exponent);
5631 __ mov(scratch2, scratch);
5632 const uint32_t one_half_bit_shift = 30 -
sizeof(uint8_t) * 8;
5633 const uint32_t one_bit_shift = one_half_bit_shift + 1;
5634 __ and_(scratch2, Immediate((1 << one_bit_shift) - 1));
5635 __ cmp(scratch2, Immediate(1 << one_half_bit_shift));
5637 __ j(
less, &no_round, Label::kNear);
5639 __ mov(scratch2, Immediate(1 << one_half_bit_shift));
5641 __ test(scratch3, scratch3);
5643 __ mov(scratch2, scratch);
5644 __ and_(scratch2, Immediate(1 << one_bit_shift));
5645 __ shr(scratch2, 1);
5647 __ add(scratch, scratch2);
5648 __ j(
overflow, &largest_value, Label::kNear);
5650 __ shr(scratch, 23);
5651 __ mov(result_reg, scratch);
5652 __ jmp(&done, Label::kNear);
5654 __ bind(&maybe_nan_or_infinity);
5662 __ or_(scratch,
FieldOperand(input_reg, HeapNumber::kMantissaOffset));
5666 __ bind(&largest_value);
5667 __ mov(result_reg, Immediate(255));
5668 __ jmp(&done, Label::kNear);
5670 __ bind(&zero_result);
5671 __ xor_(result_reg, result_reg);
5672 __ jmp(&done, Label::kNear);
5676 if (!input_reg.is(result_reg)) {
5677 __ mov(result_reg, input_reg);
5679 __ SmiUntag(result_reg);
5680 __ ClampUint8(result_reg);
5685 void LCodeGen::DoDoubleBits(LDoubleBits* instr) {
5687 Register result_reg =
ToRegister(instr->result());
5690 __ fst_d(Operand(
esp, 0));
5691 if (instr->hydrogen()->bits() == HDoubleBits::HIGH) {
5694 __ mov(result_reg, Operand(
esp, 0));
5700 void LCodeGen::DoConstructDouble(LConstructDouble* instr) {
5707 __ mov(Operand(
esp, 0), lo_reg);
5709 __ fld_d(Operand(
esp, 0));
5715 void LCodeGen::DoAllocate(LAllocate* instr) {
5718 DeferredAllocate(
LCodeGen* codegen,
5723 codegen()->DoDeferredAllocate(instr_);
5725 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5730 DeferredAllocate* deferred =
5731 new(zone()) DeferredAllocate(
this, instr,
x87_stack_);
5733 Register result =
ToRegister(instr->result());
5738 if (instr->hydrogen()->MustAllocateDoubleAligned()) {
5741 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5742 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5743 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5745 }
else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5746 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5750 if (instr->size()->IsConstantOperand()) {
5755 __ jmp(deferred->entry());
5762 __ bind(deferred->exit());
5764 if (instr->hydrogen()->MustPrefillWithFiller()) {
5765 if (instr->size()->IsConstantOperand()) {
5776 isolate()->factory()->one_pointer_filler_map());
5784 Register result =
ToRegister(instr->result());
5791 PushSafepointRegistersScope
scope(
this);
5792 if (instr->size()->IsRegister()) {
5809 instr->hydrogen()->MustAllocateDoubleAligned());
5810 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5811 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5812 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5814 }
else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5815 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5823 Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
5824 __ StoreToSafepointRegisterSlot(result,
eax);
5828 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5831 CallRuntime(Runtime::kToFastProperties, 1, instr);
5835 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5843 int literal_offset =
5845 __ LoadHeapObject(
ecx, instr->hydrogen()->literals());
5847 __ cmp(
ebx, factory()->undefined_value());
5853 __ push(Immediate(
Smi::FromInt(instr->hydrogen()->literal_index())));
5854 __ push(Immediate(instr->hydrogen()->pattern()));
5855 __ push(Immediate(instr->hydrogen()->flags()));
5856 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
5859 __ bind(&materialized);
5861 Label allocated, runtime_allocate;
5863 __ jmp(&allocated, Label::kNear);
5865 __ bind(&runtime_allocate);
5868 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5871 __ bind(&allocated);
5887 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5891 bool pretenure = instr->hydrogen()->pretenure();
5892 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5893 FastNewClosureStub stub(isolate(), instr->hydrogen()->strict_mode(),
5894 instr->hydrogen()->kind());
5895 __ mov(
ebx, Immediate(instr->hydrogen()->shared_info()));
5899 __ push(Immediate(instr->hydrogen()->shared_info()));
5900 __ push(Immediate(pretenure ? factory()->true_value()
5901 : factory()->false_value()));
5907 void LCodeGen::DoTypeof(LTypeof* instr) {
5909 LOperand* input = instr->value();
5915 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
5925 Label* true_label = instr->TrueLabel(chunk_);
5926 Label* false_label = instr->FalseLabel(chunk_);
5927 Handle<String> type_name = instr->type_literal();
5928 int left_block = instr->TrueDestination(chunk_);
5929 int right_block = instr->FalseDestination(chunk_);
5930 int next_block = GetNextEmittedBlock();
5932 Label::Distance true_distance = left_block == next_block ? Label::kNear
5934 Label::Distance false_distance = right_block == next_block ? Label::kNear
5938 __ JumpIfSmi(input, true_label, true_distance);
5940 factory()->heap_number_map());
5941 final_branch_condition =
equal;
5943 }
else if (
String::Equals(type_name, factory()->string_string())) {
5944 __ JumpIfSmi(input, false_label, false_distance);
5949 final_branch_condition =
zero;
5951 }
else if (
String::Equals(type_name, factory()->symbol_string())) {
5952 __ JumpIfSmi(input, false_label, false_distance);
5954 final_branch_condition =
equal;
5956 }
else if (
String::Equals(type_name, factory()->boolean_string())) {
5957 __ cmp(input, factory()->true_value());
5958 __ j(
equal, true_label, true_distance);
5959 __ cmp(input, factory()->false_value());
5960 final_branch_condition =
equal;
5962 }
else if (
String::Equals(type_name, factory()->undefined_string())) {
5963 __ cmp(input, factory()->undefined_value());
5964 __ j(
equal, true_label, true_distance);
5965 __ JumpIfSmi(input, false_label, false_distance);
5972 }
else if (
String::Equals(type_name, factory()->function_string())) {
5974 __ JumpIfSmi(input, false_label, false_distance);
5976 __ j(
equal, true_label, true_distance);
5978 final_branch_condition =
equal;
5980 }
else if (
String::Equals(type_name, factory()->object_string())) {
5981 __ JumpIfSmi(input, false_label, false_distance);
5982 __ cmp(input, factory()->null_value());
5983 __ j(
equal, true_label, true_distance);
5985 __ j(
below, false_label, false_distance);
5987 __ j(
above, false_label, false_distance);
5991 final_branch_condition =
zero;
5994 __ jmp(false_label, false_distance);
5996 return final_branch_condition;
6000 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
6013 Label check_frame_marker;
6016 __ j(
not_equal, &check_frame_marker, Label::kNear);
6020 __ bind(&check_frame_marker);
6027 if (!info()->IsStub()) {
6030 int current_pc = masm()->pc_offset();
6031 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
6032 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
6033 __ Nop(padding_size);
6036 last_lazy_deopt_pc_ = masm()->pc_offset();
6040 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
6041 last_lazy_deopt_pc_ = masm()->pc_offset();
6042 DCHECK(instr->HasEnvironment());
6045 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
6049 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
6062 void LCodeGen::DoDummy(LDummy* instr) {
6067 void LCodeGen::DoDummyUse(LDummyUse* instr) {
6073 PushSafepointRegistersScope
scope(
this);
6075 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
6078 DCHECK(instr->HasEnvironment());
6080 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
6084 void LCodeGen::DoStackCheck(LStackCheck* instr) {
6087 DeferredStackCheck(
LCodeGen* codegen,
6092 codegen()->DoDeferredStackCheck(instr_);
6094 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
6096 LStackCheck* instr_;
6099 DCHECK(instr->HasEnvironment());
6103 if (instr->hydrogen()->is_function_entry()) {
6106 ExternalReference stack_limit =
6107 ExternalReference::address_of_stack_limit(isolate());
6108 __ cmp(
esp, Operand::StaticVariable(stack_limit));
6111 DCHECK(instr->context()->IsRegister());
6113 CallCode(isolate()->builtins()->StackCheck(),
6118 DCHECK(instr->hydrogen()->is_backwards_branch());
6120 DeferredStackCheck* deferred_stack_check =
6121 new(zone()) DeferredStackCheck(
this, instr,
x87_stack_);
6122 ExternalReference stack_limit =
6123 ExternalReference::address_of_stack_limit(isolate());
6124 __ cmp(
esp, Operand::StaticVariable(stack_limit));
6125 __ j(
below, deferred_stack_check->entry());
6127 __ bind(instr->done_label());
6128 deferred_stack_check->SetExit(instr->done_label());
6137 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
6145 DCHECK(!environment->HasBeenRegistered());
6152 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
6154 __ cmp(
eax, isolate()->factory()->undefined_value());
6157 __ cmp(
eax, isolate()->factory()->null_value());
6167 Label use_cache, call_runtime;
6168 __ CheckEnumCache(&call_runtime);
6171 __ jmp(&use_cache, Label::kNear);
6174 __ bind(&call_runtime);
6176 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
6179 isolate()->factory()->meta_map());
6181 __ bind(&use_cache);
6185 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
6187 Register result =
ToRegister(instr->result());
6188 Label load_cache, done;
6189 __ EnumLength(result,
map);
6192 __ mov(result, isolate()->factory()->empty_fixed_array());
6193 __ jmp(&done, Label::kNear);
6195 __ bind(&load_cache);
6196 __ LoadInstanceDescriptors(
map, result);
6202 __ test(result, result);
6207 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
6208 Register
object =
ToRegister(instr->value());
6218 PushSafepointRegistersScope
scope(
this);
6222 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
6224 instr->pointer_map(), 2, Safepoint::kNoLazyDeopt);
6225 __ StoreToSafepointRegisterSlot(
object,
eax);
6229 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
6232 DeferredLoadMutableDouble(
LCodeGen* codegen,
6233 LLoadFieldByIndex* instr,
6243 codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_);
6245 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
6247 LLoadFieldByIndex* instr_;
6252 Register
object =
ToRegister(instr->object());
6255 DeferredLoadMutableDouble* deferred;
6256 deferred =
new(zone()) DeferredLoadMutableDouble(
6259 Label out_of_object, done;
6265 __ cmp(index, Immediate(0));
6266 __ j(
less, &out_of_object, Label::kNear);
6271 __ jmp(&done, Label::kNear);
6273 __ bind(&out_of_object);
6281 __ bind(deferred->exit());
6286 void LCodeGen::DoStoreFrameContext(LStoreFrameContext* instr) {
6287 Register context =
ToRegister(instr->context());
6292 void LCodeGen::DoAllocateBlockContext(LAllocateBlockContext* instr) {
6293 Handle<ScopeInfo> scope_info = instr->scope_info();
6296 CallRuntime(Runtime::kPushBlockContext, 2, instr);
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static const int kLengthOffset
static U update(U previous, T value)
static const int kHeaderSize
static int SlotOffset(int index)
static void EnsureRelocSpaceForLazyDeoptimization(Handle< Code > code)
static Address GetDeoptimizationEntry(Isolate *isolate, int id, BailoutType type, GetEntryMode mode=ENSURE_ENTRY_CODE)
static const int kEnumCacheOffset
static const int kHeaderSize
static int OffsetOfElementAt(int index)
static int SizeFor(int length)
static double hole_nan_as_double()
static const int kGlobalProxyOffset
@ kAllUsesTruncatingToInt32
static Handle< T > cast(Handle< S > that)
static const int kInfinityOrNanExponent
static const uint32_t kSignMask
static const int kValueOffset
static const uint32_t kMantissaMask
static const uint32_t kExponentMask
static const int kExponentBias
static const int kExponentShift
static const int kMapOffset
static const int kValueOffset
static const int kCacheStampOffset
static const int kSharedFunctionInfoOffset
static const int kContextOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static const int kHeaderSize
static const int kPropertiesOffset
static const int kInObjectFieldCount
static const int kDynamicAlignmentStateOffset
static const int kFunctionOffset
void FlushIfNecessary(LInstruction *instr, LCodeGen *cgen)
void Free(X87Register reg)
void push(X87Register reg)
void CommitWrite(X87Register reg)
X87Register stack_[X87Register::kMaxNumAllocatableRegisters]
bool Contains(X87Register reg)
int ArrayIndex(X87Register reg)
void Fxch(X87Register reg, int other_slot=0)
void LeavingBlock(int current_block_id, LGoto *goto_instr, LCodeGen *cgen)
void PrepareToWrite(X87Register reg)
void X87Fxch(X87Register reg, int other_slot=0)
bool IsNextEmittedBlock(int block_id) const
void X87PrepareBinaryOp(X87Register left, X87Register right, X87Register result)
void DoStoreKeyedFixedArray(LStoreKeyed *instr)
void RecordSafepointWithRegisters(LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
@ RECORD_SIMPLE_SAFEPOINT
@ RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
int inlined_function_count_
bool IsSmi(LConstantOperand *op) const
friend class SafepointGenerator
TranslationBuffer translations_
MemOperand BuildSeqStringOperand(Register string, LOperand *index, String::Encoding encoding)
Condition EmitIsString(Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
void DoDeferredStackCheck(LStackCheck *instr)
X87Register ToX87Register(LOperand *op) const
SafepointTableBuilder safepoints_
void EmitVectorLoadICRegisters(T *instr)
static Condition TokenToCondition(Token::Value op, bool is_unsigned)
ZoneList< Handle< Object > > deoptimization_literals_
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal *instr, Label *map_check, Label *bool_load)
void X87CommitWrite(X87Register reg)
void X87PrepareToWrite(X87Register reg)
bool dynamic_frame_alignment_
void PopulateDeoptimizationLiteralsWithInlinedFunctions()
void AddToTranslation(LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
ZoneList< LEnvironment * > deoptimizations_
void EmitIntegerMathAbs(LMathAbs *instr)
int32_t ToRepresentation(LConstantOperand *op, const Representation &r) const
void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
void EmitIsConstructCall(Register temp1, Register temp2)
void EmitPushTaggedOperand(LOperand *operand)
void X87Mov(X87Register reg, Operand src, X87OperandType operand=kX87DoubleOperand)
int32_t ToInteger32(LConstantOperand *op) const
LPlatformChunk * chunk() const
void FinishCode(Handle< Code > code)
ExternalReference ToExternalReference(LConstantOperand *op) const
int LookupDestination(int block_id) const
Condition EmitTypeofIs(Label *true_label, Label *false_label, Register input, Handle< String > type_name)
void DoDeferredAllocate(LAllocate *instr)
void RecordSafepoint(LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
void DoDeferredTaggedToI(LTaggedToI *instr)
void CallCodeGeneric(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
void DoDeferredStringCharCodeAt(LStringCharCodeAt *instr)
void CallCode(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
Safepoint::Kind expected_safepoint_kind_
ZoneList< LDeferredCode * > deferred_
Operand HighOperand(LOperand *op)
bool GenerateDeferredCode()
void DoDeferredNumberTagIU(LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2, IntegerSignedness signedness)
void X87Fld(Operand src, X87OperandType opts)
Handle< Object > ToHandle(LConstantOperand *op) const
bool NeedsEagerFrame() const
void GenerateBodyInstructionPost(LInstruction *instr) OVERRIDE
void RegisterEnvironmentForDeoptimization(LEnvironment *environment, Safepoint::DeoptMode mode)
friend class LDeferredCode
void LoadContextFromDeferred(LOperand *context)
void GenerateOsrPrologue()
void EmitFlushX87ForDeopt()
Operand BuildFastArrayOperand(LOperand *elements_pointer, LOperand *key, Representation key_representation, ElementsKind elements_kind, uint32_t base_offset)
void X87Free(X87Register reg)
bool NeedsDeferredFrame() const
void DoDeferredInstanceMigration(LCheckMaps *instr, Register object)
bool support_aligned_spilled_doubles_
void DoDeferredLoadMutableDouble(LLoadFieldByIndex *instr, Register result, Register object, Register index)
int DefineDeoptimizationLiteral(Handle< Object > literal)
void DeoptimizeIf(Condition condition, LInstruction *instr, const char *detail, Deoptimizer::BailoutType bailout_type)
void X87LoadForUsage(X87Register reg)
int GetStackSlotCount() const
void CallKnownFunction(Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, R1State r1_state)
X87StackMap x87_stack_map_
void WriteTranslation(LEnvironment *environment, Translation *translation)
void DoDeferredMathAbsTaggedHeapNumber(LMathAbs *instr)
void DoLoadKeyedFixedDoubleArray(LLoadKeyed *instr)
bool GenerateSafepointTable()
Operand ToOperand(LOperand *op)
void EmitClassOfTest(Label *if_true, Label *if_false, Handle< String > class_name, Register input, Register temporary, Register temporary2)
void DoLoadKeyedExternalArray(LLoadKeyed *instr)
void EmitReturn(LReturn *instr, bool dynamic_frame_alignment)
void EmitNumberUntagDNoSSE2(LNumberUntagD *instr, Register input, Register temp, X87Register res_reg, NumberUntagDMode mode)
Immediate ToImmediate(LOperand *op, const Representation &r) const
double ToDouble(LConstantOperand *op) const
Register ToRegister(LOperand *op) const
void DoStoreKeyedExternalArray(LStoreKeyed *instr)
void RecordAndWritePosition(int position) OVERRIDE
bool IsInteger32(LConstantOperand *op) const
void PopulateDeoptimizationData(Handle< Code > code)
void DoParallelMove(LParallelMove *move)
void CallRuntime(const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void DoDeferredStringCharFromCode(LStringCharFromCode *instr)
void FlushX87StackIfNecessary(LInstruction *instr)
ZoneList< Deoptimizer::JumpTableEntry > jump_table_
Condition EmitIsObject(Register input, Register temp1, Label *is_not_object, Label *is_object)
void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE
void GenerateBodyInstructionPre(LInstruction *instr) OVERRIDE
void RecordSafepointWithLazyDeopt(LInstruction *instr, SafepointMode safepoint_mode)
void EmitFalseBranch(InstrType instr, Condition condition)
void DoLoadKeyedFixedArray(LLoadKeyed *instr)
LCodeGen(LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
void EmitBranch(InstrType instr, Condition condition)
void DoDeferredNumberTagD(LNumberTagD *instr)
void DoStoreKeyedFixedDoubleArray(LStoreKeyed *instr)
friend class LEnvironment
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const Register ReceiverRegister()
static const Register NameRegister()
static void GenerateMiss(MacroAssembler *masm)
static int SafepointRegisterStackIndex(int reg_code)
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kInstanceTypeOffset
static const int kConstructorOffset
static const int kPrototypeOffset
static const int kHashFieldOffset
static const int kMaxRegularHeapObjectSize
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
static Representation Smi()
static Representation Integer32()
int num_parameters() const
Variable * parameter(int index) const
static const int kHeaderSize
static const int kDontAdaptArgumentsSentinel
static const int kInstanceClassNameOffset
static const int kNativeBitWithinByte
static const int kStrictModeBitWithinByte
static const int kMaxValue
static Smi * FromInt(int value)
static const int kContextOffset
static const int kCallerSPOffset
static const int kMarkerOffset
static const int kCallerFPOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static Handle< Code > initialize_stub(Isolate *isolate, StrictMode strict_mode)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const unsigned int kContainsCachedArrayIndexMask
static const int32_t kMaxOneByteCharCode
static const int kMaxLength
static const int kLengthOffset
static const int kMaxUtf16CodeUnit
bool Equals(String *other)
static const Register VectorRegister()
static const Register SlotRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
enable harmony numeric enable harmony object literal extensions Optimize object Array shift
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
@ PRETENURE_OLD_POINTER_SPACE
@ PRETENURE_OLD_DATA_SPACE
bool IsPowerOfTwo32(uint32_t value)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Vector< const char > CStrVector(const char *data)
const uint32_t kStringEncodingMask
MemOperand ContextOperand(Register context, int index)
const int kAlignmentPaddingPushed
static bool ExternalArrayOpRequiresTemp(Representation key_representation, ElementsKind elements_kind)
Condition CommuteCondition(Condition cond)
bool EvalComparison(Token::Value op, double op1, double op2)
const uint32_t kTwoByteStringTag
Operand FieldOperand(Register object, int offset)
const int kPointerSizeLog2
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
@ EXTERNAL_UINT16_ELEMENTS
@ EXTERNAL_INT16_ELEMENTS
@ EXTERNAL_UINT8_ELEMENTS
@ EXTERNAL_INT32_ELEMENTS
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ SLOPPY_ARGUMENTS_ELEMENTS
@ EXTERNAL_FLOAT32_ELEMENTS
@ EXTERNAL_FLOAT64_ELEMENTS
@ FAST_HOLEY_SMI_ELEMENTS
@ EXTERNAL_UINT32_ELEMENTS
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
const uint32_t kOneByteStringTag
int ElementsKindToShiftSize(ElementsKind elements_kind)
int32_t WhichPowerOf2Abs(int32_t x)
int StackSlotOffset(int index)
bool IsFastPackedElementsKind(ElementsKind kind)
const bool FLAG_enable_slow_asserts
@ NUMBER_CANDIDATE_IS_SMI
@ NUMBER_CANDIDATE_IS_ANY_TAGGED
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
AllocationSiteOverrideMode
@ DISABLE_ALLOCATION_SITES
Condition NegateCondition(Condition cond)
static InstanceType TestType(HHasInstanceTypeAndBranch *instr)
@ times_half_pointer_size
const uint32_t kStringRepresentationMask
static Condition BranchCondition(HHasInstanceTypeAndBranch *instr)
OStream & dec(OStream &os)
static int ArgumentsOffsetWithoutFrame(int index)
static Condition ComputeCompareCondition(Token::Value op)
static const char * LabelType(LLabel *label)
const int kAlignmentZapValue
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
@ NO_CALL_CONSTRUCTOR_FLAGS
const int kNoAlignmentPadding
bool IsFastSmiElementsKind(ElementsKind kind)
const uint32_t kHoleNanLower32
const uint32_t kSlotsZapValue
const uint32_t kHoleNanUpper32
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
static intptr_t Free(PagedSpace *space, FreeList *free_list, Address start, int size)
Debugger support for the V8 JavaScript engine.
static Register FromAllocationIndex(int index)
bool is(Register reg) const
static const int kMaxNumAllocatableRegisters
static X87Register FromAllocationIndex(int index)
bool is(X87Register reg) const
#define T(name, string, precedence)