44 class SafepointGenerator
FINAL :
public CallWrapper {
47 LPointerMap* pointers,
48 Safepoint::DeoptMode
mode)
57 codegen_->RecordSafepoint(pointers_, deopt_mode_);
62 LPointerMap* pointers_;
63 Safepoint::DeoptMode deopt_mode_;
70 LPhase phase(
"Z_Code generation",
chunk());
87 code->set_safepoint_table_offset(
safepoints_.GetCodeOffset());
88 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code);
94 DCHECK(info()->saves_caller_doubles());
96 Comment(
";;; Save clobbered callee double registers");
98 BitVector* doubles =
chunk()->allocated_double_registers();
99 BitVector::Iterator save_iterator(doubles);
100 while (!save_iterator.Done()) {
103 save_iterator.Advance();
110 DCHECK(info()->saves_caller_doubles());
112 Comment(
";;; Restore clobbered callee double registers");
113 BitVector* doubles =
chunk()->allocated_double_registers();
114 BitVector::Iterator save_iterator(doubles);
116 while (!save_iterator.Done()) {
119 save_iterator.Advance();
128 if (info()->IsOptimizing()) {
132 if (strlen(FLAG_stop_at) > 0 &&
133 info_->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
146 if (info_->this_has_uses() &&
147 info_->strict_mode() ==
SLOPPY &&
148 !info_->is_native()) {
150 int receiver_offset = info_->scope()->num_parameters() *
kPointerSize;
151 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
153 __ Branch(&ok,
ne, a2, Operand(at));
164 info()->set_prologue_offset(masm_->pc_offset());
166 if (info()->IsStub()) {
169 __ Prologue(info()->IsCodePreAgingActive());
172 info_->AddNoFrameRange(0, masm_->pc_offset());
178 if (FLAG_debug_code) {
187 __ Branch(&loop,
ne, a0, Operand(
sp));
194 if (info()->saves_caller_doubles()) {
200 if (heap_slots > 0) {
201 Comment(
";;; Allocate local context");
202 bool need_write_barrier =
true;
204 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
205 FastNewContextStub stub(isolate(), heap_slots);
208 need_write_barrier =
false;
220 for (
int i = 0;
i < num_parameters;
i++) {
222 if (var->IsContextSlot()) {
231 if (need_write_barrier) {
232 __ RecordWriteContextSlot(
234 }
else if (FLAG_debug_code) {
236 __ JumpIfInNewSpace(
cp, a0, &done);
237 __ Abort(kExpectedNewSpaceObject);
242 Comment(
";;; End allocate local context");
246 if (FLAG_trace && info()->IsOptimizing()) {
251 return !is_aborted();
271 if (instr->IsCall()) {
274 if (!instr->IsLazyBailout() && !instr->IsGap()) {
283 for (
int i = 0; !is_aborted() &&
i <
deferred_.length();
i++) {
287 instructions_->at(code->instruction_index())->hydrogen_value();
289 chunk()->
graph()->SourcePositionToScriptPosition(value->position()));
291 Comment(
";;; <@%d,#%d> "
292 "-------------------- Deferred %s --------------------",
293 code->instruction_index(),
294 code->instr()->hydrogen_value()->id(),
295 code->instr()->Mnemonic());
296 __ bind(code->entry());
298 Comment(
";;; Build frame");
306 Comment(
";;; Deferred code");
310 Comment(
";;; Destroy frame");
316 __ jmp(code->exit());
321 if (!is_aborted()) status_ =
DONE;
322 return !is_aborted();
328 Label needs_frame, call_deopt_entry;
330 Comment(
";;; -------------------- Jump table --------------------");
333 Register entry_offset = t9;
336 for (
int i = 0;
i < length;
i++) {
337 Deoptimizer::JumpTableEntry* table_entry = &
jump_table_[
i];
338 __ bind(&table_entry->label);
341 Address entry = table_entry->address;
342 DeoptComment(table_entry->reason);
347 __ li(entry_offset, Operand(entry - base));
349 if (table_entry->needs_frame) {
350 DCHECK(!info()->saves_caller_doubles());
351 if (needs_frame.is_bound()) {
352 __ Branch(&needs_frame);
354 __ bind(&needs_frame);
355 Comment(
";;; call deopt with frame");
365 __ bind(&call_deopt_entry);
368 __ Addu(entry_offset, entry_offset,
369 Operand(ExternalReference::ForDeoptEntry(base)));
370 __ Call(entry_offset);
375 bool need_branch = ((
i + 1) != length) || call_deopt_entry.is_bound();
377 if (need_branch)
__ Branch(&call_deopt_entry);
381 if (!call_deopt_entry.is_bound()) {
382 Comment(
";;; call deopt");
383 __ bind(&call_deopt_entry);
385 if (info()->saves_caller_doubles()) {
391 __ Addu(entry_offset, entry_offset,
392 Operand(ExternalReference::ForDeoptEntry(base)));
393 __ Call(entry_offset);
396 __ RecordComment(
"]");
400 if (!is_aborted()) status_ =
DONE;
401 return !is_aborted();
408 return !is_aborted();
429 if (op->IsRegister()) {
431 }
else if (op->IsConstantOperand()) {
432 LConstantOperand* const_op = LConstantOperand::cast(op);
433 HConstant* constant = chunk_->LookupConstant(const_op);
434 Handle<Object> literal = constant->handle(isolate());
435 Representation r = chunk_->LookupLiteralRepresentation(const_op);
436 if (r.IsInteger32()) {
437 DCHECK(literal->IsNumber());
438 __ li(scratch, Operand(
static_cast<int32_t>(literal->Number())));
439 }
else if (r.IsSmi()) {
440 DCHECK(constant->HasSmiValue());
441 __ li(scratch, Operand(
Smi::FromInt(constant->Integer32Value())));
442 }
else if (r.IsDouble()) {
443 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
445 DCHECK(r.IsSmiOrTagged());
446 __ li(scratch, literal);
449 }
else if (op->IsStackSlot()) {
459 DCHECK(op->IsDoubleRegister());
467 if (op->IsDoubleRegister()) {
469 }
else if (op->IsConstantOperand()) {
470 LConstantOperand* const_op = LConstantOperand::cast(op);
471 HConstant* constant = chunk_->LookupConstant(const_op);
475 DCHECK(literal->IsNumber());
477 __ mtc1(at, flt_scratch);
478 __ cvt_d_w(dbl_scratch, flt_scratch);
481 Abort(kUnsupportedDoubleImmediate);
483 Abort(kUnsupportedTaggedImmediate);
485 }
else if (op->IsStackSlot()) {
487 __ ldc1(dbl_scratch, mem_op);
496 HConstant* constant = chunk_->LookupConstant(op);
497 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
498 return constant->handle(isolate());
503 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
508 return chunk_->LookupLiteralRepresentation(op).IsSmi();
518 const Representation& r)
const {
519 HConstant* constant = chunk_->LookupConstant(op);
520 int32_t value = constant->Integer32Value();
521 if (r.IsInteger32())
return value;
522 DCHECK(r.IsSmiOrTagged());
528 HConstant* constant = chunk_->LookupConstant(op);
534 HConstant* constant = chunk_->LookupConstant(op);
535 DCHECK(constant->HasDoubleValue());
536 return constant->DoubleValue();
541 if (op->IsConstantOperand()) {
542 LConstantOperand* const_op = LConstantOperand::cast(op);
543 HConstant* constant =
chunk()->LookupConstant(const_op);
544 Representation r = chunk_->LookupLiteralRepresentation(const_op);
546 DCHECK(constant->HasSmiValue());
547 return Operand(
Smi::FromInt(constant->Integer32Value()));
548 }
else if (r.IsInteger32()) {
549 DCHECK(constant->HasInteger32Value());
550 return Operand(constant->Integer32Value());
551 }
else if (r.IsDouble()) {
552 Abort(kToOperandUnsupportedDoubleImmediate);
555 return Operand(constant->handle(isolate()));
556 }
else if (op->IsRegister()) {
558 }
else if (op->IsDoubleRegister()) {
559 Abort(kToOperandIsDoubleRegisterUnimplemented);
575 DCHECK(!op->IsRegister());
576 DCHECK(!op->IsDoubleRegister());
577 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
589 DCHECK(op->IsDoubleStackSlot());
602 Translation* translation) {
603 if (environment ==
NULL)
return;
606 int translation_size = environment->translation_size();
608 int height = translation_size - environment->parameter_count();
611 bool has_closure_id = !info()->closure().is_null() &&
612 !info()->closure().is_identical_to(environment->closure());
613 int closure_id = has_closure_id
615 : Translation::kSelfLiteralId;
617 switch (environment->frame_type()) {
619 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
622 translation->BeginConstructStubFrame(closure_id, translation_size);
625 DCHECK(translation_size == 1);
627 translation->BeginGetterStubFrame(closure_id);
630 DCHECK(translation_size == 2);
632 translation->BeginSetterStubFrame(closure_id);
635 translation->BeginCompiledStubFrame();
638 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
642 int object_index = 0;
643 int dematerialized_index = 0;
644 for (
int i = 0;
i < translation_size; ++
i) {
645 LOperand* value = environment->values()->at(
i);
649 environment->HasTaggedValueAt(
i),
650 environment->HasUint32ValueAt(
i),
652 &dematerialized_index);
658 Translation* translation,
662 int* object_index_pointer,
663 int* dematerialized_index_pointer) {
664 if (op == LEnvironment::materialization_marker()) {
665 int object_index = (*object_index_pointer)++;
666 if (environment->ObjectIsDuplicateAt(object_index)) {
667 int dupe_of = environment->ObjectDuplicateOfAt(object_index);
668 translation->DuplicateObject(dupe_of);
671 int object_length = environment->ObjectLengthAt(object_index);
672 if (environment->ObjectIsArgumentsAt(object_index)) {
673 translation->BeginArgumentsObject(object_length);
675 translation->BeginCapturedObject(object_length);
677 int dematerialized_index = *dematerialized_index_pointer;
678 int env_offset = environment->translation_size() + dematerialized_index;
679 *dematerialized_index_pointer += object_length;
680 for (
int i = 0;
i < object_length; ++
i) {
681 LOperand* value = environment->values()->at(env_offset +
i);
685 environment->HasTaggedValueAt(env_offset +
i),
686 environment->HasUint32ValueAt(env_offset +
i),
687 object_index_pointer,
688 dematerialized_index_pointer);
693 if (op->IsStackSlot()) {
695 translation->StoreStackSlot(op->index());
696 }
else if (is_uint32) {
697 translation->StoreUint32StackSlot(op->index());
699 translation->StoreInt32StackSlot(op->index());
701 }
else if (op->IsDoubleStackSlot()) {
702 translation->StoreDoubleStackSlot(op->index());
703 }
else if (op->IsRegister()) {
706 translation->StoreRegister(reg);
707 }
else if (is_uint32) {
708 translation->StoreUint32Register(reg);
710 translation->StoreInt32Register(reg);
712 }
else if (op->IsDoubleRegister()) {
714 translation->StoreDoubleRegister(reg);
715 }
else if (op->IsConstantOperand()) {
716 HConstant* constant =
chunk()->LookupConstant(LConstantOperand::cast(op));
718 translation->StoreLiteral(src_index);
727 LInstruction* instr) {
735 SafepointMode safepoint_mode) {
755 if (context->IsRegister()) {
757 }
else if (context->IsStackSlot()) {
759 }
else if (context->IsConstantOperand()) {
760 HConstant* constant =
761 chunk_->LookupConstant(LConstantOperand::cast(context));
774 __ CallRuntimeSaveDoubles(
id);
776 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
781 Safepoint::DeoptMode
mode) {
782 environment->set_has_been_used();
783 if (!environment->HasBeenRegistered()) {
798 int jsframe_count = 0;
805 Translation translation(&
translations_, frame_count, jsframe_count, zone());
808 int pc_offset = masm()->pc_offset();
809 environment->Register(deoptimization_index,
811 (
mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
823 DCHECK(environment->HasBeenRegistered());
824 int id = environment->deoptimization_index();
825 DCHECK(info()->IsOptimizing() || info()->IsStub());
829 Abort(kBailoutWasNotPrepared);
833 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
835 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
842 __ li(a1,
Operand(FLAG_deopt_every_n_times));
852 if (info()->ShouldTrapOnDeopt()) {
854 if (condition !=
al) {
857 __ stop(
"trap_on_deopt");
867 !info()->saves_caller_doubles()) {
868 DeoptComment(reason);
890 DeoptimizeIf(condition, instr, bailout_type, detail, src1, src2);
896 if (length == 0)
return;
902 data->SetTranslationByteArray(*translations);
904 data->SetOptimizationId(
Smi::FromInt(info_->optimization_id()));
905 if (info_->IsOptimizing()) {
908 data->SetSharedFunctionInfo(*info_->shared_info());
922 data->SetOsrAstId(
Smi::FromInt(info_->osr_ast_id().ToInt()));
926 for (
int i = 0;
i < length;
i++) {
928 data->SetAstId(
i, env->ast_id());
929 data->SetTranslationIndex(
i,
Smi::FromInt(env->translation_index()));
930 data->SetArgumentsStackHeight(
i,
934 code->set_deoptimization_data(*data);
951 const ZoneList<Handle<JSFunction> >* inlined_closures =
952 chunk()->inlined_closures();
954 for (
int i = 0, length = inlined_closures->length();
965 LInstruction* instr, SafepointMode safepoint_mode) {
971 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
977 LPointerMap* pointers,
978 Safepoint::Kind kind,
980 Safepoint::DeoptMode deopt_mode) {
983 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
984 Safepoint safepoint =
safepoints_.DefineSafepoint(masm(),
985 kind, arguments, deopt_mode);
986 for (
int i = 0;
i < operands->length();
i++) {
987 LOperand* pointer = operands->at(
i);
988 if (pointer->IsStackSlot()) {
989 safepoint.DefinePointerSlot(pointer->index(), zone());
990 }
else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
991 safepoint.DefinePointerRegister(
ToRegister(pointer), zone());
998 Safepoint::DeoptMode deopt_mode) {
1004 LPointerMap empty_pointers(zone());
1011 Safepoint::DeoptMode deopt_mode) {
1013 pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
1019 masm()->positions_recorder()->RecordPosition(position);
1020 masm()->positions_recorder()->WriteRecordedPositions();
1025 if (label->is_loop_header())
return " (loop header)";
1026 if (label->is_osr_entry())
return " (OSR entry)";
1031 void LCodeGen::DoLabel(LLabel* label) {
1032 Comment(
";;; <@%d,#%d> -------------------- B%d%s --------------------",
1033 current_instruction_,
1034 label->hydrogen_value()->id(),
1037 __ bind(label->label());
1038 current_block_ = label->block_id();
1053 LParallelMove* move = gap->GetParallelMove(inner_pos);
1059 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
1064 void LCodeGen::DoParameter(LParameter* instr) {
1069 void LCodeGen::DoCallStub(LCallStub* instr) {
1072 switch (instr->hydrogen()->major_key()) {
1073 case CodeStub::RegExpExec: {
1074 RegExpExecStub stub(isolate());
1078 case CodeStub::SubString: {
1079 SubStringStub stub(isolate());
1083 case CodeStub::StringCompare: {
1084 StringCompareStub stub(isolate());
1094 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
1099 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) {
1100 Register dividend =
ToRegister(instr->dividend());
1101 int32_t divisor = instr->divisor();
1110 HMod* hmod = instr->hydrogen();
1111 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1112 Label dividend_is_not_negative, done;
1115 __ Branch(÷nd_is_not_negative,
ge, dividend, Operand(zero_reg));
1117 __ subu(dividend, zero_reg, dividend);
1118 __ And(dividend, dividend, Operand(mask));
1120 DeoptimizeIf(
eq, instr,
"minus zero", dividend, Operand(zero_reg));
1123 __ subu(dividend, zero_reg, dividend);
1126 __ bind(÷nd_is_not_negative);
1127 __ And(dividend, dividend, Operand(mask));
1132 void LCodeGen::DoModByConstI(LModByConstI* instr) {
1133 Register dividend =
ToRegister(instr->dividend());
1134 int32_t divisor = instr->divisor();
1135 Register result =
ToRegister(instr->result());
1136 DCHECK(!dividend.is(result));
1143 __ TruncatingDiv(result, dividend,
Abs(divisor));
1144 __ Mul(result, result, Operand(
Abs(divisor)));
1145 __ Subu(result, dividend, Operand(result));
1148 HMod* hmod = instr->hydrogen();
1150 Label remainder_not_zero;
1151 __ Branch(&remainder_not_zero,
ne, result, Operand(zero_reg));
1152 DeoptimizeIf(
lt, instr,
"minus zero", dividend, Operand(zero_reg));
1153 __ bind(&remainder_not_zero);
1158 void LCodeGen::DoModI(LModI* instr) {
1159 HMod* hmod = instr->hydrogen();
1160 const Register left_reg =
ToRegister(instr->left());
1161 const Register right_reg =
ToRegister(instr->right());
1162 const Register result_reg =
ToRegister(instr->result());
1165 __ Mod(result_reg, left_reg, right_reg);
1171 DeoptimizeIf(
eq, instr,
"division by zero", right_reg, Operand(zero_reg));
1177 Label no_overflow_possible;
1178 __ Branch(&no_overflow_possible,
ne, left_reg, Operand(
kMinInt));
1182 __ Branch(&no_overflow_possible,
ne, right_reg, Operand(-1));
1184 __ mov(result_reg, zero_reg);
1186 __ bind(&no_overflow_possible);
1190 __ Branch(&done,
ge, left_reg, Operand(zero_reg));
1192 DeoptimizeIf(
eq, instr,
"minus zero", result_reg, Operand(zero_reg));
1198 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) {
1199 Register dividend =
ToRegister(instr->dividend());
1200 int32_t divisor = instr->divisor();
1201 Register result =
ToRegister(instr->result());
1203 DCHECK(!result.is(dividend));
1206 HDiv* hdiv = instr->hydrogen();
1208 DeoptimizeIf(
eq, instr,
"minus zero", dividend, Operand(zero_reg));
1216 divisor != 1 && divisor != -1) {
1217 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1218 __ And(at, dividend, Operand(mask));
1219 DeoptimizeIf(
ne, instr,
"lost precision", at, Operand(zero_reg));
1222 if (divisor == -1) {
1223 __ Subu(result, zero_reg, dividend);
1228 __ Move(result, dividend);
1229 }
else if (
shift == 1) {
1230 __ srl(result, dividend, 31);
1231 __ Addu(result, dividend, Operand(result));
1233 __ sra(result, dividend, 31);
1234 __ srl(result, result, 32 -
shift);
1235 __ Addu(result, dividend, Operand(result));
1238 if (divisor < 0)
__ Subu(result, zero_reg, result);
1242 void LCodeGen::DoDivByConstI(LDivByConstI* instr) {
1243 Register dividend =
ToRegister(instr->dividend());
1244 int32_t divisor = instr->divisor();
1245 Register result =
ToRegister(instr->result());
1246 DCHECK(!dividend.is(result));
1254 HDiv* hdiv = instr->hydrogen();
1256 DeoptimizeIf(
eq, instr,
"minus zero", dividend, Operand(zero_reg));
1259 __ TruncatingDiv(result, dividend,
Abs(divisor));
1260 if (divisor < 0)
__ Subu(result, zero_reg, result);
1263 __ Mul(
scratch0(), result, Operand(divisor));
1271 void LCodeGen::DoDivI(LDivI* instr) {
1272 HBinaryOperation* hdiv = instr->hydrogen();
1273 Register dividend =
ToRegister(instr->dividend());
1274 Register divisor =
ToRegister(instr->divisor());
1275 const Register result =
ToRegister(instr->result());
1276 Register remainder =
ToRegister(instr->temp());
1280 __ Div(remainder, result, dividend, divisor);
1284 DeoptimizeIf(
eq, instr,
"division by zero", divisor, Operand(zero_reg));
1289 Label left_not_zero;
1290 __ Branch(&left_not_zero,
ne, dividend, Operand(zero_reg));
1291 DeoptimizeIf(
lt, instr,
"minus zero", divisor, Operand(zero_reg));
1292 __ bind(&left_not_zero);
1298 Label left_not_min_int;
1299 __ Branch(&left_not_min_int,
ne, dividend, Operand(
kMinInt));
1301 __ bind(&left_not_min_int);
1305 DeoptimizeIf(
ne, instr,
"lost precision", remainder, Operand(zero_reg));
1310 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) {
1318 __ madd_d(addend, addend, multiplier, multiplicand);
1322 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) {
1323 Register dividend =
ToRegister(instr->dividend());
1324 Register result =
ToRegister(instr->result());
1325 int32_t divisor = instr->divisor();
1326 Register scratch = result.is(dividend) ?
scratch0() : dividend;
1327 DCHECK(!result.is(dividend) || !scratch.is(dividend));
1331 __ Move(result, dividend);
1339 __ sra(result, dividend,
shift);
1347 __ Move(scratch, dividend);
1349 __ Subu(result, zero_reg, dividend);
1351 DeoptimizeIf(
eq, instr,
"minus zero", result, Operand(zero_reg));
1355 __ Xor(scratch, scratch, result);
1356 if (divisor == -1) {
1365 __ sra(result, result,
shift);
1371 __ li(result, Operand(
kMinInt / divisor));
1374 __ sra(result, result,
shift);
1379 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) {
1380 Register dividend =
ToRegister(instr->dividend());
1381 int32_t divisor = instr->divisor();
1382 Register result =
ToRegister(instr->result());
1383 DCHECK(!dividend.is(result));
1391 HMathFloorOfDiv* hdiv = instr->hydrogen();
1393 DeoptimizeIf(
eq, instr,
"minus zero", dividend, Operand(zero_reg));
1400 __ TruncatingDiv(result, dividend,
Abs(divisor));
1401 if (divisor < 0)
__ Subu(result, zero_reg, result);
1408 DCHECK(!temp.is(dividend) && !temp.is(result));
1409 Label needs_adjustment, done;
1410 __ Branch(&needs_adjustment, divisor > 0 ?
lt :
gt,
1411 dividend, Operand(zero_reg));
1412 __ TruncatingDiv(result, dividend,
Abs(divisor));
1413 if (divisor < 0)
__ Subu(result, zero_reg, result);
1415 __ bind(&needs_adjustment);
1416 __ Addu(temp, dividend, Operand(divisor > 0 ? 1 : -1));
1417 __ TruncatingDiv(result, temp,
Abs(divisor));
1418 if (divisor < 0)
__ Subu(result, zero_reg, result);
1419 __ Subu(result, result, Operand(1));
1425 void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) {
1426 HBinaryOperation* hdiv = instr->hydrogen();
1427 Register dividend =
ToRegister(instr->dividend());
1428 Register divisor =
ToRegister(instr->divisor());
1429 const Register result =
ToRegister(instr->result());
1433 __ Div(remainder, result, dividend, divisor);
1437 DeoptimizeIf(
eq, instr,
"division by zero", divisor, Operand(zero_reg));
1442 Label left_not_zero;
1443 __ Branch(&left_not_zero,
ne, dividend, Operand(zero_reg));
1444 DeoptimizeIf(
lt, instr,
"minus zero", divisor, Operand(zero_reg));
1445 __ bind(&left_not_zero);
1451 Label left_not_min_int;
1452 __ Branch(&left_not_min_int,
ne, dividend, Operand(
kMinInt));
1454 __ bind(&left_not_min_int);
1460 __ Xor(remainder, remainder, Operand(divisor));
1461 __ Branch(&done,
ge, remainder, Operand(zero_reg));
1462 __ Subu(result, result, Operand(1));
1467 void LCodeGen::DoMulI(LMulI* instr) {
1469 Register result =
ToRegister(instr->result());
1472 LOperand* right_op = instr->right();
1474 bool bailout_on_minus_zero =
1478 if (right_op->IsConstantOperand()) {
1481 if (bailout_on_minus_zero && (constant < 0)) {
1490 __ SubuAndCheckForOverflow(result, zero_reg, left, scratch);
1493 __ Subu(result, zero_reg, left);
1497 if (bailout_on_minus_zero) {
1502 __ mov(result, zero_reg);
1506 __ Move(result, left);
1512 int32_t mask = constant >> 31;
1513 uint32_t constant_abs = (constant + mask) ^ mask;
1519 if (constant < 0)
__ Subu(result, zero_reg, result);
1523 __ Addu(result, scratch, left);
1525 if (constant < 0)
__ Subu(result, zero_reg, result);
1529 __ Subu(result, scratch, left);
1531 if (constant < 0)
__ Subu(result, zero_reg, result);
1534 __ li(at, constant);
1535 __ Mul(result, left, at);
1540 DCHECK(right_op->IsRegister());
1545 if (instr->hydrogen()->representation().IsSmi()) {
1546 __ SmiUntag(result, left);
1547 __ Mul(scratch, result, result, right);
1549 __ Mul(scratch, result, left, right);
1551 __ sra(at, result, 31);
1554 if (instr->hydrogen()->representation().IsSmi()) {
1555 __ SmiUntag(result, left);
1556 __ Mul(result, result, right);
1558 __ Mul(result, left, right);
1562 if (bailout_on_minus_zero) {
1564 __ Xor(at, left, right);
1565 __ Branch(&done,
ge, at, Operand(zero_reg));
1567 DeoptimizeIf(
eq, instr,
"minus zero", result, Operand(zero_reg));
1574 void LCodeGen::DoBitI(LBitI* instr) {
1575 LOperand* left_op = instr->left();
1576 LOperand* right_op = instr->right();
1577 DCHECK(left_op->IsRegister());
1579 Register result =
ToRegister(instr->result());
1582 if (right_op->IsStackSlot()) {
1585 DCHECK(right_op->IsRegister() || right_op->IsConstantOperand());
1589 switch (instr->op()) {
1590 case Token::BIT_AND:
1591 __ And(result, left, right);
1594 __ Or(result, left, right);
1596 case Token::BIT_XOR:
1597 if (right_op->IsConstantOperand() && right.immediate() ==
int32_t(~0)) {
1598 __ Nor(result, zero_reg, left);
1600 __ Xor(result, left, right);
1610 void LCodeGen::DoShiftI(LShiftI* instr) {
1613 LOperand* right_op = instr->right();
1615 Register result =
ToRegister(instr->result());
1618 if (right_op->IsRegister()) {
1621 switch (instr->op()) {
1630 if (instr->can_deopt()) {
1631 DeoptimizeIf(
lt, instr,
"negative value", result, Operand(zero_reg));
1643 int value =
ToInteger32(LConstantOperand::cast(right_op));
1644 uint8_t shift_count =
static_cast<uint8_t
>(value & 0x1F);
1645 switch (instr->op()) {
1647 if (shift_count != 0) {
1648 __ Ror(result, left, Operand(shift_count));
1650 __ Move(result, left);
1654 if (shift_count != 0) {
1655 __ sra(result, left, shift_count);
1657 __ Move(result, left);
1661 if (shift_count != 0) {
1662 __ srl(result, left, shift_count);
1664 if (instr->can_deopt()) {
1665 __ And(at, left, Operand(0x80000000));
1666 DeoptimizeIf(
ne, instr,
"negative value", at, Operand(zero_reg));
1668 __ Move(result, left);
1672 if (shift_count != 0) {
1673 if (instr->hydrogen_value()->representation().IsSmi() &&
1674 instr->can_deopt()) {
1675 if (shift_count != 1) {
1676 __ sll(result, left, shift_count - 1);
1677 __ SmiTagCheckOverflow(result, result, scratch);
1679 __ SmiTagCheckOverflow(result, left, scratch);
1683 __ sll(result, left, shift_count);
1686 __ Move(result, left);
1697 void LCodeGen::DoSubI(LSubI* instr) {
1698 LOperand* left = instr->left();
1699 LOperand* right = instr->right();
1700 LOperand* result = instr->result();
1703 if (!can_overflow) {
1704 if (right->IsStackSlot()) {
1708 DCHECK(right->IsRegister() || right->IsConstantOperand());
1714 if (right->IsStackSlot() || right->IsConstantOperand()) {
1721 DCHECK(right->IsRegister());
1734 void LCodeGen::DoConstantI(LConstantI* instr) {
1735 __ li(
ToRegister(instr->result()), Operand(instr->value()));
1739 void LCodeGen::DoConstantS(LConstantS* instr) {
1740 __ li(
ToRegister(instr->result()), Operand(instr->value()));
1744 void LCodeGen::DoConstantD(LConstantD* instr) {
1745 DCHECK(instr->result()->IsDoubleRegister());
1747 double v = instr->value();
1752 void LCodeGen::DoConstantE(LConstantE* instr) {
1753 __ li(
ToRegister(instr->result()), Operand(instr->value()));
1757 void LCodeGen::DoConstantT(LConstantT* instr) {
1758 Handle<Object>
object = instr->value(isolate());
1764 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1765 Register result =
ToRegister(instr->result());
1767 __ EnumLength(result,
map);
1771 void LCodeGen::DoDateField(LDateField* instr) {
1773 Register result =
ToRegister(instr->result());
1774 Register scratch =
ToRegister(instr->temp());
1775 Smi* index = instr->index();
1776 Label runtime, done;
1780 DCHECK(!scratch.is(
object));
1782 __ SmiTst(
object, at);
1784 __ GetObjectType(
object, scratch, scratch);
1787 if (index->value() == 0) {
1791 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1792 __ li(scratch, Operand(stamp));
1801 __ PrepareCallCFunction(2, scratch);
1802 __ li(a1, Operand(index));
1803 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1812 if (index->IsConstantOperand()) {
1813 int offset =
ToInteger32(LConstantOperand::cast(index));
1821 DCHECK(!scratch.is(
string));
1828 __ Addu(scratch,
string, scratch);
1834 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) {
1836 Register
string =
ToRegister(instr->string());
1837 Register result =
ToRegister(instr->result());
1839 if (FLAG_debug_code) {
1844 __ And(scratch, scratch,
1849 ? one_byte_seq_type : two_byte_seq_type));
1850 __ Check(
eq, kUnexpectedStringType, at, Operand(zero_reg));
1855 __ lbu(result, operand);
1857 __ lhu(result, operand);
1862 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
1864 Register
string =
ToRegister(instr->string());
1867 if (FLAG_debug_code) {
1874 ? one_byte_seq_type : two_byte_seq_type;
1875 __ EmitSeqStringSetCharCheck(
string, index, value, scratch, encoding_mask);
1880 __ sb(value, operand);
1882 __ sh(value, operand);
1887 void LCodeGen::DoAddI(LAddI* instr) {
1888 LOperand* left = instr->left();
1889 LOperand* right = instr->right();
1890 LOperand* result = instr->result();
1893 if (!can_overflow) {
1894 if (right->IsStackSlot()) {
1898 DCHECK(right->IsRegister() || right->IsConstantOperand());
1904 if (right->IsStackSlot() ||
1905 right->IsConstantOperand()) {
1912 DCHECK(right->IsRegister());
1925 void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
1926 LOperand* left = instr->left();
1927 LOperand* right = instr->right();
1928 HMathMinMax::Operation operation = instr->hydrogen()->operation();
1929 Condition condition = (operation == HMathMinMax::kMathMin) ?
le :
ge;
1930 if (instr->hydrogen()->representation().IsSmiOrInteger32()) {
1933 Register result_reg =
ToRegister(instr->result());
1934 Label return_right, done;
1936 __ Slt(scratch, left_reg, Operand(right_reg));
1937 if (condition ==
ge) {
1938 __ Movz(result_reg, left_reg, scratch);
1939 __ Movn(result_reg, right_reg, scratch);
1942 __ Movn(result_reg, left_reg, scratch);
1943 __ Movz(result_reg, right_reg, scratch);
1946 DCHECK(instr->hydrogen()->representation().IsDouble());
1950 Label check_nan_left, check_zero, return_left, return_right, done;
1951 __ BranchF(&check_zero, &check_nan_left,
eq, left_reg, right_reg);
1952 __ BranchF(&return_left,
NULL, condition, left_reg, right_reg);
1953 __ Branch(&return_right);
1955 __ bind(&check_zero);
1959 if (operation == HMathMinMax::kMathMin) {
1960 __ neg_d(left_reg, left_reg);
1961 __ sub_d(result_reg, left_reg, right_reg);
1962 __ neg_d(result_reg, result_reg);
1964 __ add_d(result_reg, left_reg, right_reg);
1968 __ bind(&check_nan_left);
1970 __ BranchF(
NULL, &return_left,
eq, left_reg, left_reg);
1971 __ bind(&return_right);
1972 if (!right_reg.is(result_reg)) {
1973 __ mov_d(result_reg, right_reg);
1977 __ bind(&return_left);
1978 if (!left_reg.is(result_reg)) {
1979 __ mov_d(result_reg, left_reg);
1986 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1990 switch (instr->op()) {
1992 __ add_d(result, left, right);
1995 __ sub_d(result, left, right);
1998 __ mul_d(result, left, right);
2001 __ div_d(result, left, right);
2005 RegList saved_regs = a0.bit() | a1.bit() | a2.bit() | a3.bit();
2006 __ MultiPush(saved_regs);
2009 __ MovToFloatParameters(left, right);
2011 ExternalReference::mod_two_doubles_operation(isolate()),
2014 __ MovFromFloatResult(result);
2017 __ MultiPop(saved_regs);
2027 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
2034 CodeFactory::BinaryOpIC(isolate(), instr->op(),
NO_OVERWRITE).code();
2042 template<
class InstrType>
2047 int left_block = instr->TrueDestination(chunk_);
2048 int right_block = instr->FalseDestination(chunk_);
2050 int next_block = GetNextEmittedBlock();
2051 if (right_block == left_block || condition ==
al) {
2053 }
else if (left_block == next_block) {
2054 __ Branch(chunk_->GetAssemblyLabel(right_block),
2056 }
else if (right_block == next_block) {
2057 __ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2);
2059 __ Branch(chunk_->GetAssemblyLabel(left_block), condition, src1, src2);
2060 __ Branch(chunk_->GetAssemblyLabel(right_block));
2065 template<
class InstrType>
2070 int right_block = instr->FalseDestination(chunk_);
2071 int left_block = instr->TrueDestination(chunk_);
2073 int next_block = GetNextEmittedBlock();
2074 if (right_block == left_block) {
2076 }
else if (left_block == next_block) {
2077 __ BranchF(chunk_->GetAssemblyLabel(right_block),
NULL,
2079 }
else if (right_block == next_block) {
2080 __ BranchF(chunk_->GetAssemblyLabel(left_block),
NULL,
2081 condition, src1, src2);
2083 __ BranchF(chunk_->GetAssemblyLabel(left_block),
NULL,
2084 condition, src1, src2);
2085 __ Branch(chunk_->GetAssemblyLabel(right_block));
2090 template<
class InstrType>
2095 int false_block = instr->FalseDestination(chunk_);
2096 __ Branch(chunk_->GetAssemblyLabel(false_block), condition, src1, src2);
2100 template<
class InstrType>
2105 int false_block = instr->FalseDestination(chunk_);
2106 __ BranchF(chunk_->GetAssemblyLabel(false_block),
NULL,
2107 condition, src1, src2);
2111 void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
2112 __ stop(
"LDebugBreak");
2116 void LCodeGen::DoBranch(LBranch* instr) {
2117 Representation r = instr->hydrogen()->value()->representation();
2118 if (r.IsInteger32() || r.IsSmi()) {
2119 DCHECK(!info()->IsStub());
2122 }
else if (r.IsDouble()) {
2123 DCHECK(!info()->IsStub());
2130 HType type = instr->hydrogen()->value()->type();
2131 if (type.IsBoolean()) {
2132 DCHECK(!info()->IsStub());
2133 __ LoadRoot(at, Heap::kTrueValueRootIndex);
2135 }
else if (type.IsSmi()) {
2136 DCHECK(!info()->IsStub());
2138 }
else if (type.IsJSArray()) {
2139 DCHECK(!info()->IsStub());
2141 }
else if (type.IsHeapNumber()) {
2142 DCHECK(!info()->IsStub());
2147 }
else if (type.IsString()) {
2148 DCHECK(!info()->IsStub());
2152 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
2158 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2159 __ Branch(instr->FalseLabel(chunk_),
eq, reg, Operand(at));
2163 __ LoadRoot(at, Heap::kTrueValueRootIndex);
2164 __ Branch(instr->TrueLabel(chunk_),
eq, reg, Operand(at));
2165 __ LoadRoot(at, Heap::kFalseValueRootIndex);
2166 __ Branch(instr->FalseLabel(chunk_),
eq, reg, Operand(at));
2170 __ LoadRoot(at, Heap::kNullValueRootIndex);
2171 __ Branch(instr->FalseLabel(chunk_),
eq, reg, Operand(at));
2176 __ Branch(instr->FalseLabel(chunk_),
eq, reg, Operand(zero_reg));
2177 __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
2178 }
else if (expected.NeedsMap()) {
2185 if (expected.NeedsMap()) {
2187 if (expected.CanBeUndetectable()) {
2191 __ Branch(instr->FalseLabel(chunk_),
ne, at, Operand(zero_reg));
2198 __ Branch(instr->TrueLabel(chunk_),
2208 __ Branch(instr->TrueLabel(chunk_),
ne, at, Operand(zero_reg));
2209 __ Branch(instr->FalseLabel(chunk_));
2210 __ bind(¬_string);
2215 const Register scratch =
scratch1();
2217 __ Branch(instr->TrueLabel(chunk_),
eq, scratch, Operand(
SYMBOL_TYPE));
2223 Label not_heap_number;
2224 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
2225 __ Branch(¬_heap_number,
ne,
map, Operand(at));
2227 __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_),
2230 __ Branch(instr->FalseLabel(chunk_));
2231 __ bind(¬_heap_number);
2234 if (!expected.IsGeneric()) {
2252 void LCodeGen::DoGoto(LGoto* instr) {
2261 case Token::EQ_STRICT:
2265 case Token::NE_STRICT:
2269 cond = is_unsigned ?
lo :
lt;
2272 cond = is_unsigned ?
hi :
gt;
2275 cond = is_unsigned ?
ls :
le;
2278 cond = is_unsigned ?
hs :
ge;
2281 case Token::INSTANCEOF:
2289 void LCodeGen::DoCompareNumericAndBranch(LCompareNumericAndBranch* instr) {
2290 LOperand* left = instr->left();
2291 LOperand* right = instr->right();
2297 if (left->IsConstantOperand() && right->IsConstantOperand()) {
2299 double left_val =
ToDouble(LConstantOperand::cast(left));
2300 double right_val =
ToDouble(LConstantOperand::cast(right));
2301 int next_block =
EvalComparison(instr->op(), left_val, right_val) ?
2302 instr->TrueDestination(chunk_) : instr->FalseDestination(chunk_);
2305 if (instr->is_double()) {
2313 __ BranchF(
NULL, instr->FalseLabel(chunk_),
eq,
2314 left_reg, right_reg);
2319 Operand cmp_right = Operand(0);
2321 if (right->IsConstantOperand()) {
2323 if (instr->hydrogen_value()->representation().IsSmi()) {
2328 cmp_right = Operand(value);
2330 }
else if (left->IsConstantOperand()) {
2332 if (instr->hydrogen_value()->representation().IsSmi()) {
2337 cmp_right = Operand(value);
2346 EmitBranch(instr, cond, cmp_left, cmp_right);
2352 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
2360 void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
2361 if (instr->hydrogen()->representation().IsTagged()) {
2362 Register input_reg =
ToRegister(instr->object());
2363 __ li(at, Operand(factory()->the_hole_value()));
2372 __ FmoveHigh(scratch, input_reg);
2377 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) {
2378 Representation rep = instr->hydrogen()->value()->representation();
2379 DCHECK(!rep.IsInteger32());
2380 Register scratch =
ToRegister(instr->temp());
2382 if (rep.IsDouble()) {
2385 __ FmoveHigh(scratch, value);
2386 __ li(at, 0x80000000);
2391 Heap::kHeapNumberMapRootIndex,
2392 instr->FalseLabel(
chunk()),
2397 __ mov(at, zero_reg);
2406 Label* is_not_object,
2408 __ JumpIfSmi(input, is_not_object);
2410 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
2418 __ Branch(is_not_object,
ne, temp2,
Operand(zero_reg));
2422 __ Branch(is_not_object,
2429 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
2436 instr->FalseLabel(chunk_), instr->TrueLabel(chunk_));
2445 Label* is_not_string,
2448 __ JumpIfSmi(input, is_not_string);
2450 __ GetObjectType(input, temp1, temp1);
2456 void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
2461 instr->hydrogen()->value()->type().IsHeapObject()
2464 EmitIsString(reg, temp1, instr->FalseLabel(chunk_), check_needed);
2471 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
2478 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
2482 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2483 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2494 case Token::EQ_STRICT:
2512 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2516 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2521 EmitBranch(instr, condition, v0, Operand(zero_reg));
2537 if (from ==
to)
return eq;
2545 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
2549 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2550 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2553 __ GetObjectType(input, scratch, scratch);
2557 Operand(
TestType(instr->hydrogen())));
2561 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
2563 Register result =
ToRegister(instr->result());
2565 __ AssertString(input);
2568 __ IndexFromHash(result, result);
2572 void LCodeGen::DoHasCachedArrayIndexAndBranch(
2573 LHasCachedArrayIndexAndBranch* instr) {
2588 Handle<String>class_name,
2593 DCHECK(!input.is(temp2));
2596 __ JumpIfSmi(input, is_false);
2598 if (
String::Equals(isolate()->factory()->Function_string(), class_name)) {
2608 __ GetObjectType(input, temp, temp2);
2615 __ GetObjectType(input, temp, temp2);
2626 __ GetObjectType(temp, temp2, temp2);
2627 if (
String::Equals(class_name, isolate()->factory()->Object_string())) {
2650 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
2654 Handle<String> class_name = instr->hydrogen()->class_name();
2657 class_name, input, temp, temp2);
2663 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2672 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2674 Label true_label, done;
2677 Register result =
ToRegister(instr->result());
2683 __ Branch(&true_label,
eq, result, Operand(zero_reg));
2684 __ li(result, Operand(factory()->false_value()));
2686 __ bind(&true_label);
2687 __ li(result, Operand(factory()->true_value()));
2692 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2695 DeferredInstanceOfKnownGlobal(
LCodeGen* codegen,
2696 LInstanceOfKnownGlobal* instr)
2699 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
2701 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
2702 Label* map_check() {
return &map_check_; }
2705 LInstanceOfKnownGlobal* instr_;
2709 DeferredInstanceOfKnownGlobal* deferred;
2710 deferred =
new(zone()) DeferredInstanceOfKnownGlobal(
this, instr);
2712 Label done, false_result;
2713 Register
object =
ToRegister(instr->value());
2715 Register result =
ToRegister(instr->result());
2721 __ JumpIfSmi(
object, &false_result);
2727 Register
map = temp;
2731 __ bind(deferred->map_check());
2735 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
2736 __ li(at, Operand(Handle<Object>(cell)));
2738 __ BranchShort(&cache_miss,
ne,
map, Operand(at));
2747 __ bind(&cache_miss);
2749 __ LoadRoot(temp, Heap::kNullValueRootIndex);
2750 __ Branch(&false_result,
eq,
object, Operand(temp));
2754 __ Branch(&false_result,
cc, temp, Operand(zero_reg));
2757 __ Branch(deferred->entry());
2759 __ bind(&false_result);
2760 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2764 __ bind(deferred->exit());
2783 PushSafepointRegistersScope
scope(
this);
2792 static const int kAdditionalDelta = 7;
2793 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2794 Label before_push_delta;
2795 __ bind(&before_push_delta);
2799 __ StoreToSafepointRegisterSlot(temp, temp);
2805 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2806 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2809 __ StoreToSafepointRegisterSlot(result, result);
2813 void LCodeGen::DoCmpT(LCmpT* instr) {
2817 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2828 __ LoadRoot(
ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2829 DCHECK_EQ(1, masm()->InstructionsGeneratedSince(&check));
2830 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2835 void LCodeGen::DoReturn(LReturn* instr) {
2836 if (FLAG_trace && info()->IsOptimizing()) {
2845 if (info()->saves_caller_doubles()) {
2848 int no_frame_start = -1;
2851 no_frame_start = masm_->pc_offset();
2854 if (instr->has_constant_parameter_count()) {
2855 int parameter_count =
ToInteger32(instr->constant_parameter_count());
2857 if (sp_delta != 0) {
2858 __ Addu(
sp,
sp, Operand(sp_delta));
2861 Register reg =
ToRegister(instr->parameter_count());
2870 if (no_frame_start != -1) {
2871 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
2876 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2877 Register result =
ToRegister(instr->result());
2878 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle())));
2880 if (instr->hydrogen()->RequiresHoleCheck()) {
2881 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2890 Register vector =
ToRegister(instr->temp_vector());
2892 __ li(vector, instr->hydrogen()->feedback_vector());
2900 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2907 if (FLAG_vector_ics) {
2908 EmitVectorLoadICRegisters<LLoadGlobalGeneric>(instr);
2911 Handle<Code> ic = CodeFactory::LoadIC(isolate(),
mode).code();
2916 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2921 __ li(cell, Operand(instr->hydrogen()->cell().handle()));
2927 if (instr->hydrogen()->RequiresHoleCheck()) {
2929 Register payload =
ToRegister(instr->temp());
2931 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2942 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2943 Register context =
ToRegister(instr->context());
2944 Register result =
ToRegister(instr->result());
2947 if (instr->hydrogen()->RequiresHoleCheck()) {
2948 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2950 if (instr->hydrogen()->DeoptimizesOnHole()) {
2954 __ Branch(&is_not_hole,
ne, result, Operand(at));
2955 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2956 __ bind(&is_not_hole);
2962 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2963 Register context =
ToRegister(instr->context());
2968 Label skip_assignment;
2970 if (instr->hydrogen()->RequiresHoleCheck()) {
2971 __ lw(scratch, target);
2972 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2974 if (instr->hydrogen()->DeoptimizesOnHole()) {
2977 __ Branch(&skip_assignment,
ne, scratch, Operand(at));
2981 __ sw(value, target);
2982 if (instr->hydrogen()->NeedsWriteBarrier()) {
2984 instr->hydrogen()->value()->type().IsHeapObject()
2986 __ RecordWriteContextSlot(context,
2996 __ bind(&skip_assignment);
3000 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
3001 HObjectAccess access = instr->hydrogen()->access();
3002 int offset = access.offset();
3003 Register
object =
ToRegister(instr->object());
3005 if (access.IsExternalMemory()) {
3006 Register result =
ToRegister(instr->result());
3008 __ Load(result, operand, access.representation());
3012 if (instr->hydrogen()->representation().IsDouble()) {
3018 Register result =
ToRegister(instr->result());
3019 if (!access.IsInobject()) {
3024 __ Load(result, operand, access.representation());
3028 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
3035 if (FLAG_vector_ics) {
3036 EmitVectorLoadICRegisters<LLoadNamedGeneric>(instr);
3038 Handle<Code> ic = CodeFactory::LoadIC(isolate(),
NOT_CONTEXTUAL).code();
3043 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
3045 Register
function =
ToRegister(instr->function());
3046 Register result =
ToRegister(instr->result());
3053 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3058 __ GetObjectType(result, scratch, scratch);
3069 void LCodeGen::DoLoadRoot(LLoadRoot* instr) {
3070 Register result =
ToRegister(instr->result());
3071 __ LoadRoot(result, instr->index());
3075 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
3076 Register arguments =
ToRegister(instr->arguments());
3077 Register result =
ToRegister(instr->result());
3080 if (instr->length()->IsConstantOperand()) {
3081 int const_length =
ToInteger32(LConstantOperand::cast(instr->length()));
3082 if (instr->index()->IsConstantOperand()) {
3083 int const_index =
ToInteger32(LConstantOperand::cast(instr->index()));
3084 int index = (const_length - const_index) + 1;
3088 __ li(at, Operand(const_length + 1));
3089 __ Subu(result, at, index);
3091 __ Addu(at, arguments, at);
3094 }
else if (instr->index()->IsConstantOperand()) {
3095 Register length =
ToRegister(instr->length());
3096 int const_index =
ToInteger32(LConstantOperand::cast(instr->index()));
3097 int loc = const_index - 1;
3099 __ Subu(result, length, Operand(loc));
3101 __ Addu(at, arguments, at);
3105 __ Addu(at, arguments, at);
3109 Register length =
ToRegister(instr->length());
3111 __ Subu(result, length, index);
3112 __ Addu(result, result, 1);
3114 __ Addu(at, arguments, at);
3121 Register external_pointer =
ToRegister(instr->elements());
3124 bool key_is_constant = instr->key()->IsConstantOperand();
3125 int constant_key = 0;
3126 if (key_is_constant) {
3127 constant_key =
ToInteger32(LConstantOperand::cast(instr->key()));
3128 if (constant_key & 0xF0000000) {
3129 Abort(kArrayIndexConstantValueTooBig);
3135 int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
3136 ? (element_size_shift -
kSmiTagSize) : element_size_shift;
3137 int base_offset = instr->base_offset();
3143 int base_offset = instr->base_offset();
3145 if (key_is_constant) {
3146 __ Addu(
scratch0(), external_pointer, constant_key << element_size_shift);
3154 __ cvt_d_s(result, result);
3159 Register result =
ToRegister(instr->result());
3161 key, external_pointer, key_is_constant, constant_key,
3162 element_size_shift, shift_size, base_offset);
3163 switch (elements_kind) {
3166 __ lb(result, mem_operand);
3172 __ lbu(result, mem_operand);
3176 __ lh(result, mem_operand);
3180 __ lhu(result, mem_operand);
3184 __ lw(result, mem_operand);
3188 __ lw(result, mem_operand);
3191 Operand(0x80000000));
3214 Register elements =
ToRegister(instr->elements());
3215 bool key_is_constant = instr->key()->IsConstantOperand();
3222 int base_offset = instr->base_offset();
3223 if (key_is_constant) {
3224 int constant_key =
ToInteger32(LConstantOperand::cast(instr->key()));
3225 if (constant_key & 0xF0000000) {
3226 Abort(kArrayIndexConstantValueTooBig);
3230 __ Addu(scratch, elements, Operand(base_offset));
3232 if (!key_is_constant) {
3234 int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
3235 ? (element_size_shift -
kSmiTagSize) : element_size_shift;
3236 __ sll(at, key, shift_size);
3237 __ Addu(scratch, scratch, at);
3242 if (instr->hydrogen()->RequiresHoleCheck()) {
3243 __ lw(scratch,
MemOperand(scratch, kHoleNanUpper32Offset));
3250 Register elements =
ToRegister(instr->elements());
3251 Register result =
ToRegister(instr->result());
3253 Register store_base = scratch;
3254 int offset = instr->base_offset();
3256 if (instr->key()->IsConstantOperand()) {
3257 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3259 store_base = elements;
3266 if (instr->hydrogen()->key()->representation().IsSmi()) {
3268 __ addu(scratch, elements, scratch);
3271 __ addu(scratch, elements, scratch);
3277 if (instr->hydrogen()->RequiresHoleCheck()) {
3279 __ SmiTst(result, scratch);
3280 DeoptimizeIf(
ne, instr,
"not a Smi", scratch, Operand(zero_reg));
3282 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
3289 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
3290 if (instr->is_typed_elements()) {
3292 }
else if (instr->hydrogen()->representation().IsDouble()) {
3302 bool key_is_constant,
3307 if (key_is_constant) {
3308 return MemOperand(base, (constant_key << element_size) + base_offset);
3311 if (base_offset == 0) {
3312 if (shift_size >= 0) {
3324 if (shift_size >= 0) {
3337 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3342 if (FLAG_vector_ics) {
3343 EmitVectorLoadICRegisters<LLoadKeyedGeneric>(instr);
3346 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
3351 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3354 Register result =
ToRegister(instr->result());
3356 if (instr->hydrogen()->from_inlined()) {
3360 Label done, adapted;
3367 __ Movn(result,
fp, temp);
3368 __ Movz(result, scratch, temp);
3373 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
3374 Register elem =
ToRegister(instr->elements());
3375 Register result =
ToRegister(instr->result());
3380 __ Addu(result, zero_reg, Operand(
scope()->num_parameters()));
3381 __ Branch(&done,
eq,
fp, Operand(elem));
3387 __ SmiUntag(result);
3394 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
3395 Register receiver =
ToRegister(instr->receiver());
3396 Register
function =
ToRegister(instr->function());
3397 Register result =
ToRegister(instr->result());
3403 Label global_object, result_in_receiver;
3405 if (!instr->hydrogen()->known_function()) {
3414 int32_t strict_mode_function_mask =
3417 __ And(scratch, scratch, Operand(strict_mode_function_mask | native_mask));
3418 __ Branch(&result_in_receiver,
ne, scratch, Operand(zero_reg));
3422 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
3423 __ Branch(&global_object,
eq, receiver, Operand(scratch));
3424 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
3425 __ Branch(&global_object,
eq, receiver, Operand(scratch));
3428 __ SmiTst(receiver, scratch);
3431 __ GetObjectType(receiver, scratch, scratch);
3435 __ Branch(&result_in_receiver);
3436 __ bind(&global_object);
3443 if (result.is(receiver)) {
3444 __ bind(&result_in_receiver);
3447 __ Branch(&result_ok);
3448 __ bind(&result_in_receiver);
3449 __ mov(result, receiver);
3450 __ bind(&result_ok);
3455 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
3456 Register receiver =
ToRegister(instr->receiver());
3457 Register
function =
ToRegister(instr->function());
3458 Register length =
ToRegister(instr->length());
3459 Register elements =
ToRegister(instr->elements());
3469 Operand(kArgumentsLimit));
3474 __ Move(receiver, length);
3483 __ sll(scratch, length, 2);
3485 __ Addu(scratch, elements, scratch);
3488 __ Subu(length, length, Operand(1));
3490 __ sll(scratch, length, 2);
3493 DCHECK(instr->HasPointerMap());
3494 LPointerMap* pointers = instr->pointer_map();
3496 this, pointers, Safepoint::kLazyDeopt);
3499 ParameterCount actual(receiver);
3500 __ InvokeFunction(
function, actual,
CALL_FUNCTION, safepoint_generator);
3504 void LCodeGen::DoPushArgument(LPushArgument* instr) {
3505 LOperand* argument = instr->value();
3506 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
3507 Abort(kDoPushArgumentNotImplementedForDoubleType);
3510 __ push(argument_reg);
3515 void LCodeGen::DoDrop(LDrop* instr) {
3516 __ Drop(instr->count());
3520 void LCodeGen::DoThisFunction(LThisFunction* instr) {
3521 Register result =
ToRegister(instr->result());
3526 void LCodeGen::DoContext(LContext* instr) {
3528 Register result =
ToRegister(instr->result());
3529 if (info()->IsOptimizing()) {
3538 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3540 __ li(
scratch0(), instr->hydrogen()->pairs());
3549 int formal_parameter_count,
3553 bool dont_adapt_arguments =
3555 bool can_invoke_directly =
3556 dont_adapt_arguments || formal_parameter_count == arity;
3560 if (can_invoke_directly) {
3562 __ li(a1,
function);
3570 if (dont_adapt_arguments) {
3582 ParameterCount count(arity);
3583 ParameterCount expected(formal_parameter_count);
3584 __ InvokeFunction(
function, expected, count,
CALL_FUNCTION, generator);
3598 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3607 __ Move(result, input);
3614 PushSafepointRegistersScope
scope(
this);
3625 Label allocated, slow;
3626 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
3627 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
3628 __ Branch(&allocated);
3639 __ LoadFromSafepointRegisterSlot(input, input);
3642 __ bind(&allocated);
3650 __ StoreToSafepointRegisterSlot(tmp1, result);
3659 Register result =
ToRegister(instr->result());
3663 __ mov(result, input);
3664 __ subu(result, zero_reg, input);
3671 void LCodeGen::DoMathAbs(LMathAbs* instr) {
3675 DeferredMathAbsTaggedHeapNumber(
LCodeGen* codegen, LMathAbs* instr)
3678 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3680 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
3685 Representation r = instr->hydrogen()->value()->representation();
3689 __ abs_d(result, input);
3690 }
else if (r.IsSmiOrInteger32()) {
3694 DeferredMathAbsTaggedHeapNumber* deferred =
3695 new(zone()) DeferredMathAbsTaggedHeapNumber(
this, instr);
3698 __ JumpIfNotSmi(input, deferred->entry());
3701 __ bind(deferred->exit());
3706 void LCodeGen::DoMathFloor(LMathFloor* instr) {
3708 Register result =
ToRegister(instr->result());
3710 Register except_flag =
ToRegister(instr->temp());
3726 __ Branch(&done,
ne, result, Operand(zero_reg));
3735 void LCodeGen::DoMathRound(LMathRound* instr) {
3737 Register result =
ToRegister(instr->result());
3740 Label done, check_sign_on_zero;
3743 __ Mfhc1(result, input);
3752 __ mov(result, zero_reg);
3754 __ Branch(&check_sign_on_zero);
3774 __ Xor(result, result, Operand(scratch));
3777 DeoptimizeIf(
lt, instr,
"minus zero", result, Operand(zero_reg));
3782 __ Branch(&skip2,
ge, result, Operand(zero_reg));
3783 __ mov(result, zero_reg);
3788 Register except_flag = scratch;
3801 __ Branch(&done,
ne, result, Operand(zero_reg));
3802 __ bind(&check_sign_on_zero);
3803 __ Mfhc1(scratch, input);
3805 DeoptimizeIf(
ne, instr,
"minus zero", scratch, Operand(zero_reg));
3811 void LCodeGen::DoMathFround(LMathFround* instr) {
3814 __ cvt_s_d(result.low(), input);
3815 __ cvt_d_s(result, result.low());
3819 void LCodeGen::DoMathSqrt(LMathSqrt* instr) {
3822 __ sqrt_d(result, input);
3826 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
3831 DCHECK(!input.is(result));
3841 __ neg_d(result, temp);
3845 __ sqrt_d(result, result);
3850 void LCodeGen::DoPower(LPower* instr) {
3851 Representation exponent_type = instr->hydrogen()->right()->representation();
3855 DCHECK(!instr->right()->IsDoubleRegister() ||
3857 DCHECK(!instr->right()->IsRegister() ||
3862 if (exponent_type.IsSmi()) {
3865 }
else if (exponent_type.IsTagged()) {
3867 __ JumpIfSmi(tagged_exponent, &no_deopt);
3868 DCHECK(!t3.is(tagged_exponent));
3870 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3875 }
else if (exponent_type.IsInteger32()) {
3879 DCHECK(exponent_type.IsDouble());
3886 void LCodeGen::DoMathExp(LMathExp* instr) {
3895 masm(), input, result, double_scratch1, double_scratch2,
3900 void LCodeGen::DoMathLog(LMathLog* instr) {
3903 __ CallCFunction(ExternalReference::math_log_double_function(isolate()),
3909 void LCodeGen::DoMathClz32(LMathClz32* instr) {
3911 Register result =
ToRegister(instr->result());
3912 __ Clz(result, input);
3916 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3919 DCHECK(instr->HasPointerMap());
3921 Handle<JSFunction> known_function = instr->hydrogen()->known_function();
3922 if (known_function.is_null()) {
3923 LPointerMap* pointers = instr->pointer_map();
3925 ParameterCount count(instr->arity());
3929 instr->hydrogen()->formal_parameter_count(),
3937 void LCodeGen::DoTailCallThroughMegamorphicCache(
3938 LTailCallThroughMegamorphicCache* instr) {
3939 Register receiver =
ToRegister(instr->receiver());
3946 Register scratch = a3;
3947 Register extra = t0;
3948 Register extra2 = t1;
3949 Register extra3 = t2;
3955 isolate()->stub_cache()->GenerateProbe(masm(), instr->hydrogen()->flags(),
3956 must_teardown_frame, receiver,
name,
3957 scratch, extra, extra2, extra3);
3965 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) {
3968 LPointerMap* pointers = instr->pointer_map();
3971 if (instr->target()->IsConstantOperand()) {
3972 LConstantOperand* target = LConstantOperand::cast(instr->target());
3977 DCHECK(instr->target()->IsRegister());
3978 Register target =
ToRegister(instr->target());
3979 generator.BeforeCall(
__ CallSize(target));
3983 generator.AfterCall();
3987 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
3991 if (instr->hydrogen()->pass_argument_count()) {
3992 __ li(a0, Operand(instr->arity()));
4006 void LCodeGen::DoCallFunction(LCallFunction* instr) {
4011 int arity = instr->arity();
4012 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
4017 void LCodeGen::DoCallNew(LCallNew* instr) {
4022 __ li(a0, Operand(instr->arity()));
4024 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4030 void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
4035 __ li(a0, Operand(instr->arity()));
4036 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4037 ElementsKind kind = instr->hydrogen()->elements_kind();
4043 if (instr->arity() == 0) {
4044 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
4046 }
else if (instr->arity() == 1) {
4053 __ Branch(&packed_case,
eq, t1, Operand(zero_reg));
4056 ArraySingleArgumentConstructorStub stub(isolate(),
4061 __ bind(&packed_case);
4064 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
4068 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
4074 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
4075 CallRuntime(instr->function(), instr->arity(), instr);
4079 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) {
4080 Register
function =
ToRegister(instr->function());
4081 Register code_object =
ToRegister(instr->code_object());
4082 __ Addu(code_object, code_object,
4089 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
4090 Register result =
ToRegister(instr->result());
4091 Register base =
ToRegister(instr->base_object());
4092 if (instr->offset()->IsConstantOperand()) {
4093 LConstantOperand* offset = LConstantOperand::cast(instr->offset());
4096 Register offset =
ToRegister(instr->offset());
4097 __ Addu(result, base, offset);
4102 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
4103 Representation representation = instr->representation();
4105 Register
object =
ToRegister(instr->object());
4107 HObjectAccess access = instr->hydrogen()->access();
4108 int offset = access.offset();
4110 if (access.IsExternalMemory()) {
4113 __ Store(value, operand, representation);
4117 __ AssertNotSmi(
object);
4119 DCHECK(!representation.IsSmi() ||
4120 !instr->value()->IsConstantOperand() ||
4121 IsSmi(LConstantOperand::cast(instr->value())));
4122 if (representation.IsDouble()) {
4123 DCHECK(access.IsInobject());
4124 DCHECK(!instr->hydrogen()->has_transition());
4125 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4131 if (instr->hydrogen()->has_transition()) {
4132 Handle<Map> transition = instr->hydrogen()->transition_map();
4133 AddDeprecationDependency(transition);
4134 __ li(scratch, Operand(transition));
4136 if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
4139 __ RecordWriteForMap(
object,
4149 if (access.IsInobject()) {
4151 __ Store(value, operand, representation);
4152 if (instr->hydrogen()->NeedsWriteBarrier()) {
4154 __ RecordWriteField(
object,
4161 instr->hydrogen()->SmiCheckForWriteBarrier(),
4162 instr->hydrogen()->PointersToHereCheckForValue());
4167 __ Store(value, operand, representation);
4168 if (instr->hydrogen()->NeedsWriteBarrier()) {
4171 __ RecordWriteField(scratch,
4178 instr->hydrogen()->SmiCheckForWriteBarrier(),
4179 instr->hydrogen()->PointersToHereCheckForValue());
4185 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
4196 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
4200 if (instr->index()->IsConstantOperand()) {
4208 if (FLAG_debug_code && instr->hydrogen()->skip_check()) {
4211 __ stop(
"eliminated bounds check failed");
4220 Register external_pointer =
ToRegister(instr->elements());
4223 bool key_is_constant = instr->key()->IsConstantOperand();
4224 int constant_key = 0;
4225 if (key_is_constant) {
4226 constant_key =
ToInteger32(LConstantOperand::cast(instr->key()));
4227 if (constant_key & 0xF0000000) {
4228 Abort(kArrayIndexConstantValueTooBig);
4234 int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
4235 ? (element_size_shift -
kSmiTagSize) : element_size_shift;
4236 int base_offset = instr->base_offset();
4244 if (key_is_constant) {
4245 if (constant_key != 0) {
4246 __ Addu(address, external_pointer,
4247 Operand(constant_key << element_size_shift));
4249 address = external_pointer;
4252 __ sll(address, key, shift_size);
4253 __ Addu(address, external_pointer, address);
4266 key, external_pointer, key_is_constant, constant_key,
4267 element_size_shift, shift_size,
4269 switch (elements_kind) {
4276 __ sb(value, mem_operand);
4282 __ sh(value, mem_operand);
4288 __ sw(value, mem_operand);
4311 Register elements =
ToRegister(instr->elements());
4314 bool key_is_constant = instr->key()->IsConstantOperand();
4315 int base_offset = instr->base_offset();
4316 Label not_nan, done;
4321 if (key_is_constant) {
4322 int constant_key =
ToInteger32(LConstantOperand::cast(instr->key()));
4323 if (constant_key & 0xF0000000) {
4324 Abort(kArrayIndexConstantValueTooBig);
4326 __ Addu(scratch, elements,
4327 Operand((constant_key << element_size_shift) + base_offset));
4329 int shift_size = (instr->hydrogen()->key()->representation().IsSmi())
4330 ? (element_size_shift -
kSmiTagSize) : element_size_shift;
4331 __ Addu(scratch, elements, Operand(base_offset));
4333 __ Addu(scratch, scratch, at);
4336 if (instr->NeedsCanonicalization()) {
4339 __ BranchF(
NULL, &is_nan,
eq, value, value);
4340 __ Branch(¬_nan);
4344 __ LoadRoot(at, Heap::kNanValueRootIndex);
4358 Register elements =
ToRegister(instr->elements());
4359 Register key = instr->key()->IsRegister() ?
ToRegister(instr->key())
4362 Register store_base = scratch;
4363 int offset = instr->base_offset();
4366 if (instr->key()->IsConstantOperand()) {
4367 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4368 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
4370 store_base = elements;
4376 if (instr->hydrogen()->key()->representation().IsSmi()) {
4378 __ addu(scratch, elements, scratch);
4381 __ addu(scratch, elements, scratch);
4386 if (instr->hydrogen()->NeedsWriteBarrier()) {
4388 instr->hydrogen()->value()->type().IsHeapObject()
4391 __ Addu(key, store_base, Operand(offset));
4392 __ RecordWrite(elements,
4399 instr->hydrogen()->PointersToHereCheckForValue());
4404 void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
4406 if (instr->is_typed_elements()) {
4408 }
else if (instr->hydrogen()->value()->representation().IsDouble()) {
4416 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
4423 CodeFactory::KeyedStoreIC(isolate(), instr->strict_mode()).code();
4428 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4429 Register object_reg =
ToRegister(instr->object());
4432 Handle<Map> from_map = instr->original_map();
4433 Handle<Map> to_map = instr->transitioned_map();
4437 Label not_applicable;
4439 __ Branch(¬_applicable,
ne, scratch, Operand(from_map));
4442 Register new_map_reg =
ToRegister(instr->new_map_temp());
4443 __ li(new_map_reg, Operand(to_map));
4446 __ RecordWriteForMap(object_reg,
4452 DCHECK(object_reg.is(a0));
4454 PushSafepointRegistersScope
scope(
this);
4455 __ li(a1, Operand(to_map));
4456 bool is_js_array = from_map->instance_type() ==
JS_ARRAY_TYPE;
4457 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
4460 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
4462 __ bind(¬_applicable);
4466 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4467 Register
object =
ToRegister(instr->object());
4469 Label no_memento_found;
4470 __ TestJSArrayForAllocationMemento(
object, temp, &no_memento_found,
4471 ne, &no_memento_found);
4473 __ bind(&no_memento_found);
4477 void LCodeGen::DoStringAdd(LStringAdd* instr) {
4481 StringAddStub stub(isolate(),
4482 instr->hydrogen()->flags(),
4483 instr->hydrogen()->pretenure_flag());
4488 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4491 DeferredStringCharCodeAt(
LCodeGen* codegen, LStringCharCodeAt* instr)
4494 codegen()->DoDeferredStringCharCodeAt(instr_);
4496 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4498 LStringCharCodeAt* instr_;
4501 DeferredStringCharCodeAt* deferred =
4502 new(zone()) DeferredStringCharCodeAt(
this, instr);
4508 __ bind(deferred->exit());
4513 Register
string =
ToRegister(instr->string());
4514 Register result =
ToRegister(instr->result());
4520 __ mov(result, zero_reg);
4522 PushSafepointRegistersScope
scope(
this);
4526 if (instr->index()->IsConstantOperand()) {
4527 int const_index =
ToInteger32(LConstantOperand::cast(instr->index()));
4539 __ StoreToSafepointRegisterSlot(v0, result);
4543 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
4546 DeferredStringCharFromCode(
LCodeGen* codegen, LStringCharFromCode* instr)
4549 codegen()->DoDeferredStringCharFromCode(instr_);
4551 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4553 LStringCharFromCode* instr_;
4556 DeferredStringCharFromCode* deferred =
4557 new(zone()) DeferredStringCharFromCode(
this, instr);
4559 DCHECK(instr->hydrogen()->value()->representation().IsInteger32());
4560 Register char_code =
ToRegister(instr->char_code());
4561 Register result =
ToRegister(instr->result());
4563 DCHECK(!char_code.is(result));
4565 __ Branch(deferred->entry(),
hi,
4567 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
4569 __ Addu(result, result, scratch);
4571 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4572 __ Branch(deferred->entry(),
eq, result, Operand(scratch));
4573 __ bind(deferred->exit());
4578 Register char_code =
ToRegister(instr->char_code());
4579 Register result =
ToRegister(instr->result());
4584 __ mov(result, zero_reg);
4586 PushSafepointRegistersScope
scope(
this);
4587 __ SmiTag(char_code);
4590 __ StoreToSafepointRegisterSlot(v0, result);
4594 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4595 LOperand* input = instr->value();
4596 DCHECK(input->IsRegister() || input->IsStackSlot());
4597 LOperand* output = instr->result();
4598 DCHECK(output->IsDoubleRegister());
4600 if (input->IsStackSlot()) {
4603 __ mtc1(scratch, single_scratch);
4611 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
4612 LOperand* input = instr->value();
4613 LOperand* output = instr->result();
4621 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
4624 DeferredNumberTagI(
LCodeGen* codegen, LNumberTagI* instr)
4627 codegen()->DoDeferredNumberTagIU(instr_,
4633 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4635 LNumberTagI* instr_;
4642 DeferredNumberTagI* deferred =
new(zone()) DeferredNumberTagI(
this, instr);
4644 __ BranchOnOverflow(deferred->entry(),
overflow);
4645 __ bind(deferred->exit());
4649 void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
4652 DeferredNumberTagU(
LCodeGen* codegen, LNumberTagU* instr)
4655 codegen()->DoDeferredNumberTagIU(instr_,
4661 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4663 LNumberTagU* instr_;
4667 Register result =
ToRegister(instr->result());
4669 DeferredNumberTagU* deferred =
new(zone()) DeferredNumberTagU(
this, instr);
4671 __ SmiTag(result, input);
4672 __ bind(deferred->exit());
4680 IntegerSignedness signedness) {
4694 __ SmiUntag(src, dst);
4695 __ Xor(src, src, Operand(0x80000000));
4697 __ mtc1(src, dbl_scratch);
4698 __ cvt_d_w(dbl_scratch, dbl_scratch);
4700 __ mtc1(src, dbl_scratch);
4701 __ Cvt_d_uw(dbl_scratch, dbl_scratch,
f22);
4704 if (FLAG_inline_new) {
4705 __ LoadRoot(tmp3, Heap::kHeapNumberMapRootIndex);
4716 __ mov(dst, zero_reg);
4719 PushSafepointRegistersScope
scope(
this);
4727 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4729 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4731 __ StoreToSafepointRegisterSlot(v0, dst);
4743 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
4746 DeferredNumberTagD(
LCodeGen* codegen, LNumberTagD* instr)
4749 codegen()->DoDeferredNumberTagD(instr_);
4751 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4753 LNumberTagD* instr_;
4762 DeferredNumberTagD* deferred =
new(zone()) DeferredNumberTagD(
this, instr);
4763 if (FLAG_inline_new) {
4764 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
4766 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry(),
4769 __ Branch(deferred->entry());
4771 __ bind(deferred->exit());
4783 __ mov(reg, zero_reg);
4785 PushSafepointRegistersScope
scope(
this);
4792 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4794 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4796 __ StoreToSafepointRegisterSlot(v0, reg);
4800 void LCodeGen::DoSmiTag(LSmiTag* instr) {
4801 HChange* hchange = instr->hydrogen();
4803 Register output =
ToRegister(instr->result());
4806 __ And(at, input, Operand(0xc0000000));
4811 __ SmiTagCheckOverflow(output, input, at);
4814 __ SmiTag(output, input);
4819 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
4822 Register result =
ToRegister(instr->result());
4823 if (instr->needs_check()) {
4827 __ SmiUntag(result, input);
4828 DeoptimizeIf(
ne, instr,
"not a Smi", scratch, Operand(zero_reg));
4830 __ SmiUntag(result, input);
4838 bool can_convert_undefined_to_nan =
4839 instr->hydrogen()->can_convert_undefined_to_nan();
4840 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero();
4843 Label convert, load_smi, done;
4846 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
4849 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4850 if (can_convert_undefined_to_nan) {
4851 __ Branch(&convert,
ne, scratch, Operand(at));
4853 DeoptimizeIf(
ne, instr,
"not a heap number", scratch, Operand(at));
4857 if (deoptimize_on_minus_zero) {
4858 __ mfc1(at, result_reg.low());
4859 __ Branch(&done,
ne, at, Operand(zero_reg));
4860 __ Mfhc1(scratch, result_reg);
4865 if (can_convert_undefined_to_nan) {
4868 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4869 DeoptimizeIf(
ne, instr,
"not a heap number/undefined", input_reg,
4871 __ LoadRoot(scratch, Heap::kNanValueRootIndex);
4876 __ SmiUntag(scratch, input_reg);
4882 __ mtc1(scratch, result_reg);
4883 __ cvt_d_w(result_reg, result_reg);
4889 Register input_reg =
ToRegister(instr->value());
4891 Register scratch2 =
ToRegister(instr->temp());
4903 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4907 if (instr->truncating()) {
4910 Label no_heap_number, check_bools, check_false;
4913 __ mov(scratch2, input_reg);
4914 __ TruncateHeapNumberToI(input_reg, scratch2);
4919 __ bind(&no_heap_number);
4920 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4921 __ Branch(&check_bools,
ne, input_reg, Operand(at));
4924 __ mov(input_reg, zero_reg);
4926 __ bind(&check_bools);
4927 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4928 __ Branch(&check_false,
ne, scratch2, Operand(at));
4930 __ li(input_reg, Operand(1));
4932 __ bind(&check_false);
4933 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4934 DeoptimizeIf(
ne, instr,
"not a heap number/undefined/true/false", scratch2,
4937 __ mov(input_reg, zero_reg);
4945 Register except_flag = scratch2;
4958 __ Branch(&done,
ne, input_reg, Operand(zero_reg));
4969 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
4972 DeferredTaggedToI(
LCodeGen* codegen, LTaggedToI* instr)
4975 codegen()->DoDeferredTaggedToI(instr_);
4977 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4982 LOperand* input = instr->value();
4983 DCHECK(input->IsRegister());
4984 DCHECK(input->Equals(instr->result()));
4988 if (instr->hydrogen()->value()->representation().IsSmi()) {
4989 __ SmiUntag(input_reg);
4991 DeferredTaggedToI* deferred =
new(zone()) DeferredTaggedToI(
this, instr);
4994 __ JumpIfNotSmi(input_reg, deferred->entry());
4997 __ SmiUntag(input_reg);
4998 __ bind(deferred->exit());
5003 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
5004 LOperand* input = instr->value();
5005 DCHECK(input->IsRegister());
5006 LOperand* result = instr->result();
5007 DCHECK(result->IsDoubleRegister());
5012 HValue* value = instr->hydrogen()->value();
5020 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
5021 Register result_reg =
ToRegister(instr->result());
5025 if (instr->truncating()) {
5026 __ TruncateDoubleToI(result_reg, double_input);
5044 __ Branch(&done,
ne, result_reg, Operand(zero_reg));
5054 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) {
5055 Register result_reg =
ToRegister(instr->result());
5059 if (instr->truncating()) {
5060 __ TruncateDoubleToI(result_reg, double_input);
5078 __ Branch(&done,
ne, result_reg, Operand(zero_reg));
5085 __ SmiTagCheckOverflow(result_reg, result_reg,
scratch1);
5090 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
5091 LOperand* input = instr->value();
5097 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
5098 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
5099 LOperand* input = instr->value();
5106 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
5110 __ GetObjectType(input, scratch, scratch);
5112 if (instr->hydrogen()->is_interval_check()) {
5115 instr->hydrogen()->GetCheckInterval(&first, &last);
5118 if (first == last) {
5119 DeoptimizeIf(
ne, instr,
"wrong instance type", scratch, Operand(first));
5121 DeoptimizeIf(
lo, instr,
"wrong instance type", scratch, Operand(first));
5124 DeoptimizeIf(
hi, instr,
"wrong instance type", scratch, Operand(last));
5130 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
5134 __ And(at, scratch, mask);
5138 __ And(scratch, scratch, Operand(mask));
5139 DeoptimizeIf(
ne, instr,
"wrong instance type", scratch, Operand(tag));
5145 void LCodeGen::DoCheckValue(LCheckValue* instr) {
5147 Handle<HeapObject>
object = instr->hydrogen()->object().handle();
5149 if (isolate()->heap()->InNewSpace(*
object)) {
5151 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
5152 __ li(at, Operand(Handle<Object>(cell)));
5163 PushSafepointRegistersScope
scope(
this);
5165 __ mov(
cp, zero_reg);
5166 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
5168 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
5169 __ StoreToSafepointRegisterSlot(v0,
scratch0());
5172 DeoptimizeIf(
eq, instr,
"instance migration failed", at, Operand(zero_reg));
5176 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
5179 DeferredCheckMaps(
LCodeGen* codegen, LCheckMaps* instr, Register
object)
5181 SetExit(check_maps());
5184 codegen()->DoDeferredInstanceMigration(instr_, object_);
5186 Label* check_maps() {
return &check_maps_; }
5187 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5194 if (instr->hydrogen()->IsStabilityCheck()) {
5195 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5196 for (
int i = 0;
i < maps->size(); ++
i) {
5197 AddStabilityDependency(maps->at(
i).handle());
5203 LOperand* input = instr->value();
5204 DCHECK(input->IsRegister());
5208 DeferredCheckMaps* deferred =
NULL;
5209 if (instr->hydrogen()->HasMigrationTarget()) {
5210 deferred =
new(zone()) DeferredCheckMaps(
this, instr, reg);
5211 __ bind(deferred->check_maps());
5214 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5216 for (
int i = 0;
i < maps->size() - 1;
i++) {
5217 Handle<Map>
map = maps->at(
i).handle();
5218 __ CompareMapAndBranch(map_reg,
map, &success,
eq, &success);
5220 Handle<Map>
map = maps->at(maps->size() - 1).handle();
5222 if (instr->hydrogen()->HasMigrationTarget()) {
5223 __ Branch(deferred->entry(),
ne, map_reg, Operand(
map));
5232 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
5234 Register result_reg =
ToRegister(instr->result());
5236 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
5240 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
5241 Register unclamped_reg =
ToRegister(instr->unclamped());
5242 Register result_reg =
ToRegister(instr->result());
5243 __ ClampUint8(result_reg, unclamped_reg);
5247 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
5249 Register input_reg =
ToRegister(instr->unclamped());
5250 Register result_reg =
ToRegister(instr->result());
5252 Label is_smi, done, heap_number;
5255 __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi);
5259 __ Branch(&heap_number,
eq, scratch, Operand(factory()->heap_number_map()));
5263 DeoptimizeIf(
ne, instr,
"not a heap number/undefined", input_reg,
5264 Operand(factory()->undefined_value()));
5265 __ mov(result_reg, zero_reg);
5269 __ bind(&heap_number);
5276 __ ClampUint8(result_reg, scratch);
5282 void LCodeGen::DoDoubleBits(LDoubleBits* instr) {
5284 Register result_reg =
ToRegister(instr->result());
5285 if (instr->hydrogen()->bits() == HDoubleBits::HIGH) {
5286 __ FmoveHigh(result_reg, value_reg);
5288 __ FmoveLow(result_reg, value_reg);
5293 void LCodeGen::DoConstructDouble(LConstructDouble* instr) {
5297 __ Move(result_reg, lo_reg, hi_reg);
5301 void LCodeGen::DoAllocate(LAllocate* instr) {
5304 DeferredAllocate(
LCodeGen* codegen, LAllocate* instr)
5307 codegen()->DoDeferredAllocate(instr_);
5309 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5314 DeferredAllocate* deferred =
5315 new(zone()) DeferredAllocate(
this, instr);
5317 Register result =
ToRegister(instr->result());
5318 Register scratch =
ToRegister(instr->temp1());
5319 Register scratch2 =
ToRegister(instr->temp2());
5323 if (instr->hydrogen()->MustAllocateDoubleAligned()) {
5326 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5327 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5328 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5330 }
else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5331 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5334 if (instr->size()->IsConstantOperand()) {
5337 __ Allocate(
size, result, scratch, scratch2, deferred->entry(),
flags);
5339 __ jmp(deferred->entry());
5343 __ Allocate(
size, result, scratch, scratch2, deferred->entry(),
flags);
5346 __ bind(deferred->exit());
5348 if (instr->hydrogen()->MustPrefillWithFiller()) {
5350 if (instr->size()->IsConstantOperand()) {
5356 __ li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
5360 __ Addu(at, result, Operand(scratch));
5362 __ Branch(&loop,
ge, scratch, Operand(zero_reg));
5368 Register result =
ToRegister(instr->result());
5373 __ mov(result, zero_reg);
5375 PushSafepointRegistersScope
scope(
this);
5376 if (instr->size()->IsRegister()) {
5387 __ stop(
"invalid allocation size");
5393 instr->hydrogen()->MustAllocateDoubleAligned());
5394 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5395 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5396 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5398 }
else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5399 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5407 Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
5408 __ StoreToSafepointRegisterSlot(v0, result);
5412 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5416 CallRuntime(Runtime::kToFastProperties, 1, instr);
5420 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5428 int literal_offset =
5430 __ li(t3, instr->hydrogen()->literals());
5432 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5433 __ Branch(&materialized,
ne, a1, Operand(at));
5437 __ li(t2, Operand(
Smi::FromInt(instr->hydrogen()->literal_index())));
5438 __ li(t1, Operand(instr->hydrogen()->pattern()));
5439 __ li(t0, Operand(instr->hydrogen()->flags()));
5441 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
5444 __ bind(&materialized);
5446 Label allocated, runtime_allocate;
5451 __ bind(&runtime_allocate);
5454 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5457 __ bind(&allocated);
5473 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5477 bool pretenure = instr->hydrogen()->pretenure();
5478 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5479 FastNewClosureStub stub(isolate(), instr->hydrogen()->strict_mode(),
5480 instr->hydrogen()->kind());
5481 __ li(a2, Operand(instr->hydrogen()->shared_info()));
5484 __ li(a2, Operand(instr->hydrogen()->shared_info()));
5485 __ li(a1, Operand(pretenure ? factory()->true_value()
5486 : factory()->false_value()));
5493 void LCodeGen::DoTypeof(LTypeof* instr) {
5501 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
5505 Operand cmp2 = Operand(
no_reg);
5508 instr->FalseLabel(chunk_),
5510 instr->type_literal(),
5515 DCHECK(!cmp2.is_reg() || cmp2.rm().is_valid());
5518 EmitBranch(instr, final_branch_condition, cmp1, cmp2);
5534 Factory* factory = isolate()->factory();
5536 __ JumpIfSmi(input, true_label);
5538 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5541 final_branch_condition =
eq;
5543 }
else if (
String::Equals(type_name, factory->string_string())) {
5544 __ JumpIfSmi(input, false_label);
5545 __ GetObjectType(input, input, scratch);
5554 final_branch_condition =
eq;
5556 }
else if (
String::Equals(type_name, factory->symbol_string())) {
5557 __ JumpIfSmi(input, false_label);
5558 __ GetObjectType(input, input, scratch);
5561 final_branch_condition =
eq;
5563 }
else if (
String::Equals(type_name, factory->boolean_string())) {
5564 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5566 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5569 final_branch_condition =
eq;
5571 }
else if (
String::Equals(type_name, factory->undefined_string())) {
5572 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5576 __ JumpIfSmi(input, false_label);
5583 final_branch_condition =
ne;
5585 }
else if (
String::Equals(type_name, factory->function_string())) {
5587 __ JumpIfSmi(input, false_label);
5588 __ GetObjectType(input, scratch, input);
5592 final_branch_condition =
eq;
5594 }
else if (
String::Equals(type_name, factory->object_string())) {
5595 __ JumpIfSmi(input, false_label);
5596 __ LoadRoot(at, Heap::kNullValueRootIndex);
5599 __ GetObjectType(input,
map, scratch);
5600 __ Branch(false_label,
5610 final_branch_condition =
eq;
5615 __ Branch(false_label);
5618 return final_branch_condition;
5622 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
5633 DCHECK(!temp1.is(temp2));
5638 Label check_frame_marker;
5640 __ Branch(&check_frame_marker,
ne, temp2,
5645 __ bind(&check_frame_marker);
5651 if (!info()->IsStub()) {
5654 int current_pc = masm()->pc_offset();
5655 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5656 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5658 while (padding_size > 0) {
5664 last_lazy_deopt_pc_ = masm()->pc_offset();
5668 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
5669 last_lazy_deopt_pc_ = masm()->pc_offset();
5670 DCHECK(instr->HasEnvironment());
5673 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5677 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
5687 DeoptimizeIf(
al, instr, type, instr->hydrogen()->reason(), zero_reg,
5692 void LCodeGen::DoDummy(LDummy* instr) {
5697 void LCodeGen::DoDummyUse(LDummyUse* instr) {
5703 PushSafepointRegistersScope
scope(
this);
5705 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5708 DCHECK(instr->HasEnvironment());
5710 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5714 void LCodeGen::DoStackCheck(LStackCheck* instr) {
5717 DeferredStackCheck(
LCodeGen* codegen, LStackCheck* instr)
5720 codegen()->DoDeferredStackCheck(instr_);
5722 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5724 LStackCheck* instr_;
5727 DCHECK(instr->HasEnvironment());
5731 if (instr->hydrogen()->is_function_entry()) {
5734 __ LoadRoot(at, Heap::kStackLimitRootIndex);
5735 __ Branch(&done,
hs,
sp, Operand(at));
5736 DCHECK(instr->context()->IsRegister());
5738 CallCode(isolate()->builtins()->StackCheck(),
5743 DCHECK(instr->hydrogen()->is_backwards_branch());
5745 DeferredStackCheck* deferred_stack_check =
5746 new(zone()) DeferredStackCheck(
this, instr);
5747 __ LoadRoot(at, Heap::kStackLimitRootIndex);
5748 __ Branch(deferred_stack_check->entry(),
lo,
sp, Operand(at));
5750 __ bind(instr->done_label());
5751 deferred_stack_check->SetExit(instr->done_label());
5760 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
5768 DCHECK(!environment->HasBeenRegistered());
5775 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5776 Register result =
ToRegister(instr->result());
5777 Register
object =
ToRegister(instr->object());
5778 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5781 Register null_value = t1;
5782 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
5789 __ GetObjectType(
object, a1, a1);
5793 Label use_cache, call_runtime;
5795 __ CheckEnumCache(null_value, &call_runtime);
5798 __ Branch(&use_cache);
5801 __ bind(&call_runtime);
5803 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5807 __ LoadRoot(at, Heap::kMetaMapRootIndex);
5809 __ bind(&use_cache);
5813 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5815 Register result =
ToRegister(instr->result());
5816 Label load_cache, done;
5817 __ EnumLength(result,
map);
5819 __ li(result, Operand(isolate()->factory()->empty_fixed_array()));
5822 __ bind(&load_cache);
5823 __ LoadInstanceDescriptors(
map, result);
5834 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5835 Register
object =
ToRegister(instr->value());
5846 PushSafepointRegistersScope
scope(
this);
5848 __ mov(
cp, zero_reg);
5849 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
5851 instr->pointer_map(), 2, Safepoint::kNoLazyDeopt);
5852 __ StoreToSafepointRegisterSlot(v0, result);
5856 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5859 DeferredLoadMutableDouble(
LCodeGen* codegen,
5860 LLoadFieldByIndex* instr,
5871 codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_);
5873 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5875 LLoadFieldByIndex* instr_;
5881 Register
object =
ToRegister(instr->object());
5883 Register result =
ToRegister(instr->result());
5886 DeferredLoadMutableDouble* deferred;
5887 deferred =
new(zone()) DeferredLoadMutableDouble(
5888 this, instr, result,
object, index);
5890 Label out_of_object, done;
5893 __ Branch(deferred->entry(),
ne, scratch, Operand(zero_reg));
5894 __ sra(index, index, 1);
5900 __ Addu(scratch,
object, scratch);
5905 __ bind(&out_of_object);
5908 __ Subu(scratch, result, scratch);
5911 __ bind(deferred->exit());
5916 void LCodeGen::DoStoreFrameContext(LStoreFrameContext* instr) {
5917 Register context =
ToRegister(instr->context());
5922 void LCodeGen::DoAllocateBlockContext(LAllocateBlockContext* instr) {
5923 Handle<ScopeInfo> scope_info = instr->scope_info();
5924 __ li(at, scope_info);
5926 CallRuntime(Runtime::kPushBlockContext, 2, instr);
An object reference managed by the v8 garbage collector.
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static const int kLengthOffset
static const int kInstrSize
friend class BlockTrampolinePoolScope
static U update(U previous, T value)
static const int kValueOffset
static const int kHeaderSize
static Address GetDeoptimizationEntry(Isolate *isolate, int id, BailoutType type, GetEntryMode mode=ENSURE_ENTRY_CODE)
static const int kEnumCacheOffset
virtual void BeforeCall(int call_size) const OVERRIDE
virtual ~SafepointGenerator()
SafepointGenerator(LCodeGen *codegen, LPointerMap *pointers, Safepoint::DeoptMode mode)
virtual void AfterCall() const OVERRIDE
static const int kHeaderSize
static int OffsetOfElementAt(int index)
static int SizeFor(int length)
static const int kGlobalProxyOffset
@ kAllUsesTruncatingToInt32
virtual HSourcePosition position() const
static Handle< T > cast(Handle< S > that)
static const uint32_t kSignMask
static const int kValueOffset
static const int kExponentBits
static const int kExponentBias
static const int kExponentShift
static const int kMapOffset
static const int kValueOffset
static const int kCacheStampOffset
static const int kSharedFunctionInfoOffset
static const int kContextOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static const int kHeaderSize
static const int kPropertiesOffset
static const int kInObjectFieldCount
static const int kFunctionOffset
bool IsNextEmittedBlock(int block_id) const
void RestoreCallerDoubles()
void DoStoreKeyedFixedArray(LStoreKeyed *instr)
DwVfpRegister ToDoubleRegister(LOperand *op) const
void RecordSafepointWithRegisters(LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
@ RECORD_SIMPLE_SAFEPOINT
@ RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
void EmitBranchF(InstrType instr, Condition condition, FPURegister src1, FPURegister src2)
int inlined_function_count_
bool IsSmi(LConstantOperand *op) const
friend class SafepointGenerator
TranslationBuffer translations_
MemOperand BuildSeqStringOperand(Register string, LOperand *index, String::Encoding encoding)
Condition EmitIsString(Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
DwVfpRegister EmitLoadDoubleRegister(LOperand *op, SwVfpRegister flt_scratch, DwVfpRegister dbl_scratch)
void DoDeferredStackCheck(LStackCheck *instr)
SafepointTableBuilder safepoints_
void EmitVectorLoadICRegisters(T *instr)
static Condition TokenToCondition(Token::Value op, bool is_unsigned)
ZoneList< Handle< Object > > deoptimization_literals_
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal *instr, Label *map_check, Label *bool_load)
MemOperand PrepareKeyedOperand(Register key, Register base, bool key_is_constant, int constant_key, int element_size, int shift_size, int base_offset)
void PopulateDeoptimizationLiteralsWithInlinedFunctions()
void AddToTranslation(LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
ZoneList< LEnvironment * > deoptimizations_
void EmitIntegerMathAbs(LMathAbs *instr)
int32_t ToRepresentation(LConstantOperand *op, const Representation &r) const
void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
void EmitIsConstructCall(Register temp1, Register temp2)
void EmitFalseBranchF(InstrType instr, Condition condition, FPURegister src1, FPURegister src2)
int32_t ToInteger32(LConstantOperand *op) const
LPlatformChunk * chunk() const
void FinishCode(Handle< Code > code)
int LookupDestination(int block_id) const
Condition EmitTypeofIs(Label *true_label, Label *false_label, Register input, Handle< String > type_name)
void DoDeferredAllocate(LAllocate *instr)
void RecordSafepoint(LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
void DoDeferredTaggedToI(LTaggedToI *instr)
LowDwVfpRegister double_scratch0()
void CallCodeGeneric(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
void DoDeferredStringCharCodeAt(LStringCharCodeAt *instr)
void CallCode(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
Safepoint::Kind expected_safepoint_kind_
ZoneList< LDeferredCode * > deferred_
bool GenerateDeferredCode()
void DoDeferredNumberTagIU(LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2, IntegerSignedness signedness)
Handle< Object > ToHandle(LConstantOperand *op) const
bool NeedsEagerFrame() const
DoubleRegister double_scratch()
void RegisterEnvironmentForDeoptimization(LEnvironment *environment, Safepoint::DeoptMode mode)
friend class LDeferredCode
void LoadContextFromDeferred(LOperand *context)
void GenerateOsrPrologue()
bool NeedsDeferredFrame() const
void DoDeferredInstanceMigration(LCheckMaps *instr, Register object)
void DoDeferredLoadMutableDouble(LLoadFieldByIndex *instr, Register result, Register object, Register index)
int DefineDeoptimizationLiteral(Handle< Object > literal)
void DeoptimizeIf(Condition condition, LInstruction *instr, const char *detail, Deoptimizer::BailoutType bailout_type)
int GetStackSlotCount() const
void CallKnownFunction(Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, R1State r1_state)
void WriteTranslation(LEnvironment *environment, Translation *translation)
void DoDeferredMathAbsTaggedHeapNumber(LMathAbs *instr)
void DoLoadKeyedFixedDoubleArray(LLoadKeyed *instr)
bool GenerateSafepointTable()
Operand ToOperand(LOperand *op)
Register EmitLoadRegister(LOperand *op, Register scratch)
void EmitClassOfTest(Label *if_true, Label *if_false, Handle< String > class_name, Register input, Register temporary, Register temporary2)
void DoLoadKeyedExternalArray(LLoadKeyed *instr)
double ToDouble(LConstantOperand *op) const
Register ToRegister(LOperand *op) const
void DoStoreKeyedExternalArray(LStoreKeyed *instr)
void RecordAndWritePosition(int position) OVERRIDE
bool IsInteger32(LConstantOperand *op) const
void PopulateDeoptimizationData(Handle< Code > code)
void DoParallelMove(LParallelMove *move)
Smi * ToSmi(LConstantOperand *op) const
void CallRuntime(const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void DoDeferredStringCharFromCode(LStringCharFromCode *instr)
ZoneList< Deoptimizer::JumpTableEntry > jump_table_
Condition EmitIsObject(Register input, Register temp1, Label *is_not_object, Label *is_object)
void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE
void EmitNumberUntagD(LNumberUntagD *instr, Register input, DwVfpRegister result, NumberUntagDMode mode)
MemOperand ToMemOperand(LOperand *op) const
void GenerateBodyInstructionPre(LInstruction *instr) OVERRIDE
MemOperand ToHighMemOperand(LOperand *op) const
void RecordSafepointWithLazyDeopt(LInstruction *instr, SafepointMode safepoint_mode)
void EmitFalseBranch(InstrType instr, Condition condition)
RAStatus GetRAState() const
void DoLoadKeyedFixedArray(LLoadKeyed *instr)
LCodeGen(LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
void EmitBranch(InstrType instr, Condition condition)
void DoDeferredNumberTagD(LNumberTagD *instr)
void DoStoreKeyedFixedDoubleArray(LStoreKeyed *instr)
friend class LEnvironment
virtual const char * Mnemonic() const =0
LEnvironment * environment() const
HValue * hydrogen_value() const
LPointerMap * pointer_map() const
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const Register ReceiverRegister()
static const Register NameRegister()
static void GenerateMiss(MacroAssembler *masm)
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kInstanceTypeOffset
static const int kConstructorOffset
static const int kPrototypeOffset
static void EmitMathExp(MacroAssembler *masm, DwVfpRegister input, DwVfpRegister result, DwVfpRegister double_scratch1, DwVfpRegister double_scratch2, Register temp1, Register temp2, Register temp3)
static const Register exponent()
static const int kHashFieldOffset
static const int kMaxRegularHeapObjectSize
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
static Representation Integer32()
int num_parameters() const
Variable * parameter(int index) const
static const int kHeaderSize
static const int kDontAdaptArgumentsSentinel
static const int kInstanceClassNameOffset
static const int kCompilerHintsOffset
static const int kMaxValue
static Smi * FromInt(int value)
static const int kFixedFrameSizeFromFp
static const int kContextOffset
static const int kCallerSPOffset
static const int kMarkerOffset
static const int kCallerFPOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static Handle< Code > initialize_stub(Isolate *isolate, StrictMode strict_mode)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const unsigned int kContainsCachedArrayIndexMask
static const int32_t kMaxOneByteCharCode
static const int kLengthOffset
bool Equals(String *other)
static const Register VectorRegister()
static const Register SlotRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
enable harmony numeric enable harmony object literal extensions Optimize object Array shift
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
@ PRETENURE_OLD_POINTER_SPACE
@ PRETENURE_OLD_DATA_SPACE
bool IsPowerOfTwo32(uint32_t value)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
int WhichPowerOf2(uint32_t x)
Vector< const char > CStrVector(const char *data)
const uint32_t kStringEncodingMask
MemOperand ContextOperand(Register context, int index)
Condition CommuteCondition(Condition cond)
bool EvalComparison(Token::Value op, double op1, double op2)
@ kCheckForInexactConversion
const uint32_t kTwoByteStringTag
DwVfpRegister DoubleRegister
const int kPointerSizeLog2
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
@ EXTERNAL_UINT16_ELEMENTS
@ EXTERNAL_INT16_ELEMENTS
@ EXTERNAL_UINT8_ELEMENTS
@ EXTERNAL_INT32_ELEMENTS
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ SLOPPY_ARGUMENTS_ELEMENTS
@ EXTERNAL_FLOAT32_ELEMENTS
@ EXTERNAL_FLOAT64_ELEMENTS
@ FAST_HOLEY_SMI_ELEMENTS
@ EXTERNAL_UINT32_ELEMENTS
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
const uint32_t kOneByteStringTag
int ElementsKindToShiftSize(ElementsKind elements_kind)
MemOperand FieldMemOperand(Register object, int offset)
int32_t WhichPowerOf2Abs(int32_t x)
int StackSlotOffset(int index)
bool IsFastPackedElementsKind(ElementsKind kind)
@ NUMBER_CANDIDATE_IS_SMI
@ NUMBER_CANDIDATE_IS_ANY_TAGGED
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
AllocationSiteOverrideMode
@ DISABLE_ALLOCATION_SITES
Condition NegateCondition(Condition cond)
static InstanceType TestType(HHasInstanceTypeAndBranch *instr)
const uint32_t kStringRepresentationMask
static Condition BranchCondition(HHasInstanceTypeAndBranch *instr)
static int ArgumentsOffsetWithoutFrame(int index)
static Condition ComputeCompareCondition(Token::Value op)
static const char * LabelType(LLabel *label)
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
@ NO_CALL_CONSTRUCTOR_FLAGS
bool IsFastSmiElementsKind(ElementsKind kind)
const uint32_t kSlotsZapValue
const uint32_t kHoleNanUpper32
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Debugger support for the V8 JavaScript engine.
bool IsEquivalentTo(const JumpTableEntry &other) const
bool is(DwVfpRegister reg) const
static DwVfpRegister FromAllocationIndex(int index)
SwVfpRegister low() const
static Register FromAllocationIndex(int index)
bool is(Register reg) const
#define T(name, string, precedence)