V8 Project
assembler.cc
Go to the documentation of this file.
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
6 // met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
14 //
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
34 
35 #include "src/assembler.h"
36 
37 #include <cmath>
38 #include "src/api.h"
39 #include "src/base/cpu.h"
40 #include "src/base/lazy-instance.h"
42 #include "src/builtins.h"
43 #include "src/codegen.h"
44 #include "src/counters.h"
45 #include "src/cpu-profiler.h"
46 #include "src/debug.h"
47 #include "src/deoptimizer.h"
48 #include "src/execution.h"
49 #include "src/ic/ic.h"
50 #include "src/ic/stub-cache.h"
51 #include "src/isolate-inl.h"
52 #include "src/jsregexp.h"
54 #include "src/regexp-stack.h"
55 #include "src/runtime/runtime.h"
56 #include "src/serialize.h"
57 #include "src/token.h"
58 
59 #if V8_TARGET_ARCH_IA32
60 #include "src/ia32/assembler-ia32-inl.h" // NOLINT
61 #elif V8_TARGET_ARCH_X64
62 #include "src/x64/assembler-x64-inl.h" // NOLINT
63 #elif V8_TARGET_ARCH_ARM64
64 #include "src/arm64/assembler-arm64-inl.h" // NOLINT
65 #elif V8_TARGET_ARCH_ARM
66 #include "src/arm/assembler-arm-inl.h" // NOLINT
67 #elif V8_TARGET_ARCH_MIPS
68 #include "src/mips/assembler-mips-inl.h" // NOLINT
69 #elif V8_TARGET_ARCH_MIPS64
70 #include "src/mips64/assembler-mips64-inl.h" // NOLINT
71 #elif V8_TARGET_ARCH_X87
72 #include "src/x87/assembler-x87-inl.h" // NOLINT
73 #else
74 #error "Unknown architecture."
75 #endif
76 
77 // Include native regexp-macro-assembler.
78 #ifndef V8_INTERPRETED_REGEXP
79 #if V8_TARGET_ARCH_IA32
80 #include "src/ia32/regexp-macro-assembler-ia32.h" // NOLINT
81 #elif V8_TARGET_ARCH_X64
82 #include "src/x64/regexp-macro-assembler-x64.h" // NOLINT
83 #elif V8_TARGET_ARCH_ARM64
85 #elif V8_TARGET_ARCH_ARM
86 #include "src/arm/regexp-macro-assembler-arm.h" // NOLINT
87 #elif V8_TARGET_ARCH_MIPS
88 #include "src/mips/regexp-macro-assembler-mips.h" // NOLINT
89 #elif V8_TARGET_ARCH_MIPS64
91 #elif V8_TARGET_ARCH_X87
92 #include "src/x87/regexp-macro-assembler-x87.h" // NOLINT
93 #else // Unknown architecture.
94 #error "Unknown architecture."
95 #endif // Target architecture.
96 #endif // V8_INTERPRETED_REGEXP
97 
98 namespace v8 {
99 namespace internal {
100 
101 // -----------------------------------------------------------------------------
102 // Common double constants.
103 
104 struct DoubleConstant BASE_EMBEDDED {
105 double min_int;
106 double one_half;
111 double uint32_bias;
112 };
113 
114 static DoubleConstant double_constants;
115 
116 const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
117 
118 static bool math_exp_data_initialized = false;
119 static base::Mutex* math_exp_data_mutex = NULL;
122 
123 // -----------------------------------------------------------------------------
124 // Implementation of AssemblerBase
125 
126 AssemblerBase::AssemblerBase(Isolate* isolate, void* buffer, int buffer_size)
127  : isolate_(isolate),
128  jit_cookie_(0),
129  enabled_cpu_features_(0),
130  emit_debug_code_(FLAG_debug_code),
131  predictable_code_size_(false),
132  // We may use the assembler without an isolate.
133  serializer_enabled_(isolate && isolate->serializer_enabled()) {
134  if (FLAG_mask_constants_with_cookie && isolate != NULL) {
136  }
137  own_buffer_ = buffer == NULL;
138  if (buffer_size == 0) buffer_size = kMinimalBufferSize;
139  DCHECK(buffer_size > 0);
140  if (own_buffer_) buffer = NewArray<byte>(buffer_size);
141  buffer_ = static_cast<byte*>(buffer);
142  buffer_size_ = buffer_size;
143 
144  pc_ = buffer_;
145 }
146 
147 
150 }
151 
152 
153 // -----------------------------------------------------------------------------
154 // Implementation of PredictableCodeSizeScope
155 
157  int expected_size)
158  : assembler_(assembler),
159  expected_size_(expected_size),
160  start_offset_(assembler->pc_offset()),
161  old_value_(assembler->predictable_code_size()) {
163 }
164 
165 
167  // TODO(svenpanne) Remove the 'if' when everything works.
168  if (expected_size_ >= 0) {
170  }
172 }
173 
174 
175 // -----------------------------------------------------------------------------
176 // Implementation of CpuFeatureScope
177 
178 #ifdef DEBUG
179 CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f)
180  : assembler_(assembler) {
182  old_enabled_ = assembler_->enabled_cpu_features();
183  uint64_t mask = static_cast<uint64_t>(1) << f;
184  // TODO(svenpanne) This special case below doesn't belong here!
185 #if V8_TARGET_ARCH_ARM
186  // ARMv7 is implied by VFP3.
187  if (f == VFP3) {
188  mask |= static_cast<uint64_t>(1) << ARMv7;
189  }
190 #endif
191  assembler_->set_enabled_cpu_features(old_enabled_ | mask);
192 }
193 
194 
195 CpuFeatureScope::~CpuFeatureScope() {
196  assembler_->set_enabled_cpu_features(old_enabled_);
197 }
198 #endif
199 
200 
201 bool CpuFeatures::initialized_ = false;
202 unsigned CpuFeatures::supported_ = 0;
203 unsigned CpuFeatures::cache_line_size_ = 0;
204 
205 
206 // -----------------------------------------------------------------------------
207 // Implementation of Label
208 
209 int Label::pos() const {
210  if (pos_ < 0) return -pos_ - 1;
211  if (pos_ > 0) return pos_ - 1;
212  UNREACHABLE();
213  return 0;
214 }
215 
216 
217 // -----------------------------------------------------------------------------
218 // Implementation of RelocInfoWriter and RelocIterator
219 //
220 // Relocation information is written backwards in memory, from high addresses
221 // towards low addresses, byte by byte. Therefore, in the encodings listed
222 // below, the first byte listed it at the highest address, and successive
223 // bytes in the record are at progressively lower addresses.
224 //
225 // Encoding
226 //
227 // The most common modes are given single-byte encodings. Also, it is
228 // easy to identify the type of reloc info and skip unwanted modes in
229 // an iteration.
230 //
231 // The encoding relies on the fact that there are fewer than 14
232 // different relocation modes using standard non-compact encoding.
233 //
234 // The first byte of a relocation record has a tag in its low 2 bits:
235 // Here are the record schemes, depending on the low tag and optional higher
236 // tags.
237 //
238 // Low tag:
239 // 00: embedded_object: [6-bit pc delta] 00
240 //
241 // 01: code_target: [6-bit pc delta] 01
242 //
243 // 10: short_data_record: [6-bit pc delta] 10 followed by
244 // [6-bit data delta] [2-bit data type tag]
245 //
246 // 11: long_record [2-bit high tag][4 bit middle_tag] 11
247 // followed by variable data depending on type.
248 //
249 // 2-bit data type tags, used in short_data_record and data_jump long_record:
250 // code_target_with_id: 00
251 // position: 01
252 // statement_position: 10
253 // comment: 11 (not used in short_data_record)
254 //
255 // Long record format:
256 // 4-bit middle_tag:
257 // 0000 - 1100 : Short record for RelocInfo::Mode middle_tag + 2
258 // (The middle_tag encodes rmode - RelocInfo::LAST_COMPACT_ENUM,
259 // and is between 0000 and 1100)
260 // The format is:
261 // 00 [4 bit middle_tag] 11 followed by
262 // 00 [6 bit pc delta]
263 //
264 // 1101: constant or veneer pool. Used only on ARM and ARM64 for now.
265 // The format is: [2-bit sub-type] 1101 11
266 // signed int (size of the pool).
267 // The 2-bit sub-types are:
268 // 00: constant pool
269 // 01: veneer pool
270 // 1110: long_data_record
271 // The format is: [2-bit data_type_tag] 1110 11
272 // signed intptr_t, lowest byte written first
273 // (except data_type code_target_with_id, which
274 // is followed by a signed int, not intptr_t.)
275 //
276 // 1111: long_pc_jump
277 // The format is:
278 // pc-jump: 00 1111 11,
279 // 00 [6 bits pc delta]
280 // or
281 // pc-jump (variable length):
282 // 01 1111 11,
283 // [7 bits data] 0
284 // ...
285 // [7 bits data] 1
286 // (Bits 6..31 of pc delta, with leading zeroes
287 // dropped, and last non-zero chunk tagged with 1.)
288 
289 
290 #ifdef DEBUG
291 const int kMaxStandardNonCompactModes = 14;
292 #endif
293 
294 const int kTagBits = 2;
295 const int kTagMask = (1 << kTagBits) - 1;
296 const int kExtraTagBits = 4;
297 const int kLocatableTypeTagBits = 2;
299 
300 const int kEmbeddedObjectTag = 0;
301 const int kCodeTargetTag = 1;
302 const int kLocatableTag = 2;
303 const int kDefaultTag = 3;
304 
305 const int kPCJumpExtraTag = (1 << kExtraTagBits) - 1;
306 
308 const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
310 
312 const int kChunkBits = 7;
313 const int kChunkMask = (1 << kChunkBits) - 1;
314 const int kLastChunkTagBits = 1;
315 const int kLastChunkTagMask = 1;
316 const int kLastChunkTag = 1;
317 
318 
320 
321 const int kCodeWithIdTag = 0;
323 const int kStatementPositionTag = 2;
324 const int kCommentTag = 3;
325 
327 const int kConstPoolTag = 0;
328 const int kVeneerPoolTag = 1;
329 
330 
331 uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) {
332  // Return if the pc_delta can fit in kSmallPCDeltaBits bits.
333  // Otherwise write a variable length PC jump for the bits that do
334  // not fit in the kSmallPCDeltaBits bits.
335  if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta;
337  uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits;
338  DCHECK(pc_jump > 0);
339  // Write kChunkBits size chunks of the pc_jump.
340  for (; pc_jump > 0; pc_jump = pc_jump >> kChunkBits) {
341  byte b = pc_jump & kChunkMask;
342  *--pos_ = b << kLastChunkTagBits;
343  }
344  // Tag the last chunk so it can be identified.
345  *pos_ = *pos_ | kLastChunkTag;
346  // Return the remaining kSmallPCDeltaBits of the pc_delta.
347  return pc_delta & kSmallPCDeltaMask;
348 }
349 
350 
351 void RelocInfoWriter::WriteTaggedPC(uint32_t pc_delta, int tag) {
352  // Write a byte of tagged pc-delta, possibly preceded by var. length pc-jump.
353  pc_delta = WriteVariableLengthPCJump(pc_delta);
354  *--pos_ = pc_delta << kTagBits | tag;
355 }
356 
357 
358 void RelocInfoWriter::WriteTaggedData(intptr_t data_delta, int tag) {
359  *--pos_ = static_cast<byte>(data_delta << kLocatableTypeTagBits | tag);
360 }
361 
362 
363 void RelocInfoWriter::WriteExtraTag(int extra_tag, int top_tag) {
364  *--pos_ = static_cast<int>(top_tag << (kTagBits + kExtraTagBits) |
365  extra_tag << kTagBits |
366  kDefaultTag);
367 }
368 
369 
370 void RelocInfoWriter::WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag) {
371  // Write two-byte tagged pc-delta, possibly preceded by var. length pc-jump.
372  pc_delta = WriteVariableLengthPCJump(pc_delta);
373  WriteExtraTag(extra_tag, 0);
374  *--pos_ = pc_delta;
375 }
376 
377 
378 void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) {
379  WriteExtraTag(kDataJumpExtraTag, top_tag);
380  for (int i = 0; i < kIntSize; i++) {
381  *--pos_ = static_cast<byte>(data_delta);
382  // Signed right shift is arithmetic shift. Tested in test-utils.cc.
383  data_delta = data_delta >> kBitsPerByte;
384  }
385 }
386 
387 
388 void RelocInfoWriter::WriteExtraTaggedPoolData(int data, int pool_type) {
389  WriteExtraTag(kPoolExtraTag, pool_type);
390  for (int i = 0; i < kIntSize; i++) {
391  *--pos_ = static_cast<byte>(data);
392  // Signed right shift is arithmetic shift. Tested in test-utils.cc.
393  data = data >> kBitsPerByte;
394  }
395 }
396 
397 
398 void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta, int top_tag) {
399  WriteExtraTag(kDataJumpExtraTag, top_tag);
400  for (int i = 0; i < kIntptrSize; i++) {
401  *--pos_ = static_cast<byte>(data_delta);
402  // Signed right shift is arithmetic shift. Tested in test-utils.cc.
403  data_delta = data_delta >> kBitsPerByte;
404  }
405 }
406 
407 
408 void RelocInfoWriter::Write(const RelocInfo* rinfo) {
409 #ifdef DEBUG
410  byte* begin_pos = pos_;
411 #endif
412  DCHECK(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
413  DCHECK(rinfo->pc() - last_pc_ >= 0);
415  <= kMaxStandardNonCompactModes);
416  // Use unsigned delta-encoding for pc.
417  uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
418  RelocInfo::Mode rmode = rinfo->rmode();
419 
420  // The two most common modes are given small tags, and usually fit in a byte.
421  if (rmode == RelocInfo::EMBEDDED_OBJECT) {
422  WriteTaggedPC(pc_delta, kEmbeddedObjectTag);
423  } else if (rmode == RelocInfo::CODE_TARGET) {
424  WriteTaggedPC(pc_delta, kCodeTargetTag);
425  DCHECK(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
426  } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
427  // Use signed delta-encoding for id.
428  DCHECK(static_cast<int>(rinfo->data()) == rinfo->data());
429  int id_delta = static_cast<int>(rinfo->data()) - last_id_;
430  // Check if delta is small enough to fit in a tagged byte.
431  if (is_intn(id_delta, kSmallDataBits)) {
432  WriteTaggedPC(pc_delta, kLocatableTag);
433  WriteTaggedData(id_delta, kCodeWithIdTag);
434  } else {
435  // Otherwise, use costly encoding.
436  WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
437  WriteExtraTaggedIntData(id_delta, kCodeWithIdTag);
438  }
439  last_id_ = static_cast<int>(rinfo->data());
440  } else if (RelocInfo::IsPosition(rmode)) {
441  // Use signed delta-encoding for position.
442  DCHECK(static_cast<int>(rinfo->data()) == rinfo->data());
443  int pos_delta = static_cast<int>(rinfo->data()) - last_position_;
444  int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag
446  // Check if delta is small enough to fit in a tagged byte.
447  if (is_intn(pos_delta, kSmallDataBits)) {
448  WriteTaggedPC(pc_delta, kLocatableTag);
449  WriteTaggedData(pos_delta, pos_type_tag);
450  } else {
451  // Otherwise, use costly encoding.
452  WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
453  WriteExtraTaggedIntData(pos_delta, pos_type_tag);
454  }
455  last_position_ = static_cast<int>(rinfo->data());
456  } else if (RelocInfo::IsComment(rmode)) {
457  // Comments are normally not generated, so we use the costly encoding.
458  WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
459  WriteExtraTaggedData(rinfo->data(), kCommentTag);
460  DCHECK(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize);
461  } else if (RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)) {
462  WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
463  WriteExtraTaggedPoolData(static_cast<int>(rinfo->data()),
465  : kVeneerPoolTag);
466  } else {
468  int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM;
469  // For all other modes we simply use the mode as the extra tag.
470  // None of these modes need a data component.
471  DCHECK(saved_mode < kPCJumpExtraTag && saved_mode < kDataJumpExtraTag);
472  WriteExtraTaggedPC(pc_delta, saved_mode);
473  }
474  last_pc_ = rinfo->pc();
475 #ifdef DEBUG
476  DCHECK(begin_pos - pos_ <= kMaxSize);
477 #endif
478 }
479 
480 
482  return *--pos_ & kTagMask;
483 }
484 
485 
487  return (*pos_ >> kTagBits) & ((1 << kExtraTagBits) - 1);
488 }
489 
490 
492  return *pos_ >> (kTagBits + kExtraTagBits);
493 }
494 
495 
497  rinfo_.pc_ += *pos_ >> kTagBits;
498 }
499 
500 
502  rinfo_.pc_ += *--pos_;
503 }
504 
505 
507  int x = 0;
508  for (int i = 0; i < kIntSize; i++) {
509  x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
510  }
511  last_id_ += x;
513 }
514 
515 
517  int x = 0;
518  for (int i = 0; i < kIntSize; i++) {
519  x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
520  }
521  rinfo_.data_ = x;
522 }
523 
524 
526  int x = 0;
527  for (int i = 0; i < kIntSize; i++) {
528  x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
529  }
530  last_position_ += x;
532 }
533 
534 
536  intptr_t x = 0;
537  for (int i = 0; i < kIntptrSize; i++) {
538  x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte;
539  }
540  rinfo_.data_ = x;
541 }
542 
543 
545  // Read the 32-kSmallPCDeltaBits most significant bits of the
546  // pc jump in kChunkBits bit chunks and shift them into place.
547  // Stop when the last chunk is encountered.
548  uint32_t pc_jump = 0;
549  for (int i = 0; i < kIntSize; i++) {
550  byte pc_jump_part = *--pos_;
551  pc_jump |= (pc_jump_part >> kLastChunkTagBits) << i * kChunkBits;
552  if ((pc_jump_part & kLastChunkTagMask) == 1) break;
553  }
554  // The least significant kSmallPCDeltaBits bits will be added
555  // later.
556  rinfo_.pc_ += pc_jump << kSmallPCDeltaBits;
557 }
558 
559 
561  return *pos_ & ((1 << kLocatableTypeTagBits) - 1);
562 }
563 
564 
566  int8_t signed_b = *pos_;
567  // Signed right shift is arithmetic shift. Tested in test-utils.cc.
568  last_id_ += signed_b >> kLocatableTypeTagBits;
570 }
571 
572 
574  int8_t signed_b = *pos_;
575  // Signed right shift is arithmetic shift. Tested in test-utils.cc.
576  last_position_ += signed_b >> kLocatableTypeTagBits;
578 }
579 
580 
581 static inline RelocInfo::Mode GetPositionModeFromTag(int tag) {
583  tag == kStatementPositionTag);
584  return (tag == kNonstatementPositionTag) ?
587 }
588 
589 
591  DCHECK(!done());
592  // Basically, do the opposite of RelocInfoWriter::Write.
593  // Reading of data is as far as possible avoided for unwanted modes,
594  // but we must always update the pc.
595  //
596  // We exit this loop by returning when we find a mode we want.
597  while (pos_ > end_) {
598  int tag = AdvanceGetTag();
599  if (tag == kEmbeddedObjectTag) {
600  ReadTaggedPC();
601  if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return;
602  } else if (tag == kCodeTargetTag) {
603  ReadTaggedPC();
604  if (SetMode(RelocInfo::CODE_TARGET)) return;
605  } else if (tag == kLocatableTag) {
606  ReadTaggedPC();
607  Advance();
608  int locatable_tag = GetLocatableTypeTag();
609  if (locatable_tag == kCodeWithIdTag) {
611  ReadTaggedId();
612  return;
613  }
614  } else {
615  // Compact encoding is never used for comments,
616  // so it must be a position.
617  DCHECK(locatable_tag == kNonstatementPositionTag ||
618  locatable_tag == kStatementPositionTag);
621  if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
622  }
623  }
624  } else {
625  DCHECK(tag == kDefaultTag);
626  int extra_tag = GetExtraTag();
627  if (extra_tag == kPCJumpExtraTag) {
630  } else {
631  AdvanceReadPC();
632  }
633  } else if (extra_tag == kDataJumpExtraTag) {
634  int locatable_tag = GetTopTag();
635  if (locatable_tag == kCodeWithIdTag) {
637  AdvanceReadId();
638  return;
639  }
640  Advance(kIntSize);
641  } else if (locatable_tag != kCommentTag) {
642  DCHECK(locatable_tag == kNonstatementPositionTag ||
643  locatable_tag == kStatementPositionTag);
646  if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
647  } else {
648  Advance(kIntSize);
649  }
650  } else {
651  DCHECK(locatable_tag == kCommentTag);
653  AdvanceReadData();
654  return;
655  }
657  }
658  } else if (extra_tag == kPoolExtraTag) {
659  int pool_type = GetTopTag();
660  DCHECK(pool_type == kConstPoolTag || pool_type == kVeneerPoolTag);
661  RelocInfo::Mode rmode = (pool_type == kConstPoolTag) ?
663  if (SetMode(rmode)) {
665  return;
666  }
667  Advance(kIntSize);
668  } else {
669  AdvanceReadPC();
670  int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM;
671  if (SetMode(static_cast<RelocInfo::Mode>(rmode))) return;
672  }
673  }
674  }
675  if (code_age_sequence_ != NULL) {
676  byte* old_code_age_sequence = code_age_sequence_;
679  rinfo_.data_ = 0;
680  rinfo_.pc_ = old_code_age_sequence;
681  return;
682  }
683  }
684  done_ = true;
685 }
686 
687 
688 RelocIterator::RelocIterator(Code* code, int mode_mask) {
689  rinfo_.host_ = code;
690  rinfo_.pc_ = code->instruction_start();
691  rinfo_.data_ = 0;
692  // Relocation info is read backwards.
693  pos_ = code->relocation_start() + code->relocation_size();
694  end_ = code->relocation_start();
695  done_ = false;
696  mode_mask_ = mode_mask;
697  last_id_ = 0;
698  last_position_ = 0;
699  byte* sequence = code->FindCodeAgeSequence();
700  // We get the isolate from the map, because at serialization time
701  // the code pointer has been cloned and isn't really in heap space.
702  Isolate* isolate = code->map()->GetIsolate();
703  if (sequence != NULL && !Code::IsYoungSequence(isolate, sequence)) {
704  code_age_sequence_ = sequence;
705  } else {
707  }
708  if (mode_mask_ == 0) pos_ = end_;
709  next();
710 }
711 
712 
713 RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask) {
714  rinfo_.pc_ = desc.buffer;
715  rinfo_.data_ = 0;
716  // Relocation info is read backwards.
717  pos_ = desc.buffer + desc.buffer_size;
718  end_ = pos_ - desc.reloc_size;
719  done_ = false;
720  mode_mask_ = mode_mask;
721  last_id_ = 0;
722  last_position_ = 0;
724  if (mode_mask_ == 0) pos_ = end_;
725  next();
726 }
727 
728 
729 // -----------------------------------------------------------------------------
730 // Implementation of RelocInfo
731 
732 
733 #ifdef DEBUG
734 bool RelocInfo::RequiresRelocation(const CodeDesc& desc) {
735  // Ensure there are no code targets or embedded objects present in the
736  // deoptimization entries, they would require relocation after code
737  // generation.
738  int mode_mask = RelocInfo::kCodeTargetMask |
742  RelocIterator it(desc, mode_mask);
743  return !it.done();
744 }
745 #endif
746 
747 
748 #ifdef ENABLE_DISASSEMBLER
749 const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
750  switch (rmode) {
751  case RelocInfo::NONE32:
752  return "no reloc 32";
753  case RelocInfo::NONE64:
754  return "no reloc 64";
756  return "embedded object";
758  return "code target (js construct call)";
760  return "debug break";
762  return "code target";
764  return "code target with id";
765  case RelocInfo::CELL:
766  return "property cell";
768  return "runtime entry";
770  return "js return";
771  case RelocInfo::COMMENT:
772  return "comment";
773  case RelocInfo::POSITION:
774  return "position";
776  return "statement position";
778  return "external reference";
780  return "internal reference";
782  return "constant pool";
784  return "veneer pool";
786  return "debug break slot";
788  return "code_age_sequence";
790  UNREACHABLE();
791  return "number_of_modes";
792  }
793  return "unknown relocation type";
794 }
795 
796 
797 void RelocInfo::Print(Isolate* isolate, OStream& os) { // NOLINT
798  os << pc_ << " " << RelocModeName(rmode_);
799  if (IsComment(rmode_)) {
800  os << " (" << reinterpret_cast<char*>(data_) << ")";
801  } else if (rmode_ == EMBEDDED_OBJECT) {
802  os << " (" << Brief(target_object()) << ")";
803  } else if (rmode_ == EXTERNAL_REFERENCE) {
804  ExternalReferenceEncoder ref_encoder(isolate);
805  os << " (" << ref_encoder.NameOfAddress(target_reference()) << ") ("
806  << target_reference() << ")";
807  } else if (IsCodeTarget(rmode_)) {
808  Code* code = Code::GetCodeFromTargetAddress(target_address());
809  os << " (" << Code::Kind2String(code->kind()) << ") (" << target_address()
810  << ")";
811  if (rmode_ == CODE_TARGET_WITH_ID) {
812  os << " (id=" << static_cast<int>(data_) << ")";
813  }
814  } else if (IsPosition(rmode_)) {
815  os << " (" << data() << ")";
816  } else if (IsRuntimeEntry(rmode_) &&
817  isolate->deoptimizer_data() != NULL) {
818  // Depotimization bailouts are stored as runtime entries.
820  isolate, target_address(), Deoptimizer::EAGER);
822  os << " (deoptimization bailout " << id << ")";
823  }
824  }
825 
826  os << "\n";
827 }
828 #endif // ENABLE_DISASSEMBLER
829 
830 
831 #ifdef VERIFY_HEAP
832 void RelocInfo::Verify(Isolate* isolate) {
833  switch (rmode_) {
834  case EMBEDDED_OBJECT:
835  Object::VerifyPointer(target_object());
836  break;
837  case CELL:
838  Object::VerifyPointer(target_cell());
839  break;
840  case DEBUG_BREAK:
841  case CONSTRUCT_CALL:
842  case CODE_TARGET_WITH_ID:
843  case CODE_TARGET: {
844  // convert inline target address to code object
845  Address addr = target_address();
846  CHECK(addr != NULL);
847  // Check that we can find the right code object.
848  Code* code = Code::GetCodeFromTargetAddress(addr);
849  Object* found = isolate->FindCodeObject(addr);
850  CHECK(found->IsCode());
851  CHECK(code->address() == HeapObject::cast(found)->address());
852  break;
853  }
854  case RUNTIME_ENTRY:
855  case JS_RETURN:
856  case COMMENT:
857  case POSITION:
858  case STATEMENT_POSITION:
859  case EXTERNAL_REFERENCE:
860  case INTERNAL_REFERENCE:
861  case CONST_POOL:
862  case VENEER_POOL:
863  case DEBUG_BREAK_SLOT:
864  case NONE32:
865  case NONE64:
866  break;
867  case NUMBER_OF_MODES:
868  UNREACHABLE();
869  break;
870  case CODE_AGE_SEQUENCE:
871  DCHECK(Code::IsYoungSequence(isolate, pc_) || code_age_stub()->IsCode());
872  break;
873  }
874 }
875 #endif // VERIFY_HEAP
876 
877 
878 // -----------------------------------------------------------------------------
879 // Implementation of ExternalReference
880 
881 void ExternalReference::SetUp() {
882  double_constants.min_int = kMinInt;
883  double_constants.one_half = 0.5;
884  double_constants.minus_one_half = -0.5;
885  double_constants.canonical_non_hole_nan = base::OS::nan_value();
886  double_constants.the_hole_nan = bit_cast<double>(kHoleNanInt64);
887  double_constants.negative_infinity = -V8_INFINITY;
888  double_constants.uint32_bias =
889  static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
890 
891  math_exp_data_mutex = new base::Mutex();
892 }
893 
894 
895 void ExternalReference::InitializeMathExpData() {
896  // Early return?
897  if (math_exp_data_initialized) return;
898 
899  base::LockGuard<base::Mutex> lock_guard(math_exp_data_mutex);
901  // If this is changed, generated code must be adapted too.
902  const int kTableSizeBits = 11;
903  const int kTableSize = 1 << kTableSizeBits;
904  const double kTableSizeDouble = static_cast<double>(kTableSize);
905 
906  math_exp_constants_array = new double[9];
907  // Input values smaller than this always return 0.
908  math_exp_constants_array[0] = -708.39641853226408;
909  // Input values larger than this always return +Infinity.
910  math_exp_constants_array[1] = 709.78271289338397;
912  // The rest is black magic. Do not attempt to understand it. It is
913  // loosely based on the "expd" function published at:
914  // http://herumi.blogspot.com/2011/08/fast-double-precision-exponential.html
915  const double constant3 = (1 << kTableSizeBits) / std::log(2.0);
916  math_exp_constants_array[3] = constant3;
918  static_cast<double>(static_cast<int64_t>(3) << 51);
919  math_exp_constants_array[5] = 1 / constant3;
920  math_exp_constants_array[6] = 3.0000000027955394;
921  math_exp_constants_array[7] = 0.16666666685227835;
923 
924  math_exp_log_table_array = new double[kTableSize];
925  for (int i = 0; i < kTableSize; i++) {
926  double value = std::pow(2, i / kTableSizeDouble);
927  uint64_t bits = bit_cast<uint64_t, double>(value);
928  bits &= (static_cast<uint64_t>(1) << 52) - 1;
929  double mantissa = bit_cast<double, uint64_t>(bits);
930  math_exp_log_table_array[i] = mantissa;
931  }
932 
934  }
935 }
936 
937 
938 void ExternalReference::TearDownMathExpData() {
939  delete[] math_exp_constants_array;
941  delete[] math_exp_log_table_array;
943  delete math_exp_data_mutex;
945 }
946 
947 
948 ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate)
949  : address_(Redirect(isolate, Builtins::c_function_address(id))) {}
950 
951 
952 ExternalReference::ExternalReference(
953  ApiFunction* fun,
954  Type type = ExternalReference::BUILTIN_CALL,
955  Isolate* isolate = NULL)
956  : address_(Redirect(isolate, fun->address(), type)) {}
957 
958 
959 ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate)
960  : address_(isolate->builtins()->builtin_address(name)) {}
961 
962 
963 ExternalReference::ExternalReference(Runtime::FunctionId id,
964  Isolate* isolate)
965  : address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {}
966 
967 
968 ExternalReference::ExternalReference(const Runtime::Function* f,
969  Isolate* isolate)
970  : address_(Redirect(isolate, f->entry)) {}
971 
972 
973 ExternalReference ExternalReference::isolate_address(Isolate* isolate) {
974  return ExternalReference(isolate);
975 }
976 
977 
978 ExternalReference::ExternalReference(const IC_Utility& ic_utility,
979  Isolate* isolate)
980  : address_(Redirect(isolate, ic_utility.address())) {}
981 
982 
983 ExternalReference::ExternalReference(StatsCounter* counter)
984  : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}
985 
986 
987 ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate)
988  : address_(isolate->get_address_from_id(id)) {}
989 
990 
991 ExternalReference::ExternalReference(const SCTableReference& table_ref)
992  : address_(table_ref.address()) {}
993 
994 
995 ExternalReference ExternalReference::
996  incremental_marking_record_write_function(Isolate* isolate) {
997  return ExternalReference(Redirect(
998  isolate,
1000 }
1001 
1002 
1003 ExternalReference ExternalReference::
1004  store_buffer_overflow_function(Isolate* isolate) {
1005  return ExternalReference(Redirect(
1006  isolate,
1008 }
1009 
1010 
1011 ExternalReference ExternalReference::flush_icache_function(Isolate* isolate) {
1012  return ExternalReference(
1013  Redirect(isolate, FUNCTION_ADDR(CpuFeatures::FlushICache)));
1014 }
1015 
1016 
1017 ExternalReference ExternalReference::delete_handle_scope_extensions(
1018  Isolate* isolate) {
1019  return ExternalReference(Redirect(
1020  isolate,
1022 }
1023 
1024 
1025 ExternalReference ExternalReference::get_date_field_function(
1026  Isolate* isolate) {
1027  return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
1028 }
1029 
1030 
1031 ExternalReference ExternalReference::get_make_code_young_function(
1032  Isolate* isolate) {
1033  return ExternalReference(Redirect(
1035 }
1036 
1037 
1038 ExternalReference ExternalReference::get_mark_code_as_executed_function(
1039  Isolate* isolate) {
1040  return ExternalReference(Redirect(
1042 }
1043 
1044 
1045 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
1046  return ExternalReference(isolate->date_cache()->stamp_address());
1047 }
1048 
1049 
1050 ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
1051  return ExternalReference(isolate->stress_deopt_count_address());
1052 }
1053 
1054 
1055 ExternalReference ExternalReference::new_deoptimizer_function(
1056  Isolate* isolate) {
1057  return ExternalReference(
1058  Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New)));
1059 }
1060 
1061 
1062 ExternalReference ExternalReference::compute_output_frames_function(
1063  Isolate* isolate) {
1064  return ExternalReference(
1065  Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
1066 }
1067 
1068 
1069 ExternalReference ExternalReference::log_enter_external_function(
1070  Isolate* isolate) {
1071  return ExternalReference(
1072  Redirect(isolate, FUNCTION_ADDR(Logger::EnterExternal)));
1073 }
1074 
1075 
1076 ExternalReference ExternalReference::log_leave_external_function(
1077  Isolate* isolate) {
1078  return ExternalReference(
1079  Redirect(isolate, FUNCTION_ADDR(Logger::LeaveExternal)));
1080 }
1081 
1082 
1083 ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) {
1084  return ExternalReference(isolate->keyed_lookup_cache()->keys_address());
1085 }
1086 
1087 
1088 ExternalReference ExternalReference::keyed_lookup_cache_field_offsets(
1089  Isolate* isolate) {
1090  return ExternalReference(
1091  isolate->keyed_lookup_cache()->field_offsets_address());
1092 }
1093 
1094 
1095 ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
1096  return ExternalReference(isolate->heap()->roots_array_start());
1097 }
1098 
1099 
1100 ExternalReference ExternalReference::allocation_sites_list_address(
1101  Isolate* isolate) {
1102  return ExternalReference(isolate->heap()->allocation_sites_list_address());
1103 }
1104 
1105 
1106 ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) {
1107  return ExternalReference(isolate->stack_guard()->address_of_jslimit());
1108 }
1109 
1110 
1111 ExternalReference ExternalReference::address_of_real_stack_limit(
1112  Isolate* isolate) {
1113  return ExternalReference(isolate->stack_guard()->address_of_real_jslimit());
1114 }
1115 
1116 
1117 ExternalReference ExternalReference::address_of_regexp_stack_limit(
1118  Isolate* isolate) {
1119  return ExternalReference(isolate->regexp_stack()->limit_address());
1120 }
1121 
1122 
1123 ExternalReference ExternalReference::new_space_start(Isolate* isolate) {
1124  return ExternalReference(isolate->heap()->NewSpaceStart());
1125 }
1126 
1127 
1128 ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) {
1129  return ExternalReference(isolate->heap()->store_buffer()->TopAddress());
1130 }
1131 
1132 
1133 ExternalReference ExternalReference::new_space_mask(Isolate* isolate) {
1134  return ExternalReference(reinterpret_cast<Address>(
1135  isolate->heap()->NewSpaceMask()));
1136 }
1137 
1138 
1139 ExternalReference ExternalReference::new_space_allocation_top_address(
1140  Isolate* isolate) {
1141  return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
1142 }
1143 
1144 
1145 ExternalReference ExternalReference::new_space_allocation_limit_address(
1146  Isolate* isolate) {
1147  return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress());
1148 }
1149 
1150 
1151 ExternalReference ExternalReference::old_pointer_space_allocation_top_address(
1152  Isolate* isolate) {
1153  return ExternalReference(
1154  isolate->heap()->OldPointerSpaceAllocationTopAddress());
1155 }
1156 
1157 
1158 ExternalReference ExternalReference::old_pointer_space_allocation_limit_address(
1159  Isolate* isolate) {
1160  return ExternalReference(
1161  isolate->heap()->OldPointerSpaceAllocationLimitAddress());
1162 }
1163 
1164 
1165 ExternalReference ExternalReference::old_data_space_allocation_top_address(
1166  Isolate* isolate) {
1167  return ExternalReference(
1168  isolate->heap()->OldDataSpaceAllocationTopAddress());
1169 }
1170 
1171 
1172 ExternalReference ExternalReference::old_data_space_allocation_limit_address(
1173  Isolate* isolate) {
1174  return ExternalReference(
1175  isolate->heap()->OldDataSpaceAllocationLimitAddress());
1176 }
1177 
1178 
1179 ExternalReference ExternalReference::handle_scope_level_address(
1180  Isolate* isolate) {
1181  return ExternalReference(HandleScope::current_level_address(isolate));
1182 }
1183 
1184 
1185 ExternalReference ExternalReference::handle_scope_next_address(
1186  Isolate* isolate) {
1187  return ExternalReference(HandleScope::current_next_address(isolate));
1188 }
1189 
1190 
1191 ExternalReference ExternalReference::handle_scope_limit_address(
1192  Isolate* isolate) {
1193  return ExternalReference(HandleScope::current_limit_address(isolate));
1194 }
1195 
1196 
1197 ExternalReference ExternalReference::scheduled_exception_address(
1198  Isolate* isolate) {
1199  return ExternalReference(isolate->scheduled_exception_address());
1200 }
1201 
1202 
1203 ExternalReference ExternalReference::address_of_pending_message_obj(
1204  Isolate* isolate) {
1205  return ExternalReference(isolate->pending_message_obj_address());
1206 }
1207 
1208 
1209 ExternalReference ExternalReference::address_of_has_pending_message(
1210  Isolate* isolate) {
1211  return ExternalReference(isolate->has_pending_message_address());
1212 }
1213 
1214 
1215 ExternalReference ExternalReference::address_of_pending_message_script(
1216  Isolate* isolate) {
1217  return ExternalReference(isolate->pending_message_script_address());
1218 }
1219 
1220 
1221 ExternalReference ExternalReference::address_of_min_int() {
1222  return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
1223 }
1224 
1225 
1226 ExternalReference ExternalReference::address_of_one_half() {
1227  return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
1228 }
1229 
1230 
1231 ExternalReference ExternalReference::address_of_minus_one_half() {
1232  return ExternalReference(
1233  reinterpret_cast<void*>(&double_constants.minus_one_half));
1234 }
1235 
1236 
1237 ExternalReference ExternalReference::address_of_negative_infinity() {
1238  return ExternalReference(
1239  reinterpret_cast<void*>(&double_constants.negative_infinity));
1240 }
1241 
1242 
1243 ExternalReference ExternalReference::address_of_canonical_non_hole_nan() {
1244  return ExternalReference(
1245  reinterpret_cast<void*>(&double_constants.canonical_non_hole_nan));
1246 }
1247 
1248 
1249 ExternalReference ExternalReference::address_of_the_hole_nan() {
1250  return ExternalReference(
1251  reinterpret_cast<void*>(&double_constants.the_hole_nan));
1252 }
1253 
1254 
1255 ExternalReference ExternalReference::address_of_uint32_bias() {
1256  return ExternalReference(
1257  reinterpret_cast<void*>(&double_constants.uint32_bias));
1258 }
1259 
1260 
1261 ExternalReference ExternalReference::is_profiling_address(Isolate* isolate) {
1262  return ExternalReference(isolate->cpu_profiler()->is_profiling_address());
1263 }
1264 
1265 
1266 ExternalReference ExternalReference::invoke_function_callback(
1267  Isolate* isolate) {
1268  Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
1269  ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
1270  ApiFunction thunk_fun(thunk_address);
1271  return ExternalReference(&thunk_fun, thunk_type, isolate);
1272 }
1273 
1274 
1275 ExternalReference ExternalReference::invoke_accessor_getter_callback(
1276  Isolate* isolate) {
1278  ExternalReference::Type thunk_type =
1279  ExternalReference::PROFILING_GETTER_CALL;
1280  ApiFunction thunk_fun(thunk_address);
1281  return ExternalReference(&thunk_fun, thunk_type, isolate);
1282 }
1283 
1284 
1285 #ifndef V8_INTERPRETED_REGEXP
1286 
1287 ExternalReference ExternalReference::re_check_stack_guard_state(
1288  Isolate* isolate) {
1289  Address function;
1290 #if V8_TARGET_ARCH_X64
1292 #elif V8_TARGET_ARCH_IA32
1294 #elif V8_TARGET_ARCH_ARM64
1296 #elif V8_TARGET_ARCH_ARM
1298 #elif V8_TARGET_ARCH_MIPS
1300 #elif V8_TARGET_ARCH_MIPS64
1302 #elif V8_TARGET_ARCH_X87
1304 #else
1305  UNREACHABLE();
1306 #endif
1307  return ExternalReference(Redirect(isolate, function));
1308 }
1309 
1310 
1311 ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) {
1312  return ExternalReference(
1314 }
1315 
1316 ExternalReference ExternalReference::re_case_insensitive_compare_uc16(
1317  Isolate* isolate) {
1318  return ExternalReference(Redirect(
1319  isolate,
1321 }
1322 
1323 
1324 ExternalReference ExternalReference::re_word_character_map() {
1325  return ExternalReference(
1327 }
1328 
1329 ExternalReference ExternalReference::address_of_static_offsets_vector(
1330  Isolate* isolate) {
1331  return ExternalReference(
1332  reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector()));
1333 }
1334 
1335 ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
1336  Isolate* isolate) {
1337  return ExternalReference(
1338  isolate->regexp_stack()->memory_address());
1339 }
1340 
1341 ExternalReference ExternalReference::address_of_regexp_stack_memory_size(
1342  Isolate* isolate) {
1343  return ExternalReference(isolate->regexp_stack()->memory_size_address());
1344 }
1345 
1346 #endif // V8_INTERPRETED_REGEXP
1347 
1348 
1349 ExternalReference ExternalReference::math_log_double_function(
1350  Isolate* isolate) {
1351  typedef double (*d2d)(double x);
1352  return ExternalReference(Redirect(isolate,
1353  FUNCTION_ADDR(static_cast<d2d>(std::log)),
1354  BUILTIN_FP_CALL));
1355 }
1356 
1357 
1358 ExternalReference ExternalReference::math_exp_constants(int constant_index) {
1360  return ExternalReference(
1361  reinterpret_cast<void*>(math_exp_constants_array + constant_index));
1362 }
1363 
1364 
1365 ExternalReference ExternalReference::math_exp_log_table() {
1367  return ExternalReference(reinterpret_cast<void*>(math_exp_log_table_array));
1368 }
1369 
1370 
1371 ExternalReference ExternalReference::page_flags(Page* page) {
1372  return ExternalReference(reinterpret_cast<Address>(page) +
1374 }
1375 
1376 
1377 ExternalReference ExternalReference::ForDeoptEntry(Address entry) {
1378  return ExternalReference(entry);
1379 }
1380 
1381 
1382 ExternalReference ExternalReference::cpu_features() {
1384  return ExternalReference(&CpuFeatures::supported_);
1385 }
1386 
1387 
1388 ExternalReference ExternalReference::debug_is_active_address(
1389  Isolate* isolate) {
1390  return ExternalReference(isolate->debug()->is_active_address());
1391 }
1392 
1393 
1394 ExternalReference ExternalReference::debug_after_break_target_address(
1395  Isolate* isolate) {
1396  return ExternalReference(isolate->debug()->after_break_target_address());
1397 }
1398 
1399 
1400 ExternalReference
1401  ExternalReference::debug_restarter_frame_function_pointer_address(
1402  Isolate* isolate) {
1403  return ExternalReference(
1404  isolate->debug()->restarter_frame_function_pointer_address());
1405 }
1406 
1407 
1408 double power_helper(double x, double y) {
1409  int y_int = static_cast<int>(y);
1410  if (y == y_int) {
1411  return power_double_int(x, y_int); // Returns 1 if exponent is 0.
1412  }
1413  if (y == 0.5) {
1414  return (std::isinf(x)) ? V8_INFINITY
1415  : fast_sqrt(x + 0.0); // Convert -0 to +0.
1416  }
1417  if (y == -0.5) {
1418  return (std::isinf(x)) ? 0 : 1.0 / fast_sqrt(x + 0.0); // Convert -0 to +0.
1419  }
1420  return power_double_double(x, y);
1421 }
1422 
1423 
1424 // Helper function to compute x^y, where y is known to be an
1425 // integer. Uses binary decomposition to limit the number of
1426 // multiplications; see the discussion in "Hacker's Delight" by Henry
1427 // S. Warren, Jr., figure 11-6, page 213.
1428 double power_double_int(double x, int y) {
1429  double m = (y < 0) ? 1 / x : x;
1430  unsigned n = (y < 0) ? -y : y;
1431  double p = 1;
1432  while (n != 0) {
1433  if ((n & 1) != 0) p *= m;
1434  m *= m;
1435  if ((n & 2) != 0) p *= m;
1436  m *= m;
1437  n >>= 2;
1438  }
1439  return p;
1440 }
1441 
1442 
1443 double power_double_double(double x, double y) {
1444 #if defined(__MINGW64_VERSION_MAJOR) && \
1445  (!defined(__MINGW64_VERSION_RC) || __MINGW64_VERSION_RC < 1)
1446  // MinGW64 has a custom implementation for pow. This handles certain
1447  // special cases that are different.
1448  if ((x == 0.0 || std::isinf(x)) && std::isfinite(y)) {
1449  double f;
1450  if (std::modf(y, &f) != 0.0) {
1451  return ((x == 0.0) ^ (y > 0)) ? V8_INFINITY : 0;
1452  }
1453  }
1454 
1455  if (x == 2.0) {
1456  int y_int = static_cast<int>(y);
1457  if (y == y_int) {
1458  return std::ldexp(1.0, y_int);
1459  }
1460  }
1461 #endif
1462 
1463  // The checks for special cases can be dropped in ia32 because it has already
1464  // been done in generated code before bailing out here.
1465  if (std::isnan(y) || ((x == 1 || x == -1) && std::isinf(y))) {
1466  return base::OS::nan_value();
1467  }
1468  return std::pow(x, y);
1469 }
1470 
1471 
1472 ExternalReference ExternalReference::power_double_double_function(
1473  Isolate* isolate) {
1474  return ExternalReference(Redirect(isolate,
1476  BUILTIN_FP_FP_CALL));
1477 }
1478 
1479 
1480 ExternalReference ExternalReference::power_double_int_function(
1481  Isolate* isolate) {
1482  return ExternalReference(Redirect(isolate,
1484  BUILTIN_FP_INT_CALL));
1485 }
1486 
1487 
1488 bool EvalComparison(Token::Value op, double op1, double op2) {
1490  switch (op) {
1491  case Token::EQ:
1492  case Token::EQ_STRICT: return (op1 == op2);
1493  case Token::NE: return (op1 != op2);
1494  case Token::LT: return (op1 < op2);
1495  case Token::GT: return (op1 > op2);
1496  case Token::LTE: return (op1 <= op2);
1497  case Token::GTE: return (op1 >= op2);
1498  default:
1499  UNREACHABLE();
1500  return false;
1501  }
1502 }
1503 
1504 
1505 ExternalReference ExternalReference::mod_two_doubles_operation(
1506  Isolate* isolate) {
1507  return ExternalReference(Redirect(isolate,
1509  BUILTIN_FP_FP_CALL));
1510 }
1511 
1512 
1513 ExternalReference ExternalReference::debug_break(Isolate* isolate) {
1514  return ExternalReference(Redirect(isolate, FUNCTION_ADDR(Debug_Break)));
1515 }
1516 
1517 
1518 ExternalReference ExternalReference::debug_step_in_fp_address(
1519  Isolate* isolate) {
1520  return ExternalReference(isolate->debug()->step_in_fp_addr());
1521 }
1522 
1523 
1524 void PositionsRecorder::RecordPosition(int pos) {
1526  DCHECK(pos >= 0);
1527  state_.current_position = pos;
1528  LOG_CODE_EVENT(assembler_->isolate(),
1529  CodeLinePosInfoAddPositionEvent(jit_handler_data_,
1530  assembler_->pc_offset(),
1531  pos));
1532 }
1533 
1534 
1535 void PositionsRecorder::RecordStatementPosition(int pos) {
1537  DCHECK(pos >= 0);
1538  state_.current_statement_position = pos;
1539  LOG_CODE_EVENT(assembler_->isolate(),
1540  CodeLinePosInfoAddStatementPositionEvent(
1541  jit_handler_data_,
1542  assembler_->pc_offset(),
1543  pos));
1544 }
1545 
1546 
1547 bool PositionsRecorder::WriteRecordedPositions() {
1548  bool written = false;
1549 
1550  // Write the statement position if it is different from what was written last
1551  // time.
1552  if (state_.current_statement_position != state_.written_statement_position) {
1553  EnsureSpace ensure_space(assembler_);
1554  assembler_->RecordRelocInfo(RelocInfo::STATEMENT_POSITION,
1555  state_.current_statement_position);
1556  state_.written_statement_position = state_.current_statement_position;
1557  written = true;
1558  }
1559 
1560  // Write the position if it is different from what was written last time and
1561  // also different from the written statement position.
1562  if (state_.current_position != state_.written_position &&
1563  state_.current_position != state_.written_statement_position) {
1564  EnsureSpace ensure_space(assembler_);
1565  assembler_->RecordRelocInfo(RelocInfo::POSITION, state_.current_position);
1566  state_.written_position = state_.current_position;
1567  written = true;
1568  }
1569 
1570  // Return whether something was written.
1571  return written;
1572 }
1573 
1574 } } // namespace v8::internal
#define BASE_EMBEDDED
Definition: allocation.h:45
Isolate represents an isolated instance of the V8 engine.
Definition: v8.h:4356
static double nan_value()
void set_enabled_cpu_features(uint64_t features)
Definition: assembler.h:75
Isolate * isolate() const
Definition: assembler.h:62
static const int kMinimalBufferSize
Definition: assembler.h:92
AssemblerBase(Isolate *isolate, void *buffer, int buffer_size)
Definition: assembler.cc:126
void set_predictable_code_size(bool value)
Definition: assembler.h:72
uint64_t enabled_cpu_features() const
Definition: assembler.h:74
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:5018
static bool IsYoungSequence(Isolate *isolate, byte *sequence)
static const char * Kind2String(Kind kind)
Definition: objects.cc:10627
static void MarkCodeAsExecuted(byte *sequence, Isolate *isolate)
Definition: objects.cc:10457
byte * relocation_start()
Definition: objects-inl.h:6196
static void MakeCodeAgeSequenceYoung(byte *sequence, Isolate *isolate)
Definition: objects.cc:10452
byte * instruction_start()
Definition: objects-inl.h:6176
byte * FindCodeAgeSequence()
Definition: objects.cc:10498
static bool initialized_
Definition: assembler.h:207
static void FlushICache(void *start, size_t size)
static bool IsSupported(CpuFeature f)
Definition: assembler.h:184
static unsigned cache_line_size_
Definition: assembler.h:206
static unsigned supported_
Definition: assembler.h:205
static void ComputeOutputFrames(Deoptimizer *deoptimizer)
Definition: deoptimizer.cc:515
static Deoptimizer * New(JSFunction *function, BailoutType type, unsigned bailout_id, Address from, int fp_to_sp_delta, Isolate *isolate)
Definition: deoptimizer.cc:78
static const int kNotDeoptimizationEntry
Definition: deoptimizer.h:247
static int GetDeoptimizationId(Isolate *isolate, Address addr, BailoutType type)
Definition: deoptimizer.cc:690
static Address current_limit_address(Isolate *isolate)
Definition: handles.cc:87
static void DeleteExtensions(Isolate *isolate)
Definition: handles.cc:61
static Address current_next_address(Isolate *isolate)
Definition: handles.cc:82
static Address current_level_address(Isolate *isolate)
Definition: handles.cc:77
Isolate * GetIsolate() const
Definition: objects-inl.h:1387
static void RecordWriteFromCode(HeapObject *obj, Object **slot, Isolate *isolate)
base::RandomNumberGenerator * random_number_generator()
Definition: isolate-inl.h:33
static Object * GetField(Object *date, Smi *index)
Definition: objects.cc:16122
static void LeaveExternal(Isolate *isolate)
Definition: log.cc:946
static void EnterExternal(Isolate *isolate)
Definition: log.cc:939
static const int kFlagsOffset
Definition: spaces.h:605
static Address GrowStack(Address stack_pointer, Address *stack_top, Isolate *isolate)
static int CaseInsensitiveCompareUC16(Address byte_offset1, Address byte_offset2, size_t byte_length, Isolate *isolate)
PredictableCodeSizeScope(AssemblerBase *assembler, int expected_size)
Definition: assembler.cc:156
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame, int start_offset, const byte **input_start, const byte **input_end)
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
static int CheckStackGuardState(Address *return_address, Code *re_code, Address re_frame)
static int ModeMask(Mode mode)
Definition: assembler.h:445
static const int kCodeTargetMask
Definition: assembler.h:587
static const int kPositionMask
Definition: assembler.h:588
static bool IsVeneerPool(Mode mode)
Definition: assembler.h:421
static bool IsComment(Mode mode)
Definition: assembler.h:415
static bool IsRuntimeEntry(Mode mode)
Definition: assembler.h:405
static bool IsPosition(Mode mode)
Definition: assembler.h:424
static const int kApplyMask
Definition: assembler.h:591
static bool IsCodeTarget(Mode mode)
Definition: assembler.h:399
static const char *const kFillerCommentString
Definition: assembler.h:323
static const int kMaxCallSize
Definition: assembler.h:331
static bool IsConstPool(Mode mode)
Definition: assembler.h:418
static const int kMaxSmallPCDelta
Definition: assembler.h:334
static const int kMinRelocCommentSize
Definition: assembler.h:328
intptr_t data() const
Definition: assembler.h:460
Mode rmode() const
Definition: assembler.h:459
static const int kNoPosition
Definition: assembler.h:317
bool SetMode(RelocInfo::Mode mode)
Definition: assembler.h:712
RelocIterator(Code *code, int mode_mask=-1)
Definition: assembler.cc:688
void Advance(int bytes=1)
Definition: assembler.h:695
static void StoreBufferOverflow(Isolate *isolate)
Definition: store-buffer.cc:98
static bool IsCompareOp(Value op)
Definition: token.h:202
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define V8_INFINITY
Definition: globals.h:25
#define FUNCTION_ADDR(f)
Definition: globals.h:195
#define LOG_CODE_EVENT(isolate, Call)
Definition: log.h:77
#define UNREACHABLE()
Definition: logging.h:30
#define CHECK_EQ(expected, value)
Definition: logging.h:169
#define CHECK(condition)
Definition: logging.h:36
#define DCHECK(condition)
Definition: logging.h:205
void DeleteArray(T *array)
Definition: allocation.h:68
const int kEmbeddedObjectTag
Definition: assembler.cc:300
double power_double_int(double x, int y)
Definition: assembler.cc:1428
const int kExtraTagBits
Definition: assembler.cc:296
const int kPoolExtraTag
Definition: assembler.cc:326
bool EvalComparison(Token::Value op, double op1, double op2)
Definition: assembler.cc:1488
const int kSmallPCDeltaMask
Definition: assembler.cc:308
const int kTagBits
Definition: assembler.cc:294
const int kLastChunkTagBits
Definition: assembler.cc:314
TypeImpl< ZoneTypeConfig > Type
const int kDataJumpExtraTag
Definition: assembler.cc:319
double fast_sqrt(double input)
static double * math_exp_constants_array
Definition: assembler.cc:120
bool is_uintn(int64_t x, unsigned n)
Definition: utils.h:904
double power_helper(double x, double y)
Definition: assembler.cc:1408
const int kCommentTag
Definition: assembler.cc:324
const int kChunkMask
Definition: assembler.cc:313
const int kLastChunkTagMask
Definition: assembler.cc:315
double modulo(double x, double y)
Definition: codegen.cc:50
const int kCodeWithIdTag
Definition: assembler.cc:321
const int kSmallDataBits
Definition: assembler.cc:298
const int kIntptrSize
Definition: globals.h:128
kSerializedDataOffset Object
Definition: objects-inl.h:5322
const int kChunkBits
Definition: assembler.cc:312
static int * GetInternalPointer(StatsCounter *counter)
Definition: serialize.cc:40
const int kVeneerPoolTag
Definition: assembler.cc:328
static double * math_exp_log_table_array
Definition: assembler.cc:121
const int kConstPoolTag
Definition: assembler.cc:327
const int kCodeTargetTag
Definition: assembler.cc:301
const int kMinInt
Definition: globals.h:110
void InvokeFunctionCallback(const v8::FunctionCallbackInfo< v8::Value > &info, v8::FunctionCallback callback)
Definition: api.cc:7679
byte * Address
Definition: globals.h:101
const int kPCJumpExtraTag
Definition: assembler.cc:305
const int kStatementPositionTag
Definition: assembler.cc:323
const int kNonstatementPositionTag
Definition: assembler.cc:322
double power_double_double(double x, double y)
Definition: assembler.cc:1443
const int kIntSize
Definition: globals.h:124
static RelocInfo::Mode GetPositionModeFromTag(int tag)
Definition: assembler.cc:581
const int kLocatableTag
Definition: assembler.cc:302
const int kDefaultTag
Definition: assembler.cc:303
static bool math_exp_data_initialized
Definition: assembler.cc:118
const uint64_t kHoleNanInt64
Definition: globals.h:660
const int kBitsPerByte
Definition: globals.h:162
const int kTagMask
Definition: assembler.cc:295
const int kSmallPCDeltaBits
Definition: assembler.cc:307
bool is_intn(int64_t x, unsigned n)
Definition: utils.h:898
void InvokeAccessorGetterCallback(v8::Local< v8::Name > property, const v8::PropertyCallbackInfo< v8::Value > &info, v8::AccessorNameGetterCallback getter)
Definition: api.cc:7665
const int kLocatableTypeTagBits
Definition: assembler.cc:297
static base::Mutex * math_exp_data_mutex
Definition: assembler.cc:119
const int kVariableLengthPCJumpTopTag
Definition: assembler.cc:311
const int kLastChunkTag
Definition: assembler.cc:316
static DoubleConstant double_constants
Definition: assembler.cc:114
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20