blob: c3fa00d259b1d235cc6a44c744d5686436af4b67 [file] [log] [blame]
// Copyright 2012 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/execution/isolate.h"
#include <stdlib.h>
#include <atomic>
#include <cstdint>
#include <fstream>
#include <memory>
#include <sstream>
#include <string>
#include <unordered_map>
#include <utility>
#include "include/v8-template.h"
#include "src/api/api-inl.h"
#include "src/ast/ast-value-factory.h"
#include "src/ast/scopes.h"
#include "src/base/hashmap.h"
#include "src/base/logging.h"
#include "src/base/platform/mutex.h"
#include "src/base/platform/platform.h"
#include "src/base/platform/wrappers.h"
#include "src/base/sys-info.h"
#include "src/base/utils/random-number-generator.h"
#include "src/baseline/baseline-batch-compiler.h"
#include "src/bigint/bigint.h"
#include "src/builtins/builtins-promise.h"
#include "src/builtins/constants-table-builder.h"
#include "src/codegen/assembler-inl.h"
#include "src/codegen/compilation-cache.h"
#include "src/codegen/flush-instruction-cache.h"
#include "src/common/assert-scope.h"
#include "src/common/globals.h"
#include "src/common/ptr-compr-inl.h"
#include "src/compiler-dispatcher/lazy-compile-dispatcher.h"
#include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
#include "src/date/date.h"
#include "src/debug/debug-frames.h"
#include "src/debug/debug.h"
#include "src/deoptimizer/deoptimizer.h"
#include "src/deoptimizer/materialized-object-store.h"
#include "src/diagnostics/basic-block-profiler.h"
#include "src/diagnostics/compilation-statistics.h"
#include "src/execution/frames-inl.h"
#include "src/execution/frames.h"
#include "src/execution/isolate-inl.h"
#include "src/execution/local-isolate.h"
#include "src/execution/messages.h"
#include "src/execution/microtask-queue.h"
#include "src/execution/protectors-inl.h"
#include "src/execution/simulator.h"
#include "src/execution/tiering-manager.h"
#include "src/execution/v8threads.h"
#include "src/execution/vm-state-inl.h"
#include "src/handles/global-handles-inl.h"
#include "src/handles/persistent-handles.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap-verifier.h"
#include "src/heap/local-heap.h"
#include "src/heap/parked-scope.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/safepoint.h"
#include "src/ic/stub-cache.h"
#include "src/init/bootstrapper.h"
#include "src/init/setup-isolate.h"
#include "src/init/v8.h"
#include "src/interpreter/interpreter.h"
#include "src/libsampler/sampler.h"
#include "src/logging/counters.h"
#include "src/logging/log.h"
#include "src/logging/metrics.h"
#include "src/logging/runtime-call-stats-scope.h"
#include "src/numbers/hash-seed-inl.h"
#include "src/objects/backing-store.h"
#include "src/objects/call-site-info-inl.h"
#include "src/objects/elements.h"
#include "src/objects/feedback-vector.h"
#include "src/objects/hash-table-inl.h"
#include "src/objects/instance-type-inl.h"
#include "src/objects/js-array-buffer-inl.h"
#include "src/objects/js-array-inl.h"
#include "src/objects/js-generator-inl.h"
#include "src/objects/js-weak-refs-inl.h"
#include "src/objects/managed-inl.h"
#include "src/objects/module-inl.h"
#include "src/objects/promise-inl.h"
#include "src/objects/property-descriptor.h"
#include "src/objects/prototype.h"
#include "src/objects/slots.h"
#include "src/objects/smi.h"
#include "src/objects/source-text-module-inl.h"
#include "src/objects/string-set-inl.h"
#include "src/objects/visitors.h"
#include "src/profiler/heap-profiler.h"
#include "src/profiler/tracing-cpu-profiler.h"
#include "src/regexp/regexp-stack.h"
#include "src/roots/static-roots.h"
#include "src/snapshot/embedded/embedded-data-inl.h"
#include "src/snapshot/embedded/embedded-file-writer-interface.h"
#include "src/snapshot/read-only-deserializer.h"
#include "src/snapshot/shared-heap-deserializer.h"
#include "src/snapshot/snapshot.h"
#include "src/snapshot/startup-deserializer.h"
#include "src/strings/string-builder-inl.h"
#include "src/strings/string-stream.h"
#include "src/tasks/cancelable-task.h"
#include "src/tracing/tracing-category-observer.h"
#include "src/utils/address-map.h"
#include "src/utils/ostreams.h"
#include "src/utils/version.h"
#include "src/zone/accounting-allocator.h"
#include "src/zone/type-stats.h"
#ifdef V8_INTL_SUPPORT
#include "src/objects/intl-objects.h"
#include "unicode/locid.h"
#include "unicode/uobject.h"
#endif // V8_INTL_SUPPORT
#if V8_ENABLE_MAGLEV
#include "src/maglev/maglev-concurrent-dispatcher.h"
#endif // V8_ENABLE_MAGLEV
#if V8_ENABLE_WEBASSEMBLY
#include "src/debug/debug-wasm-objects.h"
#include "src/trap-handler/trap-handler.h"
#include "src/wasm/stacks.h"
#include "src/wasm/wasm-code-manager.h"
#include "src/wasm/wasm-engine.h"
#include "src/wasm/wasm-module.h"
#include "src/wasm/wasm-objects.h"
#endif // V8_ENABLE_WEBASSEMBLY
#if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING)
#include "src/diagnostics/etw-jit-win.h"
#endif
#if defined(V8_OS_WIN64)
#include "src/diagnostics/unwinding-info-win64.h"
#endif // V8_OS_WIN64
#if USE_SIMULATOR
#include "src/execution/simulator-base.h"
#endif
extern "C" const uint8_t v8_Default_embedded_blob_code_[];
extern "C" uint32_t v8_Default_embedded_blob_code_size_;
extern "C" const uint8_t v8_Default_embedded_blob_data_[];
extern "C" uint32_t v8_Default_embedded_blob_data_size_;
namespace v8 {
namespace internal {
#ifdef DEBUG
#define TRACE_ISOLATE(tag) \
do { \
if (v8_flags.trace_isolates) { \
PrintF("Isolate %p (id %d)" #tag "\n", reinterpret_cast<void*>(this), \
id()); \
} \
} while (false)
#else
#define TRACE_ISOLATE(tag)
#endif
const uint8_t* DefaultEmbeddedBlobCode() {
return v8_Default_embedded_blob_code_;
}
uint32_t DefaultEmbeddedBlobCodeSize() {
return v8_Default_embedded_blob_code_size_;
}
const uint8_t* DefaultEmbeddedBlobData() {
return v8_Default_embedded_blob_data_;
}
uint32_t DefaultEmbeddedBlobDataSize() {
return v8_Default_embedded_blob_data_size_;
}
namespace {
// These variables provide access to the current embedded blob without requiring
// an isolate instance. This is needed e.g. by
// InstructionStream::InstructionStart, which may not have access to an isolate
// but still needs to access the embedded blob. The variables are initialized by
// each isolate in Init(). Writes and reads are relaxed since we can guarantee
// that the current thread has initialized these variables before accessing
// them. Different threads may race, but this is fine since they all attempt to
// set the same values of the blob pointer and size.
std::atomic<const uint8_t*> current_embedded_blob_code_(nullptr);
std::atomic<uint32_t> current_embedded_blob_code_size_(0);
std::atomic<const uint8_t*> current_embedded_blob_data_(nullptr);
std::atomic<uint32_t> current_embedded_blob_data_size_(0);
// The various workflows around embedded snapshots are fairly complex. We need
// to support plain old snapshot builds, nosnap builds, and the requirements of
// subtly different serialization tests. There's two related knobs to twiddle:
//
// - The default embedded blob may be overridden by setting the sticky embedded
// blob. This is set automatically whenever we create a new embedded blob.
//
// - Lifecycle management can be either manual or set to refcounting.
//
// A few situations to demonstrate their use:
//
// - A plain old snapshot build neither overrides the default blob nor
// refcounts.
//
// - mksnapshot sets the sticky blob and manually frees the embedded
// blob once done.
//
// - Most serializer tests do the same.
//
// - Nosnapshot builds set the sticky blob and enable refcounting.
// This mutex protects access to the following variables:
// - sticky_embedded_blob_code_
// - sticky_embedded_blob_code_size_
// - sticky_embedded_blob_data_
// - sticky_embedded_blob_data_size_
// - enable_embedded_blob_refcounting_
// - current_embedded_blob_refs_
base::LazyMutex current_embedded_blob_refcount_mutex_ = LAZY_MUTEX_INITIALIZER;
const uint8_t* sticky_embedded_blob_code_ = nullptr;
uint32_t sticky_embedded_blob_code_size_ = 0;
const uint8_t* sticky_embedded_blob_data_ = nullptr;
uint32_t sticky_embedded_blob_data_size_ = 0;
bool enable_embedded_blob_refcounting_ = true;
int current_embedded_blob_refs_ = 0;
const uint8_t* StickyEmbeddedBlobCode() { return sticky_embedded_blob_code_; }
uint32_t StickyEmbeddedBlobCodeSize() {
return sticky_embedded_blob_code_size_;
}
const uint8_t* StickyEmbeddedBlobData() { return sticky_embedded_blob_data_; }
uint32_t StickyEmbeddedBlobDataSize() {
return sticky_embedded_blob_data_size_;
}
void SetStickyEmbeddedBlob(const uint8_t* code, uint32_t code_size,
const uint8_t* data, uint32_t data_size) {
sticky_embedded_blob_code_ = code;
sticky_embedded_blob_code_size_ = code_size;
sticky_embedded_blob_data_ = data;
sticky_embedded_blob_data_size_ = data_size;
}
} // namespace
void DisableEmbeddedBlobRefcounting() {
base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
enable_embedded_blob_refcounting_ = false;
}
void FreeCurrentEmbeddedBlob() {
CHECK(!enable_embedded_blob_refcounting_);
base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
if (StickyEmbeddedBlobCode() == nullptr) return;
CHECK_EQ(StickyEmbeddedBlobCode(), Isolate::CurrentEmbeddedBlobCode());
CHECK_EQ(StickyEmbeddedBlobData(), Isolate::CurrentEmbeddedBlobData());
OffHeapInstructionStream::FreeOffHeapOffHeapInstructionStream(
const_cast<uint8_t*>(Isolate::CurrentEmbeddedBlobCode()),
Isolate::CurrentEmbeddedBlobCodeSize(),
const_cast<uint8_t*>(Isolate::CurrentEmbeddedBlobData()),
Isolate::CurrentEmbeddedBlobDataSize());
current_embedded_blob_code_.store(nullptr, std::memory_order_relaxed);
current_embedded_blob_code_size_.store(0, std::memory_order_relaxed);
current_embedded_blob_data_.store(nullptr, std::memory_order_relaxed);
current_embedded_blob_data_size_.store(0, std::memory_order_relaxed);
sticky_embedded_blob_code_ = nullptr;
sticky_embedded_blob_code_size_ = 0;
sticky_embedded_blob_data_ = nullptr;
sticky_embedded_blob_data_size_ = 0;
}
// static
bool Isolate::CurrentEmbeddedBlobIsBinaryEmbedded() {
// In some situations, we must be able to rely on the embedded blob being
// immortal immovable. This is the case if the blob is binary-embedded.
// See blob lifecycle controls above for descriptions of when the current
// embedded blob may change (e.g. in tests or mksnapshot). If the blob is
// binary-embedded, it is immortal immovable.
const uint8_t* code =
current_embedded_blob_code_.load(std::memory_order_relaxed);
if (code == nullptr) return false;
return code == DefaultEmbeddedBlobCode();
}
void Isolate::SetEmbeddedBlob(const uint8_t* code, uint32_t code_size,
const uint8_t* data, uint32_t data_size) {
CHECK_NOT_NULL(code);
CHECK_NOT_NULL(data);
embedded_blob_code_ = code;
embedded_blob_code_size_ = code_size;
embedded_blob_data_ = data;
embedded_blob_data_size_ = data_size;
current_embedded_blob_code_.store(code, std::memory_order_relaxed);
current_embedded_blob_code_size_.store(code_size, std::memory_order_relaxed);
current_embedded_blob_data_.store(data, std::memory_order_relaxed);
current_embedded_blob_data_size_.store(data_size, std::memory_order_relaxed);
#ifdef DEBUG
// Verify that the contents of the embedded blob are unchanged from
// serialization-time, just to ensure the compiler isn't messing with us.
EmbeddedData d = EmbeddedData::FromBlob();
if (d.EmbeddedBlobDataHash() != d.CreateEmbeddedBlobDataHash()) {
FATAL(
"Embedded blob data section checksum verification failed. This "
"indicates that the embedded blob has been modified since compilation "
"time.");
}
if (v8_flags.text_is_readable) {
if (d.EmbeddedBlobCodeHash() != d.CreateEmbeddedBlobCodeHash()) {
FATAL(
"Embedded blob code section checksum verification failed. This "
"indicates that the embedded blob has been modified since "
"compilation time. A common cause is a debugging breakpoint set "
"within builtin code.");
}
}
#endif // DEBUG
}
void Isolate::ClearEmbeddedBlob() {
CHECK(enable_embedded_blob_refcounting_);
CHECK_EQ(embedded_blob_code_, CurrentEmbeddedBlobCode());
CHECK_EQ(embedded_blob_code_, StickyEmbeddedBlobCode());
CHECK_EQ(embedded_blob_data_, CurrentEmbeddedBlobData());
CHECK_EQ(embedded_blob_data_, StickyEmbeddedBlobData());
embedded_blob_code_ = nullptr;
embedded_blob_code_size_ = 0;
embedded_blob_data_ = nullptr;
embedded_blob_data_size_ = 0;
current_embedded_blob_code_.store(nullptr, std::memory_order_relaxed);
current_embedded_blob_code_size_.store(0, std::memory_order_relaxed);
current_embedded_blob_data_.store(nullptr, std::memory_order_relaxed);
current_embedded_blob_data_size_.store(0, std::memory_order_relaxed);
sticky_embedded_blob_code_ = nullptr;
sticky_embedded_blob_code_size_ = 0;
sticky_embedded_blob_data_ = nullptr;
sticky_embedded_blob_data_size_ = 0;
}
const uint8_t* Isolate::embedded_blob_code() const {
return embedded_blob_code_;
}
uint32_t Isolate::embedded_blob_code_size() const {
return embedded_blob_code_size_;
}
const uint8_t* Isolate::embedded_blob_data() const {
return embedded_blob_data_;
}
uint32_t Isolate::embedded_blob_data_size() const {
return embedded_blob_data_size_;
}
// static
const uint8_t* Isolate::CurrentEmbeddedBlobCode() {
return current_embedded_blob_code_.load(std::memory_order_relaxed);
}
// static
uint32_t Isolate::CurrentEmbeddedBlobCodeSize() {
return current_embedded_blob_code_size_.load(std::memory_order_relaxed);
}
// static
const uint8_t* Isolate::CurrentEmbeddedBlobData() {
return current_embedded_blob_data_.load(std::memory_order_relaxed);
}
// static
uint32_t Isolate::CurrentEmbeddedBlobDataSize() {
return current_embedded_blob_data_size_.load(std::memory_order_relaxed);
}
// static
base::AddressRegion Isolate::GetShortBuiltinsCallRegion() {
// Update calculations below if the assert fails.
static_assert(kMaxPCRelativeCodeRangeInMB <= 4096);
if (kMaxPCRelativeCodeRangeInMB == 0) {
// Return empty region if pc-relative calls/jumps are not supported.
return base::AddressRegion(kNullAddress, 0);
}
constexpr size_t max_size = std::numeric_limits<size_t>::max();
if (uint64_t{kMaxPCRelativeCodeRangeInMB} * MB > max_size) {
// The whole addressable space is reachable with pc-relative calls/jumps.
return base::AddressRegion(kNullAddress, max_size);
}
constexpr size_t radius = kMaxPCRelativeCodeRangeInMB * MB;
DCHECK_LT(CurrentEmbeddedBlobCodeSize(), radius);
Address embedded_blob_code_start =
reinterpret_cast<Address>(CurrentEmbeddedBlobCode());
if (embedded_blob_code_start == kNullAddress) {
// Return empty region if there's no embedded blob.
return base::AddressRegion(kNullAddress, 0);
}
Address embedded_blob_code_end =
embedded_blob_code_start + CurrentEmbeddedBlobCodeSize();
Address region_start =
(embedded_blob_code_end > radius) ? (embedded_blob_code_end - radius) : 0;
Address region_end = embedded_blob_code_start + radius;
if (region_end < embedded_blob_code_start) {
region_end = static_cast<Address>(-1);
}
return base::AddressRegion(region_start, region_end - region_start);
}
size_t Isolate::HashIsolateForEmbeddedBlob() {
DCHECK(builtins_.is_initialized());
DCHECK(Builtins::AllBuiltinsAreIsolateIndependent());
DisallowGarbageCollection no_gc;
static constexpr size_t kSeed = 0;
size_t hash = kSeed;
// Hash static entries of the roots table.
hash = base::hash_combine(hash, V8_STATIC_ROOTS_BOOL);
#if V8_STATIC_ROOTS_BOOL
hash = base::hash_combine(hash,
static_cast<int>(RootIndex::kReadOnlyRootsCount));
RootIndex i = RootIndex::kFirstReadOnlyRoot;
for (auto ptr : StaticReadOnlyRootsPointerTable) {
hash = base::hash_combine(ptr, hash);
++i;
}
#endif // V8_STATIC_ROOTS_BOOL
// Hash data sections of builtin code objects.
for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
++builtin) {
Code code = builtins()->code(builtin);
DCHECK(Internals::HasHeapObjectTag(code.ptr()));
uint8_t* const code_ptr = reinterpret_cast<uint8_t*>(code.address());
// These static asserts ensure we don't miss relevant fields. We don't hash
// instruction_start, but other data fields must remain the same.
static_assert(Code::kEndOfStrongFieldsOffset ==
Code::kInstructionStartOffset);
static_assert(Code::kInstructionStartOffsetEnd + 1 == Code::kFlagsOffset);
static_assert(Code::kFlagsOffsetEnd + 1 == Code::kInstructionSizeOffset);
static_assert(Code::kInstructionSizeOffsetEnd + 1 ==
Code::kMetadataSizeOffset);
static_assert(Code::kMetadataSizeOffsetEnd + 1 ==
Code::kInlinedBytecodeSizeOffset);
static_assert(Code::kInlinedBytecodeSizeOffsetEnd + 1 ==
Code::kOsrOffsetOffset);
static_assert(Code::kOsrOffsetOffsetEnd + 1 ==
Code::kHandlerTableOffsetOffset);
static_assert(Code::kHandlerTableOffsetOffsetEnd + 1 ==
Code::kUnwindingInfoOffsetOffset);
static_assert(Code::kUnwindingInfoOffsetOffsetEnd + 1 ==
Code::kConstantPoolOffsetOffset);
static_assert(Code::kConstantPoolOffsetOffsetEnd + 1 ==
Code::kCodeCommentsOffsetOffset);
static_assert(Code::kCodeCommentsOffsetOffsetEnd + 1 ==
Code::kBuiltinIdOffset);
static_assert(Code::kBuiltinIdOffsetEnd + 1 == Code::kUnalignedSize);
static constexpr int kStartOffset = Code::kFlagsOffset;
for (int j = kStartOffset; j < Code::kUnalignedSize; j++) {
hash = base::hash_combine(hash, size_t{code_ptr[j]});
}
}
// The builtins constants table is also tightly tied to embedded builtins.
hash = base::hash_combine(
hash, static_cast<size_t>(heap_.builtins_constants_table().length()));
return hash;
}
Isolate* Isolate::process_wide_shared_space_isolate_{nullptr};
thread_local Isolate::PerIsolateThreadData* g_current_per_isolate_thread_data_
V8_CONSTINIT = nullptr;
thread_local Isolate* g_current_isolate_ V8_CONSTINIT = nullptr;
namespace {
// A global counter for all generated Isolates, might overflow.
std::atomic<int> isolate_counter{0};
} // namespace
Isolate::PerIsolateThreadData*
Isolate::FindOrAllocatePerThreadDataForThisThread() {
ThreadId thread_id = ThreadId::Current();
PerIsolateThreadData* per_thread = nullptr;
{
base::MutexGuard lock_guard(&thread_data_table_mutex_);
per_thread = thread_data_table_.Lookup(thread_id);
if (per_thread == nullptr) {
if (v8_flags.adjust_os_scheduling_parameters) {
base::OS::AdjustSchedulingParams();
}
per_thread = new PerIsolateThreadData(this, thread_id);
thread_data_table_.Insert(per_thread);
}
DCHECK(thread_data_table_.Lookup(thread_id) == per_thread);
}
return per_thread;
}
void Isolate::DiscardPerThreadDataForThisThread() {
ThreadId thread_id = ThreadId::TryGetCurrent();
if (thread_id.IsValid()) {
DCHECK_NE(thread_manager_->mutex_owner_.load(std::memory_order_relaxed),
thread_id);
base::MutexGuard lock_guard(&thread_data_table_mutex_);
PerIsolateThreadData* per_thread = thread_data_table_.Lookup(thread_id);
if (per_thread) {
DCHECK(!per_thread->thread_state_);
thread_data_table_.Remove(per_thread);
}
}
}
Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() {
ThreadId thread_id = ThreadId::Current();
return FindPerThreadDataForThread(thread_id);
}
Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread(
ThreadId thread_id) {
PerIsolateThreadData* per_thread = nullptr;
{
base::MutexGuard lock_guard(&thread_data_table_mutex_);
per_thread = thread_data_table_.Lookup(thread_id);
}
return per_thread;
}
void Isolate::InitializeOncePerProcess() { Heap::InitializeOncePerProcess(); }
Address Isolate::get_address_from_id(IsolateAddressId id) {
return isolate_addresses_[id];
}
char* Isolate::Iterate(RootVisitor* v, char* thread_storage) {
ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
Iterate(v, thread);
return thread_storage + sizeof(ThreadLocalTop);
}
void Isolate::IterateThread(ThreadVisitor* v, char* t) {
ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
v->VisitThread(this, thread);
}
void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) {
// Visit the roots from the top for a given thread.
v->VisitRootPointer(Root::kStackRoots, nullptr,
FullObjectSlot(&thread->pending_exception_));
v->VisitRootPointer(Root::kStackRoots, nullptr,
FullObjectSlot(&thread->pending_message_));
v->VisitRootPointer(Root::kStackRoots, nullptr,
FullObjectSlot(&thread->context_));
v->VisitRootPointer(Root::kStackRoots, nullptr,
FullObjectSlot(&thread->scheduled_exception_));
for (v8::TryCatch* block = thread->try_catch_handler_; block != nullptr;
block = block->next_) {
// TODO(3770): Make TryCatch::exception_ an Address (and message_obj_ too).
v->VisitRootPointer(
Root::kStackRoots, nullptr,
FullObjectSlot(reinterpret_cast<Address>(&(block->exception_))));
v->VisitRootPointer(
Root::kStackRoots, nullptr,
FullObjectSlot(reinterpret_cast<Address>(&(block->message_obj_))));
}
// Iterate over pointers on native execution stack.
#if V8_ENABLE_WEBASSEMBLY
wasm::WasmCodeRefScope wasm_code_ref_scope;
if (v8_flags.experimental_wasm_stack_switching) {
wasm::StackMemory* current = wasm_stacks_;
DCHECK_NOT_NULL(current);
do {
if (current->IsActive()) {
// The active stack's jump buffer does not match the current state, use
// the thread info below instead.
current = current->next();
continue;
}
for (StackFrameIterator it(this, current); !it.done(); it.Advance()) {
it.frame()->Iterate(v);
}
current = current->next();
} while (current != wasm_stacks_);
}
#endif // V8_ENABLE_WEBASSEMBLY
for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
it.frame()->Iterate(v);
}
}
void Isolate::Iterate(RootVisitor* v) {
ThreadLocalTop* current_t = thread_local_top();
Iterate(v, current_t);
}
void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
thread_local_top()->try_catch_handler_ = that;
}
void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) {
DCHECK(thread_local_top()->try_catch_handler_ == that);
thread_local_top()->try_catch_handler_ = that->next_;
}
Handle<String> Isolate::StackTraceString() {
if (stack_trace_nesting_level_ == 0) {
stack_trace_nesting_level_++;
HeapStringAllocator allocator;
StringStream::ClearMentionedObjectCache(this);
StringStream accumulator(&allocator);
incomplete_message_ = &accumulator;
PrintStack(&accumulator);
Handle<String> stack_trace = accumulator.ToString(this);
incomplete_message_ = nullptr;
stack_trace_nesting_level_ = 0;
return stack_trace;
} else if (stack_trace_nesting_level_ == 1) {
stack_trace_nesting_level_++;
base::OS::PrintError(
"\n\nAttempt to print stack while printing stack (double fault)\n");
base::OS::PrintError(
"If you are lucky you may find a partial stack dump on stdout.\n\n");
incomplete_message_->OutputToStdOut();
return factory()->empty_string();
} else {
base::OS::Abort();
}
}
void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3,
void* ptr4) {
StackTraceFailureMessage message(this,
StackTraceFailureMessage::kIncludeStackTrace,
ptr1, ptr2, ptr3, ptr4);
message.Print();
base::OS::Abort();
}
void Isolate::PushParamsAndDie(void* ptr1, void* ptr2, void* ptr3, void* ptr4,
void* ptr5, void* ptr6) {
StackTraceFailureMessage message(
this, StackTraceFailureMessage::kDontIncludeStackTrace, ptr1, ptr2, ptr3,
ptr4, ptr5, ptr6);
message.Print();
base::OS::Abort();
}
void StackTraceFailureMessage::Print() volatile {
// Print the details of this failure message object, including its own address
// to force stack allocation.
base::OS::PrintError(
"Stacktrace:\n ptr1=%p\n ptr2=%p\n ptr3=%p\n ptr4=%p\n "
"ptr5=%p\n ptr6=%p\n failure_message_object=%p\n%s",
ptr1_, ptr2_, ptr3_, ptr4_, ptr5_, ptr6_, this, &js_stack_trace_[0]);
}
StackTraceFailureMessage::StackTraceFailureMessage(
Isolate* isolate, StackTraceFailureMessage::StackTraceMode mode, void* ptr1,
void* ptr2, void* ptr3, void* ptr4, void* ptr5, void* ptr6) {
isolate_ = isolate;
ptr1_ = ptr1;
ptr2_ = ptr2;
ptr3_ = ptr3;
ptr4_ = ptr4;
ptr5_ = ptr5;
ptr6_ = ptr6;
// Write a stracktrace into the {js_stack_trace_} buffer.
const size_t buffer_length = arraysize(js_stack_trace_);
memset(&js_stack_trace_, 0, buffer_length);
memset(&code_objects_, 0, sizeof(code_objects_));
if (mode == kIncludeStackTrace) {
FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1);
StringStream accumulator(&fixed, StringStream::kPrintObjectConcise);
isolate->PrintStack(&accumulator, Isolate::kPrintStackVerbose);
// Keeping a reference to the last code objects to increase likelihood that
// they get included in the minidump.
const size_t code_objects_length = arraysize(code_objects_);
size_t i = 0;
StackFrameIterator it(isolate);
for (; !it.done() && i < code_objects_length; it.Advance()) {
code_objects_[i++] =
reinterpret_cast<void*>(it.frame()->unchecked_code().ptr());
}
}
}
bool NoExtension(const v8::FunctionCallbackInfo<v8::Value>&) { return false; }
namespace {
class CallSiteBuilder {
public:
CallSiteBuilder(Isolate* isolate, FrameSkipMode mode, int limit,
Handle<Object> caller)
: isolate_(isolate),
mode_(mode),
limit_(limit),
caller_(caller),
skip_next_frame_(mode != SKIP_NONE) {
DCHECK_IMPLIES(mode_ == SKIP_UNTIL_SEEN, caller_->IsJSFunction());
// Modern web applications are usually built with multiple layers of
// framework and library code, and stack depth tends to be more than
// a dozen frames, so we over-allocate a bit here to avoid growing
// the elements array in the common case.
elements_ = isolate->factory()->NewFixedArray(std::min(64, limit));
}
bool Visit(FrameSummary const& summary) {
if (Full()) return false;
#if V8_ENABLE_WEBASSEMBLY
if (summary.IsWasm()) {
AppendWasmFrame(summary.AsWasm());
return true;
}
if (summary.IsWasmInlined()) {
AppendWasmInlinedFrame(summary.AsWasmInlined());
return true;
}
if (summary.IsBuiltin()) {
AppendBuiltinFrame(summary.AsBuiltin());
return true;
}
#endif // V8_ENABLE_WEBASSEMBLY
AppendJavaScriptFrame(summary.AsJavaScript());
return true;
}
void AppendAsyncFrame(Handle<JSGeneratorObject> generator_object) {
Handle<JSFunction> function(generator_object->function(), isolate_);
if (!IsVisibleInStackTrace(function)) return;
int flags = CallSiteInfo::kIsAsync;
if (IsStrictFrame(function)) flags |= CallSiteInfo::kIsStrict;
Handle<Object> receiver(generator_object->receiver(), isolate_);
Handle<BytecodeArray> code(function->shared().GetBytecodeArray(isolate_),
isolate_);
// The stored bytecode offset is relative to a different base than what
// is used in the source position table, hence the subtraction.
int offset = Smi::ToInt(generator_object->input_or_debug_pos()) -
(BytecodeArray::kHeaderSize - kHeapObjectTag);
Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
if (V8_UNLIKELY(v8_flags.detailed_error_stack_trace)) {
parameters = isolate_->factory()->CopyFixedArrayUpTo(
handle(generator_object->parameters_and_registers(), isolate_),
function->shared()
.internal_formal_parameter_count_without_receiver());
}
AppendFrame(receiver, function, code, offset, flags, parameters);
}
void AppendPromiseCombinatorFrame(Handle<JSFunction> element_function,
Handle<JSFunction> combinator) {
if (!IsVisibleInStackTrace(combinator)) return;
int flags =
CallSiteInfo::kIsAsync | CallSiteInfo::kIsSourcePositionComputed;
Handle<Object> receiver(combinator->native_context().promise_function(),
isolate_);
Handle<Code> code(combinator->code(), isolate_);
// TODO(mmarchini) save Promises list from the Promise combinator
Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
// We store the offset of the promise into the element function's
// hash field for element callbacks.
int promise_index = Smi::ToInt(element_function->GetIdentityHash()) - 1;
AppendFrame(receiver, combinator, code, promise_index, flags, parameters);
}
void AppendJavaScriptFrame(
FrameSummary::JavaScriptFrameSummary const& summary) {
// Filter out internal frames that we do not want to show.
if (!IsVisibleInStackTrace(summary.function())) return;
int flags = 0;
Handle<JSFunction> function = summary.function();
if (IsStrictFrame(function)) flags |= CallSiteInfo::kIsStrict;
if (summary.is_constructor()) flags |= CallSiteInfo::kIsConstructor;
AppendFrame(summary.receiver(), function, summary.abstract_code(),
summary.code_offset(), flags, summary.parameters());
}
#if V8_ENABLE_WEBASSEMBLY
void AppendWasmFrame(FrameSummary::WasmFrameSummary const& summary) {
if (summary.code()->kind() != wasm::WasmCode::kWasmFunction) return;
Handle<WasmInstanceObject> instance = summary.wasm_instance();
int flags = CallSiteInfo::kIsWasm;
if (instance->module_object().is_asm_js()) {
flags |= CallSiteInfo::kIsAsmJsWasm;
if (summary.at_to_number_conversion()) {
flags |= CallSiteInfo::kIsAsmJsAtNumberConversion;
}
}
auto code = Managed<wasm::GlobalWasmCodeRef>::Allocate(
isolate_, 0, summary.code(),
instance->module_object().shared_native_module());
AppendFrame(instance,
handle(Smi::FromInt(summary.function_index()), isolate_), code,
summary.code_offset(), flags,
isolate_->factory()->empty_fixed_array());
}
void AppendWasmInlinedFrame(
FrameSummary::WasmInlinedFrameSummary const& summary) {
Handle<HeapObject> code = isolate_->factory()->undefined_value();
int flags = CallSiteInfo::kIsWasm;
AppendFrame(summary.wasm_instance(),
handle(Smi::FromInt(summary.function_index()), isolate_), code,
summary.code_offset(), flags,
isolate_->factory()->empty_fixed_array());
}
void AppendBuiltinFrame(FrameSummary::BuiltinFrameSummary const& summary) {
Builtin builtin = summary.builtin();
Handle<Code> code = isolate_->builtins()->code_handle(builtin);
Handle<Object> function(Smi::FromInt(static_cast<int>(builtin)), isolate_);
int flags = CallSiteInfo::kIsBuiltin;
AppendFrame(summary.receiver(), function, code, summary.code_offset(),
flags, isolate_->factory()->empty_fixed_array());
}
#endif // V8_ENABLE_WEBASSEMBLY
bool Full() { return index_ >= limit_; }
Handle<FixedArray> Build() {
return FixedArray::ShrinkOrEmpty(isolate_, elements_, index_);
}
private:
// Poison stack frames below the first strict mode frame.
// The stack trace API should not expose receivers and function
// objects on frames deeper than the top-most one with a strict mode
// function.
bool IsStrictFrame(Handle<JSFunction> function) {
if (!encountered_strict_function_) {
encountered_strict_function_ =
is_strict(function->shared().language_mode());
}
return encountered_strict_function_;
}
// Determines whether the given stack frame should be displayed in a stack
// trace.
bool IsVisibleInStackTrace(Handle<JSFunction> function) {
return ShouldIncludeFrame(function) && IsNotHidden(function);
}
// This mechanism excludes a number of uninteresting frames from the stack
// trace. This can be be the first frame (which will be a builtin-exit frame
// for the error constructor builtin) or every frame until encountering a
// user-specified function.
bool ShouldIncludeFrame(Handle<JSFunction> function) {
switch (mode_) {
case SKIP_NONE:
return true;
case SKIP_FIRST:
if (!skip_next_frame_) return true;
skip_next_frame_ = false;
return false;
case SKIP_UNTIL_SEEN:
if (skip_next_frame_ && (*function == *caller_)) {
skip_next_frame_ = false;
return false;
}
return !skip_next_frame_;
}
UNREACHABLE();
}
bool IsNotHidden(Handle<JSFunction> function) {
// TODO(szuend): Remove this check once the flag is enabled
// by default.
if (!v8_flags.experimental_stack_trace_frames &&
function->shared().IsApiFunction()) {
return false;
}
// Functions defined not in user scripts are not visible unless directly
// exposed, in which case the native flag is set.
// The --builtins-in-stack-traces command line flag allows including
// internal call sites in the stack trace for debugging purposes.
if (!v8_flags.builtins_in_stack_traces &&
!function->shared().IsUserJavaScript()) {
return function->shared().native() || function->shared().IsApiFunction();
}
return true;
}
void AppendFrame(Handle<Object> receiver_or_instance, Handle<Object> function,
Handle<HeapObject> code, int offset, int flags,
Handle<FixedArray> parameters) {
if (receiver_or_instance->IsTheHole(isolate_)) {
// TODO(jgruber): Fix all cases in which frames give us a hole value
// (e.g. the receiver in RegExp constructor frames).
receiver_or_instance = isolate_->factory()->undefined_value();
}
auto info = isolate_->factory()->NewCallSiteInfo(
receiver_or_instance, function, code, offset, flags, parameters);
elements_ = FixedArray::SetAndGrow(isolate_, elements_, index_++, info);
}
Isolate* isolate_;
const FrameSkipMode mode_;
int index_ = 0;
const int limit_;
const Handle<Object> caller_;
bool skip_next_frame_;
bool encountered_strict_function_ = false;
Handle<FixedArray> elements_;
};
bool GetStackTraceLimit(Isolate* isolate, int* result) {
if (v8_flags.correctness_fuzzer_suppressions) return false;
Handle<JSObject> error = isolate->error_function();
Handle<String> key = isolate->factory()->stackTraceLimit_string();
Handle<Object> stack_trace_limit =
JSReceiver::GetDataProperty(isolate, error, key);
if (!stack_trace_limit->IsNumber()) return false;
// Ensure that limit is not negative.
*result = std::max(FastD2IChecked(stack_trace_limit->Number()), 0);
if (*result != v8_flags.stack_trace_limit) {
isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit);
}
return true;
}
bool IsBuiltinFunction(Isolate* isolate, HeapObject object, Builtin builtin) {
if (!object.IsJSFunction()) return false;
JSFunction const function = JSFunction::cast(object);
return function.code() == isolate->builtins()->code(builtin);
}
void CaptureAsyncStackTrace(Isolate* isolate, Handle<JSPromise> promise,
CallSiteBuilder* builder) {
while (!builder->Full()) {
// Check that the {promise} is not settled.
if (promise->status() != Promise::kPending) return;
// Check that we have exactly one PromiseReaction on the {promise}.
if (!promise->reactions().IsPromiseReaction()) return;
Handle<PromiseReaction> reaction(
PromiseReaction::cast(promise->reactions()), isolate);
if (!reaction->next().IsSmi()) return;
// Check if the {reaction} has one of the known async function or
// async generator continuations as its fulfill handler.
if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
Builtin::kAsyncFunctionAwaitResolveClosure) ||
IsBuiltinFunction(isolate, reaction->fulfill_handler(),
Builtin::kAsyncGeneratorAwaitResolveClosure) ||
IsBuiltinFunction(
isolate, reaction->fulfill_handler(),
Builtin::kAsyncGeneratorYieldWithAwaitResolveClosure)) {
// Now peek into the handlers' AwaitContext to get to
// the JSGeneratorObject for the async function.
Handle<Context> context(
JSFunction::cast(reaction->fulfill_handler()).context(), isolate);
Handle<JSGeneratorObject> generator_object(
JSGeneratorObject::cast(context->extension()), isolate);
CHECK(generator_object->is_suspended());
// Append async frame corresponding to the {generator_object}.
builder->AppendAsyncFrame(generator_object);
// Try to continue from here.
if (generator_object->IsJSAsyncFunctionObject()) {
Handle<JSAsyncFunctionObject> async_function_object =
Handle<JSAsyncFunctionObject>::cast(generator_object);
promise = handle(async_function_object->promise(), isolate);
} else {
Handle<JSAsyncGeneratorObject> async_generator_object =
Handle<JSAsyncGeneratorObject>::cast(generator_object);
if (async_generator_object->queue().IsUndefined(isolate)) return;
Handle<AsyncGeneratorRequest> async_generator_request(
AsyncGeneratorRequest::cast(async_generator_object->queue()),
isolate);
promise = handle(JSPromise::cast(async_generator_request->promise()),
isolate);
}
} else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
Builtin::kPromiseAllResolveElementClosure)) {
Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
isolate);
Handle<Context> context(function->context(), isolate);
Handle<JSFunction> combinator(context->native_context().promise_all(),
isolate);
builder->AppendPromiseCombinatorFrame(function, combinator);
// Now peak into the Promise.all() resolve element context to
// find the promise capability that's being resolved when all
// the concurrent promises resolve.
int const index =
PromiseBuiltins::kPromiseAllResolveElementCapabilitySlot;
Handle<PromiseCapability> capability(
PromiseCapability::cast(context->get(index)), isolate);
if (!capability->promise().IsJSPromise()) return;
promise = handle(JSPromise::cast(capability->promise()), isolate);
} else if (IsBuiltinFunction(
isolate, reaction->fulfill_handler(),
Builtin::kPromiseAllSettledResolveElementClosure)) {
Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
isolate);
Handle<Context> context(function->context(), isolate);
Handle<JSFunction> combinator(
context->native_context().promise_all_settled(), isolate);
builder->AppendPromiseCombinatorFrame(function, combinator);
// Now peak into the Promise.allSettled() resolve element context to
// find the promise capability that's being resolved when all
// the concurrent promises resolve.
int const index =
PromiseBuiltins::kPromiseAllResolveElementCapabilitySlot;
Handle<PromiseCapability> capability(
PromiseCapability::cast(context->get(index)), isolate);
if (!capability->promise().IsJSPromise()) return;
promise = handle(JSPromise::cast(capability->promise()), isolate);
} else if (IsBuiltinFunction(isolate, reaction->reject_handler(),
Builtin::kPromiseAnyRejectElementClosure)) {
Handle<JSFunction> function(JSFunction::cast(reaction->reject_handler()),
isolate);
Handle<Context> context(function->context(), isolate);
Handle<JSFunction> combinator(context->native_context().promise_any(),
isolate);
builder->AppendPromiseCombinatorFrame(function, combinator);
// Now peak into the Promise.any() reject element context to
// find the promise capability that's being resolved when any of
// the concurrent promises resolve.
int const index = PromiseBuiltins::kPromiseAnyRejectElementCapabilitySlot;
Handle<PromiseCapability> capability(
PromiseCapability::cast(context->get(index)), isolate);
if (!capability->promise().IsJSPromise()) return;
promise = handle(JSPromise::cast(capability->promise()), isolate);
} else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
Builtin::kPromiseCapabilityDefaultResolve)) {
Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
isolate);
Handle<Context> context(function->context(), isolate);
promise =
handle(JSPromise::cast(context->get(PromiseBuiltins::kPromiseSlot)),
isolate);
} else {
// We have some generic promise chain here, so try to
// continue with the chained promise on the reaction
// (only works for native promise chains).
Handle<HeapObject> promise_or_capability(
reaction->promise_or_capability(), isolate);
if (promise_or_capability->IsJSPromise()) {
promise = Handle<JSPromise>::cast(promise_or_capability);
} else if (promise_or_capability->IsPromiseCapability()) {
Handle<PromiseCapability> capability =
Handle<PromiseCapability>::cast(promise_or_capability);
if (!capability->promise().IsJSPromise()) return;
promise = handle(JSPromise::cast(capability->promise()), isolate);
} else {
// Otherwise the {promise_or_capability} must be undefined here.
CHECK(promise_or_capability->IsUndefined(isolate));
return;
}
}
}
}
void CaptureAsyncStackTrace(Isolate* isolate, CallSiteBuilder* builder) {
Handle<Object> current_microtask = isolate->factory()->current_microtask();
if (current_microtask->IsPromiseReactionJobTask()) {
Handle<PromiseReactionJobTask> promise_reaction_job_task =
Handle<PromiseReactionJobTask>::cast(current_microtask);
// Check if the {reaction} has one of the known async function or
// async generator continuations as its fulfill handler.
if (IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
Builtin::kAsyncFunctionAwaitResolveClosure) ||
IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
Builtin::kAsyncGeneratorAwaitResolveClosure) ||
IsBuiltinFunction(
isolate, promise_reaction_job_task->handler(),
Builtin::kAsyncGeneratorYieldWithAwaitResolveClosure) ||
IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
Builtin::kAsyncFunctionAwaitRejectClosure) ||
IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
Builtin::kAsyncGeneratorAwaitRejectClosure)) {
// Now peek into the handlers' AwaitContext to get to
// the JSGeneratorObject for the async function.
Handle<Context> context(
JSFunction::cast(promise_reaction_job_task->handler()).context(),
isolate);
Handle<JSGeneratorObject> generator_object(
JSGeneratorObject::cast(context->extension()), isolate);
if (generator_object->is_executing()) {
if (generator_object->IsJSAsyncFunctionObject()) {
Handle<JSAsyncFunctionObject> async_function_object =
Handle<JSAsyncFunctionObject>::cast(generator_object);
Handle<JSPromise> promise(async_function_object->promise(), isolate);
CaptureAsyncStackTrace(isolate, promise, builder);
} else {
Handle<JSAsyncGeneratorObject> async_generator_object =
Handle<JSAsyncGeneratorObject>::cast(generator_object);
Handle<Object> queue(async_generator_object->queue(), isolate);
if (!queue->IsUndefined(isolate)) {
Handle<AsyncGeneratorRequest> async_generator_request =
Handle<AsyncGeneratorRequest>::cast(queue);
Handle<JSPromise> promise(
JSPromise::cast(async_generator_request->promise()), isolate);
CaptureAsyncStackTrace(isolate, promise, builder);
}
}
}
} else {
// The {promise_reaction_job_task} doesn't belong to an await (or
// yield inside an async generator), but we might still be able to
// find an async frame if we follow along the chain of promises on
// the {promise_reaction_job_task}.
Handle<HeapObject> promise_or_capability(
promise_reaction_job_task->promise_or_capability(), isolate);
if (promise_or_capability->IsJSPromise()) {
Handle<JSPromise> promise =
Handle<JSPromise>::cast(promise_or_capability);
CaptureAsyncStackTrace(isolate, promise, builder);
}
}
}
}
template <typename Visitor>
void VisitStack(Isolate* isolate, Visitor* visitor,
StackTrace::StackTraceOptions options = StackTrace::kDetailed) {
DisallowJavascriptExecution no_js(isolate);
for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
StackFrame* frame = it.frame();
switch (frame->type()) {
case StackFrame::BUILTIN_EXIT:
case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION:
case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
case StackFrame::TURBOFAN:
case StackFrame::MAGLEV:
case StackFrame::INTERPRETED:
case StackFrame::BASELINE:
case StackFrame::BUILTIN:
#if V8_ENABLE_WEBASSEMBLY
case StackFrame::STUB:
case StackFrame::WASM:
#endif // V8_ENABLE_WEBASSEMBLY
{
// A standard frame may include many summarized frames (due to
// inlining).
std::vector<FrameSummary> summaries;
CommonFrame::cast(frame)->Summarize(&summaries);
for (auto rit = summaries.rbegin(); rit != summaries.rend(); ++rit) {
FrameSummary& summary = *rit;
// Skip frames from other origins when asked to do so.
if (!(options & StackTrace::kExposeFramesAcrossSecurityOrigins) &&
!summary.native_context()->HasSameSecurityTokenAs(
isolate->context())) {
continue;
}
if (!visitor->Visit(summary)) return;
}
break;
}
default:
break;
}
}
}
Handle<FixedArray> CaptureSimpleStackTrace(Isolate* isolate, int limit,
FrameSkipMode mode,
Handle<Object> caller) {
TRACE_EVENT_BEGIN1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
"maxFrameCount", limit);
#if V8_ENABLE_WEBASSEMBLY
wasm::WasmCodeRefScope code_ref_scope;
#endif // V8_ENABLE_WEBASSEMBLY
CallSiteBuilder builder(isolate, mode, limit, caller);
VisitStack(isolate, &builder);
// If --async-stack-traces are enabled and the "current microtask" is a
// PromiseReactionJobTask, we try to enrich the stack trace with async
// frames.
if (v8_flags.async_stack_traces) {
CaptureAsyncStackTrace(isolate, &builder);
}
Handle<FixedArray> stack_trace = builder.Build();
TRACE_EVENT_END1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
"frameCount", stack_trace->length());
return stack_trace;
}
} // namespace
MaybeHandle<JSObject> Isolate::CaptureAndSetErrorStack(
Handle<JSObject> error_object, FrameSkipMode mode, Handle<Object> caller) {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__);
Handle<Object> error_stack = factory()->undefined_value();
// Capture the "simple stack trace" for the error.stack property,
// which can be disabled by setting Error.stackTraceLimit to a non
// number value or simply deleting the property. If the inspector
// is active, and requests more stack frames than the JavaScript
// program itself, we collect up to the maximum.
int stack_trace_limit = 0;
if (GetStackTraceLimit(this, &stack_trace_limit)) {
int limit = stack_trace_limit;
if (capture_stack_trace_for_uncaught_exceptions_ &&
!(stack_trace_for_uncaught_exceptions_options_ &
StackTrace::kExposeFramesAcrossSecurityOrigins)) {
// Collect up to the maximum of what the JavaScript program and
// the inspector want. There's a special case here where the API
// can ask the stack traces to also include cross-origin frames,
// in which case we collect a separate trace below. Note that
// the inspector doesn't use this option, so we could as well
// just deprecate this in the future.
if (limit < stack_trace_for_uncaught_exceptions_frame_limit_) {
limit = stack_trace_for_uncaught_exceptions_frame_limit_;
}
}
error_stack = CaptureSimpleStackTrace(this, limit, mode, caller);
}
// Next is the inspector part: Depending on whether we got a "simple
// stack trace" above and whether that's usable (meaning the API
// didn't request to include cross-origin frames), we remember the
// cap for the stack trace (either a positive limit indicating that
// the Error.stackTraceLimit value was below what was requested via
// the API, or a negative limit to indicate the opposite), or we
// collect a "detailed stack trace" eagerly and stash that away.
if (capture_stack_trace_for_uncaught_exceptions_) {
Handle<Object> limit_or_stack_frame_infos;
if (error_stack->IsUndefined(this) ||
(stack_trace_for_uncaught_exceptions_options_ &
StackTrace::kExposeFramesAcrossSecurityOrigins)) {
limit_or_stack_frame_infos = CaptureDetailedStackTrace(
stack_trace_for_uncaught_exceptions_frame_limit_,
stack_trace_for_uncaught_exceptions_options_);
} else {
int limit =
stack_trace_limit > stack_trace_for_uncaught_exceptions_frame_limit_
? -stack_trace_for_uncaught_exceptions_frame_limit_
: stack_trace_limit;
limit_or_stack_frame_infos = handle(Smi::FromInt(limit), this);
}
error_stack =
factory()->NewErrorStackData(error_stack, limit_or_stack_frame_infos);
}
RETURN_ON_EXCEPTION(
this,
JSObject::SetProperty(this, error_object, factory()->error_stack_symbol(),
error_stack, StoreOrigin::kMaybeKeyed,
Just(ShouldThrow::kThrowOnError)),
JSObject);
return error_object;
}
Handle<FixedArray> Isolate::GetDetailedStackTrace(
Handle<JSReceiver> maybe_error_object) {
ErrorUtils::StackPropertyLookupResult lookup =
ErrorUtils::GetErrorStackProperty(this, maybe_error_object);
if (!lookup.error_stack->IsErrorStackData()) return {};
Handle<ErrorStackData> error_stack_data =
Handle<ErrorStackData>::cast(lookup.error_stack);
ErrorStackData::EnsureStackFrameInfos(this, error_stack_data);
if (!error_stack_data->limit_or_stack_frame_infos().IsFixedArray()) return {};
return handle(
FixedArray::cast(error_stack_data->limit_or_stack_frame_infos()), this);
}
Handle<FixedArray> Isolate::GetSimpleStackTrace(
Handle<JSReceiver> maybe_error_object) {
ErrorUtils::StackPropertyLookupResult lookup =
ErrorUtils::GetErrorStackProperty(this, maybe_error_object);
if (lookup.error_stack->IsFixedArray()) {
return Handle<FixedArray>::cast(lookup.error_stack);
}
if (!lookup.error_stack->IsErrorStackData()) {
return factory()->empty_fixed_array();
}
Handle<ErrorStackData> error_stack_data =
Handle<ErrorStackData>::cast(lookup.error_stack);
if (!error_stack_data->HasCallSiteInfos()) {
return factory()->empty_fixed_array();
}
return handle(error_stack_data->call_site_infos(), this);
}
Address Isolate::GetAbstractPC(int* line, int* column) {
JavaScriptStackFrameIterator it(this);
if (it.done()) {
*line = -1;
*column = -1;
return kNullAddress;
}
JavaScriptFrame* frame = it.frame();
DCHECK(!frame->is_builtin());
Handle<SharedFunctionInfo> shared = handle(frame->function().shared(), this);
SharedFunctionInfo::EnsureSourcePositionsAvailable(this, shared);
int position = frame->position();
Object maybe_script = frame->function().shared().script();
if (maybe_script.IsScript()) {
Handle<Script> script(Script::cast(maybe_script), this);
Script::PositionInfo info;
Script::GetPositionInfo(script, position, &info);
*line = info.line + 1;
*column = info.column + 1;
} else {
*line = position;
*column = -1;
}
if (frame->is_unoptimized()) {
UnoptimizedFrame* iframe = static_cast<UnoptimizedFrame*>(frame);
Address bytecode_start =
iframe->GetBytecodeArray().GetFirstBytecodeAddress();
return bytecode_start + iframe->GetBytecodeOffset();
}
return frame->pc();
}
namespace {
class StackFrameBuilder {
public:
StackFrameBuilder(Isolate* isolate, int limit)
: isolate_(isolate),
frames_(isolate_->factory()->empty_fixed_array()),
index_(0),
limit_(limit) {}
bool Visit(FrameSummary& summary) {
// Check if we have enough capacity left.
if (index_ >= limit_) return false;
// Skip frames that aren't subject to debugging.
if (!summary.is_subject_to_debugging()) return true;
Handle<StackFrameInfo> frame = summary.CreateStackFrameInfo();
frames_ = FixedArray::SetAndGrow(isolate_, frames_, index_++, frame);
return true;
}
Handle<FixedArray> Build() {
return FixedArray::ShrinkOrEmpty(isolate_, frames_, index_);
}
private:
Isolate* isolate_;
Handle<FixedArray> frames_;
int index_;
int limit_;
};
} // namespace
Handle<FixedArray> Isolate::CaptureDetailedStackTrace(
int limit, StackTrace::StackTraceOptions options) {
TRACE_EVENT_BEGIN1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
"maxFrameCount", limit);
StackFrameBuilder builder(this, limit);
VisitStack(this, &builder, options);
Handle<FixedArray> stack_trace = builder.Build();
TRACE_EVENT_END1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
"frameCount", stack_trace->length());
return stack_trace;
}
namespace {
class CurrentScriptNameStackVisitor {
public:
explicit CurrentScriptNameStackVisitor(Isolate* isolate)
: isolate_(isolate) {}
bool Visit(FrameSummary& summary) {
// Skip frames that aren't subject to debugging. Keep this in sync with
// StackFrameBuilder::Visit so both visitors visit the same frames.
if (!summary.is_subject_to_debugging()) return true;
// Frames that are subject to debugging always have a valid script object.
Handle<Script> script = Handle<Script>::cast(summary.script());
Handle<Object> name_or_url_obj =
handle(script->GetNameOrSourceURL(), isolate_);
if (!name_or_url_obj->IsString()) return true;
Handle<String> name_or_url = Handle<String>::cast(name_or_url_obj);
if (!name_or_url->length()) return true;
name_or_url_ = name_or_url;
return false;
}
Handle<String> CurrentScriptNameOrSourceURL() const { return name_or_url_; }
private:
Isolate* const isolate_;
Handle<String> name_or_url_;
};
} // namespace
Handle<String> Isolate::CurrentScriptNameOrSourceURL() {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__);
CurrentScriptNameStackVisitor visitor(this);
VisitStack(this, &visitor);
return visitor.CurrentScriptNameOrSourceURL();
}
void Isolate::PrintStack(FILE* out, PrintStackMode mode) {
if (stack_trace_nesting_level_ == 0) {
stack_trace_nesting_level_++;
StringStream::ClearMentionedObjectCache(this);
HeapStringAllocator allocator;
StringStream accumulator(&allocator);
incomplete_message_ = &accumulator;
PrintStack(&accumulator, mode);
accumulator.OutputToFile(out);
InitializeLoggingAndCounters();
accumulator.Log(this);
incomplete_message_ = nullptr;
stack_trace_nesting_level_ = 0;
} else if (stack_trace_nesting_level_ == 1) {
stack_trace_nesting_level_++;
base::OS::PrintError(
"\n\nAttempt to print stack while printing stack (double fault)\n");
base::OS::PrintError(
"If you are lucky you may find a partial stack dump on stdout.\n\n");
incomplete_message_->OutputToFile(out);
}
}
static void PrintFrames(Isolate* isolate, StringStream* accumulator,
StackFrame::PrintMode mode) {
StackFrameIterator it(isolate);
for (int i = 0; !it.done(); it.Advance()) {
it.frame()->Print(accumulator, mode, i++);
}
}
void Isolate::PrintStack(StringStream* accumulator, PrintStackMode mode) {
HandleScope scope(this);
DCHECK(accumulator->IsMentionedObjectCacheClear(this));
// Avoid printing anything if there are no frames.
if (c_entry_fp(thread_local_top()) == 0) return;
accumulator->Add(
"\n==== JS stack trace =========================================\n\n");
PrintFrames(this, accumulator, StackFrame::OVERVIEW);
if (mode == kPrintStackVerbose) {
accumulator->Add(
"\n==== Details ================================================\n\n");
PrintFrames(this, accumulator, StackFrame::DETAILS);
accumulator->PrintMentionedObjectCache(this);
}
accumulator->Add("=====================\n\n");
}
void Isolate::SetFailedAccessCheckCallback(
v8::FailedAccessCheckCallback callback) {
thread_local_top()->failed_access_check_callback_ = callback;
}
void Isolate::ReportFailedAccessCheck(Handle<JSObject> receiver) {
if (!thread_local_top()->failed_access_check_callback_) {
return ScheduleThrow(*factory()->NewTypeError(MessageTemplate::kNoAccess));
}
DCHECK(receiver->IsAccessCheckNeeded());
DCHECK(!context().is_null());
// Get the data object from access check info.
HandleScope scope(this);
Handle<Object> data;
{
DisallowGarbageCollection no_gc;
AccessCheckInfo access_check_info = AccessCheckInfo::Get(this, receiver);
if (access_check_info.is_null()) {
no_gc.Release();
return ScheduleThrow(
*factory()->NewTypeError(MessageTemplate::kNoAccess));
}
data = handle(access_check_info.data(), this);
}
// Leaving JavaScript.
VMState<EXTERNAL> state(this);
thread_local_top()->failed_access_check_callback_(
v8::Utils::ToLocal(receiver), v8::ACCESS_HAS, v8::Utils::ToLocal(data));
}
bool Isolate::MayAccess(Handle<Context> accessing_context,
Handle<JSObject> receiver) {
DCHECK(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded());
// Check for compatibility between the security tokens in the
// current lexical context and the accessed object.
// During bootstrapping, callback functions are not enabled yet.
if (bootstrapper()->IsActive()) return true;
{
DisallowGarbageCollection no_gc;
if (receiver->IsJSGlobalProxy()) {
Object receiver_context = JSGlobalProxy::cast(*receiver).native_context();
if (!receiver_context.IsContext()) return false;
// Get the native context of current top context.
// avoid using Isolate::native_context() because it uses Handle.
Context native_context = accessing_context->native_context();
if (receiver_context == native_context) return true;
if (Context::cast(receiver_context).security_token() ==
native_context.security_token())
return true;
}
}
HandleScope scope(this);
Handle<Object> data;
v8::AccessCheckCallback callback = nullptr;
{
DisallowGarbageCollection no_gc;
AccessCheckInfo access_check_info = AccessCheckInfo::Get(this, receiver);
if (access_check_info.is_null()) return false;
Object fun_obj = access_check_info.callback();
callback = v8::ToCData<v8::AccessCheckCallback>(fun_obj);
data = handle(access_check_info.data(), this);
}
{
// Leaving JavaScript.
VMState<EXTERNAL> state(this);
return callback(v8::Utils::ToLocal(accessing_context),
v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(data));
}
}
Object Isolate::StackOverflow() {
// Whoever calls this method should not have overflown the stack limit by too
// much. Otherwise we risk actually running out of stack space.
// We allow for up to 8kB overflow, because we typically allow up to 4KB
// overflow per frame in generated code, but might call through more smaller
// frames until we reach this method.
// If this DCHECK fails, one of the frames on the stack should be augmented by
// an additional stack check.
#if defined(V8_USE_ADDRESS_SANITIZER) || defined(MEMORY_SANITIZER)
// Allow for a bit more overflow in sanitizer builds, because C++ frames take
// significantly more space there.
DCHECK_GE(GetCurrentStackPosition(), stack_guard()->real_climit() - 32 * KB);
#else
DCHECK_GE(GetCurrentStackPosition(), stack_guard()->real_climit() - 8 * KB);
#endif
if (v8_flags.correctness_fuzzer_suppressions) {
FATAL("Aborting on stack overflow");
}
DisallowJavascriptExecution no_js(this);
HandleScope scope(this);
Handle<JSFunction> fun = range_error_function();
Handle<Object> msg = factory()->NewStringFromAsciiChecked(
MessageFormatter::TemplateString(MessageTemplate::kStackOverflow));
Handle<Object> options = factory()->undefined_value();
Handle<Object> no_caller;
Handle<JSObject> exception;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
this, exception,
ErrorUtils::Construct(this, fun, fun, msg, options, SKIP_NONE, no_caller,
ErrorUtils::StackTraceCollection::kEnabled));
JSObject::AddProperty(this, exception, factory()->wasm_uncatchable_symbol(),
factory()->true_value(), NONE);
Throw(*exception);
#ifdef VERIFY_HEAP
if (v8_flags.verify_heap && v8_flags.stress_compaction) {
heap()->CollectAllGarbage(Heap::kNoGCFlags,
GarbageCollectionReason::kTesting);
}
#endif // VERIFY_HEAP
return ReadOnlyRoots(heap()).exception();
}
Object Isolate::ThrowAt(Handle<JSObject> exception, MessageLocation* location) {
Handle<Name> key_start_pos = factory()->error_start_pos_symbol();
Object::SetProperty(this, exception, key_start_pos,
handle(Smi::FromInt(location->start_pos()), this),
StoreOrigin::kMaybeKeyed,
Just(ShouldThrow::kThrowOnError))
.Check();
Handle<Name> key_end_pos = factory()->error_end_pos_symbol();
Object::SetProperty(this, exception, key_end_pos,
handle(Smi::FromInt(location->end_pos()), this),
StoreOrigin::kMaybeKeyed,
Just(ShouldThrow::kThrowOnError))
.Check();
Handle<Name> key_script = factory()->error_script_symbol();
Object::SetProperty(this, exception, key_script, location->script(),
StoreOrigin::kMaybeKeyed,
Just(ShouldThrow::kThrowOnError))
.Check();
return ThrowInternal(*exception, location);
}
Object Isolate::TerminateExecution() {
return Throw(ReadOnlyRoots(this).termination_exception());
}
void Isolate::CancelTerminateExecution() {
if (try_catch_handler()) {
try_catch_handler()->has_terminated_ = false;
}
if (has_pending_exception() && is_execution_termination_pending()) {
thread_local_top()->external_caught_exception_ = false;
clear_pending_exception();
}
if (has_scheduled_exception() && is_execution_terminating()) {
thread_local_top()->external_caught_exception_ = false;
clear_scheduled_exception();
}
}
void Isolate::RequestInterrupt(InterruptCallback callback, void* data) {
ExecutionAccess access(this);
api_interrupts_queue_.push(InterruptEntry(callback, data));
stack_guard()->RequestApiInterrupt();
}
void Isolate::InvokeApiInterruptCallbacks() {
RCS_SCOPE(this, RuntimeCallCounterId::kInvokeApiInterruptCallbacks);
// Note: callback below should be called outside of execution access lock.
while (true) {
InterruptEntry entry;
{
ExecutionAccess access(this);
if (api_interrupts_queue_.empty()) return;
entry = api_interrupts_queue_.front();
api_interrupts_queue_.pop();
}
VMState<EXTERNAL> state(this);
HandleScope handle_scope(this);
entry.first(reinterpret_cast<v8::Isolate*>(this), entry.second);
}
}
namespace {
void ReportBootstrappingException(Handle<Object> exception,
MessageLocation* location) {
base::OS::PrintError("Exception thrown during bootstrapping\n");
if (location == nullptr || location->script().is_null()) return;
// We are bootstrapping and caught an error where the location is set
// and we have a script for the location.
// In this case we could have an extension (or an internal error
// somewhere) and we print out the line number at which the error occurred
// to the console for easier debugging.
int line_number =
location->script()->GetLineNumber(location->start_pos()) + 1;
if (exception->IsString() && location->script()->name().IsString()) {
base::OS::PrintError(
"Extension or internal compilation error: %s in %s at line %d.\n",
String::cast(*exception).ToCString().get(),
String::cast(location->script()->name()).ToCString().get(),
line_number);
} else if (location->script()->name().IsString()) {
base::OS::PrintError(
"Extension or internal compilation error in %s at line %d.\n",
String::cast(location->script()->name()).ToCString().get(),
line_number);
} else if (exception->IsString()) {
base::OS::PrintError("Extension or internal compilation error: %s.\n",
String::cast(*exception).ToCString().get());
} else {
base::OS::PrintError("Extension or internal compilation error.\n");
}
#ifdef OBJECT_PRINT
// Since comments and empty lines have been stripped from the source of
// builtins, print the actual source here so that line numbers match.
if (location->script()->source().IsString()) {
Handle<String> src(String::cast(location->script()->source()),
location->script()->GetIsolate());
PrintF("Failing script:");
int len = src->length();
if (len == 0) {
PrintF(" <not available>\n");
} else {
PrintF("\n");
line_number = 1;
PrintF("%5d: ", line_number);
for (int i = 0; i < len; i++) {
uint16_t character = src->Get(i);
PrintF("%c", character);
if (character == '\n' && i < len - 2) {
PrintF("%5d: ", ++line_number);
}
}
PrintF("\n");
}
}
#endif
}
} // anonymous namespace
Handle<JSMessageObject> Isolate::CreateMessageOrAbort(
Handle<Object> exception, MessageLocation* location) {
Handle<JSMessageObject> message_obj = CreateMessage(exception, location);
// If the abort-on-uncaught-exception flag is specified, and if the
// embedder didn't specify a custom uncaught exception callback,
// or if the custom callback determined that V8 should abort, then
// abort.
// Cache the flag on a static so that we can modify the value looked up below
// in the presence of read-only flags.
static bool abort_on_uncaught_exception =
v8_flags.abort_on_uncaught_exception;
if (abort_on_uncaught_exception) {
CatchType prediction = PredictExceptionCatcher();
if ((prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) &&
(!abort_on_uncaught_exception_callback_ ||
abort_on_uncaught_exception_callback_(
reinterpret_cast<v8::Isolate*>(this)))) {
// Prevent endless recursion.
abort_on_uncaught_exception = false;
// This flag is intended for use by JavaScript developers, so
// print a user-friendly stack trace (not an internal one).
PrintF(stderr, "%s\n\nFROM\n",
MessageHandler::GetLocalizedMessage(this, message_obj).get());
std::ostringstream stack_trace_stream;
PrintCurrentStackTrace(stack_trace_stream);
PrintF(stderr, "%s", stack_trace_stream.str().c_str());
base::OS::Abort();
}
}
return message_obj;
}
Object Isolate::ThrowInternal(Object raw_exception, MessageLocation* location) {
DCHECK(!has_pending_exception());
IF_WASM(DCHECK_IMPLIES, trap_handler::IsTrapHandlerEnabled(),
!trap_handler::IsThreadInWasm());
HandleScope scope(this);
Handle<Object> exception(raw_exception, this);
if (v8_flags.print_all_exceptions) {
PrintF("=========================================================\n");
PrintF("Exception thrown:\n");
if (location) {
Handle<Script> script = location->script();
Handle<Object> name(script->GetNameOrSourceURL(), this);
PrintF("at ");
if (name->IsString() && String::cast(*name).length() > 0)
String::cast(*name).PrintOn(stdout);
else
PrintF("<anonymous>");
// Script::GetLineNumber and Script::GetColumnNumber can allocate on the heap to
// initialize the line_ends array, so be careful when calling them.
#ifdef DEBUG
if (AllowGarbageCollection::IsAllowed()) {
#else
if ((false)) {
#endif
Script::PositionInfo start_pos;
Script::PositionInfo end_pos;
Script::GetPositionInfo(script, location->start_pos(), &start_pos);
Script::GetPositionInfo(script, location->end_pos(), &end_pos);
PrintF(", %d:%d - %d:%d\n", start_pos.line + 1, start_pos.column + 1,
end_pos.line + 1, end_pos.column + 1);
// Make sure to update the raw exception pointer in case it moved.
raw_exception = *exception;
} else {
PrintF(", line %d\n", script->GetLineNumber(location->start_pos()) + 1);
}
}
raw_exception.Print();
PrintF("Stack Trace:\n");
PrintStack(stdout);
PrintF("=========================================================\n");
}
// Determine whether a message needs to be created for the given exception
// depending on the following criteria:
// 1) External v8::TryCatch missing: Always create a message because any
// JavaScript handler for a finally-block might re-throw to top-level.
// 2) External v8::TryCatch exists: Only create a message if the handler
// captures messages or is verbose (which reports despite the catch).
// 3) ReThrow from v8::TryCatch: The message from a previous throw still
// exists and we preserve it instead of creating a new message.
bool requires_message = try_catch_handler() == nullptr ||
try_catch_handler()->is_verbose_ ||
try_catch_handler()->capture_message_;
bool rethrowing_message = thread_local_top()->rethrowing_message_;
thread_local_top()->rethrowing_message_ = false;
// Notify debugger of exception.
if (is_catchable_by_javascript(raw_exception)) {
base::Optional<Object> maybe_exception = debug()->OnThrow(exception);
if (maybe_exception.has_value()) {
return *maybe_exception;
}
}
// Generate the message if required.
if (requires_message && !rethrowing_message) {
MessageLocation computed_location;
// If no location was specified we try to use a computed one instead.
if (location == nullptr && ComputeLocation(&computed_location)) {
location = &computed_location;
}
if (bootstrapper()->IsActive()) {
// It's not safe to try to make message objects or collect stack traces
// while the bootstrapper is active since the infrastructure may not have
// been properly initialized.
ReportBootstrappingException(exception, location);
} else {
Handle<Object> message_obj = CreateMessageOrAbort(exception, location);
set_pending_message(*message_obj);
}
}
// Set the exception being thrown.
set_pending_exception(*exception);
return ReadOnlyRoots(heap()).exception();
}
Object Isolate::ReThrow(Object exception) {
DCHECK(!has_pending_exception());
// Set the exception being re-thrown.
set_pending_exception(exception);
return ReadOnlyRoots(heap()).exception();
}
Object Isolate::ReThrow(Object exception, Object message) {
DCHECK(!has_pending_exception());
DCHECK(!has_pending_message());
set_pending_message(message);
return ReThrow(exception);
}
namespace {
#if V8_ENABLE_WEBASSEMBLY
// This scope will set the thread-in-wasm flag after the execution of all
// destructors. The thread-in-wasm flag is only set when the scope gets enabled.
class SetThreadInWasmFlagScope {
public:
SetThreadInWasmFlagScope() {
DCHECK_IMPLIES(trap_handler::IsTrapHandlerEnabled(),
!trap_handler::IsThreadInWasm());
}
~SetThreadInWasmFlagScope() {
if (enabled_) trap_handler::SetThreadInWasm();
}
void Enable() { enabled_ = true; }
private:
bool enabled_ = false;
};
#endif // V8_ENABLE_WEBASSEMBLY
} // namespace
Object Isolate::UnwindAndFindHandler() {
// TODO(v8:12676): Fix gcmole failures in this function.
DisableGCMole no_gcmole;
DisallowGarbageCollection no_gc;
#if V8_ENABLE_WEBASSEMBLY
// Create the {SetThreadInWasmFlagScope} first in this function so that its
// destructor gets called after all the other destructors. It is important
// that the destructor sets the thread-in-wasm flag after all other
// destructors. The other destructors may cause exceptions, e.g. ASan on
// Windows, which would invalidate the thread-in-wasm flag when the wasm trap
// handler handles such non-wasm exceptions.
SetThreadInWasmFlagScope set_thread_in_wasm_flag_scope;
#endif // V8_ENABLE_WEBASSEMBLY
Object exception = pending_exception();
auto FoundHandler = [&](Context context, Address instruction_start,
intptr_t handler_offset,
Address constant_pool_address, Address handler_sp,
Address handler_fp, int num_frames_above_handler) {
// Store information to be consumed by the CEntry.
thread_local_top()->pending_handler_context_ = context;
thread_local_top()->pending_handler_entrypoint_ =
instruction_start + handler_offset;
thread_local_top()->pending_handler_constant_pool_ = constant_pool_address;
thread_local_top()->pending_handler_fp_ = handler_fp;
thread_local_top()->pending_handler_sp_ = handler_sp;
thread_local_top()->num_frames_above_pending_handler_ =
num_frames_above_handler;
// Return and clear pending exception. The contract is that:
// (1) the pending exception is stored in one place (no duplication), and
// (2) within generated-code land, that one place is the return register.
// If/when we unwind back into C++ (returning to the JSEntry stub,
// or to Execution::CallWasm), the returned exception will be sent
// back to isolate->set_pending_exception(...).
clear_pending_exception();
return exception;
};
// Special handling of termination exceptions, uncatchable by JavaScript and
// Wasm code, we unwind the handlers until the top ENTRY handler is found.
bool catchable_by_js = is_catchable_by_javascript(exception);
if (!catchable_by_js && !context().is_null()) {
// Because the array join stack will not pop the elements when throwing the
// uncatchable terminate exception, we need to clear the array join stack to
// avoid leaving the stack in an invalid state.
// See also CycleProtectedArrayJoin.
raw_native_context().set_array_join_stack(
ReadOnlyRoots(this).undefined_value());
}
int visited_frames = 0;
#if V8_ENABLE_WEBASSEMBLY
// Iterate the chain of stack segments for wasm stack switching.
WasmContinuationObject current_stack;
if (v8_flags.experimental_wasm_stack_switching) {
current_stack =
WasmContinuationObject::cast(root(RootIndex::kActiveContinuation));
}
#endif
// Compute handler and stack unwinding information by performing a full walk
// over the stack and dispatching according to the frame type.
for (StackFrameIterator iter(this);; iter.Advance(), visited_frames++) {
#if V8_ENABLE_WEBASSEMBLY
if (v8_flags.experimental_wasm_stack_switching && iter.done()) {
// We reached the end of the current stack segment. Follow the linked-list
// of stacks to find the next frame, and perform the implicit stack
// switch.
auto stack = Managed<wasm::StackMemory>::cast(current_stack.stack());
// Mark this stack as empty.
DCHECK_EQ(stack.get()->jmpbuf()->state, wasm::JumpBuffer::Active);
stack.get()->jmpbuf()->state = wasm::JumpBuffer::Retired;
HeapObject parent = current_stack.parent();
DCHECK(!parent.IsUndefined());
current_stack = WasmContinuationObject::cast(parent);
wasm::StackMemory* parent_stack =
Managed<wasm::StackMemory>::cast(current_stack.stack()).get().get();
DCHECK_EQ(parent_stack->jmpbuf()->state, wasm::JumpBuffer::Inactive);
parent_stack->jmpbuf()->state = wasm::JumpBuffer::Active;
iter.Reset(thread_local_top(), parent_stack);
// Update the continuation and suspender state.
roots_table().slot(RootIndex::kActiveContinuation).store(current_stack);
WasmSuspenderObject suspender =
WasmSuspenderObject::cast(root(RootIndex::kActiveSuspender));
if (!suspender.parent().IsUndefined()) {
suspender.set_state(WasmSuspenderObject::State::kInactive);
auto parent_suspender = WasmSuspenderObject::cast(suspender.parent());
parent_suspender.set_state(WasmSuspenderObject::State::kActive);
// For now, assume that a suspender contains a single continuation.
// TODO(thibaudm): When core stack-switching is added, only update the
// suspender when we exit its outermost stack.
DCHECK_EQ(current_stack, parent_suspender.continuation());
}
roots_table().slot(RootIndex::kActiveSuspender).store(suspender.parent());
if (v8_flags.trace_wasm_stack_switching) {
PrintF("Switch to stack #%d (unwind)\n", parent_stack->id());
}
uintptr_t limit =
reinterpret_cast<uintptr_t>(parent_stack->jmpbuf()->stack_limit);
stack_guard()->SetStackLimit(limit);
}
#endif
// Handler must exist.
DCHECK(!iter.done());
StackFrame* frame = iter.frame();
// The debugger implements the "restart frame" feature by throwing a
// terminate exception. Check and if we need to restart `frame`,
// jump into the `RestartFrameTrampoline` builtin instead of
// a catch handler.
// Optimized frames take a detour via the deoptimizer before also jumping
// to the `RestartFrameTrampoline` builtin.
if (debug()->ShouldRestartFrame(frame->id())) {
CHECK(!catchable_by_js);
CHECK(frame->is_java_script());
if (frame->is_turbofan()) {
Code code = frame->LookupCode();
// The debugger triggers lazy deopt for the "to-be-restarted" frame
// immediately when the CDP event arrives while paused.
CHECK(code.marked_for_deoptimization());
set_deoptimizer_lazy_throw(true);
// Jump directly to the optimized frames return, to immediately fall
// into the deoptimizer.
const int offset =
static_cast<int>(frame->pc() - code.instruction_start());
// Compute the stack pointer from the frame pointer. This ensures that
// argument slots on the stack are dropped as returning would.
// Note: Needed by the deoptimizer to rematerialize frames.
Address return_sp = frame->fp() +
StandardFrameConstants::kFixedFrameSizeAboveFp -
code.stack_slots() * kSystemPointerSize;
return FoundHandler(Context(), code.instruction_start(), offset,
code.constant_pool(), return_sp, frame->fp(),
visited_frames);
}
DCHECK(!frame->is_maglev());
debug()->clear_restart_frame();
Code code = *BUILTIN_CODE(this, RestartFrameTrampoline);
return FoundHandler(Context(), code.instruction_start(), 0,
code.constant_pool(), kNullAddress, frame->fp(),
visited_frames);
}
switch (frame->type()) {
case StackFrame::ENTRY:
case StackFrame::CONSTRUCT_ENTRY: {
// For JSEntry frames we always have a handler.
StackHandler* handler = frame->top_handler();
// Restore the next handler.
thread_local_top()->handler_ = handler->next_address();
// Gather information from the handler.
Code code = frame->LookupCode();
HandlerTable table(code);
return FoundHandler(Context(), code.InstructionStart(this, frame->pc()),
table.LookupReturn(0), code.constant_pool(),
handler->address() + StackHandlerConstants::kSize,
0, visited_frames);
}
#if V8_ENABLE_WEBASSEMBLY
case StackFrame::C_WASM_ENTRY: {
StackHandler* handler = frame->top_handler();
thread_local_top()->handler_ = handler->next_address();
Code code = frame->LookupCode();
HandlerTable table(code);
Address instruction_start = code.instruction_start();
int return_offset = static_cast<int>(frame->pc() - instruction_start);
int handler_offset = table.LookupReturn(return_offset);
DCHECK_NE(-1, handler_offset);
// Compute the stack pointer from the frame pointer. This ensures that
// argument slots on the stack are dropped as returning would.
Address return_sp = frame->fp() +
StandardFrameConstants::kFixedFrameSizeAboveFp -
code.stack_slots() * kSystemPointerSize;
return FoundHandler(Context(), instruction_start, handler_offset,
code.constant_pool(), return_sp, frame->fp(),
visited_frames);
}
case StackFrame::WASM: {
if (!is_catchable_by_wasm(exception)) break;
// For WebAssembly frames we perform a lookup in the handler table.
// This code ref scope is here to avoid a check failure when looking up
// the code. It's not actually necessary to keep the code alive as it's
// currently being executed.
wasm::WasmCodeRefScope code_ref_scope;
WasmFrame* wasm_frame = static_cast<WasmFrame*>(frame);
wasm::WasmCode* wasm_code =
wasm::GetWasmCodeManager()->LookupCode(frame->pc());
int offset = wasm_frame->LookupExceptionHandlerInTable();
if (offset < 0) break;
wasm::GetWasmEngine()->SampleCatchEvent(this);
// Compute the stack pointer from the frame pointer. This ensures that
// argument slots on the stack are dropped as returning would.
Address return_sp = frame->fp() +
StandardFrameConstants::kFixedFrameSizeAboveFp -
wasm_code->stack_slots() * kSystemPointerSize;
// This is going to be handled by WebAssembly, so we need to set the TLS
// flag. The {SetThreadInWasmFlagScope} will set the flag after all
// destructors have been executed.
set_thread_in_wasm_flag_scope.Enable();
return FoundHandler(Context(), wasm_code->instruction_start(), offset,
wasm_code->constant_pool(), return_sp, frame->fp(),
visited_frames);
}
case StackFrame::WASM_LIFTOFF_SETUP: {
// The WasmLiftoffFrameSetup builtin doesn't throw, and doesn't call
// out to user code that could throw.
UNREACHABLE();
}
case StackFrame::WASM_TO_JS:
if (v8_flags.experimental_wasm_stack_switching) {
// Decrement the Wasm-to-JS counter.
Object suspender_obj = root(RootIndex::kActiveSuspender);
if (!suspender_obj.IsUndefined()) {
WasmSuspenderObject suspender =
WasmSuspenderObject::cast(suspender_obj);
int wasm_to_js_counter = suspender.wasm_to_js_counter();
DCHECK_LT(0, wasm_to_js_counter);
suspender.set_wasm_to_js_counter(wasm_to_js_counter - 1);
}
}
break;
#endif // V8_ENABLE_WEBASSEMBLY
case StackFrame::MAGLEV:
case StackFrame::TURBOFAN: {
// For optimized frames we perform a lookup in the handler table.
if (!catchable_by_js) break;
OptimizedFrame* opt_frame = static_cast<OptimizedFrame*>(frame);
int offset = opt_frame->LookupExceptionHandlerInTable(nullptr, nullptr);
if (offset < 0) break;
// The code might be an optimized code or a turbofanned builtin.
Code code = frame->LookupCode();
// Compute the stack pointer from the frame pointer. This ensures
// that argument slots on the stack are dropped as returning would.
Address return_sp = frame->fp() +
StandardFrameConstants::kFixedFrameSizeAboveFp -
code.stack_slots() * kSystemPointerSize;
// TODO(bmeurer): Turbofanned BUILTIN frames appear as TURBOFAN,
// but do not have a code kind of TURBOFAN.
if (CodeKindCanDeoptimize(code.kind()) &&
code.marked_for_deoptimization()) {
// If the target code is lazy deoptimized, we jump to the original
// return address, but we make a note that we are throwing, so
// that the deoptimizer can do the right thing.
offset = static_cast<int>(frame->pc() - code.instruction_start());
set_deoptimizer_lazy_throw(true);
}
return FoundHandler(Context(), code.InstructionStart(this, frame->pc()),
offset, code.constant_pool(), return_sp,
frame->fp(), visited_frames);
}
case StackFrame::STUB: {
// Some stubs are able to handle exceptions.
if (!catchable_by_js) break;
StubFrame* stub_frame = static_cast<StubFrame*>(frame);
#if defined(DEBUG) && V8_ENABLE_WEBASSEMBLY
wasm::WasmCodeRefScope code_ref_scope;
DCHECK_NULL(wasm::GetWasmCodeManager()->LookupCode(frame->pc()));
#endif // defined(DEBUG) && V8_ENABLE_WEBASSEMBLY
// The code might be a dynamically generated stub or a turbofanned
// embedded builtin.
Code code = stub_frame->LookupCode();
if (code.kind() != CodeKind::BUILTIN || !code.is_turbofanned() ||
!code.has_handler_table()) {
break;
}
int offset = stub_frame->LookupExceptionHandlerInTable();
if (offset < 0) break;
// Compute the stack pointer from the frame pointer. This ensures
// that argument slots on the stack are dropped as returning would.
Address return_sp = frame->fp() +
StandardFrameConstants::kFixedFrameSizeAboveFp -
code.stack_slots() * kSystemPointerSize;
return FoundHandler(Context(), code.InstructionStart(this, frame->pc()),
offset, code.constant_pool(), return_sp,
frame->fp(), visited_frames);
}
case StackFrame::INTERPRETED:
case StackFrame::BASELINE: {
// For interpreted frame we perform a range lookup in the handler table.
if (!catchable_by_js) break;
UnoptimizedFrame* js_frame = UnoptimizedFrame::cast(frame);
int register_slots = UnoptimizedFrameConstants::RegisterStackSlotCount(
js_frame->GetBytecodeArray().register_count());
int context_reg = 0; // Will contain register index holding context.
int offset =
js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
if (offset < 0) break;
// Compute the stack pointer from the frame pointer. This ensures that
// argument slots on the stack are dropped as returning would.
// Note: This is only needed for interpreted frames that have been
// materialized by the deoptimizer. If there is a handler frame
// in between then {frame->sp()} would already be correct.
Address return_sp = frame->fp() -
InterpreterFrameConstants::kFixedFrameSizeFromFp -
register_slots * kSystemPointerSize;
// Patch the bytecode offset in the interpreted frame to reflect the
// position of the exception handler. The special builtin below will
// take care of continuing to dispatch at that position. Also restore
// the correct context for the handler from the interpreter register.
Context context =
Context::cast(js_frame->ReadInterpreterRegister(context_reg));
DCHECK(context.IsContext());
if (frame->is_baseline()) {
BaselineFrame* sp_frame = BaselineFrame::cast(js_frame);
Code code = sp_frame->LookupCode();
intptr_t pc_offset = sp_frame->GetPCForBytecodeOffset(offset);
// Patch the context register directly on the frame, so that we don't
// need to have a context read + write in the baseline code.
sp_frame->PatchContext(context);
return FoundHandler(Context(), code.instruction_start(), pc_offset,
code.constant_pool(), return_sp, sp_frame->fp(),
visited_frames);
} else {
InterpretedFrame::cast(js_frame)->PatchBytecodeOffset(
static_cast<int>(offset));
Code code = *BUILTIN_CODE(this, InterpreterEnterAtBytecode);
// We subtract a frame from visited_frames because otherwise the
// shadow stack will drop the underlying interpreter entry trampoline
// in which the handler runs.
//
// An interpreted frame cannot be the first frame we look at
// because at a minimum, an exit frame into C++ has to separate
// it and the context in which this C++ code runs.
CHECK_GE(visited_frames, 1);
return FoundHandler(context, code.instruction_start(), 0,
code.constant_pool(), return_sp, frame->fp(),
visited_frames - 1);
}
}
case StackFrame::BUILTIN:
// For builtin frames we are guaranteed not to find a handler.
if (catchable_by_js) {
CHECK_EQ(-1, BuiltinFrame::cast(frame)->LookupExceptionHandlerInTable(
nullptr, nullptr));
}
break;
case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
// Builtin continuation frames with catch can handle exceptions.
if (!catchable_by_js) break;
JavaScriptBuiltinContinuationWithCatchFrame* js_frame =
JavaScriptBuiltinContinuationWithCatchFrame::cast(frame);
js_frame->SetException(exception);
// Reconstruct the stack pointer from the frame pointer.
Address return_sp = js_frame->fp() - js_frame->GetSPToFPDelta();
Code code = js_frame->LookupCode();
return FoundHandler(Context(), code.instruction_start(), 0,
code.constant_pool(), return_sp, frame->fp(),
visited_frames);
}
default:
// All other types can not handle exception.
break;
}
if (frame->is_turbofan()) {
// Remove per-frame stored materialized objects.
bool removed = materialized_object_store_->Remove(frame->fp());
USE(removed);
// If there were any materialized objects, the code should be
// marked for deopt.
DCHECK_IMPLIES(removed, frame->LookupCode().marked_for_deoptimization());
}
}
UNREACHABLE();
}
namespace {
HandlerTable::CatchPrediction CatchPredictionFor(Builtin builtin_id) {
switch (builtin_id) {
#define CASE(Name) \
case Builtin::k##Name: \
return HandlerTable::PROMISE;
BUILTIN_PROMISE_REJECTION_PREDICTION_LIST(CASE)
#undef CASE
default:
return HandlerTable::UNCAUGHT;
}
}
HandlerTable::CatchPrediction PredictException(JavaScriptFrame* frame) {
HandlerTable::CatchPrediction prediction;
if (frame->is_optimized()) {
if (frame->LookupExceptionHandlerInTable(nullptr, nullptr) > 0) {
// This optimized frame will catch. It's handler table does not include
// exception prediction, and we need to use the corresponding handler
// tables on the unoptimized code objects.
std::vector<FrameSummary> summaries;
frame->Summarize(&summaries);
PtrComprCageBase cage_base(frame->isolate());
for (size_t i = summaries.size(); i != 0; i--) {
const FrameSummary& summary = summaries[i - 1];
Handle<AbstractCode> code = summary.AsJavaScript().abstract_code();
if (code->kind(cage_base) == CodeKind::BUILTIN) {
auto prediction = CatchPredictionFor(code->GetCode().builtin_id());
if (prediction == HandlerTable::UNCAUGHT) continue;
return prediction;
}
// Must have been constructed from a bytecode array.
CHECK_EQ(CodeKind::INTERPRETED_FUNCTION, code->kind(cage_base));
int code_offset = summary.code_offset();
HandlerTable table(code->GetBytecodeArray());
int index = table.LookupRange(code_offset, nullptr, &prediction);
if (index <= 0) continue;
if (prediction == HandlerTable::UNCAUGHT) continue;
return prediction;
}
}
} else if (frame->LookupExceptionHandlerInTable(nullptr, &prediction) > 0) {
return prediction;
}
return HandlerTable::UNCAUGHT;
}
Isolate::CatchType ToCatchType(HandlerTable::CatchPrediction prediction) {
switch (prediction) {
case HandlerTable::UNCAUGHT:
return Isolate::NOT_CAUGHT;
case HandlerTable::CAUGHT:
return Isolate::CAUGHT_BY_JAVASCRIPT;
case HandlerTable::PROMISE:
return Isolate::CAUGHT_BY_PROMISE;
case HandlerTable::UNCAUGHT_ASYNC_AWAIT:
case HandlerTable::ASYNC_AWAIT:
return Isolate::CAUGHT_BY_ASYNC_AWAIT;
default:
UNREACHABLE();
}
}
} // anonymous namespace
Isolate::CatchType Isolate::PredictExceptionCatcher() {
Address external_handler = thread_local_top()->try_catch_handler_address();
if (TopExceptionHandlerType(Object()) ==
ExceptionHandlerType::kExternalTryCatch) {
return CAUGHT_BY_EXTERNAL;
}
// Search for an exception handler by performing a full walk over the stack.
for (StackFrameIterator iter(this); !iter.done(); iter.Advance()) {
StackFrame* frame = iter.frame();
switch (frame->type()) {
case StackFrame::ENTRY:
case StackFrame::CONSTRUCT_ENTRY: {
Address entry_handler = frame->top_handler()->next_address();
// The exception has been externally caught if and only if there is an
// external handler which is on top of the top-most JS_ENTRY handler.
if (external_handler != kNullAddress &&
!try_catch_handler()->is_verbose_) {
if (entry_handler == kNullAddress ||
entry_handler > external_handler) {
return CAUGHT_BY_EXTERNAL;
}
}
} break;
// For JavaScript frames we perform a lookup in the handler table.
case StackFrame::INTERPRETED:
case StackFrame::BASELINE:
case StackFrame::TURBOFAN:
case StackFrame::MAGLEV:
case StackFrame::BUILTIN: {
JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
Isolate::CatchType prediction = ToCatchType(PredictException(js_frame));
if (prediction == NOT_CAUGHT) break;
return prediction;
}
case StackFrame::STUB: {
base::Optional<Code> code = frame->LookupCode();
if (code->kind() != CodeKind::BUILTIN || !code->has_handler_table() ||
!code->is_turbofanned()) {
break;
}
auto prediction = ToCatchType(CatchPredictionFor(code->builtin_id()));
if (prediction != NOT_CAUGHT) return prediction;
break;
}
case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
base::Optional<Code> code = frame->LookupCode();
auto prediction = ToCatchType(CatchPredictionFor(code->builtin_id()));
if (prediction != NOT_CAUGHT) return prediction;
break;
}
default:
// All other types can not handle exception.
break;
}
}
// Handler not found.
return NOT_CAUGHT;
}
Object Isolate::ThrowIllegalOperation() {
if (v8_flags.stack_trace_on_illegal) PrintStack(stdout);
return Throw(ReadOnlyRoots(heap()).illegal_access_string());
}
void Isolate::ScheduleThrow(Object exception) {
// When scheduling a throw we first throw the exception to get the
// error reporting if it is uncaught before rescheduling it.
Throw(exception);
PropagatePendingExceptionToExternalTryCatch(
TopExceptionHandlerType(pending_exception()));
if (has_pending_exception()) {
set_scheduled_exception(pending_exception());
thread_local_top()->external_caught_exception_ = false;
clear_pending_exception();
}
}
void Isolate::RestorePendingMessageFromTryCatch(v8::TryCatch* handler) {
DCHECK(handler == try_catch_handler());
DCHECK(handler->HasCaught());
DCHECK(handler->rethrow_);
DCHECK(handler->capture_message_);
Object message(reinterpret_cast<Address>(handler->message_obj_));
DCHECK(message.IsJSMessageObject() || message.IsTheHole(this));
set_pending_message(message);
}
void Isolate::CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler) {
DCHECK(has_scheduled_exception());
if (reinterpret_cast<void*>(scheduled_exception().ptr()) ==
handler->exception_) {
DCHECK_IMPLIES(v8_flags.strict_termination_checks,
!is_execution_terminating());
clear_scheduled_exception();
} else {
DCHECK_IMPLIES(v8_flags.strict_termination_checks,
is_execution_terminating());
// Clear termination once we returned from all V8 frames.
if (thread_local_top()->CallDepthIsZero()) {
thread_local_top()->external_caught_exception_ = false;
clear_scheduled_exception();
}
}
if (reinterpret_cast<void*>(thread_local_top()->pending_message_.ptr()) ==
handler->message_obj_) {
clear_pending_message();
}
}
Object Isolate::PromoteScheduledException() {
Object thrown = scheduled_exception();
clear_scheduled_exception();
// Re-throw the exception to avoid getting repeated error reporting.
return ReThrow(thrown);
}
void Isolate::PrintCurrentStackTrace(std::ostream& out) {
Handle<FixedArray> frames = CaptureSimpleStackTrace(
this, FixedArray::kMaxLength, SKIP_NONE, factory()->undefined_value());
IncrementalStringBuilder builder(this);
for (int i = 0; i < frames->length(); ++i) {
Handle<CallSiteInfo> frame(CallSiteInfo::cast(frames->get(i)), this);
SerializeCallSiteInfo(this, frame, &builder);
}
Handle<String> stack_trace = builder.Finish().ToHandleChecked();
stack_trace->PrintOn(out);
}
bool Isolate::ComputeLocation(MessageLocation* target) {
DebuggableStackFrameIterator it(this);
if (it.done()) return false;
// Compute the location from the function and the relocation info of the
// baseline code. For optimized code this will use the deoptimization
// information to get canonical location information.
#if V8_ENABLE_WEBASSEMBLY
wasm::WasmCodeRefScope code_ref_scope;
#endif // V8_ENABLE_WEBASSEMBLY
FrameSummary summary = it.GetTopValidFrame();
Handle<SharedFunctionInfo> shared;
Handle<Object> script = summary.script();
if (!script->IsScript() || Script::cast(*script).source().IsUndefined(this)) {
return false;
}
if (summary.IsJavaScript()) {
shared = handle(summary.AsJavaScript().function()->shared(), this);
}
if (summary.AreSourcePositionsAvailable()) {
int pos = summary.SourcePosition();
*target =
MessageLocation(Handle<Script>::cast(script), pos, pos + 1, shared);
} else {
*target = MessageLocation(Handle<Script>::cast(script), shared,
summary.code_offset());
}
return true;
}
bool Isolate::ComputeLocationFromException(MessageLocation* target,
Handle<Object> exception) {
if (!exception->IsJSObject()) return false;
Handle<Name> start_pos_symbol = factory()->error_start_pos_symbol();
Handle<Object> start_pos = JSReceiver::GetDataProperty(
this, Handle<JSObject>::cast(exception), start_pos_symbol);
if (!start_pos->IsSmi()) return false;
int start_pos_value = Handle<Smi>::cast(start_pos)->value();
Handle<Name> end_pos_symbol = factory()->error_end_pos_symbol();
Handle<Object> end_pos = JSReceiver::GetDataProperty(
this, Handle<JSObject>::cast(exception), end_pos_symbol);
if (!end_pos->IsSmi()) return false;
int end_pos_value = Handle<Smi>::cast(end_pos)->value();
Handle<Name> script_symbol = factory()->error_script_symbol();
Handle<Object> script = JSReceiver::GetDataProperty(
this, Handle<JSObject>::cast(exception), script_symbol);
if (!script->IsScript()) return false;
Handle<Script> cast_script(Script::cast(*script), this);
*target = MessageLocation(cast_script, start_pos_value, end_pos_value);
return true;
}
bool Isolate::ComputeLocationFromSimpleStackTrace(MessageLocation* target,
Handle<Object> exception) {
if (!exception->IsJSReceiver()) {
return false;
}
Handle<FixedArray> call_site_infos =
GetSimpleStackTrace(Handle<JSReceiver>::cast(exception));
for (int i = 0; i < call_site_infos->length(); ++i) {
Handle<CallSiteInfo> call_site_info(
CallSiteInfo::cast(call_site_infos->get(i)), this);
if (CallSiteInfo::ComputeLocation(call_site_info, target)) {
return true;
}
}
return false;
}
bool Isolate::ComputeLocationFromDetailedStackTrace(MessageLocation* target,
Handle<Object> exception) {
if (!exception->IsJSReceiver()) return false;
Handle<FixedArray> stack_frame_infos =
GetDetailedStackTrace(Handle<JSReceiver>::cast(exception));
if (stack_frame_infos.is_null() || stack_frame_infos->length() == 0) {
return false;
}
Handle<StackFrameInfo> info(StackFrameInfo::cast(stack_frame_infos->get(0)),
this);
const int pos = StackFrameInfo::GetSourcePosition(info);
*target = MessageLocation(handle(info->script(), this), pos, pos + 1);
return true;
}
Handle<JSMessageObject> Isolate::CreateMessage(Handle<Object> exception,
MessageLocation* location) {
Handle<FixedArray> stack_trace_object;
if (capture_stack_trace_for_uncaught_exceptions_) {
if (exception->IsJSError()) {
// We fetch the stack trace that corresponds to this error object.
// If the lookup fails, the exception is probably not a valid Error
// object. In that case, we fall through and capture the stack trace
// at this throw site.
stack_trace_object =
GetDetailedStackTrace(Handle<JSObject>::cast(exception));
}
if (stack_trace_object.is_null()) {
// Not an error object, we capture stack and location at throw site.
stack_trace_object = CaptureDetailedStackTrace(
stack_trace_for_uncaught_exceptions_frame_limit_,
stack_trace_for_uncaught_exceptions_options_);
}
}
MessageLocation computed_location;
if (location == nullptr &&
(ComputeLocationFromException(&computed_location, exception) ||
ComputeLocationFromSimpleStackTrace(&computed_location, exception) ||
ComputeLocation(&computed_location))) {
location = &computed_location;
}
return MessageHandler::MakeMessageObject(
this, MessageTemplate::kUncaughtException, location, exception,
stack_trace_object);
}
Handle<JSMessageObject> Isolate::CreateMessageFromException(
Handle<Object> exception) {
Handle<FixedArray> stack_trace_object;
if (exception->IsJSError()) {
stack_trace_object =
GetDetailedStackTrace(Handle<JSObject>::cast(exception));
}
MessageLocation* location = nullptr;
MessageLocation computed_location;
if (ComputeLocationFromException(&computed_location, exception) ||
ComputeLocationFromDetailedStackTrace(&computed_location, exception)) {
location = &computed_location;
}
return MessageHandler::MakeMessageObject(
this, MessageTemplate::kPlaceholderOnly, location, exception,
stack_trace_object);
}
Isolate::ExceptionHandlerType Isolate::TopExceptionHandlerType(
Object exception) {
DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
Address js_handler = Isolate::handler(thread_local_top());
Address external_handler = thread_local_top()->try_catch_handler_address();
// A handler cannot be on top if it doesn't exist. For uncatchable exceptions,
// the JavaScript handler cannot be on top.
if (js_handler == kNullAddress || !is_catchable_by_javascript(exception)) {
if (external_handler == kNullAddress) {
return ExceptionHandlerType::kNone;
}
return ExceptionHandlerType::kExternalTryCatch;
}
if (external_handler == kNullAddress) {
return ExceptionHandlerType::kJavaScriptHandler;
}
// The exception has been externally caught if and only if there is an
// external handler which is on top of the top-most JS_ENTRY handler.
//
// Note, that finally clauses would re-throw an exception unless it's aborted
// by jumps in control flow (like return, break, etc.) and we'll have another
// chance to set proper v8::TryCatch later.
DCHECK_NE(kNullAddress, external_handler);
DCHECK_NE(kNullAddress, js_handler);
if (external_handler < js_handler) {
return ExceptionHandlerType::kExternalTryCatch;
}
return ExceptionHandlerType::kJavaScriptHandler;
}
std::vector<MemoryRange>* Isolate::GetCodePages() const {
return code_pages_.load(std::memory_order_acquire);
}
void Isolate::SetCodePages(std::vector<MemoryRange>* new_code_pages) {
code_pages_.store(new_code_pages, std::memory_order_release);
}
void Isolate::ReportPendingMessages() {
DCHECK(AllowExceptions::IsAllowed(this));
// The embedder might run script in response to an exception.
AllowJavascriptExecutionDebugOnly allow_script(this);
Object exception_obj = pending_exception();
ExceptionHandlerType top_handler = TopExceptionHandlerType(exception_obj);
// Try to propagate the exception to an external v8::TryCatch handler. If
// propagation was unsuccessful, then we will get another chance at reporting
// the pending message if the exception is re-thrown.
bool has_been_propagated =
PropagatePendingExceptionToExternalTryCatch(top_handler);
if (!has_been_propagated) return;
// Clear the pending message object early to avoid endless recursion.
Object message_obj = pending_message();
clear_pending_message();
// For uncatchable exceptions we do nothing. If needed, the exception and the
// message have already been propagated to v8::TryCatch.
if (!is_catchable_by_javascript(exception_obj)) return;
// Determine whether the message needs to be reported to all message handlers
// depending on whether the topmost external v8::TryCatch is verbose. We know
// there's no JavaScript handler on top; if there was, we would've returned
// early.
DCHECK_NE(ExceptionHandlerType::kJavaScriptHandler, top_handler);
bool should_report_exception;
if (top_handler == ExceptionHandlerType::kExternalTryCatch) {
should_report_exception = try_catch_handler()->is_verbose_;
} else {
should_report_exception = true;
}
// Actually report the pending message to all message handlers.
if (!message_obj.IsTheHole(this) && should_report_exception) {
HandleScope scope(this);
Handle<JSMessageObject> message(JSMessageObject::cast(message_obj), this);
Handle<Object> exception(exception_obj, this);
Handle<Script> script(message->script(), this);
// Clear the exception and restore it afterwards, otherwise
// CollectSourcePositions will abort.
clear_pending_exception();
JSMessageObject::EnsureSourcePositionsAvailable(this, message);
set_pending_exception(*exception);
int start_pos = message->GetStartPosition();
int end_pos = message->GetEndPosition();
MessageLocation location(script, start_pos, end_pos);
MessageHandler::ReportMessage(this, &location, message);
}
}
bool Isolate::OptionalRescheduleException(bool clear_exception) {
DCHECK(has_pending_exception());
PropagatePendingExceptionToExternalTryCatch(
TopExceptionHandlerType(pending_exception()));
if (is_execution_termination_pending()) {
if (clear_exception) {
thread_local_top()->external_caught_exception_ = false;
clear_pending_exception();
return false;
}
} else if (thread_local_top()->external_caught_exception_) {
// If the exception is externally caught, clear it if there are no
// JavaScript frames on the way to the C++ frame that has the
// external handler.
DCHECK_NE(thread_local_top()->try_catch_handler_address(), kNullAddress);
Address external_handler_address =
thread_local_top()->try_catch_handler_address();
JavaScriptStackFrameIterator it(this);
if (it.done() || (it.frame()->sp() > external_handler_address)) {
clear_exception = true;
}
}
// Clear the exception if needed.
if (clear_exception) {
thread_local_top()->external_caught_exception_ = false;
clear_pending_exception();
return false;
}
// Reschedule the exception.
set_scheduled_exception(pending_exception());
clear_pending_exception();
return true;
}
void Isolate::PushPromise(Handle<JSObject> promise) {
Handle<Object> promise_on_stack(debug()->thread_local_.promise_stack_, this);
promise_on_stack = factory()->NewPromiseOnStack(promise_on_stack, promise);
debug()->thread_local_.promise_stack_ = *promise_on_stack;
}
void Isolate::PopPromise() {
if (!IsPromiseStackEmpty()) {
debug()->thread_local_.promise_stack_ =
PromiseOnStack::cast(debug()->thread_local_.promise_stack_).prev();
}
}
bool Isolate::IsPromiseStackEmpty() const {
DCHECK_IMPLIES(!debug()->thread_local_.promise_stack_.IsSmi(),
debug()->thread_local_.promise_stack_.IsPromiseOnStack());
return debug()->thread_local_.promise_stack_.IsSmi();
}
namespace {
bool PromiseIsRejectHandler(Isolate* isolate, Handle<JSReceiver> handler) {
// Recurse to the forwarding Promise (e.g. return false) due to
// - await reaction forwarding to the throwaway Promise, which has
// a dependency edge to the outer Promise.
// - PromiseIdResolveHandler forwarding to the output of .then
// - Promise.all/Promise.race forwarding to a throwaway Promise, which
// has a dependency edge to the generated outer Promise.
// Otherwise, this is a real reject handler for the Promise.
Handle<Symbol> key = isolate->factory()->promise_forwarding_handler_symbol();
Handle<Object> forwarding_handler =
JSReceiver::GetDataProperty(isolate, handler, key);
return forwarding_handler->IsUndefined(isolate);
}
bool PromiseHasUserDefinedRejectHandlerInternal(Isolate* isolate,
Handle