blob: b079f7ecb2f3c0f08ff1bdcc72d660640c2dea5a [file] [log] [blame]
// Copyright 2012 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/execution/frames.h"
#include <cstdint>
#include <memory>
#include <sstream>
#include "src/base/bits.h"
#include "src/codegen/interface-descriptors.h"
#include "src/codegen/macro-assembler.h"
#include "src/codegen/maglev-safepoint-table.h"
#include "src/codegen/register-configuration.h"
#include "src/codegen/safepoint-table.h"
#include "src/common/globals.h"
#include "src/deoptimizer/deoptimizer.h"
#include "src/execution/arguments.h"
#include "src/execution/frame-constants.h"
#include "src/execution/frames-inl.h"
#include "src/execution/vm-state-inl.h"
#include "src/ic/ic-stats.h"
#include "src/logging/counters.h"
#include "src/objects/code.h"
#include "src/objects/slots.h"
#include "src/objects/smi.h"
#include "src/objects/visitors.h"
#include "src/snapshot/embedded/embedded-data-inl.h"
#include "src/strings/string-stream.h"
#include "src/zone/zone-containers.h"
#if V8_ENABLE_WEBASSEMBLY
#include "src/debug/debug-wasm-objects.h"
#include "src/wasm/stacks.h"
#include "src/wasm/wasm-code-manager.h"
#include "src/wasm/wasm-engine.h"
#include "src/wasm/wasm-objects-inl.h"
#endif // V8_ENABLE_WEBASSEMBLY
namespace v8 {
namespace internal {
ReturnAddressLocationResolver StackFrame::return_address_location_resolver_ =
nullptr;
namespace {
Address AddressOf(const StackHandler* handler) {
Address raw = handler->address();
#ifdef V8_USE_ADDRESS_SANITIZER
// ASan puts C++-allocated StackHandler markers onto its fake stack.
// We work around that by storing the real stack address in the "padding"
// field. StackHandlers allocated from generated code have 0 as padding.
Address padding =
base::Memory<Address>(raw + StackHandlerConstants::kPaddingOffset);
if (padding != 0) return padding;
#endif
return raw;
}
} // namespace
// Iterator that supports traversing the stack handlers of a
// particular frame. Needs to know the top of the handler chain.
class StackHandlerIterator {
public:
StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
: limit_(frame->fp()), handler_(handler) {
#if V8_ENABLE_WEBASSEMBLY
// Make sure the handler has already been unwound to this frame. With stack
// switching this is not equivalent to the inequality below, because the
// frame and the handler could be in different stacks.
DCHECK_IMPLIES(!v8_flags.experimental_wasm_stack_switching,
frame->sp() <= AddressOf(handler));
// For CWasmEntry frames, the handler was registered by the last C++
// frame (Execution::CallWasm), so even though its address is already
// beyond the limit, we know we always want to unwind one handler.
if (frame->is_c_wasm_entry()) handler_ = handler_->next();
#else
// Make sure the handler has already been unwound to this frame.
DCHECK_LE(frame->sp(), AddressOf(handler));
#endif // V8_ENABLE_WEBASSEMBLY
}
StackHandler* handler() const { return handler_; }
bool done() { return handler_ == nullptr || AddressOf(handler_) > limit_; }
void Advance() {
DCHECK(!done());
handler_ = handler_->next();
}
private:
const Address limit_;
StackHandler* handler_;
};
// -------------------------------------------------------------------------
#define INITIALIZE_SINGLETON(type, field) field##_(this),
StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate)
: isolate_(isolate),
STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON) frame_(nullptr),
handler_(nullptr) {}
#undef INITIALIZE_SINGLETON
StackFrameIterator::StackFrameIterator(Isolate* isolate)
: StackFrameIterator(isolate, isolate->thread_local_top()) {}
StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
: StackFrameIteratorBase(isolate) {
Reset(t);
}
#if V8_ENABLE_WEBASSEMBLY
StackFrameIterator::StackFrameIterator(Isolate* isolate,
wasm::StackMemory* stack)
: StackFrameIteratorBase(isolate) {
Reset(isolate->thread_local_top(), stack);
}
#endif
void StackFrameIterator::Advance() {
DCHECK(!done());
// Compute the state of the calling frame before restoring
// callee-saved registers and unwinding handlers. This allows the
// frame code that computes the caller state to access the top
// handler and the value of any callee-saved register if needed.
StackFrame::State state;
StackFrame::Type type = frame_->GetCallerState(&state);
// Unwind handlers corresponding to the current frame.
StackHandlerIterator it(frame_, handler_);
while (!it.done()) it.Advance();
handler_ = it.handler();
// Advance to the calling frame.
frame_ = SingletonFor(type, &state);
// When we're done iterating over the stack frames, the handler
// chain must have been completely unwound. Except for wasm stack-switching:
// we stop at the end of the current segment.
#if V8_ENABLE_WEBASSEMBLY
DCHECK_IMPLIES(done() && !v8_flags.experimental_wasm_stack_switching,
handler_ == nullptr);
#else
DCHECK_IMPLIES(done(), handler_ == nullptr);
#endif
}
StackFrame* StackFrameIterator::Reframe() {
StackFrame::Type type = ComputeStackFrameType(&frame_->state_);
frame_ = SingletonFor(type, &frame_->state_);
return frame();
}
void StackFrameIterator::Reset(ThreadLocalTop* top) {
StackFrame::State state;
StackFrame::Type type =
ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
handler_ = StackHandler::FromAddress(Isolate::handler(top));
frame_ = SingletonFor(type, &state);
}
#if V8_ENABLE_WEBASSEMBLY
void StackFrameIterator::Reset(ThreadLocalTop* top, wasm::StackMemory* stack) {
if (stack->jmpbuf()->state == wasm::JumpBuffer::Retired) {
return;
}
StackFrame::State state;
StackSwitchFrame::GetStateForJumpBuffer(stack->jmpbuf(), &state);
handler_ = StackHandler::FromAddress(Isolate::handler(top));
frame_ = SingletonFor(StackFrame::STACK_SWITCH, &state);
}
#endif
StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
StackFrame::State* state) {
StackFrame* result = SingletonFor(type);
DCHECK((!result) == (type == StackFrame::NO_FRAME_TYPE));
if (result) result->state_ = *state;
return result;
}
StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
#define FRAME_TYPE_CASE(type, field) \
case StackFrame::type: \
return &field##_;
switch (type) {
case StackFrame::NO_FRAME_TYPE:
return nullptr;
STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
default:
break;
}
return nullptr;
#undef FRAME_TYPE_CASE
}
// -------------------------------------------------------------------------
void TypedFrameWithJSLinkage::Iterate(RootVisitor* v) const {
IterateExpressions(v);
IteratePc(v, pc_address(), constant_pool_address(), GcSafeLookupCode());
}
// -------------------------------------------------------------------------
void JavaScriptStackFrameIterator::Advance() {
do {
iterator_.Advance();
} while (!iterator_.done() && !iterator_.frame()->is_java_script());
}
// -------------------------------------------------------------------------
DebuggableStackFrameIterator::DebuggableStackFrameIterator(Isolate* isolate)
: iterator_(isolate) {
if (!done() && !IsValidFrame(iterator_.frame())) Advance();
}
DebuggableStackFrameIterator::DebuggableStackFrameIterator(Isolate* isolate,
StackFrameId id)
: DebuggableStackFrameIterator(isolate) {
while (!done() && frame()->id() != id) Advance();
}
void DebuggableStackFrameIterator::Advance() {
do {
iterator_.Advance();
} while (!done() && !IsValidFrame(iterator_.frame()));
}
int DebuggableStackFrameIterator::FrameFunctionCount() const {
DCHECK(!done());
if (!iterator_.frame()->is_optimized()) return 1;
std::vector<SharedFunctionInfo> infos;
TurbofanFrame::cast(iterator_.frame())->GetFunctions(&infos);
return static_cast<int>(infos.size());
}
FrameSummary DebuggableStackFrameIterator::GetTopValidFrame() const {
DCHECK(!done());
// Like FrameSummary::GetTop, but additionally observes
// DebuggableStackFrameIterator filtering semantics.
std::vector<FrameSummary> frames;
frame()->Summarize(&frames);
if (is_javascript()) {
for (int i = static_cast<int>(frames.size()) - 1; i >= 0; i--) {
const FrameSummary& summary = frames[i];
if (summary.is_subject_to_debugging()) {
return summary;
}
}
UNREACHABLE();
}
#if V8_ENABLE_WEBASSEMBLY
if (is_wasm()) return frames.back();
#endif // V8_ENABLE_WEBASSEMBLY
UNREACHABLE();
}
// static
bool DebuggableStackFrameIterator::IsValidFrame(StackFrame* frame) {
if (frame->is_java_script()) {
#if V8_ENABLE_WEBASSEMBLY
Object maybe_func(
Memory<Address>(frame->fp() + StandardFrameConstants::kFunctionOffset));
// Some builtins are called by their ID from optimized code.
if (maybe_func.IsSmi()) {
DCHECK(Builtins::IsBuiltinId(Smi::cast(maybe_func).value()));
return false;
}
#endif
JSFunction function = static_cast<JavaScriptFrame*>(frame)->function();
return function.shared().IsSubjectToDebugging();
}
#if V8_ENABLE_WEBASSEMBLY
if (frame->is_wasm()) return true;
#endif // V8_ENABLE_WEBASSEMBLY
return false;
}
// -------------------------------------------------------------------------
namespace {
bool IsInterpreterFramePc(Isolate* isolate, Address pc,
StackFrame::State* state) {
Builtin builtin = OffHeapInstructionStream::TryLookupCode(isolate, pc);
if (builtin != Builtin::kNoBuiltinId &&
(builtin == Builtin::kInterpreterEntryTrampoline ||
builtin == Builtin::kInterpreterEnterAtBytecode ||
builtin == Builtin::kInterpreterEnterAtNextBytecode ||
builtin == Builtin::kBaselineOrInterpreterEnterAtBytecode ||
builtin == Builtin::kBaselineOrInterpreterEnterAtNextBytecode)) {
return true;
} else if (v8_flags.interpreted_frames_native_stack) {
intptr_t marker = Memory<intptr_t>(
state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
MSAN_MEMORY_IS_INITIALIZED(
state->fp + StandardFrameConstants::kFunctionOffset,
kSystemPointerSize);
Object maybe_function = Object(
Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset));
// There's no need to run a full ContainsSlow if we know the frame can't be
// an InterpretedFrame, so we do these fast checks first
if (StackFrame::IsTypeMarker(marker) || maybe_function.IsSmi()) {
return false;
} else if (!isolate->heap()->InSpaceSlow(pc, CODE_SPACE)) {
return false;
}
Code interpreter_entry_trampoline =
isolate->heap()->FindCodeForInnerPointer(pc);
return interpreter_entry_trampoline.is_interpreter_trampoline_builtin();
} else {
return false;
}
}
} // namespace
bool StackFrameIteratorForProfiler::IsNoFrameBytecodeHandlerPc(
Isolate* isolate, Address pc, Address fp) const {
EmbeddedData d = EmbeddedData::FromBlob(isolate);
if (pc < d.InstructionStartOfBytecodeHandlers() ||
pc >= d.InstructionEndOfBytecodeHandlers()) {
return false;
}
Address frame_type_address =
fp + CommonFrameConstants::kContextOrFrameTypeOffset;
if (!IsValidStackAddress(frame_type_address)) {
return false;
}
// Check if top stack frame is a bytecode handler stub frame.
MSAN_MEMORY_IS_INITIALIZED(frame_type_address, kSystemPointerSize);
intptr_t marker = Memory<intptr_t>(frame_type_address);
if (StackFrame::IsTypeMarker(marker) &&
StackFrame::MarkerToType(marker) == StackFrame::STUB) {
// Bytecode handler built a frame.
return false;
}
return true;
}
StackFrameIteratorForProfiler::StackFrameIteratorForProfiler(
Isolate* isolate, Address pc, Address fp, Address sp, Address lr,
Address js_entry_sp)
: StackFrameIteratorBase(isolate),
low_bound_(sp),
high_bound_(js_entry_sp),
top_frame_type_(StackFrame::NO_FRAME_TYPE),
external_callback_scope_(isolate->external_callback_scope()),
top_link_register_(lr) {
if (!isolate->isolate_data()->stack_is_iterable()) {
// The stack is not iterable in a short time interval during deoptimization.
// See also: ExternalReference::stack_is_iterable_address.
DCHECK(done());
return;
}
// For Advance below, we need frame_ to be set; and that only happens if the
// type is not NO_FRAME_TYPE.
// TODO(jgruber): Clean this up.
static constexpr StackFrame::Type kTypeForAdvance = StackFrame::TURBOFAN;
StackFrame::State state;
StackFrame::Type type;
ThreadLocalTop* const top = isolate->thread_local_top();
bool advance_frame = true;
const Address fast_c_fp = isolate->isolate_data()->fast_c_call_caller_fp();
if (fast_c_fp != kNullAddress) {
// 'Fast C calls' are a special type of C call where we call directly from
// JS to C without an exit frame inbetween. The CEntryStub is responsible
// for setting Isolate::c_entry_fp, meaning that it won't be set for fast C
// calls. To keep the stack iterable, we store the FP and PC of the caller
// of the fast C call on the isolate. This is guaranteed to be the topmost
// JS frame, because fast C calls cannot call back into JS. We start
// iterating the stack from this topmost JS frame.
DCHECK_NE(kNullAddress, isolate->isolate_data()->fast_c_call_caller_pc());
state.fp = fast_c_fp;
state.sp = sp;
state.pc_address = reinterpret_cast<Address*>(
isolate->isolate_data()->fast_c_call_caller_pc_address());
// ComputeStackFrameType will read both kContextOffset and
// kFunctionOffset, we check only that kFunctionOffset is within the stack
// bounds and do a compile time check that kContextOffset slot is pushed on
// the stack before kFunctionOffset.
static_assert(StandardFrameConstants::kFunctionOffset <
StandardFrameConstants::kContextOffset);
if (IsValidStackAddress(state.fp +
StandardFrameConstants::kFunctionOffset)) {
type = ComputeStackFrameType(&state);
if (IsValidFrameType(type)) {
top_frame_type_ = type;
advance_frame = false;
}
} else {
// Cannot determine the actual type; the frame will be skipped below.
type = kTypeForAdvance;
}
} else if (IsValidTop(top)) {
type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
top_frame_type_ = type;
} else if (IsValidStackAddress(fp)) {
DCHECK_NE(fp, kNullAddress);
state.fp = fp;
state.sp = sp;
state.pc_address =
StackFrame::ResolveReturnAddressLocation(reinterpret_cast<Address*>(
fp + StandardFrameConstants::kCallerPCOffset));
// If the current PC is in a bytecode handler, the top stack frame isn't
// the bytecode handler's frame and the top of stack or link register is a
// return address into the interpreter entry trampoline, then we are likely
// in a bytecode handler with elided frame. In that case, set the PC
// properly and make sure we do not drop the frame.
bool is_no_frame_bytecode_handler = false;
if (IsNoFrameBytecodeHandlerPc(isolate, pc, fp)) {
Address* top_location = nullptr;
if (top_link_register_) {
top_location = &top_link_register_;
} else if (IsValidStackAddress(sp)) {
MSAN_MEMORY_IS_INITIALIZED(sp, kSystemPointerSize);
top_location = reinterpret_cast<Address*>(sp);
}
if (IsInterpreterFramePc(isolate, *top_location, &state)) {
state.pc_address = top_location;
is_no_frame_bytecode_handler = true;
advance_frame = false;
}
}
// ComputeStackFrameType will read both kContextOffset and
// kFunctionOffset, we check only that kFunctionOffset is within the stack
// bounds and do a compile time check that kContextOffset slot is pushed on
// the stack before kFunctionOffset.
static_assert(StandardFrameConstants::kFunctionOffset <
StandardFrameConstants::kContextOffset);
Address function_slot = fp + StandardFrameConstants::kFunctionOffset;
if (IsValidStackAddress(function_slot)) {
if (is_no_frame_bytecode_handler) {
type = StackFrame::INTERPRETED;
} else {
type = ComputeStackFrameType(&state);
}
top_frame_type_ = type;
} else {
// Cannot determine the actual type; the frame will be skipped below.
type = kTypeForAdvance;
}
} else {
// Not iterable.
DCHECK(done());
return;
}
frame_ = SingletonFor(type, &state);
if (advance_frame && !done()) {
Advance();
}
}
bool StackFrameIteratorForProfiler::IsValidTop(ThreadLocalTop* top) const {
Address c_entry_fp = Isolate::c_entry_fp(top);
if (!IsValidExitFrame(c_entry_fp)) return false;
// There should be at least one JS_ENTRY stack handler.
Address handler = Isolate::handler(top);
if (handler == kNullAddress) return false;
// Check that there are no js frames on top of the native frames.
return c_entry_fp < handler;
}
void StackFrameIteratorForProfiler::AdvanceOneFrame() {
DCHECK(!done());
StackFrame* last_frame = frame_;
Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
// Before advancing to the next stack frame, perform pointer validity tests.
if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
frame_ = nullptr;
return;
}
// Advance to the previous frame.
StackFrame::State state;
StackFrame::Type type = frame_->GetCallerState(&state);
frame_ = SingletonFor(type, &state);
if (!frame_) return;
// Check that we have actually moved to the previous frame in the stack.
if (frame_->sp() <= last_sp || frame_->fp() <= last_fp) {
frame_ = nullptr;
}
}
bool StackFrameIteratorForProfiler::IsValidFrame(StackFrame* frame) const {
return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
}
bool StackFrameIteratorForProfiler::IsValidCaller(StackFrame* frame) {
StackFrame::State state;
if (frame->is_entry() || frame->is_construct_entry()) {
// See EntryFrame::GetCallerState. It computes the caller FP address
// and calls ExitFrame::GetStateForFramePointer on it. We need to be
// sure that caller FP address is valid.
Address next_exit_frame_fp = Memory<Address>(
frame->fp() + EntryFrameConstants::kNextExitFrameFPOffset);
if (!IsValidExitFrame(next_exit_frame_fp)) return false;
}
frame->ComputeCallerState(&state);
return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
SingletonFor(frame->GetCallerState(&state)) != nullptr;
}
bool StackFrameIteratorForProfiler::IsValidExitFrame(Address fp) const {
if (!IsValidStackAddress(fp)) return false;
Address sp = ExitFrame::ComputeStackPointer(fp);
if (!IsValidStackAddress(sp)) return false;
StackFrame::State state;
ExitFrame::FillState(fp, sp, &state);
MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address));
return *state.pc_address != kNullAddress;
}
void StackFrameIteratorForProfiler::Advance() {
while (true) {
AdvanceOneFrame();
if (done()) break;
ExternalCallbackScope* last_callback_scope = nullptr;
while (external_callback_scope_ != nullptr &&
external_callback_scope_->scope_address() < frame_->fp()) {
// As long as the setup of a frame is not atomic, we may happen to be
// in an interval where an ExternalCallbackScope is already created,
// but the frame is not yet entered. So we are actually observing
// the previous frame.
// Skip all the ExternalCallbackScope's that are below the current fp.
last_callback_scope = external_callback_scope_;
external_callback_scope_ = external_callback_scope_->previous();
}
if (frame_->is_java_script()) break;
#if V8_ENABLE_WEBASSEMBLY
if (frame_->is_wasm() || frame_->is_wasm_to_js() ||
frame_->is_js_to_wasm()) {
break;
}
#endif // V8_ENABLE_WEBASSEMBLY
if (frame_->is_exit() || frame_->is_builtin_exit()) {
// Some of the EXIT frames may have ExternalCallbackScope allocated on
// top of them. In that case the scope corresponds to the first EXIT
// frame beneath it. There may be other EXIT frames on top of the
// ExternalCallbackScope, just skip them as we cannot collect any useful
// information about them.
if (last_callback_scope) {
frame_->state_.pc_address =
last_callback_scope->callback_entrypoint_address();
}
break;
}
}
}
// -------------------------------------------------------------------------
namespace {
base::Optional<GcSafeCode> GetContainingCode(Isolate* isolate, Address pc) {
return isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code;
}
} // namespace
GcSafeCode StackFrame::GcSafeLookupCode() const {
base::Optional<GcSafeCode> result = GetContainingCode(isolate(), pc());
DCHECK_GE(pc(), result->InstructionStart(isolate(), pc()));
DCHECK_LT(pc(), result->InstructionEnd(isolate(), pc()));
return result.value();
}
Code StackFrame::LookupCode() const {
DCHECK_NE(isolate()->heap()->gc_state(), Heap::MARK_COMPACT);
return GcSafeLookupCode().UnsafeCastToCode();
}
void StackFrame::IteratePc(RootVisitor* v, Address* pc_address,
Address* constant_pool_address,
GcSafeCode holder) const {
const Address old_pc = ReadPC(pc_address);
DCHECK_GE(old_pc, holder.InstructionStart(isolate(), old_pc));
DCHECK_LT(old_pc, holder.InstructionEnd(isolate(), old_pc));
// Keep the old pc offset before visiting the code since we need it to
// calculate the new pc after a potential InstructionStream move.
const uintptr_t pc_offset_from_start = old_pc - holder.instruction_start();
// Visit.
GcSafeCode visited_holder = holder;
PtrComprCageBase code_cage_base{isolate()->code_cage_base()};
const Object old_istream = holder.raw_instruction_stream(code_cage_base);
Object visited_istream = old_istream;
v->VisitRunningCode(FullObjectSlot{&visited_holder},
FullObjectSlot{&visited_istream});
if (visited_istream == old_istream) {
// Note this covers two important cases:
// 1. the associated InstructionStream object did not move, and
// 2. `holder` is an embedded builtin and has no InstructionStream.
return;
}
DCHECK(visited_holder.has_instruction_stream());
InstructionStream istream =
InstructionStream::unchecked_cast(visited_istream);
const Address new_pc = istream.instruction_start() + pc_offset_from_start;
// TODO(v8:10026): avoid replacing a signed pointer.
PointerAuthentication::ReplacePC(pc_address, new_pc, kSystemPointerSize);
if (V8_EMBEDDED_CONSTANT_POOL_BOOL && constant_pool_address != nullptr) {
*constant_pool_address = visited_holder.constant_pool(istream);
}
}
void StackFrame::SetReturnAddressLocationResolver(
ReturnAddressLocationResolver resolver) {
DCHECK_NULL(return_address_location_resolver_);
return_address_location_resolver_ = resolver;
}
namespace {
StackFrame::Type ComputeBuiltinFrameType(GcSafeCode code) {
if (code.is_interpreter_trampoline_builtin() ||
code.is_baseline_trampoline_builtin()) {
// Frames for baseline entry trampolines on the stack are still interpreted
// frames.
return StackFrame::INTERPRETED;
} else if (code.is_baseline_leave_frame_builtin()) {
return StackFrame::BASELINE;
} else if (code.is_turbofanned()) {
// TODO(bmeurer): We treat frames for BUILTIN Code objects as
// OptimizedFrame for now (all the builtins with JavaScript linkage are
// actually generated with TurboFan currently, so this is sound).
return StackFrame::TURBOFAN;
}
return StackFrame::BUILTIN;
}
StackFrame::Type SafeStackFrameType(StackFrame::Type candidate) {
DCHECK_LE(static_cast<uintptr_t>(candidate), StackFrame::NUMBER_OF_TYPES);
switch (candidate) {
case StackFrame::BUILTIN_CONTINUATION:
case StackFrame::BUILTIN_EXIT:
case StackFrame::CONSTRUCT:
case StackFrame::CONSTRUCT_ENTRY:
case StackFrame::ENTRY:
case StackFrame::EXIT:
case StackFrame::INTERNAL:
case StackFrame::IRREGEXP:
case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION:
case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
case StackFrame::STUB:
return candidate;
#if V8_ENABLE_WEBASSEMBLY
case StackFrame::JS_TO_WASM:
case StackFrame::STACK_SWITCH:
case StackFrame::WASM:
case StackFrame::WASM_DEBUG_BREAK:
case StackFrame::WASM_EXIT:
case StackFrame::WASM_LIFTOFF_SETUP:
case StackFrame::WASM_TO_JS:
return candidate;
#endif // V8_ENABLE_WEBASSEMBLY
// Any other marker value is likely to be a bogus stack frame when being
// called from the profiler (in particular, JavaScript frames, including
// interpreted frames, should never have a StackFrame::Type marker).
// Consider these frames "native".
// TODO(jgruber): For the StackFrameIterator, I'm not sure this fallback
// makes sense. Shouldn't we know how to handle all frames we encounter
// there?
case StackFrame::BASELINE:
case StackFrame::BUILTIN:
case StackFrame::INTERPRETED:
case StackFrame::MAGLEV:
case StackFrame::MANUAL:
case StackFrame::NATIVE:
case StackFrame::NO_FRAME_TYPE:
case StackFrame::NUMBER_OF_TYPES:
case StackFrame::TURBOFAN:
case StackFrame::TURBOFAN_STUB_WITH_CONTEXT:
#if V8_ENABLE_WEBASSEMBLY
case StackFrame::C_WASM_ENTRY:
case StackFrame::WASM_TO_JS_FUNCTION:
#endif // V8_ENABLE_WEBASSEMBLY
return StackFrame::NATIVE;
}
UNREACHABLE();
}
} // namespace
StackFrame::Type StackFrameIterator::ComputeStackFrameType(
StackFrame::State* state) const {
#if V8_ENABLE_WEBASSEMBLY
if (state->fp == kNullAddress) {
DCHECK(v8_flags.experimental_wasm_stack_switching);
return StackFrame::NO_FRAME_TYPE;
}
#endif
const Address pc = StackFrame::ReadPC(state->pc_address);
#if V8_ENABLE_WEBASSEMBLY
// If the {pc} does not point into WebAssembly code we can rely on the
// returned {wasm_code} to be null and fall back to {GetContainingCode}.
wasm::WasmCodeRefScope code_ref_scope;
if (wasm::WasmCode* wasm_code = wasm::GetWasmCodeManager()->LookupCode(pc)) {
switch (wasm_code->kind()) {
case wasm::WasmCode::kWasmFunction:
return StackFrame::WASM;
case wasm::WasmCode::kWasmToCapiWrapper:
return StackFrame::WASM_EXIT;
case wasm::WasmCode::kWasmToJsWrapper:
return StackFrame::WASM_TO_JS;
default:
UNREACHABLE();
}
}
#endif // V8_ENABLE_WEBASSEMBLY
// Look up the code object to figure out the type of the stack frame.
base::Optional<GcSafeCode> lookup_result = GetContainingCode(isolate(), pc);
if (!lookup_result.has_value()) return StackFrame::NATIVE;
MSAN_MEMORY_IS_INITIALIZED(
state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
kSystemPointerSize);
const intptr_t marker = Memory<intptr_t>(
state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
switch (lookup_result->kind()) {
case CodeKind::BUILTIN: {
if (StackFrame::IsTypeMarker(marker)) break;
return ComputeBuiltinFrameType(lookup_result.value());
}
case CodeKind::BASELINE:
return StackFrame::BASELINE;
case CodeKind::MAGLEV:
if (StackFrame::IsTypeMarker(marker)) {
// An INTERNAL frame can be set up with an associated Maglev code
// object when calling into runtime to handle tiering. In this case,
// all stack slots are tagged pointers and should be visited through
// the usual logic.
DCHECK_EQ(StackFrame::MarkerToType(marker), StackFrame::INTERNAL);
return StackFrame::INTERNAL;
}
return StackFrame::MAGLEV;
case CodeKind::TURBOFAN:
return StackFrame::TURBOFAN;
#if V8_ENABLE_WEBASSEMBLY
case CodeKind::JS_TO_WASM_FUNCTION:
if (lookup_result->builtin_id() == Builtin::kGenericJSToWasmWrapper) {
return StackFrame::JS_TO_WASM;
}
return StackFrame::TURBOFAN_STUB_WITH_CONTEXT;
case CodeKind::JS_TO_JS_FUNCTION:
return StackFrame::TURBOFAN_STUB_WITH_CONTEXT;
case CodeKind::C_WASM_ENTRY:
return StackFrame::C_WASM_ENTRY;
case CodeKind::WASM_TO_JS_FUNCTION:
return StackFrame::WASM_TO_JS_FUNCTION;
case CodeKind::WASM_FUNCTION:
case CodeKind::WASM_TO_CAPI_FUNCTION:
// These never appear as on-heap Code objects.
UNREACHABLE();
#else
case CodeKind::C_WASM_ENTRY:
case CodeKind::JS_TO_JS_FUNCTION:
case CodeKind::JS_TO_WASM_FUNCTION:
case CodeKind::WASM_FUNCTION:
case CodeKind::WASM_TO_CAPI_FUNCTION:
case CodeKind::WASM_TO_JS_FUNCTION:
UNREACHABLE();
#endif // V8_ENABLE_WEBASSEMBLY
case CodeKind::BYTECODE_HANDLER:
case CodeKind::FOR_TESTING:
case CodeKind::REGEXP:
case CodeKind::INTERPRETED_FUNCTION:
// Fall back to the marker.
break;
}
return SafeStackFrameType(StackFrame::MarkerToType(marker));
}
StackFrame::Type StackFrameIteratorForProfiler::ComputeStackFrameType(
StackFrame::State* state) const {
#if V8_ENABLE_WEBASSEMBLY
if (state->fp == kNullAddress) {
DCHECK(v8_flags.experimental_wasm_stack_switching);
return StackFrame::NO_FRAME_TYPE;
}
#endif
MSAN_MEMORY_IS_INITIALIZED(
state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
kSystemPointerSize);
const intptr_t marker = Memory<intptr_t>(
state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
if (StackFrame::IsTypeMarker(marker)) {
if (static_cast<uintptr_t>(marker) > StackFrame::NUMBER_OF_TYPES) {
// We've read some bogus value from the stack.
return StackFrame::NATIVE;
}
return SafeStackFrameType(StackFrame::MarkerToType(marker));
}
// We use unauthenticated_pc because it may come from
// fast_c_call_caller_pc_address, for which authentication does not work.
const Address pc = StackFrame::unauthenticated_pc(state->pc_address);
MSAN_MEMORY_IS_INITIALIZED(
state->fp + StandardFrameConstants::kFunctionOffset, kSystemPointerSize);
Object maybe_function = Object(
Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset));
if (maybe_function.IsSmi()) {
return StackFrame::NATIVE;
} else if (IsInterpreterFramePc(isolate(), pc, state)) {
return StackFrame::INTERPRETED;
}
return StackFrame::TURBOFAN;
}
StackFrame::Type StackFrame::GetCallerState(State* state) const {
ComputeCallerState(state);
return iterator_->ComputeStackFrameType(state);
}
Address CommonFrame::GetCallerStackPointer() const {
return fp() + CommonFrameConstants::kCallerSPOffset;
}
void NativeFrame::ComputeCallerState(State* state) const {
state->sp = caller_sp();
state->fp = Memory<Address>(fp() + CommonFrameConstants::kCallerFPOffset);
state->pc_address = ResolveReturnAddressLocation(
reinterpret_cast<Address*>(fp() + CommonFrameConstants::kCallerPCOffset));
state->callee_pc_address = nullptr;
state->constant_pool_address = nullptr;
}
HeapObject EntryFrame::unchecked_code() const {
return isolate()->builtins()->code(Builtin::kJSEntry);
}
void EntryFrame::ComputeCallerState(State* state) const {
GetCallerState(state);
}
StackFrame::Type EntryFrame::GetCallerState(State* state) const {
Address next_exit_frame_fp =
Memory<Address>(fp() + EntryFrameConstants::kNextExitFrameFPOffset);
return ExitFrame::GetStateForFramePointer(next_exit_frame_fp, state);
}
#if V8_ENABLE_WEBASSEMBLY
StackFrame::Type CWasmEntryFrame::GetCallerState(State* state) const {
const int offset = CWasmEntryFrameConstants::kCEntryFPOffset;
Address fp = Memory<Address>(this->fp() + offset);
return ExitFrame::GetStateForFramePointer(fp, state);
}
#endif // V8_ENABLE_WEBASSEMBLY
HeapObject ConstructEntryFrame::unchecked_code() const {
return isolate()->builtins()->code(Builtin::kJSConstructEntry);
}
void ExitFrame::ComputeCallerState(State* state) const {
// Set up the caller state.
state->sp = caller_sp();
state->fp = Memory<Address>(fp() + ExitFrameConstants::kCallerFPOffset);
state->pc_address = ResolveReturnAddressLocation(
reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
state->callee_pc_address = nullptr;
if (V8_EMBEDDED_CONSTANT_POOL_BOOL) {
state->constant_pool_address = reinterpret_cast<Address*>(
fp() + ExitFrameConstants::kConstantPoolOffset);
}
}
void ExitFrame::Iterate(RootVisitor* v) const {
// The arguments are traversed as part of the expression stack of
// the calling frame.
IteratePc(v, pc_address(), constant_pool_address(), GcSafeLookupCode());
}
StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
if (fp == 0) return NO_FRAME_TYPE;
StackFrame::Type type = ComputeFrameType(fp);
#if V8_ENABLE_WEBASSEMBLY
Address sp = type == WASM_EXIT ? WasmExitFrame::ComputeStackPointer(fp)
: ExitFrame::ComputeStackPointer(fp);
#else
Address sp = ExitFrame::ComputeStackPointer(fp);
#endif // V8_ENABLE_WEBASSEMBLY
FillState(fp, sp, state);
DCHECK_NE(*state->pc_address, kNullAddress);
return type;
}
StackFrame::Type ExitFrame::ComputeFrameType(Address fp) {
// Distinguish between between regular and builtin exit frames.
// Default to EXIT in all hairy cases (e.g., when called from profiler).
const int offset = ExitFrameConstants::kFrameTypeOffset;
Object marker(Memory<Address>(fp + offset));
if (!marker.IsSmi()) {
return EXIT;
}
intptr_t marker_int = base::bit_cast<intptr_t>(marker);
StackFrame::Type frame_type = static_cast<StackFrame::Type>(marker_int >> 1);
switch (frame_type) {
case BUILTIN_EXIT:
#if V8_ENABLE_WEBASSEMBLY
case WASM_EXIT:
case STACK_SWITCH:
#endif // V8_ENABLE_WEBASSEMBLY
return frame_type;
default:
return EXIT;
}
}
Address ExitFrame::ComputeStackPointer(Address fp) {
MSAN_MEMORY_IS_INITIALIZED(fp + ExitFrameConstants::kSPOffset,
kSystemPointerSize);
return Memory<Address>(fp + ExitFrameConstants::kSPOffset);
}
#if V8_ENABLE_WEBASSEMBLY
Address WasmExitFrame::ComputeStackPointer(Address fp) {
// For WASM_EXIT frames, {sp} is only needed for finding the PC slot,
// everything else is handled via safepoint information.
Address sp = fp + WasmExitFrameConstants::kWasmInstanceOffset;
DCHECK_EQ(sp - 1 * kPCOnStackSize,
fp + WasmExitFrameConstants::kCallingPCOffset);
return sp;
}
#endif // V8_ENABLE_WEBASSEMBLY
void ExitFrame::FillState(Address fp, Address sp, State* state) {
state->sp = sp;
state->fp = fp;
state->pc_address = ResolveReturnAddressLocation(
reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
state->callee_pc_address = nullptr;
// The constant pool recorded in the exit frame is not associated
// with the pc in this state (the return address into a C entry
// stub). ComputeCallerState will retrieve the constant pool
// together with the associated caller pc.
state->constant_pool_address = nullptr;
}
void BuiltinExitFrame::Summarize(std::vector<FrameSummary>* frames) const {
DCHECK(frames->empty());
Handle<FixedArray> parameters = GetParameters();
DisallowGarbageCollection no_gc;
Code code = LookupCode();
int code_offset = code.GetOffsetFromInstructionStart(isolate(), pc());
FrameSummary::JavaScriptFrameSummary summary(
isolate(), receiver(), function(), AbstractCode::cast(code), code_offset,
IsConstructor(), *parameters);
frames->push_back(summary);
}
JSFunction BuiltinExitFrame::function() const {
return JSFunction::cast(target_slot_object());
}
Object BuiltinExitFrame::receiver() const { return receiver_slot_object(); }
Object BuiltinExitFrame::GetParameter(int i) const {
DCHECK(i >= 0 && i < ComputeParametersCount());
int offset =
BuiltinExitFrameConstants::kFirstArgumentOffset + i * kSystemPointerSize;
return Object(Memory<Address>(fp() + offset));
}
int BuiltinExitFrame::ComputeParametersCount() const {
Object argc_slot = argc_slot_object();
DCHECK(argc_slot.IsSmi());
// Argc also counts the receiver, target, new target, and argc itself as args,
// therefore the real argument count is argc - 4.
int argc = Smi::ToInt(argc_slot) - 4;
DCHECK_GE(argc, 0);
return argc;
}
Handle<FixedArray> BuiltinExitFrame::GetParameters() const {
if (V8_LIKELY(!v8_flags.detailed_error_stack_trace)) {
return isolate()->factory()->empty_fixed_array();
}
int param_count = ComputeParametersCount();
auto parameters = isolate()->factory()->NewFixedArray(param_count);
for (int i = 0; i < param_count; i++) {
parameters->set(i, GetParameter(i));
}
return parameters;
}
bool BuiltinExitFrame::IsConstructor() const {
return !new_target_slot_object().IsUndefined(isolate());
}
namespace {
void PrintIndex(StringStream* accumulator, StackFrame::PrintMode mode,
int index) {
accumulator->Add((mode == StackFrame::OVERVIEW) ? "%5d: " : "[%d]: ", index);
}
const char* StringForStackFrameType(StackFrame::Type type) {
switch (type) {
#define CASE(value, name) \
case StackFrame::value: \
return #name;
STACK_FRAME_TYPE_LIST(CASE)
#undef CASE
default:
UNREACHABLE();
}
}
} // namespace
void StackFrame::Print(StringStream* accumulator, PrintMode mode,
int index) const {
DisallowGarbageCollection no_gc;
PrintIndex(accumulator, mode, index);
accumulator->Add(StringForStackFrameType(type()));
accumulator->Add(" [pc: %p]\n", reinterpret_cast<void*>(pc()));
}
void BuiltinExitFrame::Print(StringStream* accumulator, PrintMode mode,
int index) const {
DisallowGarbageCollection no_gc;
Object receiver = this->receiver();
JSFunction function = this->function();
accumulator->PrintSecurityTokenIfChanged(function);
PrintIndex(accumulator, mode, index);
accumulator->Add("builtin exit frame: ");
if (IsConstructor()) accumulator->Add("new ");
accumulator->PrintFunction(function, receiver);
accumulator->Add("(this=%o", receiver);
// Print the parameters.
int parameters_count = ComputeParametersCount();
for (int i = 0; i < parameters_count; i++) {
accumulator->Add(",%o", GetParameter(i));
}
accumulator->Add(")\n\n");
}
Address CommonFrame::GetExpressionAddress(int n) const {
const int offset = StandardFrameConstants::kExpressionsOffset;
return fp() + offset - n * kSystemPointerSize;
}
Address UnoptimizedFrame::GetExpressionAddress(int n) const {
const int offset = UnoptimizedFrameConstants::kExpressionsOffset;
return fp() + offset - n * kSystemPointerSize;
}
Object CommonFrame::context() const {
return ReadOnlyRoots(isolate()).undefined_value();
}
int CommonFrame::position() const {
Code code = LookupCode();
int code_offset = code.GetOffsetFromInstructionStart(isolate(), pc());
return AbstractCode::cast(code).SourcePosition(isolate(), code_offset);
}
int CommonFrame::ComputeExpressionsCount() const {
Address base = GetExpressionAddress(0);
Address limit = sp() - kSystemPointerSize;
DCHECK(base >= limit); // stack grows downwards
// Include register-allocated locals in number of expressions.
return static_cast<int>((base - limit) / kSystemPointerSize);
}
void CommonFrame::ComputeCallerState(State* state) const {
state->fp = caller_fp();
#if V8_ENABLE_WEBASSEMBLY
if (state->fp == kNullAddress) {
// An empty FP signals the first frame of a stack segment. The caller is
// on a different stack, or is unbound (suspended stack).
DCHECK(v8_flags.experimental_wasm_stack_switching);
return;
}
#endif
state->sp = caller_sp();
state->pc_address = ResolveReturnAddressLocation(reinterpret_cast<Address*>(
fp() + StandardFrameConstants::kCallerPCOffset));
state->callee_fp = fp();
state->callee_pc_address = pc_address();
state->constant_pool_address = reinterpret_cast<Address*>(
fp() + StandardFrameConstants::kConstantPoolOffset);
}
void CommonFrame::Summarize(std::vector<FrameSummary>* functions) const {
// This should only be called on frames which override this method.
UNREACHABLE();
}
namespace {
void VisitSpillSlot(Isolate* isolate, RootVisitor* v,
FullObjectSlot spill_slot) {
#ifdef V8_COMPRESS_POINTERS
PtrComprCageBase cage_base(isolate);
bool was_compressed = false;
// Spill slots may contain compressed values in which case the upper
// 32-bits will contain zeros. In order to simplify handling of such
// slots in GC we ensure that the slot always contains full value.
// The spill slot may actually contain weak references so we load/store
// values using spill_slot.location() in order to avoid dealing with
// FullMaybeObjectSlots here.
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
// When external code space is enabled the spill slot could contain both
// InstructionStream and non-InstructionStream references, which have
// different cage bases. So unconditional decompression of the value might
// corrupt InstructionStream pointers. However, given that 1) the
// InstructionStream pointers are never compressed by design (because
// otherwise we wouldn't know which cage base to apply for
// decompression, see respective DCHECKs in
// RelocInfo::target_object()),
// 2) there's no need to update the upper part of the full pointer
// because if it was there then it'll stay the same,
// we can avoid updating upper part of the spill slot if it already
// contains full value.
// TODO(v8:11880): Remove this special handling by enforcing builtins
// to use CodeTs instead of InstructionStream objects.
Address value = *spill_slot.location();
if (!HAS_SMI_TAG(value) && value <= 0xffffffff) {
// We don't need to update smi values or full pointers.
was_compressed = true;
*spill_slot.location() = V8HeapCompressionScheme::DecompressTagged(
cage_base, static_cast<Tagged_t>(value));
if (DEBUG_BOOL) {
// Ensure that the spill slot contains correct heap object.
HeapObject raw = HeapObject::cast(Object(*spill_slot.location()));
MapWord map_word = raw.map_word(cage_base, kRelaxedLoad);
HeapObject forwarded = map_word.IsForwardingAddress()
? map_word.ToForwardingAddress(raw)
: raw;
bool is_self_forwarded =
forwarded.map_word(cage_base, kRelaxedLoad) ==
MapWord::FromForwardingAddress(forwarded, forwarded);
if (is_self_forwarded) {
// The object might be in a self-forwarding state if it's located
// in new large object space. GC will fix this at a later stage.
CHECK(BasicMemoryChunk::FromHeapObject(forwarded)
->InNewLargeObjectSpace());
} else {
HeapObject forwarded_map = forwarded.map(cage_base);
// The map might be forwarded as well.
MapWord fwd_map_map_word =
forwarded_map.map_word(cage_base, kRelaxedLoad);
if (fwd_map_map_word.IsForwardingAddress()) {
forwarded_map = fwd_map_map_word.ToForwardingAddress(forwarded_map);
}
CHECK(forwarded_map.IsMap(cage_base));
}
}
}
} else {
Address slot_contents = *spill_slot.location();
Tagged_t compressed_value = static_cast<Tagged_t>(slot_contents);
if (!HAS_SMI_TAG(compressed_value)) {
was_compressed = slot_contents <= 0xFFFFFFFF;
// We don't need to update smi values.
*spill_slot.location() = V8HeapCompressionScheme::DecompressTagged(
cage_base, compressed_value);
}
}
#endif
v->VisitRootPointer(Root::kStackRoots, nullptr, spill_slot);
#if V8_COMPRESS_POINTERS
if (was_compressed) {
// Restore compression. Generated code should be able to trust that
// compressed spill slots remain compressed.
*spill_slot.location() =
V8HeapCompressionScheme::CompressObject(*spill_slot.location());
}
#endif
}
void VisitSpillSlots(Isolate* isolate, RootVisitor* v,
FullObjectSlot first_slot_offset,
base::Vector<const uint8_t> tagged_slots) {
FullObjectSlot slot_offset = first_slot_offset;
for (uint8_t bits : tagged_slots) {
while (bits) {
const int bit = base::bits::CountTrailingZeros(bits);
bits &= ~(1 << bit);
FullObjectSlot spill_slot = slot_offset + bit;
VisitSpillSlot(isolate, v, spill_slot);
}
slot_offset += kBitsPerByte;
}
}
SafepointEntry GetSafepointEntryFromCodeCache(
Isolate* isolate, Address inner_pointer,
InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry) {
if (!entry->safepoint_entry.is_initialized()) {
entry->safepoint_entry =
SafepointTable::FindEntry(isolate, entry->code.value(), inner_pointer);
DCHECK(entry->safepoint_entry.is_initialized());
} else {
DCHECK_EQ(
entry->safepoint_entry,
SafepointTable::FindEntry(isolate, entry->code.value(), inner_pointer));
}
return entry->safepoint_entry;
}
MaglevSafepointEntry GetMaglevSafepointEntryFromCodeCache(
Isolate* isolate, Address inner_pointer,
InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry) {
if (!entry->maglev_safepoint_entry.is_initialized()) {
entry->maglev_safepoint_entry = MaglevSafepointTable::FindEntry(
isolate, entry->code.value(), inner_pointer);
DCHECK(entry->maglev_safepoint_entry.is_initialized());
} else {
DCHECK_EQ(entry->maglev_safepoint_entry,
MaglevSafepointTable::FindEntry(isolate, entry->code.value(),
inner_pointer));
}
return entry->maglev_safepoint_entry;
}
} // namespace
#ifdef V8_ENABLE_WEBASSEMBLY
void WasmFrame::Iterate(RootVisitor* v) const {
DCHECK(!iterator_->IsStackFrameIteratorForProfiler());
// === WasmFrame ===
// +-----------------+-----------------------------------------
// | out_param n | <-- parameters_base / sp
// | ... |
// | out_param 0 | (these can be tagged or untagged)
// +-----------------+-----------------------------------------
// | spill_slot n | <-- parameters_limit ^
// | ... | spill_slot_space
// | spill_slot 0 | v
// +-----------------+-----------------------------------------
// | WasmFeedback(*) | <-- frame_header_base ^
// |- - - - - - - - -| |
// | WasmInstance | |
// |- - - - - - - - -| |
// | Type Marker | |
// |- - - - - - - - -| frame_header_size
// | [Constant Pool] | |
// |- - - - - - - - -| |
// | saved frame ptr | <-- fp |
// |- - - - - - - - -| |
// | return addr | <- tagged_parameter_limit v
// +-----------------+-----------------------------------------
// | in_param n |
// | ... |
// | in_param 0 | <-- first_tagged_parameter_slot
// +-----------------+-----------------------------------------
//
// (*) Only if compiled by Liftoff and with --experimental-wasm-inlining.
auto* wasm_code = wasm::GetWasmCodeManager()->LookupCode(pc());
DCHECK(wasm_code);
SafepointTable table(wasm_code);
SafepointEntry safepoint_entry = table.FindEntry(pc());
#ifdef DEBUG
intptr_t marker =
Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
DCHECK(StackFrame::IsTypeMarker(marker));
StackFrame::Type type = StackFrame::MarkerToType(marker);
DCHECK(type == WASM_TO_JS || type == WASM || type == WASM_EXIT);
#endif
// Determine the fixed header and spill slot area size.
// The last value in the frame header is the calling PC, which should
// not be visited.
static_assert(WasmExitFrameConstants::kFixedSlotCountFromFp ==
WasmFrameConstants::kFixedSlotCountFromFp + 1,
"WasmExitFrame has one slot more than WasmFrame");
int frame_header_size = WasmFrameConstants::kFixedFrameSizeFromFp;
if (wasm_code->is_liftoff() && wasm_code->frame_has_feedback_slot()) {
// Frame has Wasm feedback slot.
frame_header_size += kSystemPointerSize;
}
int spill_slot_space =
wasm_code->stack_slots() * kSystemPointerSize -
(frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp);
// Fixed frame slots.
FullObjectSlot frame_header_base(&Memory<Address>(fp() - frame_header_size));
FullObjectSlot frame_header_limit(
&Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize));
// Parameters passed to the callee.
FullObjectSlot parameters_base(&Memory<Address>(sp()));
FullObjectSlot parameters_limit(frame_header_base.address() -
spill_slot_space);
// Visit the rest of the parameters if they are tagged.
bool has_tagged_outgoing_params =
wasm_code->kind() != wasm::WasmCode::kWasmFunction &&
wasm_code->kind() != wasm::WasmCode::kWasmToCapiWrapper;
if (has_tagged_outgoing_params) {
v->VisitRootPointers(Root::kStackRoots, nullptr, parameters_base,
parameters_limit);
}
// Visit pointer spill slots and locals.
DCHECK_GE((wasm_code->stack_slots() + kBitsPerByte) / kBitsPerByte,
safepoint_entry.tagged_slots().size());
VisitSpillSlots(isolate(), v, parameters_limit,
safepoint_entry.tagged_slots());
// Visit tagged parameters that have been passed to the function of this
// frame. Conceptionally these parameters belong to the parent frame. However,
// the exact count is only known by this frame (in the presence of tail calls,
// this information cannot be derived from the call site).
if (wasm_code->num_tagged_parameter_slots() > 0) {
FullObjectSlot tagged_parameter_base(&Memory<Address>(caller_sp()));
tagged_parameter_base += wasm_code->first_tagged_parameter_slot();
FullObjectSlot tagged_parameter_limit =
tagged_parameter_base + wasm_code->num_tagged_parameter_slots();
v->VisitRootPointers(Root::kStackRoots, nullptr, tagged_parameter_base,
tagged_parameter_limit);
}
// Visit the instance object.
v->VisitRootPointers(Root::kStackRoots, nullptr, frame_header_base,
frame_header_limit);
}
#endif // V8_ENABLE_WEBASSEMBLY
void TypedFrame::Iterate(RootVisitor* v) const {
DCHECK(!iterator_->IsStackFrameIteratorForProfiler());
// === TypedFrame ===
// +-----------------+-----------------------------------------
// | out_param n | <-- parameters_base / sp
// | ... |
// | out_param 0 |
// +-----------------+-----------------------------------------
// | spill_slot n | <-- parameters_limit ^
// | ... | spill_slot_count
// | spill_slot 0 | v
// +-----------------+-----------------------------------------
// | Type Marker | <-- frame_header_base ^
// |- - - - - - - - -| |
// | [Constant Pool] | |
// |- - - - - - - - -| kFixedSlotCount
// | saved frame ptr | <-- fp |
// |- - - - - - - - -| |
// | return addr | v
// +-----------------+-----------------------------------------
// Find the code and compute the safepoint information.
Address inner_pointer = pc();
InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
isolate()->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
CHECK(entry->code.has_value());
GcSafeCode code = entry->code.value();
DCHECK(code.is_turbofanned());
SafepointEntry safepoint_entry =
GetSafepointEntryFromCodeCache(isolate(), inner_pointer, entry);
#ifdef DEBUG
intptr_t marker =
Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
DCHECK(StackFrame::IsTypeMarker(marker));
#endif // DEBUG
// Determine the fixed header and spill slot area size.
int frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp;
int spill_slots_size =
code.stack_slots() * kSystemPointerSize -
(frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp);
// Fixed frame slots.
FullObjectSlot frame_header_base(&Memory<Address>(fp() - frame_header_size));
FullObjectSlot frame_header_limit(
&Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize));
// Parameters passed to the callee.
FullObjectSlot parameters_base(&Memory<Address>(sp()));
FullObjectSlot parameters_limit(frame_header_base.address() -
spill_slots_size);
// Visit the rest of the parameters.
if (HasTaggedOutgoingParams(code)) {
v->VisitRootPointers(Root::kStackRoots, nullptr, parameters_base,
parameters_limit);
}
// Visit pointer spill slots and locals.
DCHECK_GE((code.stack_slots() + kBitsPerByte) / kBitsPerByte,
safepoint_entry.tagged_slots().size());
VisitSpillSlots(isolate(), v, parameters_limit,
safepoint_entry.tagged_slots());
// Visit fixed header region.
v->VisitRootPointers(Root::kStackRoots, nullptr, frame_header_base,
frame_header_limit);
// Visit the return address in the callee and incoming arguments.
IteratePc(v, pc_address(), constant_pool_address(), code);
}
void MaglevFrame::Iterate(RootVisitor* v) const {
DCHECK(!iterator_->IsStackFrameIteratorForProfiler());
// === MaglevFrame ===
// +-----------------+-----------------------------------------
// | out_param n | <-- parameters_base / sp
// | ... |
// | out_param 0 |
// +-----------------+-----------------------------------------
// | pushed_double n | <-- parameters_limit ^
// | ... | |
// | pushed_double 0 | |
// +- - - - - - - - -+ num_pushed_registers
// | pushed_reg n | |
// | ... | |
// | pushed_reg 0 | <-- pushed_register_base v
// +-----------------+-----------------------------------------
// | untagged_slot n | ^
// | ... | |
// | untagged_slot 0 | |
// +- - - - - - - - -+ spill_slot_count
// | tagged_slot n | |
// | ... | |
// | tagged_slot 0 | v
// +-----------------+-----------------------------------------
// | argc | <-- frame_header_base ^
// |- - - - - - - - -| |
// | JSFunction | |
// |- - - - - - - - -| |
// | Context | |
// |- - - - - - - - -| kFixedSlotCount
// | [Constant Pool] | |
// |- - - - - - - - -| |
// | saved frame ptr | <-- fp |
// |- - - - - - - - -| |
// | return addr | v
// +-----------------+-----------------------------------------
// Find the code and compute the safepoint information.
Address inner_pointer = pc();
InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
isolate()->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
CHECK(entry->code.has_value());
GcSafeCode code = entry->code.value();
DCHECK(code.is_maglevved());
MaglevSafepointEntry maglev_safepoint_entry =
GetMaglevSafepointEntryFromCodeCache(isolate(), inner_pointer, entry);
#ifdef DEBUG
// Assert that it is a JS frame and it has a context.
intptr_t marker =
Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
DCHECK(!StackFrame::IsTypeMarker(marker));
#endif // DEBUG
// Fixed frame slots.
FullObjectSlot frame_header_base(
&Memory<Address>(fp() - StandardFrameConstants::kFixedFrameSizeFromFp));
FullObjectSlot frame_header_limit(
&Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize));
// Determine spill slot area count.
uint32_t tagged_slot_count = maglev_safepoint_entry.num_tagged_slots();
uint32_t spill_slot_count =
tagged_slot_count + maglev_safepoint_entry.num_untagged_slots();
DCHECK_EQ(code.stack_slots(),
StandardFrameConstants::kFixedSlotCount +
maglev_safepoint_entry.num_tagged_slots() +
maglev_safepoint_entry.num_untagged_slots());
// Visit the outgoing parameters if they are tagged.
DCHECK(code.has_tagged_outgoing_params());
FullObjectSlot parameters_base(&Memory<Address>(sp()));
FullObjectSlot parameters_limit =
frame_header_base - spill_slot_count -
maglev_safepoint_entry.num_pushed_registers();
v->VisitRootPointers(Root::kStackRoots, nullptr, parameters_base,
parameters_limit);
// Maglev can also spill registers, tagged and untagged, just before making
// a call. These are distinct from normal spill slots and live between the
// normal spill slots and the pushed parameters. Some of these are tagged,
// as indicated by the tagged register indexes, and should be visited too.
if (maglev_safepoint_entry.num_pushed_registers() > 0) {
FullObjectSlot pushed_register_base =
frame_header_base - spill_slot_count - 1;
uint32_t tagged_register_indexes =
maglev_safepoint_entry.tagged_register_indexes();
while (tagged_register_indexes != 0) {
int index = base::bits::CountTrailingZeros(tagged_register_indexes);
tagged_register_indexes &= ~(1 << index);
FullObjectSlot spill_slot = pushed_register_base - index;
VisitSpillSlot(isolate(), v, spill_slot);
}
}
// Visit tagged spill slots.
for (uint32_t i = 0; i < tagged_slot_count; ++i) {
FullObjectSlot spill_slot = frame_header_base - 1 - i;
VisitSpillSlot(isolate(), v, spill_slot);
}
// Visit fixed header region (the context and JSFunction), skipping the
// argument count since it is stored untagged.
v->VisitRootPointers(Root::kStackRoots, nullptr, frame_header_base + 1,
frame_header_limit);
// Visit the return address in the callee and incoming arguments.
IteratePc(v, pc_address(), constant_pool_address(), code);
}
Handle<JSFunction> MaglevFrame::GetInnermostFunction() const {
std::vector<FrameSummary> frames;
Summarize(&frames);
return frames.back().AsJavaScript().function();
}
BytecodeOffset MaglevFrame::GetBytecodeOffsetForOSR() const {
int deopt_index = SafepointEntry::kNoDeoptIndex;
const DeoptimizationData data = GetDeoptimizationData(&deopt_index);
if (deopt_index == SafepointEntry::kNoDeoptIndex) {
CHECK(data.is_null());
FATAL("Missing deoptimization information for OptimizedFrame::Summarize.");
}
return data.GetBytecodeOffset(deopt_index);
}
bool CommonFrame::HasTaggedOutgoingParams(GcSafeCode code_lookup) const {
#if V8_ENABLE_WEBASSEMBLY
// With inlined JS-to-Wasm calls, we can be in an OptimizedFrame and
// directly call a Wasm function from JavaScript. In this case the Wasm frame
// is responsible for visiting incoming potentially tagged parameters.
// (This is required for tail-call support: If the direct callee tail-called
// another function which then caused a GC, the caller would not be able to
// determine where there might be tagged parameters.)
wasm::WasmCode* wasm_callee =
wasm::GetWasmCodeManager()->LookupCode(callee_pc());
return (wasm_callee == nullptr) && code_lookup.has_tagged_outgoing_params();
#else
return code_lookup.has_tagged_outgoing_params();
#endif // V8_ENABLE_WEBASSEMBLY
}
HeapObject TurbofanStubWithContextFrame::unchecked_code() const {
base::Optional<GcSafeCode> code_lookup =
isolate()->heap()->GcSafeTryFindCodeForInnerPointer(pc());
if (!code_lookup.has_value()) return {};
return code_lookup.value();
}
void CommonFrame::IterateTurbofanOptimizedFrame(RootVisitor* v) const {
DCHECK(!iterator_->IsStackFrameIteratorForProfiler());
// === TurbofanFrame ===
// +-----------------+-----------------------------------------
// | out_param n | <-- parameters_base / sp
// | ... |
// | out_param 0 |
// +-----------------+-----------------------------------------
// | spill_slot n | <-- parameters_limit ^
// | ... | spill_slot_count
// | spill_slot 0 | v
// +-----------------+-----------------------------------------
// | argc | <-- frame_header_base ^
// |- - - - - - - - -| |
// | JSFunction | |
// |- - - - - - - - -| |
// | Context | |
// |- - - - - - - - -| kFixedSlotCount
// | [Constant Pool] | |
// |- - - - - - - - -| |
// | saved frame ptr | <-- fp |
// |- - - - - - - - -| |
// | return addr | v
// +-----------------+-----------------------------------------
// Find the code and compute the safepoint information.
Address inner_pointer = pc();
InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
isolate()->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
CHECK(entry->code.has_value());
GcSafeCode code = entry->code.value();
DCHECK(code.is_turbofanned());
SafepointEntry safepoint_entry =
GetSafepointEntryFromCodeCache(isolate(), inner_pointer, entry);
#ifdef DEBUG
// Assert that it is a JS frame and it has a context.
intptr_t marker =
Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
DCHECK(!StackFrame::IsTypeMarker(marker));
#endif // DEBUG
// Determine the fixed header and spill slot area size.
int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp;
int spill_slot_count =
code.stack_slots() - StandardFrameConstants::kFixedSlotCount;
// Fixed frame slots.
FullObjectSlot frame_header_base(&Memory<Address>(fp() - frame_header_size));
FullObjectSlot frame_header_limit(
&Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize));
// Parameters passed to the callee.
FullObjectSlot parameters_base(&Memory<Address>(sp()));
FullObjectSlot parameters_limit = frame_header_base - spill_slot_count;
// Visit the outgoing parameters if they are tagged.
if (HasTaggedOutgoingParams(code)) {
v->VisitRootPointers(Root::kStackRoots, nullptr, parameters_base,
parameters_limit);
}
// Spill slots are in the region ]frame_header_base, parameters_limit];
// Visit pointer spill slots and locals.
DCHECK_GE((code.stack_slots() + kBitsPerByte) / kBitsPerByte,
safepoint_entry.tagged_slots().size());
VisitSpillSlots(isolate(), v, parameters_limit,
safepoint_entry.tagged_slots());
// Visit fixed header region (the context and JSFunction), skipping the
// argument count since it is stored untagged.
v->VisitRootPointers(Root::kStackRoots, nullptr, frame_header_base + 1,
frame_header_limit);
// Visit the return address in the callee and incoming arguments.
IteratePc(v, pc_address(), constant_pool_address(), code);
}
void TurbofanStubWithContextFrame::Iterate(RootVisitor* v) const {
return IterateTurbofanOptimizedFrame(v);
}
void TurbofanFrame::Iterate(RootVisitor* v) const {
return IterateTurbofanOptimizedFrame(v);
}
HeapObject StubFrame::unchecked_code() const {
base::Optional<GcSafeCode> code_lookup =
isolate()->heap()->GcSafeTryFindCodeForInnerPointer(pc());
if (!code_lookup.has_value()) return {};
return code_lookup.value();
}
int StubFrame::LookupExceptionHandlerInTable() {
Code code = LookupCode();
DCHECK(code.is_turbofanned());
DCHECK_EQ(code.kind(), CodeKind::BUILTIN);
HandlerTable table(code);
int pc_offset = code.GetOffsetFromInstructionStart(isolate(), pc());
return table.LookupReturn(pc_offset);
}
void StubFrame::Summarize(std::vector<FrameSummary>* frames) const {
#if V8_ENABLE_WEBASSEMBLY
Code code = LookupCode();
if (code.kind() != CodeKind::BUILTIN) return;
// We skip most stub frames from stack traces, but a few builtins
// specifically exist to pretend to be another builtin throwing an
// exception.
switch (code.builtin_id()) {
case Builtin::kThrowIndexOfCalledOnNull:
case Builtin::kThrowToLowerCaseCalledOnNull:
case Builtin::kWasmIntToString: {
// When adding builtins here, also implement naming support for them.
DCHECK_NE(nullptr, Builtins::NameForStackTrace(code.builtin_id()));
FrameSummary::BuiltinFrameSummary summary(isolate(), code.builtin_id());
frames->push_back(summary);
break;
}
default:
break;
}
#endif // V8_ENABLE_WEBASSEMBLY
}
void JavaScriptFrame::SetParameterValue(int index, Object value) const {
Memory<Address>(GetParameterSlot(index)) = value.ptr();
}
bool JavaScriptFrame::IsConstructor() const {
return IsConstructFrame(caller_fp());
}
HeapObject CommonFrameWithJSLinkage::unchecked_code() const {
return function().code();
}
int TurbofanFrame::ComputeParametersCount() const {
if (GcSafeLookupCode().kind() == CodeKind::BUILTIN) {
return static_cast<int>(
Memory<intptr_t>(fp() + StandardFrameConstants::kArgCOffset)) -
kJSArgcReceiverSlots;
} else {
return JavaScriptFrame::ComputeParametersCount();
}
}
Address JavaScriptFrame::GetCallerStackPointer() const {
return fp() + StandardFrameConstants::kCallerSPOffset;
}
void JavaScriptFrame::GetFunctions(
std::vector<SharedFunctionInfo>* functions) const {
DCHECK(functions->empty());
functions->push_back(function().shared());
}
void JavaScriptFrame::GetFunctions(
std::vector<Handle<SharedFunctionInfo>>* functions) const {
DCHECK(functions->empty());
std::vector<SharedFunctionInfo> raw_functions;
GetFunctions(&raw_functions);
for (const auto& raw_function : raw_functions) {
functions->push_back(
Handle<SharedFunctionInfo>(raw_function, function().GetIsolate()));
}
}
bool CommonFrameWithJSLinkage::IsConstructor() const {
return IsConstructFrame(caller_fp());
}
void CommonFrameWithJSLinkage::Summarize(
std::vector<FrameSummary>* functions) const {
DCHECK(functions->empty());
GcSafeCode code = GcSafeLookupCode();
int offset = code.GetOffsetFromInstructionStart(isolate(), pc());
Handle<AbstractCode> abstract_code(
AbstractCode::cast(code.UnsafeCastToCode()), isolate());
#if V8_ENABLE_WEBASSEMBLY
Object maybe_func(
base::Memory<Address>(fp() + StandardFrameConstants::kFunctionOffset));
if (code.kind() == CodeKind::BUILTIN && maybe_func.IsSmi()) {
FrameSummary::BuiltinFrameSummary summary(
isolate(), static_cast<Builtin>(Smi::cast(maybe_func).value()));
functions->push_back(summary);
return;
}
#endif
Handle<FixedArray> params = GetParameters();
FrameSummary::JavaScriptFrameSummary summary(
isolate(), receiver(), function(), *abstract_code, offset,
IsConstructor(), *params);
functions->push_back(summary);
}
JSFunction JavaScriptFrame::function() const {
return JSFunction::cast(function_slot_object());
}
Object JavaScriptFrame::unchecked_function() const {
// During deoptimization of an optimized function, we may have yet to
// materialize some closures on the stack. The arguments marker object
// marks this case.
DCHECK(function_slot_object().IsJSFunction() ||
ReadOnlyRoots(isolate()).arguments_marker() == function_slot_object());
return function_slot_object();
}
Object CommonFrameWithJSLinkage::receiver() const {
// TODO(cbruni): document this better
return GetParameter(-1);
}
Object JavaScriptFrame::context() const {
const int offset = StandardFrameConstants::kContextOffset;
Object maybe_result(Memory<Address>(fp() + offset));
DCHECK(!maybe_result.IsSmi());
return maybe_result;
}
Script JavaScriptFrame::script() const {
return Script::cast(function().shared().script());
}
int CommonFrameWithJSLinkage::LookupExceptionHandlerInTable(
int* stack_depth, HandlerTable::CatchPrediction* prediction) {
if (DEBUG_BOOL) {
Code code_lookup_result = LookupCode();
CHECK(!code_lookup_result.has_handler_table());
CHECK(!code_lookup_result.is_optimized_code() ||
code_lookup_result.kind() == CodeKind::BASELINE);
}
return -1;
}
void JavaScriptFrame::PrintFunctionAndOffset(JSFunction function,
AbstractCode code, int code_offset,
FILE* file,
bool print_line_number) {
PtrComprCageBase cage_base = GetPtrComprCageBase(function);
PrintF(file, "%s", CodeKindToMarker(code.kind(cage_base)));
function.PrintName(file);
PrintF(file, "+%d", code_offset);
if (print_line_number) {
SharedFunctionInfo shared = function.shared();
int source_pos = code.SourcePosition(cage_base, code_offset);
Object maybe_script = shared.script();
if (maybe_script.IsScript()) {
Script script = Script::cast(maybe_script);
int line = script.GetLineNumber(source_pos) + 1;
Object script_name_raw = script.name();
if (script_name_raw.IsString()) {
String script_name = String::cast(script.name());
std::unique_ptr<char[]> c_script_name =
script_name.ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
PrintF(file, " at %s:%d", c_script_name.get(), line);
} else {
PrintF(file, " at <unknown>:%d", line);
}
} else {
PrintF(file, " at <unknown>:<unknown>");
}
}
}
void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
bool print_line_number) {
// constructor calls
DisallowGarbageCollection no_gc;
JavaScriptStackFrameIterator it(isolate);
while (!it.done()) {
if (it.frame()->is_java_script()) {
JavaScriptFrame* frame = it.frame();
if (frame->IsConstructor()) PrintF(file, "new ");
JSFunction function = frame->function();
int code_offset = 0;
AbstractCode abstract_code = function.abstract_code(isolate);
if (frame->is_interpreted()) {
InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
code_offset = iframe->GetBytecodeOffset();
} else if (frame->is_baseline()) {
// TODO(pthier): AbstractCode should fully support Baseline code.
BaselineFrame* baseline_frame = BaselineFrame::cast(frame);
code_offset = baseline_frame->GetBytecodeOffset();
abstract_code = AbstractCode::cast(baseline_frame->GetBytecodeArray());
} else {
code_offset = frame->LookupCode().GetOffsetFromInstructionStart(
isolate, frame->pc());
}
PrintFunctionAndOffset(function, abstract_code, code_offset, file,
print_line_number);
if (print_args) {
// function arguments
// (we are intentionally only printing the actually
// supplied parameters, not all parameters required)
PrintF(file, "(this=");
frame->receiver().ShortPrint(file);
const int length = frame->ComputeParametersCount();
for (int i = 0; i < length; i++) {
PrintF(file, ", ");
frame->GetParameter(i).ShortPrint(file);
}
PrintF(file, ")");
}
break;
}
it.Advance();
}
}
// static
void JavaScriptFrame::CollectFunctionAndOffsetForICStats(JSFunction function,
AbstractCode code,
int code_offset) {
auto ic_stats = ICStats::instance();
ICInfo& ic_info = ic_stats->Current();
PtrComprCageBase cage_base = GetPtrComprCageBase(function);
SharedFunctionInfo shared = function.shared(cage_base);
ic_info.function_name = ic_stats->GetOrCacheFunctionName(function);
ic_info.script_offset = code_offset;
int source_pos = code.SourcePosition(cage_base, code_offset);
Object maybe_script = shared.script(cage_base);
if (maybe_script.IsScript(cage_base)) {
Script script = Script::cast(maybe_script);
Script::PositionInfo info;
script.GetPositionInfo(source_pos, &info);
ic_info.line_num = info.line + 1;
ic_info.column_num = info.column + 1;
ic_info.script_name = ic_stats->GetOrCacheScriptName(script);
}
}
Object CommonFrameWithJSLinkage::GetParameter(int index) const {
return Object(Memory<Address>(GetParameterSlot(index)));
}
int CommonFrameWithJSLinkage::ComputeParametersCount() const {
DCHECK(!iterator_->IsStackFrameIteratorForProfiler() &&
isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
return function().shared().internal_formal_parameter_count_without_receiver();
}
int JavaScriptFrame::GetActualArgumentCount() const {
return static_cast<int>(
Memory<intptr_t>(fp() + StandardFrameConstants::kArgCOffset)) -
kJSArgcReceiverSlots;
}
Handle<FixedArray> CommonFrameWithJSLinkage::GetParameters() const {
if (V8_LIKELY(!v8_flags.detailed_error_stack_trace)) {
return isolate()->factory()->empty_fixed_array();
}
int param_count = ComputeParametersCount();
Handle<FixedArray> parameters =
isolate()->factory()->NewFixedArray(param_count);
for (int i = 0; i < param_count; i++) {
parameters->set(i, GetParameter(i));
}
return parameters;
}
JSFunction JavaScriptBuiltinContinuationFrame::function() const {
const int offset = BuiltinContinuationFrameConstants::kFunctionOffset;
return JSFunction::cast(Object(base::Memory<Address>(fp() + offset)));
}
int JavaScriptBuiltinContinuationFrame::ComputeParametersCount() const {
// Assert that the first allocatable register is also the argument count
// register.
DCHECK_EQ(RegisterConfiguration::Default()->GetAllocatableGeneralCode(0),
kJavaScriptCallArgCountRegister.code());
Object argc_object(
Memory<Address>(fp() + BuiltinContinuationFrameConstants::kArgCOffset));
return Smi::ToInt(argc_object) - kJSArgcReceiverSlots;
}
intptr_t JavaScriptBuiltinContinuationFrame::GetSPToFPDelta() const {
Address height_slot =
fp() + BuiltinContinuationFrameConstants::kFrameSPtoFPDeltaAtDeoptimize;
intptr_t height = Smi::ToInt(Smi(Memory<Address>(height_slot)));
return height;
}
Object JavaScriptBuiltinContinuationFrame::context() const {
return Object(Memory<Address>(
fp() + BuiltinContinuationFrameConstants::kBuiltinContextOffset));
}
void JavaScriptBuiltinContinuationWithCatchFrame::SetException(
Object exception) {
int argc = ComputeParametersCount();
Address exception_argument_slot =
fp() + BuiltinContinuationFrameConstants::kFixedFrameSizeAboveFp +
(argc - 1) * kSystemPointerSize;
// Only allow setting exception if previous value was the hole.
CHECK_EQ(ReadOnlyRoots(isolate()).the_hole_value(),
Object(Memory<Address>(exception_argument_slot)));
Memory<Address>(exception_argument_slot) = exception.ptr();
}
FrameSummary::JavaScriptFrameSummary::JavaScriptFrameSummary(
Isolate* isolate, Object receiver, JSFunction function,
AbstractCode abstract_code, int code_offset, bool is_constructor,
FixedArray parameters)
: FrameSummaryBase(isolate, FrameSummary::JAVA_SCRIPT),
receiver_(receiver, isolate),
function_(function, isolate),
abstract_code_(abstract_code, isolate),
code_offset_(code_offset),
is_constructor_(is_constructor),
parameters_(parameters, isolate) {
DCHECK(!CodeKindIsOptimizedJSFunction(abstract_code.kind(isolate)));
}
void FrameSummary::EnsureSourcePositionsAvailable() {
if (IsJavaScript()) {
java_script_summary_.EnsureSourcePositionsAvailable();
}
}
bool FrameSummary::AreSourcePositionsAvailable() const {
if (IsJavaScript()) {
return java_script_summary_.AreSourcePositionsAvailable();
}
return true;
}
void FrameSummary::JavaScriptFrameSummary::EnsureSourcePositionsAvailable() {
Handle<SharedFunctionInfo> shared(function()->shared(), isolate());
SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), shared);
}
bool FrameSummary::JavaScriptFrameSummary::AreSourcePositionsAvailable() const {
return !v8_flags.enable_lazy_source_positions ||
function()
->shared()
.GetBytecodeArray(isolate())
.HasSourcePositionTable();
}
bool FrameSummary::JavaScriptFrameSummary::is_subject_to_debugging() const {
return function()->shared().IsSubjectToDebugging();
}
int FrameSummary::JavaScriptFrameSummary::SourcePosition() const {
return abstract_code()->SourcePosition(isolate(), code_offset());
}
int FrameSummary::JavaScriptFrameSummary::SourceStatementPosition() const {
return abstract_code()->SourceStatementPosition(isolate(), code_offset());
}
Handle<Object> FrameSummary::JavaScriptFrameSummary::script() const {
return handle(function_->shared().script(), isolate());
}
Handle<Context> FrameSummary::JavaScriptFrameSummary::native_context() const {
return handle(function_->native_context(), isolate());
}
Handle<StackFrameInfo>
FrameSummary::JavaScriptFrameSummary::CreateStackFrameInfo() const {
Handle<SharedFunctionInfo> shared(function_->shared(), isolate());
Handle<Script> script(Script::cast(shared->script()), isolate());
Handle<String> function_name = JSFunction::GetDebugName(function_);
if (function_name->length() == 0 &&
script->compilation_type() == Script::CompilationType::kEval) {
function_name = isolate()->factory()->eval_string();
}
int bytecode_offset = code_offset();
if (bytecode_offset == kFunctionEntryBytecodeOffset) {
// For the special function entry bytecode offset (-1), which signals
// that the stack trace was captured while the function entry was
// executing (i.e. during the interrupt check), we cannot store this
// sentinel in the bit field, so we just eagerly lookup the source
// position within the script.
SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), shared);
int source_position =
abstract_code()->SourcePosition(isolate(), bytecode_offset);
return isolate()->factory()->NewStackFrameInfo(
script, source_position, function_name, is_constructor());
}
return isolate()->factory()->NewStackFrameInfo(
shared, bytecode_offset, function_name, is_constructor());
}
#if V8_ENABLE_WEBASSEMBLY
FrameSummary::WasmFrameSummary::WasmFrameSummary(
Isolate* isolate, Handle<WasmInstanceObject> instance, wasm::WasmCode* code,
int byte_offset, int function_index, bool at_to_number_conversion)
: FrameSummaryBase(isolate, WASM),
wasm_instance_(instance),
at_to_number_conversion_(at_to_number_conversion),
code_(code),
byte_offset_(byte_offset),
function_index_(function_index) {}
Handle<Object> FrameSummary::WasmFrameSummary::receiver() const {
return wasm_instance_->GetIsolate()->global_proxy();
}
uint32_t FrameSummary::WasmFrameSummary::function_index() const {
return function_index_;
}
int FrameSummary::WasmFrameSummary::SourcePosition() const {
const wasm::WasmModule* module = wasm_instance()->module_object().module();
return GetSourcePosition(module, function_index(), code_offset(),
at_to_number_conversion());
}
Handle<Script> FrameSummary::WasmFrameSummary::script() const {
return handle(wasm_instance()->module_object().script(),
wasm_instance()->GetIsolate());
}
Handle<Context> FrameSummary::WasmFrameSummary::native_context() const {
return handle(wasm_instance()->native_context(), isolate());
}
Handle<StackFrameInfo> FrameSummary::WasmFrameSummary::CreateStackFrameInfo()
const {
Handle<String> function_name =
GetWasmFunctionDebugName(isolate(), wasm_instance(), function_index());
return isolate()->factory()->NewStackFrameInfo(script(), SourcePosition(),
function_name, false);
}
FrameSummary::WasmInlinedFrameSummary::WasmInlinedFrameSummary(
Isolate* isolate, Handle<WasmInstanceObject> instance, int function_index,
int op_wire_bytes_offset)
: FrameSummaryBase(isolate, WASM_INLINED),
wasm_instance_(instance),
function_index_(function_index),
op_wire_bytes_offset_(op_wire_bytes_offset) {}
Handle<Object> FrameSummary::WasmInlinedFrameSummary::receiver() const {
return wasm_instance_->GetIsolate()->global_proxy();
}
uint32_t FrameSummary::WasmInlinedFrameSummary::function_index() const {
return function_index_;
}
int FrameSummary::WasmInlinedFrameSummary::SourcePosition() const {
const wasm::WasmModule* module = wasm_instance()->module_object().module();
return GetSourcePosition(module, function_index(), code_offset(), false);
}
Handle<Script> FrameSummary::WasmInlinedFrameSummary::script() const {
return handle(wasm_instance()->module_object().script(),
wasm_instance()->GetIsolate());
}
Handle<Context> FrameSummary::WasmInlinedFrameSummary::native_context() const {
return handle(wasm_instance()->native_context(), isolate());
}
Handle<StackFrameInfo>
FrameSummary::WasmInlinedFrameSummary::CreateStackFrameInfo() const {
Handle<String> function_name =
GetWasmFunctionDebugName(isolate(), wasm_instance(), function_index());
return isolate()->factory()->NewStackFrameInfo(script(), SourcePosition(),
function_name, false);
}
FrameSummary::BuiltinFrameSummary::BuiltinFrameSummary(Isolate* isolate,
Builtin builtin)
: FrameSummaryBase(isolate, FrameSummary::BUILTIN), builtin_(builtin) {}
Handle<Object> FrameSummary::BuiltinFrameSummary::receiver() const {
return isolate()->factory()->undefined_value();
}
Handle<Object> FrameSummary::BuiltinFrameSummary::script() const {
return isolate()->factory()->undefined_value();
}
Handle<Context> FrameSummary::BuiltinFrameSummary::native_context() const {
return isolate()->native_context();
}
Handle<StackFrameInfo> FrameSummary::BuiltinFrameSummary::CreateStackFrameInfo()
const {
Handle<String> name_str = isolate()->factory()->NewStringFromAsciiChecked(
Builtins::NameForStackTrace(builtin_));
return isolate()->factory()->NewStackFrameInfo(
Handle<HeapObject>::cast(script()), SourcePosition(), name_str, false);
}
#endif // V8_ENABLE_WEBASSEMBLY
FrameSummary::~FrameSummary() {
#define FRAME_SUMMARY_DESTR(kind, type, field, desc) \
case kind: \
field.~type(); \
break;
switch (base_.kind()) {
FRAME_SUMMARY_VARIANTS(FRAME_SUMMARY_DESTR)
default:
UNREACHABLE();
}
#undef FRAME_SUMMARY_DESTR
}
FrameSummary FrameSummary::GetTop(const CommonFrame* frame) {
std::vector<FrameSummary> frames;
frame->Summarize(&frames);
DCHECK_LT(0, frames.size());
return frames.back();
}
FrameSummary FrameSummary::GetBottom(const CommonFrame* frame) {
return Get(frame, 0);
}
FrameSummary FrameSummary::GetSingle(const CommonFrame* frame) {
std::vector<FrameSummary> frames;
frame->Summarize(&frames);
DCHECK_EQ(1, frames.size());
return frames.front();
}
FrameSummary FrameSummary::Get(const CommonFrame* frame, int index) {
DCHECK_LE(0, index);
std::vector<FrameSummary> frames;
frame->Summarize(&frames);
DCHECK_GT(frames.size(), index);
return frames[index];
}
#if V8_ENABLE_WEBASSEMBLY
#define FRAME_SUMMARY_DISPATCH(ret, name) \
ret FrameSummary::name() const { \
switch (base_.kind()) { \
case JAVA_SCRIPT: \
return java_script_summary_.name(); \
case WASM: \
return wasm_summary_.name(); \
case WASM_INLINED: \
return wasm_inlined_summary_.name(); \
case BUILTIN: \
return builtin_summary_.name(); \
default: \
UNREACHABLE(); \
} \
}
#else
#define FRAME_SUMMARY_DISPATCH(ret, name) \
ret FrameSummary::name() const { \
DCHECK_EQ(JAVA_SCRIPT, base_.kind()); \
return java_script_summary_.name(); \
}
#endif // V8_ENABLE_WEBASSEMBLY
FRAME_SUMMARY_DISPATCH(Handle<Object>, receiver)
FRAME_SUMMARY_DISPATCH(int, code_offset)
FRAME_SUMMARY_DISPATCH(bool, is_constructor)
FRAME_SUMMARY_DISPATCH(bool, is_subject_to_debugging)
FRAME_SUMMARY_DISPATCH(Handle<Object>, script)
FRAME_SUMMARY_DISPATCH(int, SourcePosition)
FRAME_SUMMARY_DISPATCH(int, SourceStatementPosition)
FRAME_SUMMARY_DISPATCH(Handle<Context>, native_context)
FRAME_SUMMARY_DISPATCH(Handle<StackFrameInfo>, CreateStackFrameInfo)
#undef FRAME_SUMMARY_DISPATCH
void OptimizedFrame::Summarize(std::vector<FrameSummary>* frames) const {
DCHECK(frames->empty());
DCHECK(is_optimized());
// Delegate to JS frame in absence of deoptimization info.
// TODO(turbofan): Revisit once we support deoptimization across the board.
Handle<GcSafeCode> code(GcSafeLookupCode(), isolate());
if (code->kind() == CodeKind::BUILTIN) {
return JavaScriptFrame::Summarize(frames);
}
int deopt_index = SafepointEntry::kNoDeoptIndex;
DeoptimizationData const data = GetDeoptimizationData(&deopt_index);
if (deopt_index == SafepointEntry::kNoDeoptIndex) {
// Hack: For maglevved function entry, we don't emit lazy deopt information,
// so create an extra special summary here.
//
// TODO(leszeks): Remove this hack, by having a maglev-specific frame
// summary which is a bit more aware of maglev behaviour and can e.g. handle
// more compact safepointed frame information for both function entry and
// loop stack checks.
if (code->is_maglevved()) {
DCHECK(frames->empty());
Handle<AbstractCode> abstract_code(
AbstractCode::cast(function().shared().GetBytecodeArray(isolate())),
isolate());
Handle<FixedArray> params = GetParameters();
FrameSummary::JavaScriptFrameSummary summary(
isolate(), receiver(), function(), *abstract_code,
kFunctionEntryBytecodeOffset, IsConstructor(), *params);
frames->push_back(summary);
return;
}
CHECK(data.is_null());
FATAL("Missing deoptimization information for OptimizedFrame::Summarize.");
}
// Prepare iteration over translation. We must not materialize values here
// because we do not deoptimize the function.
TranslatedState translated(this);
translated.Prepare(fp());
// We create the summary in reverse order because the frames
// in the deoptimization translation are ordered bottom-to-top.
bool is_constructor = IsConstructor();
for (auto it = translated.begin(); it != translated.end(); it++) {
if (it->kind() == TranslatedFrame::kUnoptimizedFunction ||
it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
it->kind() ==
TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
Handle<SharedFunctionInfo> shared_info = it->shared_info();
// The translation commands are ordered and the function is always
// at the first position, and the receiver is next.
TranslatedFrame::iterator translated_values = it->begin();
// Get the correct function in the optimized frame.
CHECK(!translated_values->IsMaterializedObject());
Handle<JSFunction> function =
Handle<JSFunction>::cast(translated_values->GetValue());
translated_values++;
// Get the correct receiver in the optimized frame.
CHECK(!translated_values->IsMaterializedObject());
Handle<Object> receiver = translated_values->GetValue();
translated_values++;
// Determine the underlying code object and the position within it from
// the translation corresponding to the frame type in question.
Handle<AbstractCode> abstract_code;
unsigned code_offset;
if (it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
it->kind() ==
TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
code_offset = 0;
abstract_code =
Handle<AbstractCode>::cast(isolate()->builtins()->code_handle(
Builtins::GetBuiltinFromBytecodeOffset(it->bytecode_offset())));
} else {
DCHECK_EQ(it->kind(), TranslatedFrame::kUnoptimizedFunction);
code_offset = it->bytecode_offset().ToInt();
abstract_code =
handle(shared_info->abstract_code(isolate()), isolate());
}
// Append full summary of the encountered JS frame.
Handle<FixedArray> params = GetParameters();
FrameSummary::JavaScriptFrameSummary summary(
isolate(), *receiver, *function, *abstract_code, code_offset,
is_constructor, *params);
frames->push_back(summary);
is_constructor = false;
} else if (it->kind() == TranslatedFrame::kConstructStub) {
// The next encountered JS frame will be marked as a constructor call.
DCHECK(!is_constructor);
is_constructor = true;
#if V8_ENABLE_WEBASSEMBLY
} else if (it->kind() == TranslatedFrame::kWasmInlinedIntoJS) {
Handle<SharedFunctionInfo> shared_info = it->shared_info();
DCHECK_NE(isolate()->heap()->gc_state(), Heap::MARK_COMPACT);
Handle<Code> js_code = handle(code->UnsafeCastToCode(), isolate());
SourcePositionTableIterator iter(js_code->source_position_table());
const int offset = code->GetOffsetFromInstructionStart(isolate(), pc());
SourcePosition pos;
// Search for the source position before or at the current pc.
while (!iter.done() && iter.code_offset() < offset) {
pos = iter.source_position();
iter.Advance();
}
if (pos.IsKnown()) {
DCHECK_EQ(*shared_info,
DeoptimizationData::cast(js_code->deoptimization_data())
.GetInlinedFunction(pos.InliningId()));
DCHECK(shared_info->HasWasmExportedFunctionData());
WasmExportedFunctionData function_data =
shared_info->wasm_exported_function_data();
Handle<WasmInstanceObject> instance =
handle(function_data.instance(), isolate());
int func_index = function_data.function_index();
FrameSummary::WasmInlinedFrameSummary summary(
isolate(), instance, func_index, pos.ScriptOffset());
frames->push_back(summary);
} else {
DCHECK(false && "Missing source position for inlined wasm frame");
}
#endif // V8_ENABLE_WEBASSEMBLY
}
}
}
int OptimizedFrame::LookupExceptionHandlerInTable(
int* data, HandlerTable::CatchPrediction* prediction) {
// We cannot perform exception prediction on optimized code. Instead, we need
// to use FrameSummary to find the corresponding code offset in unoptimized
// code to perform prediction there.
DCHECK_NULL(prediction);
Code code = LookupCode();
HandlerTable table(code);
if (table.NumberOfReturnEntries() == 0) return -1;
int pc_offset = code.GetOffsetFromInstructionStart(isolate(), pc());
DCHECK_NULL(data); // Data is not used and will not return a value.
// When the return pc has been replaced by a trampoline there won't be
// a handler for this trampoline. Thus we need to use the return pc that
// _used to be_ on the stack to get the right ExceptionHandler.
if (CodeKindCanDeoptimize(code.kind()) && code.marked_for_deoptimization()) {
pc_offset = FindReturnPCForTrampoline(code, pc_offset);
}
return table.LookupReturn(pc_offset);
}
int MaglevFrame::FindReturnPCForTrampoline(Code code, int trampoline_pc) const {
DCHECK_EQ(code.kind(), CodeKind::MAGLEV);
DCHECK(code.marked_for_deoptimization());
MaglevSafepointTable safepoints(isolate(), pc(), code);
return safepoints.find_return_pc(trampoline_pc);
}
int TurbofanFrame::FindReturnPCForTrampoline(Code code,
int trampoline_pc) const {
DCHECK_EQ(code.kind(), CodeKind::TURBOFAN);
DCHECK(code.marked_for_deoptimization());
SafepointTable safepoints(isolate(), pc(), code);
return safepoints.find_return_pc(trampoline_pc);
}
DeoptimizationData OptimizedFrame::GetDeoptimizationData(
int* deopt_index) const {
DCHECK(is_optimized());
JSFunction opt_function = function();
Code code = opt_function.code();
// The code object may have been replaced by lazy deoptimization. Fall back
// to a slow search in this case to find the original optimized code object.
if (!code.contains(isolate(), pc())) {
code = isolate()
->heap()
->GcSafeFindCodeForInnerPointer(pc())
.UnsafeCastToCode();
}
DCHECK(!code.is_null());
DCHECK(CodeKindCanDeoptimize(code.kind()));
if (code.is_maglevved()) {
MaglevSafepointEntry safepoint_entry =
code.GetMaglevSafepointEntry(isolate(), pc());
if (safepoint_entry.has_deoptimization_index()) {
*deopt_index = safepoint_entry.deoptimization_index();
return DeoptimizationData::cast(code.deoptimization_data());
}
} else {
SafepointEntry safepoint_entry = code.GetSafepointEntry(isolate(), pc());
if (safepoint_entry.has_deoptimization_index()) {
*deopt_index = safepoint_entry.deoptimization_index();
return DeoptimizationData::cast(code.deoptimization_data());
}
}
*deopt_index = SafepointEntry::kNoDeoptIndex;
return DeoptimizationData();
}
void OptimizedFrame::GetFunctions(
std::vector<SharedFunctionInfo>* functions) const {
DCHECK(functions->empty());
DCHECK(is_optimized());
// Delegate to JS frame in absence of turbofan deoptimization.
// TODO(turbofan): Revisit once we support deoptimization across the board.
Code code = LookupCode();
if (code.kind() == CodeKind::BUILTIN) {
return JavaScriptFrame::GetFunctions(functions);
}
DisallowGarbageCollection no_gc;
int deopt_index = SafepointEntry::kNoDeoptIndex;
DeoptimizationData const data = GetDeoptimizationData(&deopt_index);
DCHECK(!data.is_null());
DCHECK_NE(SafepointEntry::kNoDeoptIndex, deopt_index);
DeoptimizationLiteralArray const literal_array = data.LiteralArray();
TranslationArrayIterator it(data.TranslationByteArray(),
data.TranslationIndex(deopt_index).value());
TranslationOpcode opcode = it.NextOpcode();
DCHECK(TranslationOpcodeIsBegin(opcode));
it.NextOperand(); // Skip lookback distance.
it.NextOperand(); // Skip frame count.
int jsframe_count = it.NextOperand();
// We insert the frames in reverse order because the frames
// in the deoptimization translation are ordered bottom-to-top.
while (jsframe_count != 0) {
opcode = it.NextOpcode();
if (opcode == TranslationOpcode::INTERPRETED_FRAME_WITH_RETURN ||
opcode == TranslationOpcode::INTERPRETED_FRAME_WITHOUT_RETURN ||
opcode == TranslationOpcode::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME ||
opcode == TranslationOpcode::
JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME) {
it.NextOperand(); // Skip bailout id.
jsframe_count--;
// The second operand of the frame points to the function.
Object shared = literal_array.get(it.NextOperand());
functions->push_back(SharedFunctionInfo::cast(shared));
// Skip over remaining operands to advance to the next opcode.
it.SkipOperands(TranslationOpcodeOperandCount(opcode) - 2);
} else {
// Skip over operands to advance to the next opcode.
it.SkipOperands(TranslationOpcodeOperandCount(opcode));
}
}
}
int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) {
return StandardFrameConstants::kCallerSPOffset -
((slot_index + 1) * kSystemPointerSize);
}
int UnoptimizedFrame::position() const {
AbstractCode code = AbstractCode::cast(GetBytecodeArray());
int code_offset = GetBytecodeOffset();
return code.SourcePosition(isolate(), code_offset);
}
int UnoptimizedFrame::LookupExceptionHandlerInTable(
int* context_register, HandlerTable::CatchPrediction* prediction) {
HandlerTable table(GetBytecodeArray());
return table.LookupRange(GetBytecodeOffset(), context_register, prediction);
}
BytecodeArray UnoptimizedFrame::GetBytecodeArray() const {
const int index = UnoptimizedFrameConstants::kBytecodeArrayExpressionIndex;
DCHECK_EQ(UnoptimizedFrameConstants::kBytecodeArrayFromFp,
UnoptimizedFrameConstants::kExpressionsOffset -
index * kSystemPointerSize);
return BytecodeArray::cast(GetExpression(index));
}
Object UnoptimizedFrame::ReadInterpreterRegister(int register_index) const {
const int index = UnoptimizedFrameConstants::kRegisterFileExpressionIndex;
DCHECK_EQ(UnoptimizedFrameConstants::kRegisterFileFromFp,
UnoptimizedFrameConstants::kExpressionsOffset -
index * kSystemPointerSize);
return GetExpression(index + register_index);
}
void UnoptimizedFrame::Summarize(std::vector<FrameSummary>* functions) const {
DCHECK(functions->empty());
Handle<AbstractCode> abstract_code(AbstractCode::cast(GetBytecodeArray()),
isolate());
Handle<FixedArray> params = GetParameters();
FrameSummary::JavaScriptFrameSummary summary(
isolate(), receiver(), function(), *abstract_code, GetBytecodeOffset(),
IsConstructor(), *params);
functions->push_back(summary);
}
int InterpretedFrame::GetBytecodeOffset() const {
const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp,
InterpreterFrameConstants::kExpressionsOffset -
index * kSystemPointerSize);
int raw_offset = Smi::ToInt(GetExpression(index));
return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
}
// static
int InterpretedFrame::GetBytecodeOffset(Address fp) {
const int offset = InterpreterFrameConstants::kExpressionsOffset;
const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp,
InterpreterFrameConstants::kExpressionsOffset -
index * kSystemPointerSize);
Address expression_offset = fp + offset - index * kSystemPointerSize;
int raw_offset = Smi::ToInt(Object(Memory<Address>(expression_offset)));
return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
}
void InterpretedFrame::PatchBytecodeOffset(int new_offset) {
const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp,
InterpreterFrameConstants::kExpressionsOffset -
index * kSystemPointerSize);
int raw_offset = BytecodeArray::kHeaderSize - kHeapObjectTag + new_offset;
SetExpression(index, Smi::FromInt(raw_offset));
}
void InterpretedFrame::PatchBytecodeArray(BytecodeArray bytecode_array) {
const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
DCHECK_EQ(InterpreterFrameConstants::kBytecodeArrayFromFp,
InterpreterFrameConstants::kExpressionsOffset -
index * kSystemPointerSize);
SetExpression(index, bytecode_array);
}
int BaselineFrame::GetBytecodeOffset() const {
Code code = LookupCode();
return code.GetBytecodeOffsetForBaselinePC(this->pc(), GetBytecodeArray());
}
intptr_t BaselineFrame::GetPCForBytecodeOffset(int bytecode_offset) const {
Code code = LookupCode();
return code.GetBaselineStartPCForBytecodeOffset(bytecode_offset,
GetBytecodeArray());
}
void BaselineFrame::PatchContext(Context value) {
base::Memory<Address>(fp() + BaselineFrameConstants::kContextOffset) =
value.ptr();
}
JSFunction BuiltinFrame::function() const {
const int offset = BuiltinFrameConstants::kFunctionOffset;
return JSFunction::cast(Object(base::Memory<Address>(fp() + offset)));
}
int BuiltinFrame::ComputeParametersCount() const {
const int offset = BuiltinFrameConstants::kLengthOffset;
return Smi::ToInt(Object(base::Memory<Address>(fp() + offset))) -
kJSArgcReceiverSlots;
}
#if V8_ENABLE_WEBASSEMBLY
void WasmFrame::Print(StringStream* accumulator, PrintMode mode,
int index) const {
PrintIndex(accumulator, mode, index);
if (function_index() == wasm::kAnonymousFuncIndex) {
accumulator->Add("Anonymous wasm wrapper [pc: %p]\n",
reinterpret_cast<void*>(pc()));
return;
}
wasm::WasmCodeRefScope code_ref_scope;
accumulator->Add(is_wasm_to_js() ? "Wasm-to-JS [" : "Wasm [");
accumulator->PrintName(script().name());
Address instruction_start = wasm_code()->instruction_start();
base::Vector<const uint8_t> raw_func_name =
module_object().GetRawFunctionName(function_index());
const int kMaxPrintedFunctionName = 64;
char func_name[kMaxPrintedFunctionName + 1];
int func_name_len = std::min(kMaxPrintedFunctionName, raw_func_name.length());
memcpy(func_name, raw_func_name.begin(), func_name_len);
func_name[func_name_len] = '\0';
int pos = position();
const wasm::WasmModule* module = wasm_instance().module_object().module();
int func_index = function_index();
int func_code_offset = module->functions[func_index].code.offset();
accumulator->Add("], function #%u ('%s'), pc=%p (+0x%x), pos=%d (+%d)\n",
func_index, func_name, reinterpret_cast<void*>(pc()),
static_cast<int>(pc() - instruction_start), pos,
pos - func_code_offset);
if (mode != OVERVIEW) accumulator->Add("\n");
}
wasm::WasmCode* WasmFrame::wasm_code() const {
return wasm::GetWasmCodeManager()->LookupCode(pc());
}
WasmInstanceObject WasmFrame::wasm_instance() const {
const int offset = WasmFrameConstants::kWasmInstanceOffset;
Object instance(Memory<Address>(fp() + offset));
return WasmInstanceObject::cast(instance);
}
wasm::NativeModule* WasmFrame::native_module() const {
return module_object().native_module();
}
WasmModuleObject WasmFrame::module_object() const {
return wasm_instance().module_object();
}
int WasmFrame::function_index() const {
wasm::WasmCodeRefScope code_ref_scope;
return wasm_code()->index();
}
Script WasmFrame::script() const { return module_object().script(); }
int WasmFrame::position() const {
wasm::WasmCodeRefScope code_ref_scope;
const wasm::WasmModule* module = wasm_instance().module_object().module();
return GetSourcePosition(module, function_index(), generated_code_offset(),
at_to_number_conversion());
}
int WasmFrame::generated_code_offset() const {
wasm::WasmCode* code = wasm_code();
int offset = static_cast<int>(pc() - code->instruction_start());
return code->GetSourceOffsetBefore(offset);
}
bool WasmFrame::is_inspectable() const {
wasm::WasmCodeRefScope code_ref_scope;
return wasm_code()->is_inspectable();
}
Object WasmFrame::context() const { return wasm_instance().native_context(); }
void WasmFrame::Summarize(std::vector<FrameSummary>* functions) const {
DCHECK(functions->empty());
// The {WasmCode*} escapes this scope via the {FrameSummary}, which is fine,
// since this code object is part of our stack.
wasm::WasmCodeRefScope code_ref_scope;
wasm::WasmCode* code = wasm_code();
int offset = static_cast<int>(pc() - code->instruction_start());
Handle<WasmInstanceObject> instance(wasm_instance(), isolate());
// Push regular non-inlined summary.
SourcePosition pos = code->GetSourcePositionBefore(offset);
bool at_conversion = at_to_number_conversion();
// Add summaries for each inlined function at the current location.
while (pos.isInlined()) {
// Use current pc offset as the code offset for inlined functions.
// This is not fully correct but there isn't a real code offset of a stack
// frame for an inlined function as the inlined function is not a true
// function with a defined start and end in the generated code.
//
const auto [func_index, caller_pos] =
code->GetInliningPosition(pos.InliningId());
FrameSummary::WasmFrameSummary summary(isolate(), instance, code,
pos.ScriptOffset(), func_index,
at_conversion);
functions->push_back(summary);
pos = caller_pos;
at_conversion = false;
}
int func_index = code->index();
FrameSummary::WasmFrameSummary summary(
isolate(), instance, code, pos.ScriptOffset(), func_index, at_conversion);
functions->push_back(summary);
// The caller has to be on top.
std::reverse(functions->begin(), functions->end());
}
bool WasmFrame::at_to_number_conversion() const {
// Check whether our callee is a WASM_TO_JS frame, and this frame is at the
// ToNumber conversion call.
wasm::WasmCode* code =
callee_pc() != kNullAddress
? wasm::GetWasmCodeManager()->LookupCode(callee_pc())
: nullptr;
if (!code || code->kind() != wasm::WasmCode::kWasmToJsWrapper) return false;
int offset = static_cast<int>(callee_pc() - code->instruction_start());
int pos = code->GetSourceOffsetBefore(offset);
// The imported call has position 0, ToNumber has position 1.
// If there is no source position available, this is also not a ToNumber call.
DCHECK(pos == wasm::kNoCodePosition || pos == 0 || pos == 1);
return pos == 1;
}
int WasmFrame::LookupExceptionHandlerInTable() {
wasm::WasmCode* code = wasm::GetWasmCodeManager()->LookupCode(pc());
if (!code->IsAnonymous() && code->handler_table_size() > 0) {
HandlerTable table(code);
int pc_offset = static_cast<int>(pc() - code->instruction_start());
return table.LookupReturn(pc_offset);
}
return -1;
}
void WasmDebugBreakFrame::Iterate(RootVisitor* v) const {
DCHECK(caller_pc());
wasm::WasmCode* code = wasm::GetWasmCodeManager()->LookupCode(caller_pc());
DCHECK(code);
SafepointTable table(code);
SafepointEntry safepoint_entry = table.FindEntry(caller_pc());
uint32_t tagged_register_indexes = safepoint_entry.tagged_register_indexes();
while (tagged_register_indexes != 0) {
int reg_code = base::bits::CountTrailingZeros(tagged_register_indexes);
tagged_register_indexes &= ~(1 << reg_code);
FullObjectSlot spill_slot(&Memory<Address>(
fp() +
WasmDebugBreakFrameConstants::GetPushedGpRegisterOffset(reg_code)));
v->VisitRootPointer(Root::kStackRoots, nullptr, spill_slot);
}
}
void WasmDebugBreakFrame::Print(StringStream* accumulator, PrintMode mode,
int index) const {
PrintIndex(accumulator, mode, index);
accumulator->Add("WasmDebugBreak");
if (mode != OVERVIEW) accumulator->Add("\n");
}
WasmInstanceObject WasmToJsFrame::wasm_instance() const {
// WasmToJsFrames hold the {WasmApiFunctionRef} object in the instance slot.
// Load the instance from there.
const int offset = WasmFrameConstants::kWasmInstanceOffset;
Object func_ref_obj(Memory<Address>(fp() + offset));
WasmApiFunctionRef func_ref = WasmApiFunctionRef::cast(func_ref_obj);
return WasmInstanceObject::cast(func_ref.instance());
}
void JsToWasmFrame::Iterate(RootVisitor* v) const {
DCHECK_EQ(GetContainingCode(isolate(), pc())->builtin_id(),
Builtin::kGenericJSToWasmWrapper);
// GenericJSToWasmWrapper stack layout
// ------+-----------------+----------------------
// | return addr |
// fp |- - - - - - - - -| -------------------|
// | fp | |
// fp-p |- - - - - - - - -| |
// | frame marker | | no GC scan
// fp-2p |- - - - - - - - -| |
// | scan_count | |
// fp-3p |- - - - - - - - -| -------------------|
// | .... | <- spill_slot_limit |
// | spill slots | | GC scan scan_count slots
// | .... | <- spill_slot_base--|
// |- - - - - - - - -| |
// The [fp + BuiltinFrameConstants::kGCScanSlotCount] on the stack is a value
// indicating how many values should be scanned from the top.
intptr_t scan_count = *reinterpret_cast<intptr_t*>(
fp() + BuiltinWasmWrapperConstants::kGCScanSlotCountOffset);
Full