[cleanup] Avoid {Object::operator->}
This CL was generated by an automatic clang AST rewriter using this
matcher expression:
callExpr(
callee(
cxxMethodDecl(
hasName("operator->"),
ofClass(isSameOrDerivedFrom("v8::internal::Object"))
)
),
argumentCountIs(1)
)
The "->" at the expression location was then rewritten to ".".
R=jkummerow@chromium.org
TBR=mstarzinger@chromium.org,verwaest@chromium.org,yangguo@chromium.org
Bug: v8:9183, v8:3770
No-Try: true
No-Tree-Checks: true
Change-Id: I0a7ecabdeafe51d0cf427f5280af0c7cab96869e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1624209
Reviewed-by: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Reviewed-by: Toon Verwaest <verwaest@chromium.org>
Commit-Queue: Yang Guo <yangguo@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#61764}
diff --git a/src/address-map.cc b/src/address-map.cc
index db3f548..35c46fc 100644
--- a/src/address-map.cc
+++ b/src/address-map.cc
@@ -18,7 +18,7 @@
for (RootIndex root_index = RootIndex::kFirstStrongOrReadOnlyRoot;
root_index <= RootIndex::kLastStrongOrReadOnlyRoot; ++root_index) {
Object root = isolate->root(root_index);
- if (!root->IsHeapObject()) continue;
+ if (!root.IsHeapObject()) continue;
// Omit root entries that can be written after initialization. They must
// not be referenced through the root list in the snapshot.
// Since we map the raw address of an root item to its root list index, the
diff --git a/src/allocation-site-scopes-inl.h b/src/allocation-site-scopes-inl.h
index 6500e9e..937c4a5 100644
--- a/src/allocation-site-scopes-inl.h
+++ b/src/allocation-site-scopes-inl.h
@@ -39,7 +39,7 @@
}
bool AllocationSiteUsageContext::ShouldCreateMemento(Handle<JSObject> object) {
- if (activated_ && AllocationSite::CanTrack(object->map()->instance_type())) {
+ if (activated_ && AllocationSite::CanTrack(object->map().instance_type())) {
if (FLAG_allocation_site_pretenuring ||
AllocationSite::ShouldTrack(object->GetElementsKind())) {
if (FLAG_trace_creation_allocation_sites) {
diff --git a/src/allocation-site-scopes.h b/src/allocation-site-scopes.h
index 898dcf0..df6156b 100644
--- a/src/allocation-site-scopes.h
+++ b/src/allocation-site-scopes.h
@@ -30,7 +30,7 @@
protected:
void update_current_site(AllocationSite site) {
- *(current_.location()) = site->ptr();
+ *(current_.location()) = site.ptr();
}
inline void InitializeTraversal(Handle<AllocationSite> site);
diff --git a/src/api/api-arguments-inl.h b/src/api/api-arguments-inl.h
index 71966e1..05bb357 100644
--- a/src/api/api-arguments-inl.h
+++ b/src/api/api-arguments-inl.h
@@ -43,7 +43,7 @@
// Check the ReturnValue.
FullObjectSlot slot = slot_at(kReturnValueOffset);
// Nothing was set, return empty handle as per previous behaviour.
- if ((*slot)->IsTheHole(isolate)) return Handle<V>();
+ if ((*slot).IsTheHole(isolate)) return Handle<V>();
Handle<V> result = Handle<V>::cast(Handle<Object>(slot.location()));
result->VerifyApiCallResultType();
return result;
@@ -144,7 +144,7 @@
LOG(isolate, ApiObjectAccess("call", holder()));
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kFunctionCallback);
v8::FunctionCallback f =
- v8::ToCData<v8::FunctionCallback>(handler->callback());
+ v8::ToCData<v8::FunctionCallback>(handler.callback());
Handle<Object> receiver_check_unsupported;
if (isolate->debug_execution_mode() == DebugInfo::kSideEffects &&
!isolate->debug()->PerformSideEffectCheckForCallback(
diff --git a/src/api/api-arguments.cc b/src/api/api-arguments.cc
index 69561fd..51317fd 100644
--- a/src/api/api-arguments.cc
+++ b/src/api/api-arguments.cc
@@ -28,8 +28,8 @@
HeapObject the_hole = ReadOnlyRoots(isolate).the_hole_value();
slot_at(T::kReturnValueDefaultValueIndex).store(the_hole);
slot_at(T::kReturnValueIndex).store(the_hole);
- DCHECK((*slot_at(T::kHolderIndex))->IsHeapObject());
- DCHECK((*slot_at(T::kIsolateIndex))->IsSmi());
+ DCHECK((*slot_at(T::kHolderIndex)).IsHeapObject());
+ DCHECK((*slot_at(T::kIsolateIndex)).IsSmi());
}
FunctionCallbackArguments::FunctionCallbackArguments(
@@ -46,8 +46,8 @@
HeapObject the_hole = ReadOnlyRoots(isolate).the_hole_value();
slot_at(T::kReturnValueDefaultValueIndex).store(the_hole);
slot_at(T::kReturnValueIndex).store(the_hole);
- DCHECK((*slot_at(T::kHolderIndex))->IsHeapObject());
- DCHECK((*slot_at(T::kIsolateIndex))->IsSmi());
+ DCHECK((*slot_at(T::kHolderIndex)).IsHeapObject());
+ DCHECK((*slot_at(T::kIsolateIndex)).IsSmi());
}
} // namespace internal
diff --git a/src/api/api-inl.h b/src/api/api-inl.h
index 202c097..4d4dc40 100644
--- a/src/api/api-inl.h
+++ b/src/api/api-inl.h
@@ -18,13 +18,13 @@
STATIC_ASSERT(sizeof(T) == sizeof(v8::internal::Address));
if (obj == v8::internal::Smi::kZero) return nullptr;
return reinterpret_cast<T>(
- v8::internal::Foreign::cast(obj)->foreign_address());
+ v8::internal::Foreign::cast(obj).foreign_address());
}
template <>
inline v8::internal::Address ToCData(v8::internal::Object obj) {
if (obj == v8::internal::Smi::kZero) return v8::internal::kNullAddress;
- return v8::internal::Foreign::cast(obj)->foreign_address();
+ return v8::internal::Foreign::cast(obj).foreign_address();
}
template <typename T>
@@ -117,7 +117,7 @@
DCHECK(that == nullptr || \
v8::internal::Object( \
*reinterpret_cast<const v8::internal::Address*>(that)) \
- ->Is##To()); \
+ .Is##To()); \
return v8::internal::Handle<v8::internal::To>( \
reinterpret_cast<v8::internal::Address*>( \
const_cast<v8::From*>(that))); \
diff --git a/src/api/api-natives.cc b/src/api/api-natives.cc
index 2f2ccf2..53b9dc6 100644
--- a/src/api/api-natives.cc
+++ b/src/api/api-natives.cc
@@ -65,12 +65,12 @@
Handle<Object> getter, Handle<Object> setter, PropertyAttributes attributes,
bool force_instantiate) {
DCHECK(!getter->IsFunctionTemplateInfo() ||
- !FunctionTemplateInfo::cast(*getter)->do_not_cache());
+ !FunctionTemplateInfo::cast(*getter).do_not_cache());
DCHECK(!setter->IsFunctionTemplateInfo() ||
- !FunctionTemplateInfo::cast(*setter)->do_not_cache());
+ !FunctionTemplateInfo::cast(*setter).do_not_cache());
if (getter->IsFunctionTemplateInfo()) {
if (force_instantiate ||
- FunctionTemplateInfo::cast(*getter)->BreakAtEntry()) {
+ FunctionTemplateInfo::cast(*getter).BreakAtEntry()) {
ASSIGN_RETURN_ON_EXCEPTION(
isolate, getter,
InstantiateFunction(isolate,
@@ -80,7 +80,7 @@
}
if (setter->IsFunctionTemplateInfo()) {
if (force_instantiate ||
- FunctionTemplateInfo::cast(*setter)->BreakAtEntry()) {
+ FunctionTemplateInfo::cast(*setter).BreakAtEntry()) {
ASSIGN_RETURN_ON_EXCEPTION(
isolate, setter,
InstantiateFunction(isolate,
@@ -145,7 +145,7 @@
public:
AccessCheckDisableScope(Isolate* isolate, Handle<JSObject> obj)
: isolate_(isolate),
- disabled_(obj->map()->is_access_check_needed()),
+ disabled_(obj->map().is_access_check_needed()),
obj_(obj) {
if (disabled_) {
DisableAccessChecks(isolate_, obj_);
@@ -188,11 +188,11 @@
int max_number_of_properties = 0;
TemplateInfoT info = *data;
while (!info.is_null()) {
- Object props = info->property_accessors();
- if (!props->IsUndefined(isolate)) {
- max_number_of_properties += TemplateList::cast(props)->length();
+ Object props = info.property_accessors();
+ if (!props.IsUndefined(isolate)) {
+ max_number_of_properties += TemplateList::cast(props).length();
}
- info = info->GetParent(isolate);
+ info = info.GetParent(isolate);
}
if (max_number_of_properties > 0) {
@@ -205,7 +205,7 @@
temp = handle(temp->GetParent(isolate), isolate)) {
// Accumulate accessors.
Object maybe_properties = temp->property_accessors();
- if (!maybe_properties->IsUndefined(isolate)) {
+ if (!maybe_properties.IsUndefined(isolate)) {
valid_descriptors = AccessorInfo::AppendUnique(
isolate, handle(maybe_properties, isolate), array,
valid_descriptors);
@@ -223,7 +223,7 @@
}
Object maybe_property_list = data->property_list();
- if (maybe_property_list->IsUndefined(isolate)) return obj;
+ if (maybe_property_list.IsUndefined(isolate)) return obj;
Handle<TemplateList> properties(TemplateList::cast(maybe_property_list),
isolate);
if (properties->length() == 0) return obj;
@@ -232,7 +232,7 @@
for (int c = 0; c < data->number_of_properties(); c++) {
auto name = handle(Name::cast(properties->get(i++)), isolate);
Object bit = properties->get(i++);
- if (bit->IsSmi()) {
+ if (bit.IsSmi()) {
PropertyDetails details(Smi::cast(bit));
PropertyAttributes attributes = details.attributes();
PropertyKind kind = details.kind();
@@ -336,7 +336,7 @@
if (serial_number <= TemplateInfo::kFastTemplateInstantiationsCacheSize) {
Handle<FixedArray> fast_cache =
isolate->fast_template_instantiations_cache();
- DCHECK(!fast_cache->get(serial_number - 1)->IsUndefined(isolate));
+ DCHECK(!fast_cache->get(serial_number - 1).IsUndefined(isolate));
fast_cache->set_undefined(serial_number - 1);
} else if (caching_mode == CachingMode::kUnlimited ||
(serial_number <=
@@ -354,11 +354,11 @@
JSReceiver new_target) {
DisallowHeapAllocation no_gc;
- if (!new_target->IsJSFunction()) return false;
+ if (!new_target.IsJSFunction()) return false;
JSFunction fun = JSFunction::cast(new_target);
- if (fun->shared()->function_data() != info->constructor()) return false;
- if (info->immutable_proto()) return false;
- return fun->context()->native_context() == isolate->raw_native_context();
+ if (fun.shared().function_data() != info.constructor()) return false;
+ if (info.immutable_proto()) return false;
+ return fun.context().native_context() == isolate->raw_native_context();
}
MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
@@ -387,7 +387,7 @@
if (constructor.is_null()) {
Object maybe_constructor_info = info->constructor();
- if (maybe_constructor_info->IsUndefined(isolate)) {
+ if (maybe_constructor_info.IsUndefined(isolate)) {
constructor = isolate->object_function();
} else {
// Enter a new scope. Recursion could otherwise create a lot of handles.
@@ -471,9 +471,9 @@
Handle<Object> prototype;
if (!data->remove_prototype()) {
Object prototype_templ = data->GetPrototypeTemplate();
- if (prototype_templ->IsUndefined(isolate)) {
+ if (prototype_templ.IsUndefined(isolate)) {
Object protoype_provider_templ = data->GetPrototypeProviderTemplate();
- if (protoype_provider_templ->IsUndefined(isolate)) {
+ if (protoype_provider_templ.IsUndefined(isolate)) {
prototype = isolate->factory()->NewJSObject(isolate->object_function());
} else {
ASSIGN_RETURN_ON_EXCEPTION(
@@ -490,7 +490,7 @@
JSFunction);
}
Object parent = data->GetParentTemplate();
- if (!parent->IsUndefined(isolate)) {
+ if (!parent.IsUndefined(isolate)) {
Handle<Object> parent_prototype;
ASSIGN_RETURN_ON_EXCEPTION(isolate, parent_prototype,
GetInstancePrototype(isolate, parent),
@@ -502,8 +502,8 @@
}
InstanceType function_type =
(!data->needs_access_check() &&
- data->GetNamedPropertyHandler()->IsUndefined(isolate) &&
- data->GetIndexedPropertyHandler()->IsUndefined(isolate))
+ data->GetNamedPropertyHandler().IsUndefined(isolate) &&
+ data->GetIndexedPropertyHandler().IsUndefined(isolate))
? JS_API_OBJECT_TYPE
: JS_SPECIAL_API_OBJECT_TYPE;
@@ -531,7 +531,7 @@
int length, Handle<Object>* data) {
Object maybe_list = templ->property_list();
Handle<TemplateList> list;
- if (maybe_list->IsUndefined(isolate)) {
+ if (maybe_list.IsUndefined(isolate)) {
list = TemplateList::New(isolate, length);
} else {
list = handle(TemplateList::cast(maybe_list), isolate);
@@ -623,7 +623,7 @@
Handle<AccessorInfo> property) {
Object maybe_list = info->property_accessors();
Handle<TemplateList> list;
- if (maybe_list->IsUndefined(isolate)) {
+ if (maybe_list.IsUndefined(isolate)) {
list = TemplateList::New(isolate, 1);
} else {
list = handle(TemplateList::cast(maybe_list), isolate);
@@ -647,7 +647,7 @@
if (obj->remove_prototype()) {
DCHECK(prototype.is_null());
- DCHECK(result->shared()->IsApiFunction());
+ DCHECK(result->shared().IsApiFunction());
DCHECK(!result->IsConstructor());
DCHECK(!result->has_prototype_slot());
return result;
@@ -663,7 +663,7 @@
if (prototype->IsTheHole(isolate)) {
prototype = isolate->factory()->NewFunctionPrototype(result);
- } else if (obj->GetPrototypeProviderTemplate()->IsUndefined(isolate)) {
+ } else if (obj->GetPrototypeProviderTemplate().IsUndefined(isolate)) {
JSObject::AddProperty(isolate, Handle<JSObject>::cast(prototype),
isolate->factory()->constructor_string(), result,
DONT_ENUM);
@@ -671,7 +671,7 @@
int embedder_field_count = 0;
bool immutable_proto = false;
- if (!obj->GetInstanceTemplate()->IsUndefined(isolate)) {
+ if (!obj->GetInstanceTemplate().IsUndefined(isolate)) {
Handle<ObjectTemplateInfo> GetInstanceTemplate = Handle<ObjectTemplateInfo>(
ObjectTemplateInfo::cast(obj->GetInstanceTemplate()), isolate);
embedder_field_count = GetInstanceTemplate->embedder_field_count();
@@ -694,7 +694,7 @@
// undetectable and callable. If we ever see the need to have an object
// that is undetectable but not callable, we need to update the types.h
// to allow encoding this.
- CHECK(!obj->GetInstanceCallHandler()->IsUndefined(isolate));
+ CHECK(!obj->GetInstanceCallHandler().IsUndefined(isolate));
map->set_is_undetectable(true);
}
@@ -705,16 +705,16 @@
}
// Set interceptor information in the map.
- if (!obj->GetNamedPropertyHandler()->IsUndefined(isolate)) {
+ if (!obj->GetNamedPropertyHandler().IsUndefined(isolate)) {
map->set_has_named_interceptor(true);
map->set_may_have_interesting_symbols(true);
}
- if (!obj->GetIndexedPropertyHandler()->IsUndefined(isolate)) {
+ if (!obj->GetIndexedPropertyHandler().IsUndefined(isolate)) {
map->set_has_indexed_interceptor(true);
}
// Mark instance as callable in the map.
- if (!obj->GetInstanceCallHandler()->IsUndefined(isolate)) {
+ if (!obj->GetInstanceCallHandler().IsUndefined(isolate)) {
map->set_is_callable(true);
map->set_is_constructor(!obj->undetectable());
}
diff --git a/src/api/api.cc b/src/api/api.cc
index 54931f9..3eb1db6 100644
--- a/src/api/api.cc
+++ b/src/api/api.cc
@@ -286,7 +286,7 @@
i::Handle<i::Context> env = Utils::OpenHandle(*context);
i::HandleScopeImplementer* impl = isolate->handle_scope_implementer();
if (!isolate->context().is_null() &&
- isolate->context()->native_context() == env->native_context()) {
+ isolate->context().native_context() == env->native_context()) {
context_ = Local<Context>();
} else {
impl->SaveContext(isolate->context());
@@ -302,7 +302,7 @@
isolate_->set_context(impl->RestoreContext());
i::Handle<i::Context> env = Utils::OpenHandle(*context_);
- microtask_queue = env->native_context()->microtask_queue();
+ microtask_queue = env->native_context().microtask_queue();
}
if (!escaped_) isolate_->handle_scope_implementer()->DecrementCallDepth();
if (do_callback) isolate_->FireCallCompletedCallback(microtask_queue);
@@ -643,7 +643,7 @@
i::HandleScope scope(isolate);
i::Handle<i::Object> obj(i::Object(object), isolate);
i::Handle<i::ArrayList> list;
- if (!isolate->heap()->serialized_objects()->IsArrayList()) {
+ if (!isolate->heap()->serialized_objects().IsArrayList()) {
list = i::ArrayList::New(isolate, 1);
} else {
list = i::Handle<i::ArrayList>(
@@ -663,7 +663,7 @@
i::HandleScope scope(isolate);
i::Handle<i::Object> obj(i::Object(object), isolate);
i::Handle<i::ArrayList> list;
- if (!ctx->serialized_objects()->IsArrayList()) {
+ if (!ctx->serialized_objects().IsArrayList()) {
list = i::ArrayList::New(isolate, 1);
} else {
list = i::Handle<i::ArrayList>(
@@ -679,7 +679,7 @@
void ConvertSerializedObjectsToFixedArray(Local<Context> context) {
i::Handle<i::Context> ctx = Utils::OpenHandle(*context);
i::Isolate* isolate = ctx->GetIsolate();
- if (!ctx->serialized_objects()->IsArrayList()) {
+ if (!ctx->serialized_objects().IsArrayList()) {
ctx->set_serialized_objects(i::ReadOnlyRoots(isolate).empty_fixed_array());
} else {
i::Handle<i::ArrayList> list(i::ArrayList::cast(ctx->serialized_objects()),
@@ -690,7 +690,7 @@
}
void ConvertSerializedObjectsToFixedArray(i::Isolate* isolate) {
- if (!isolate->heap()->serialized_objects()->IsArrayList()) {
+ if (!isolate->heap()->serialized_objects().IsArrayList()) {
isolate->heap()->SetSerializedObjects(
i::ReadOnlyRoots(isolate).empty_fixed_array());
} else {
@@ -732,7 +732,7 @@
i::Handle<i::Context> context =
v8::Utils::OpenHandle(*data->contexts_.Get(i));
global_proxy_sizes->set(i,
- i::Smi::FromInt(context->global_proxy()->Size()));
+ i::Smi::FromInt(context->global_proxy().Size()));
}
isolate->heap()->SetSerializedGlobalProxySizes(*global_proxy_sizes);
}
@@ -767,16 +767,16 @@
i::HeapIterator heap_iterator(isolate->heap());
for (i::HeapObject current_obj = heap_iterator.next();
!current_obj.is_null(); current_obj = heap_iterator.next()) {
- if (current_obj->IsSharedFunctionInfo()) {
+ if (current_obj.IsSharedFunctionInfo()) {
i::SharedFunctionInfo shared =
i::SharedFunctionInfo::cast(current_obj);
- if (shared->CanDiscardCompiled()) {
+ if (shared.CanDiscardCompiled()) {
sfis_to_clear.emplace_back(shared, isolate);
}
- } else if (current_obj->IsJSRegExp()) {
+ } else if (current_obj.IsJSRegExp()) {
i::JSRegExp regexp = i::JSRegExp::cast(current_obj);
- if (regexp->HasCompiledCode()) {
- regexp->DiscardCompiledCodeForSerialization();
+ if (regexp.HasCompiledCode()) {
+ regexp.DiscardCompiledCodeForSerialization();
}
}
}
@@ -813,23 +813,22 @@
i::HeapIterator heap_iterator(isolate->heap());
for (i::HeapObject current_obj = heap_iterator.next(); !current_obj.is_null();
current_obj = heap_iterator.next()) {
- if (current_obj->IsJSFunction()) {
+ if (current_obj.IsJSFunction()) {
i::JSFunction fun = i::JSFunction::cast(current_obj);
// Complete in-object slack tracking for all functions.
- fun->CompleteInobjectSlackTrackingIfActive();
+ fun.CompleteInobjectSlackTrackingIfActive();
// Also, clear out feedback vectors, or any optimized code.
- if (!fun->raw_feedback_cell()->value()->IsUndefined()) {
- fun->raw_feedback_cell()->set_value(
+ if (!fun.raw_feedback_cell().value().IsUndefined()) {
+ fun.raw_feedback_cell().set_value(
i::ReadOnlyRoots(isolate).undefined_value());
- fun->set_code(isolate->builtins()->builtin(i::Builtins::kCompileLazy));
+ fun.set_code(isolate->builtins()->builtin(i::Builtins::kCompileLazy));
}
if (function_code_handling == FunctionCodeHandling::kClear) {
- DCHECK(fun->shared()->HasWasmExportedFunctionData() ||
- fun->shared()->HasBuiltinId() ||
- fun->shared()->IsApiFunction() ||
- fun->shared()->HasUncompiledDataWithoutPreparseData());
+ DCHECK(fun.shared().HasWasmExportedFunctionData() ||
+ fun.shared().HasBuiltinId() || fun.shared().IsApiFunction() ||
+ fun.shared().HasUncompiledDataWithoutPreparseData());
}
}
}
@@ -1007,7 +1006,7 @@
i::Handle<i::Object> result = isolate->global_handles()->Create(*obj);
#ifdef VERIFY_HEAP
if (i::FLAG_verify_heap) {
- i::Object(*obj)->ObjectVerify(isolate);
+ i::Object(*obj).ObjectVerify(isolate);
}
#endif // VERIFY_HEAP
return result.location();
@@ -1020,7 +1019,7 @@
isolate->global_handles()->CreateTraced(*obj, slot);
#ifdef VERIFY_HEAP
if (i::FLAG_verify_heap) {
- i::Object(*obj)->ObjectVerify(isolate);
+ i::Object(*obj).ObjectVerify(isolate);
}
#endif // VERIFY_HEAP
return result.location();
@@ -1141,16 +1140,16 @@
EscapableHandleScope::EscapableHandleScope(Isolate* v8_isolate) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
escape_slot_ =
- CreateHandle(isolate, i::ReadOnlyRoots(isolate).the_hole_value()->ptr());
+ CreateHandle(isolate, i::ReadOnlyRoots(isolate).the_hole_value().ptr());
Initialize(v8_isolate);
}
i::Address* EscapableHandleScope::Escape(i::Address* escape_value) {
i::Heap* heap = reinterpret_cast<i::Isolate*>(GetIsolate())->heap();
- Utils::ApiCheck(i::Object(*escape_slot_)->IsTheHole(heap->isolate()),
+ Utils::ApiCheck(i::Object(*escape_slot_).IsTheHole(heap->isolate()),
"EscapableHandleScope::Escape", "Escape value set twice");
if (escape_value == nullptr) {
- *escape_slot_ = i::ReadOnlyRoots(heap).undefined_value()->ptr();
+ *escape_slot_ = i::ReadOnlyRoots(heap).undefined_value().ptr();
return nullptr;
}
*escape_slot_ = *escape_value;
@@ -1262,7 +1261,7 @@
CHECK(context->IsNativeContext());
// TODO(ishell): remove cast once embedder_data slot has a proper type.
return static_cast<uint32_t>(
- i::EmbedderDataArray::cast(context->embedder_data())->length());
+ i::EmbedderDataArray::cast(context->embedder_data()).length());
}
v8::Local<v8::Value> Context::SlowGetEmbedderData(int index) {
@@ -1390,8 +1389,8 @@
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
i::Handle<i::Object> result = Utils::OpenHandle(*prototype_provider);
auto info = Utils::OpenHandle(this);
- CHECK(info->GetPrototypeTemplate()->IsUndefined(i_isolate));
- CHECK(info->GetParentTemplate()->IsUndefined(i_isolate));
+ CHECK(info->GetPrototypeTemplate().IsUndefined(i_isolate));
+ CHECK(info->GetParentTemplate().IsUndefined(i_isolate));
i::FunctionTemplateInfo::SetPrototypeProviderTemplate(i_isolate, info,
result);
}
@@ -1407,7 +1406,7 @@
EnsureNotInstantiated(info, "v8::FunctionTemplate::Inherit");
i::Isolate* i_isolate = info->GetIsolate();
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
- CHECK(info->GetPrototypeProviderTemplate()->IsUndefined(i_isolate));
+ CHECK(info->GetPrototypeProviderTemplate().IsUndefined(i_isolate));
i::FunctionTemplateInfo::SetParentTemplate(i_isolate, info,
Utils::OpenHandle(*value));
}
@@ -1465,9 +1464,9 @@
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
i::FixedArray serialized_objects = i_isolate->heap()->serialized_objects();
int int_index = static_cast<int>(index);
- if (int_index < serialized_objects->length()) {
- i::Object info = serialized_objects->get(int_index);
- if (info->IsFunctionTemplateInfo()) {
+ if (int_index < serialized_objects.length()) {
+ i::Object info = serialized_objects.get(int_index);
+ if (info.IsFunctionTemplateInfo()) {
return Utils::ToLocal(i::Handle<i::FunctionTemplateInfo>(
i::FunctionTemplateInfo::cast(info), i_isolate));
}
@@ -1573,7 +1572,7 @@
}
i::Isolate* isolate = handle->GetIsolate();
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate);
- if (handle->GetInstanceTemplate()->IsUndefined(isolate)) {
+ if (handle->GetInstanceTemplate().IsUndefined(isolate)) {
Local<ObjectTemplate> templ =
ObjectTemplate::New(isolate, ToApiHandle<FunctionTemplate>(handle));
i::FunctionTemplateInfo::SetInstanceTemplate(isolate, handle,
@@ -1670,9 +1669,9 @@
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
i::FixedArray serialized_objects = i_isolate->heap()->serialized_objects();
int int_index = static_cast<int>(index);
- if (int_index < serialized_objects->length()) {
- i::Object info = serialized_objects->get(int_index);
- if (info->IsObjectTemplateInfo()) {
+ if (int_index < serialized_objects.length()) {
+ i::Object info = serialized_objects.get(int_index);
+ if (info.IsObjectTemplateInfo()) {
return Utils::ToLocal(i::Handle<i::ObjectTemplateInfo>(
i::ObjectTemplateInfo::cast(info), i_isolate));
}
@@ -1685,7 +1684,7 @@
static i::Handle<i::FunctionTemplateInfo> EnsureConstructor(
i::Isolate* isolate, ObjectTemplate* object_template) {
i::Object obj = Utils::OpenHandle(object_template)->constructor();
- if (!obj->IsUndefined(isolate)) {
+ if (!obj.IsUndefined(isolate)) {
i::FunctionTemplateInfo info = i::FunctionTemplateInfo::cast(obj);
return i::Handle<i::FunctionTemplateInfo>(info, isolate);
}
@@ -2072,7 +2071,7 @@
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
i::Isolate* isolate = obj->GetIsolate();
LOG_API(isolate, UnboundScript, GetLineNumber);
- if (obj->script()->IsScript()) {
+ if (obj->script().IsScript()) {
i::Handle<i::Script> script(i::Script::cast(obj->script()), isolate);
return i::Script::GetLineNumber(script, code_pos);
} else {
@@ -2085,8 +2084,8 @@
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
i::Isolate* isolate = obj->GetIsolate();
LOG_API(isolate, UnboundScript, GetName);
- if (obj->script()->IsScript()) {
- i::Object name = i::Script::cast(obj->script())->name();
+ if (obj->script().IsScript()) {
+ i::Object name = i::Script::cast(obj->script()).name();
return Utils::ToLocal(i::Handle<i::Object>(name, isolate));
} else {
return Local<String>();
@@ -2098,8 +2097,8 @@
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
i::Isolate* isolate = obj->GetIsolate();
LOG_API(isolate, UnboundScript, GetSourceURL);
- if (obj->script()->IsScript()) {
- i::Object url = i::Script::cast(obj->script())->source_url();
+ if (obj->script().IsScript()) {
+ i::Object url = i::Script::cast(obj->script()).source_url();
return Utils::ToLocal(i::Handle<i::Object>(url, isolate));
} else {
return Local<String>();
@@ -2111,8 +2110,8 @@
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
i::Isolate* isolate = obj->GetIsolate();
LOG_API(isolate, UnboundScript, GetSourceMappingURL);
- if (obj->script()->IsScript()) {
- i::Object url = i::Script::cast(obj->script())->source_mapping_url();
+ if (obj->script().IsScript()) {
+ i::Object url = i::Script::cast(obj->script()).source_mapping_url();
return Utils::ToLocal(i::Handle<i::Object>(url, isolate));
} else {
return Local<String>();
@@ -2156,8 +2155,8 @@
Local<UnboundScript> Script::GetUnboundScript() {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
- i::SharedFunctionInfo sfi = i::JSFunction::cast(*obj)->shared();
- i::Isolate* isolate = sfi->GetIsolate();
+ i::SharedFunctionInfo sfi = i::JSFunction::cast(*obj).shared();
+ i::Isolate* isolate = sfi.GetIsolate();
return ToApiHandle<UnboundScript>(i::handle(sfi, isolate));
}
@@ -2231,14 +2230,14 @@
int Module::GetModuleRequestsLength() const {
i::Handle<i::Module> self = Utils::OpenHandle(this);
- return self->info()->module_requests()->length();
+ return self->info().module_requests().length();
}
Local<String> Module::GetModuleRequest(int i) const {
CHECK_GE(i, 0);
i::Handle<i::Module> self = Utils::OpenHandle(this);
i::Isolate* isolate = self->GetIsolate();
- i::Handle<i::FixedArray> module_requests(self->info()->module_requests(),
+ i::Handle<i::FixedArray> module_requests(self->info().module_requests(),
isolate);
CHECK_LT(i, module_requests->length());
return ToApiHandle<String>(i::handle(module_requests->get(i), isolate));
@@ -2250,7 +2249,7 @@
i::HandleScope scope(isolate);
i::Handle<i::Module> self = Utils::OpenHandle(this);
i::Handle<i::FixedArray> module_request_positions(
- self->info()->module_request_positions(), isolate);
+ self->info().module_request_positions(), isolate);
CHECK_LT(i, module_request_positions->length());
int position = i::Smi::ToInt(module_request_positions->get(i));
i::Handle<i::Script> script(self->script(), isolate);
@@ -2463,7 +2462,7 @@
DCHECK(context->IsNativeContext());
i::Handle<i::SharedFunctionInfo> outer_info(
- context->empty_function()->shared(), isolate);
+ context->empty_function().shared(), isolate);
i::Handle<i::JSFunction> fun;
i::Handle<i::FixedArray> arguments_list =
@@ -2665,7 +2664,7 @@
bool v8::TryCatch::HasCaught() const {
return !i::Object(reinterpret_cast<i::Address>(exception_))
- ->IsTheHole(isolate_);
+ .IsTheHole(isolate_);
}
bool v8::TryCatch::CanContinue() const { return can_continue_; }
@@ -2691,7 +2690,7 @@
MaybeLocal<Value> v8::TryCatch::StackTrace(Local<Context> context) const {
if (!HasCaught()) return v8::Local<Value>();
i::Object raw_obj(reinterpret_cast<i::Address>(exception_));
- if (!raw_obj->IsJSObject()) return v8::Local<Value>();
+ if (!raw_obj.IsJSObject()) return v8::Local<Value>();
PREPARE_FOR_EXECUTION(context, TryCatch, StackTrace, Value);
i::Handle<i::JSObject> obj(i::JSObject::cast(raw_obj), isolate_);
i::Handle<i::String> name = isolate->factory()->stack_string();
@@ -2708,8 +2707,8 @@
v8::Local<v8::Message> v8::TryCatch::Message() const {
i::Object message(reinterpret_cast<i::Address>(message_obj_));
- DCHECK(message->IsJSMessageObject() || message->IsTheHole(isolate_));
- if (HasCaught() && !message->IsTheHole(isolate_)) {
+ DCHECK(message.IsJSMessageObject() || message.IsTheHole(isolate_));
+ if (HasCaught() && !message.IsTheHole(isolate_)) {
return v8::Utils::MessageToLocal(i::Handle<i::Object>(message, isolate_));
} else {
return v8::Local<v8::Message>();
@@ -2728,8 +2727,8 @@
void v8::TryCatch::ResetInternal() {
i::Object the_hole = i::ReadOnlyRoots(isolate_).the_hole_value();
- exception_ = reinterpret_cast<void*>(the_hole->ptr());
- message_obj_ = reinterpret_cast<void*>(the_hole->ptr());
+ exception_ = reinterpret_cast<void*>(the_hole.ptr());
+ message_obj_ = reinterpret_cast<void*>(the_hole.ptr());
}
void v8::TryCatch::SetVerbose(bool value) { is_verbose_ = value; }
@@ -2845,14 +2844,14 @@
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate);
return Utils::OpenHandle(this)
->script()
- ->origin_options()
+ .origin_options()
.IsSharedCrossOrigin();
}
bool Message::IsOpaque() const {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate);
- return Utils::OpenHandle(this)->script()->origin_options().IsOpaque();
+ return Utils::OpenHandle(this)->script().origin_options().IsOpaque();
}
MaybeLocal<String> Message::GetSourceLine(Local<Context> context) const {
@@ -3280,7 +3279,7 @@
bool Value::IsArrayBuffer() const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
- return obj->IsJSArrayBuffer() && !i::JSArrayBuffer::cast(*obj)->is_shared();
+ return obj->IsJSArrayBuffer() && !i::JSArrayBuffer::cast(*obj).is_shared();
}
bool Value::IsArrayBufferView() const {
@@ -3291,11 +3290,11 @@
return Utils::OpenHandle(this)->IsJSTypedArray();
}
-#define VALUE_IS_TYPED_ARRAY(Type, typeName, TYPE, ctype) \
- bool Value::Is##Type##Array() const { \
- i::Handle<i::Object> obj = Utils::OpenHandle(this); \
- return obj->IsJSTypedArray() && \
- i::JSTypedArray::cast(*obj)->type() == i::kExternal##Type##Array; \
+#define VALUE_IS_TYPED_ARRAY(Type, typeName, TYPE, ctype) \
+ bool Value::Is##Type##Array() const { \
+ i::Handle<i::Object> obj = Utils::OpenHandle(this); \
+ return obj->IsJSTypedArray() && \
+ i::JSTypedArray::cast(*obj).type() == i::kExternal##Type##Array; \
}
TYPED_ARRAYS(VALUE_IS_TYPED_ARRAY)
@@ -3308,7 +3307,7 @@
bool Value::IsSharedArrayBuffer() const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
- return obj->IsJSArrayBuffer() && i::JSArrayBuffer::cast(*obj)->is_shared();
+ return obj->IsJSArrayBuffer() && i::JSArrayBuffer::cast(*obj).is_shared();
}
bool Value::IsObject() const { return Utils::OpenHandle(this)->IsJSReceiver(); }
@@ -3349,8 +3348,8 @@
// Check the instance type is JS_OBJECT (instance type of Externals) before
// attempting to get the Isolate since that guarantees the object is writable
// and GetIsolate will work.
- if (heap_obj->map()->instance_type() != i::JS_OBJECT_TYPE) return false;
- i::Isolate* isolate = i::JSObject::cast(*heap_obj)->GetIsolate();
+ if (heap_obj->map().instance_type() != i::JS_OBJECT_TYPE) return false;
+ i::Isolate* isolate = i::JSObject::cast(*heap_obj).GetIsolate();
return heap_obj->IsExternal(isolate);
}
@@ -3387,14 +3386,14 @@
i::Handle<i::Object> obj = Utils::OpenHandle(this);
if (!obj->IsJSFunction()) return false;
i::Handle<i::JSFunction> func = i::Handle<i::JSFunction>::cast(obj);
- return i::IsAsyncFunction(func->shared()->kind());
+ return i::IsAsyncFunction(func->shared().kind());
}
bool Value::IsGeneratorFunction() const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
if (!obj->IsJSFunction()) return false;
i::Handle<i::JSFunction> func = i::Handle<i::JSFunction>::cast(obj);
- return i::IsGeneratorFunction(func->shared()->kind());
+ return i::IsGeneratorFunction(func->shared().kind());
}
bool Value::IsGeneratorObject() const {
@@ -3644,7 +3643,7 @@
void v8::ArrayBuffer::CheckCast(Value* that) {
i::Handle<i::Object> obj = Utils::OpenHandle(that);
Utils::ApiCheck(
- obj->IsJSArrayBuffer() && !i::JSArrayBuffer::cast(*obj)->is_shared(),
+ obj->IsJSArrayBuffer() && !i::JSArrayBuffer::cast(*obj).is_shared(),
"v8::ArrayBuffer::Cast()", "Could not convert to ArrayBuffer");
}
@@ -3665,7 +3664,7 @@
i::Handle<i::Object> obj = Utils::OpenHandle(that); \
Utils::ApiCheck( \
obj->IsJSTypedArray() && \
- i::JSTypedArray::cast(*obj)->type() == i::kExternal##Type##Array, \
+ i::JSTypedArray::cast(*obj).type() == i::kExternal##Type##Array, \
"v8::" #Type "Array::Cast()", "Could not convert to " #Type "Array"); \
}
@@ -3682,7 +3681,7 @@
void v8::SharedArrayBuffer::CheckCast(Value* that) {
i::Handle<i::Object> obj = Utils::OpenHandle(that);
Utils::ApiCheck(
- obj->IsJSArrayBuffer() && i::JSArrayBuffer::cast(*obj)->is_shared(),
+ obj->IsJSArrayBuffer() && i::JSArrayBuffer::cast(*obj).is_shared(),
"v8::SharedArrayBuffer::Cast()",
"Could not convert to SharedArrayBuffer");
}
@@ -4196,10 +4195,10 @@
auto isolate = self->GetIsolate();
i::PrototypeIterator iter(isolate, *self, i::kStartAtReceiver);
auto tmpl_info = *Utils::OpenHandle(*tmpl);
- while (!tmpl_info->IsTemplateFor(iter.GetCurrent<i::JSObject>())) {
+ while (!tmpl_info.IsTemplateFor(iter.GetCurrent<i::JSObject>())) {
iter.Advance();
if (iter.IsAtEnd()) return Local<Object>();
- if (!iter.GetCurrent()->IsJSObject()) return Local<Object>();
+ if (!iter.GetCurrent().IsJSObject()) return Local<Object>();
}
// IsTemplateFor() ensures that iter.GetCurrent() can't be a Proxy here.
return Utils::ToLocal(i::handle(iter.GetCurrent<i::JSObject>(), isolate));
@@ -4227,9 +4226,9 @@
RETURN_ON_FAILED_EXECUTION(Array);
value =
accumulator.GetKeys(static_cast<i::GetKeysConversion>(key_conversion));
- DCHECK(self->map()->EnumLength() == i::kInvalidEnumCacheSentinel ||
- self->map()->EnumLength() == 0 ||
- self->map()->instance_descriptors()->enum_cache()->keys() != *value);
+ DCHECK(self->map().EnumLength() == i::kInvalidEnumCacheSentinel ||
+ self->map().EnumLength() == 0 ||
+ self->map().instance_descriptors().enum_cache().keys() != *value);
auto result = isolate->factory()->NewJSArrayWithElements(value);
RETURN_ESCAPED(Utils::ToLocal(result));
}
@@ -4636,7 +4635,7 @@
auto isolate = Utils::OpenHandle(this)->GetIsolate();
i::HandleScope scope(isolate);
auto self = Utils::OpenHandle(this);
- return self->GetOrCreateIdentityHash(isolate)->value();
+ return self->GetOrCreateIdentityHash(isolate).value();
}
bool v8::Object::IsCallable() {
@@ -4720,13 +4719,13 @@
isolate->debug_execution_mode() == i::DebugInfo::kSideEffects;
if (should_set_has_no_side_effect) {
CHECK(self->IsJSFunction() &&
- i::JSFunction::cast(*self)->shared()->IsApiFunction());
+ i::JSFunction::cast(*self).shared().IsApiFunction());
i::Object obj =
- i::JSFunction::cast(*self)->shared()->get_api_func_data()->call_code();
- if (obj->IsCallHandlerInfo()) {
+ i::JSFunction::cast(*self).shared().get_api_func_data().call_code();
+ if (obj.IsCallHandlerInfo()) {
i::CallHandlerInfo handler_info = i::CallHandlerInfo::cast(obj);
- if (!handler_info->IsSideEffectFreeCallHandlerInfo()) {
- handler_info->SetNextCallHasNoSideEffect();
+ if (!handler_info.IsSideEffectFreeCallHandlerInfo()) {
+ handler_info.SetNextCallHasNoSideEffect();
}
}
}
@@ -4736,15 +4735,15 @@
i::Execution::New(isolate, self, self, argc, args), &result);
if (should_set_has_no_side_effect) {
i::Object obj =
- i::JSFunction::cast(*self)->shared()->get_api_func_data()->call_code();
- if (obj->IsCallHandlerInfo()) {
+ i::JSFunction::cast(*self).shared().get_api_func_data().call_code();
+ if (obj.IsCallHandlerInfo()) {
i::CallHandlerInfo handler_info = i::CallHandlerInfo::cast(obj);
if (has_pending_exception) {
// Restore the map if an exception prevented restoration.
- handler_info->NextCallHasNoSideEffect();
+ handler_info.NextCallHasNoSideEffect();
} else {
- DCHECK(handler_info->IsSideEffectCallHandlerInfo() ||
- handler_info->IsSideEffectFreeCallHandlerInfo());
+ DCHECK(handler_info.IsSideEffectCallHandlerInfo() ||
+ handler_info.IsSideEffectFreeCallHandlerInfo());
}
}
}
@@ -4777,7 +4776,7 @@
auto self = Utils::OpenHandle(this);
if (!self->IsJSFunction()) return;
auto func = i::Handle<i::JSFunction>::cast(self);
- func->shared()->SetName(*Utils::OpenHandle(*name));
+ func->shared().SetName(*Utils::OpenHandle(*name));
}
Local<Value> Function::GetName() const {
@@ -4793,7 +4792,7 @@
}
if (self->IsJSFunction()) {
auto func = i::Handle<i::JSFunction>::cast(self);
- return Utils::ToLocal(handle(func->shared()->Name(), isolate));
+ return Utils::ToLocal(handle(func->shared().Name(), isolate));
}
return ToApiHandle<Primitive>(isolate->factory()->undefined_value());
}
@@ -4805,8 +4804,8 @@
self->GetIsolate()->factory()->undefined_value());
}
auto func = i::Handle<i::JSFunction>::cast(self);
- return Utils::ToLocal(i::Handle<i::Object>(func->shared()->inferred_name(),
- func->GetIsolate()));
+ return Utils::ToLocal(
+ i::Handle<i::Object>(func->shared().inferred_name(), func->GetIsolate()));
}
Local<Value> Function::GetDebugName() const {
@@ -4845,8 +4844,8 @@
return v8::ScriptOrigin(Local<Value>());
}
auto func = i::Handle<i::JSFunction>::cast(self);
- if (func->shared()->script()->IsScript()) {
- i::Handle<i::Script> script(i::Script::cast(func->shared()->script()),
+ if (func->shared().script().IsScript()) {
+ i::Handle<i::Script> script(i::Script::cast(func->shared().script()),
func->GetIsolate());
return GetScriptOriginForScript(func->GetIsolate(), script);
}
@@ -4861,10 +4860,10 @@
return kLineOffsetNotFound;
}
auto func = i::Handle<i::JSFunction>::cast(self);
- if (func->shared()->script()->IsScript()) {
- i::Handle<i::Script> script(i::Script::cast(func->shared()->script()),
+ if (func->shared().script().IsScript()) {
+ i::Handle<i::Script> script(i::Script::cast(func->shared().script()),
func->GetIsolate());
- return i::Script::GetLineNumber(script, func->shared()->StartPosition());
+ return i::Script::GetLineNumber(script, func->shared().StartPosition());
}
return kLineOffsetNotFound;
}
@@ -4875,10 +4874,10 @@
return kLineOffsetNotFound;
}
auto func = i::Handle<i::JSFunction>::cast(self);
- if (func->shared()->script()->IsScript()) {
- i::Handle<i::Script> script(i::Script::cast(func->shared()->script()),
+ if (func->shared().script().IsScript()) {
+ i::Handle<i::Script> script(i::Script::cast(func->shared().script()),
func->GetIsolate());
- return i::Script::GetColumnNumber(script, func->shared()->StartPosition());
+ return i::Script::GetColumnNumber(script, func->shared().StartPosition());
}
return kLineOffsetNotFound;
}
@@ -4889,10 +4888,10 @@
return v8::UnboundScript::kNoScriptId;
}
auto func = i::Handle<i::JSFunction>::cast(self);
- if (!func->shared()->script()->IsScript()) {
+ if (!func->shared().script().IsScript()) {
return v8::UnboundScript::kNoScriptId;
}
- i::Handle<i::Script> script(i::Script::cast(func->shared()->script()),
+ i::Handle<i::Script> script(i::Script::cast(func->shared().script()),
func->GetIsolate());
return script->id();
}
@@ -4992,16 +4991,16 @@
bool CheckCons(i::ConsString cons_string) {
while (true) {
// Check left side if flat.
- i::String left = cons_string->first();
+ i::String left = cons_string.first();
i::ConsString left_as_cons = i::String::VisitFlat(this, left, 0);
if (!is_one_byte_) return false;
// Check right side if flat.
- i::String right = cons_string->second();
+ i::String right = cons_string.second();
i::ConsString right_as_cons = i::String::VisitFlat(this, right, 0);
if (!is_one_byte_) return false;
// Standard recurse/iterate trick.
if (!left_as_cons.is_null() && !right_as_cons.is_null()) {
- if (left->length() < right->length()) {
+ if (left.length() < right.length()) {
CheckCons(left_as_cons);
cons_string = right_as_cons;
} else {
@@ -5256,12 +5255,12 @@
i::String str = *Utils::OpenHandle(this);
const v8::String::ExternalStringResource* expected;
- if (str->IsThinString()) {
- str = i::ThinString::cast(str)->actual();
+ if (str.IsThinString()) {
+ str = i::ThinString::cast(str).actual();
}
if (i::StringShape(str).IsExternalTwoByte()) {
- const void* resource = i::ExternalTwoByteString::cast(str)->resource();
+ const void* resource = i::ExternalTwoByteString::cast(str).resource();
expected = reinterpret_cast<const ExternalStringResource*>(resource);
} else {
expected = nullptr;
@@ -5276,22 +5275,22 @@
const v8::String::ExternalStringResourceBase* expected;
Encoding expectedEncoding;
- if (str->IsThinString()) {
- str = i::ThinString::cast(str)->actual();
+ if (str.IsThinString()) {
+ str = i::ThinString::cast(str).actual();
}
if (i::StringShape(str).IsExternalOneByte()) {
- const void* resource = i::ExternalOneByteString::cast(str)->resource();
+ const void* resource = i::ExternalOneByteString::cast(str).resource();
expected = reinterpret_cast<const ExternalStringResourceBase*>(resource);
expectedEncoding = ONE_BYTE_ENCODING;
} else if (i::StringShape(str).IsExternalTwoByte()) {
- const void* resource = i::ExternalTwoByteString::cast(str)->resource();
+ const void* resource = i::ExternalTwoByteString::cast(str).resource();
expected = reinterpret_cast<const ExternalStringResourceBase*>(resource);
expectedEncoding = TWO_BYTE_ENCODING;
} else {
expected = nullptr;
expectedEncoding =
- str->IsOneByteRepresentation() ? ONE_BYTE_ENCODING : TWO_BYTE_ENCODING;
+ str.IsOneByteRepresentation() ? ONE_BYTE_ENCODING : TWO_BYTE_ENCODING;
}
CHECK_EQ(expected, value);
CHECK_EQ(expectedEncoding, encoding);
@@ -5302,8 +5301,8 @@
typedef internal::Internals I;
i::String str = *Utils::OpenHandle(this);
- if (str->IsThinString()) {
- str = i::ThinString::cast(str)->actual();
+ if (str.IsThinString()) {
+ str = i::ThinString::cast(str).actual();
}
if (i::StringShape(str).IsExternalTwoByte()) {
@@ -5320,8 +5319,8 @@
ExternalStringResourceBase* resource = nullptr;
i::String str = *Utils::OpenHandle(this);
- if (str->IsThinString()) {
- str = i::ThinString::cast(str)->actual();
+ if (str.IsThinString()) {
+ str = i::ThinString::cast(str).actual();
}
internal::Address string = str.ptr();
@@ -5340,11 +5339,11 @@
i::DisallowHeapAllocation no_allocation;
i::String str = *Utils::OpenHandle(this);
if (i::StringShape(str).IsExternalOneByte()) {
- return i::ExternalOneByteString::cast(str)->resource();
- } else if (str->IsThinString()) {
- str = i::ThinString::cast(str)->actual();
+ return i::ExternalOneByteString::cast(str).resource();
+ } else if (str.IsThinString()) {
+ str = i::ThinString::cast(str).actual();
if (i::StringShape(str).IsExternalOneByte()) {
- return i::ExternalOneByteString::cast(str)->resource();
+ return i::ExternalOneByteString::cast(str).resource();
}
}
return nullptr;
@@ -5434,7 +5433,7 @@
i::Handle<i::JSReceiver> obj = Utils::OpenHandle(this);
const char* location = "v8::Object::GetInternalField()";
if (!InternalFieldOK(obj, index, location)) return Local<Value>();
- i::Handle<i::Object> value(i::JSObject::cast(*obj)->GetEmbedderField(index),
+ i::Handle<i::Object> value(i::JSObject::cast(*obj).GetEmbedderField(index),
obj->GetIsolate());
return Utils::ToLocal(value);
}
@@ -5474,7 +5473,7 @@
const char* location = "v8::Object::SetAlignedPointerInInternalFields()";
i::DisallowHeapAllocation no_gc;
i::JSObject js_obj = i::JSObject::cast(*obj);
- int nof_embedder_fields = js_obj->GetEmbedderFieldCount();
+ int nof_embedder_fields = js_obj.GetEmbedderFieldCount();
for (int i = 0; i < argc; i++) {
int index = indices[i];
if (!Utils::ApiCheck(index < nof_embedder_fields, location,
@@ -5491,11 +5490,11 @@
static void* ExternalValue(i::Object obj) {
// Obscure semantics for undefined, but somehow checked in our unit tests...
- if (obj->IsUndefined()) {
+ if (obj.IsUndefined()) {
return nullptr;
}
- i::Object foreign = i::JSObject::cast(obj)->GetEmbedderField(0);
- return reinterpret_cast<void*>(i::Foreign::cast(foreign)->foreign_address());
+ i::Object foreign = i::JSObject::cast(obj).GetEmbedderField(0);
+ return reinterpret_cast<void*>(i::Foreign::cast(foreign).foreign_address());
}
// --- E n v i r o n m e n t ---
@@ -5697,7 +5696,7 @@
// Migrate security handlers from global_template to
// proxy_template. Temporarily removing access check
// information from the global template.
- if (!global_constructor->GetAccessCheckInfo()->IsUndefined(isolate)) {
+ if (!global_constructor->GetAccessCheckInfo().IsUndefined(isolate)) {
i::FunctionTemplateInfo::SetAccessCheckInfo(
isolate, proxy_constructor,
i::handle(global_constructor->GetAccessCheckInfo(), isolate));
@@ -5713,15 +5712,14 @@
// interceptors, we need to replace them temporarily with noop
// interceptors, so the map is correctly marked as having interceptors,
// but we don't invoke any.
- if (!global_constructor->GetNamedPropertyHandler()->IsUndefined(
- isolate)) {
+ if (!global_constructor->GetNamedPropertyHandler().IsUndefined(isolate)) {
named_interceptor =
handle(global_constructor->GetNamedPropertyHandler(), isolate);
i::FunctionTemplateInfo::SetNamedPropertyHandler(
isolate, global_constructor,
i::ReadOnlyRoots(isolate).noop_interceptor_info_handle());
}
- if (!global_constructor->GetIndexedPropertyHandler()->IsUndefined(
+ if (!global_constructor->GetIndexedPropertyHandler().IsUndefined(
isolate)) {
indexed_interceptor =
handle(global_constructor->GetIndexedPropertyHandler(), isolate);
@@ -5772,7 +5770,7 @@
// TODO(jkummerow): This is for crbug.com/713699. Remove it if it doesn't
// fail.
// Sanity-check that the isolate is initialized and usable.
- CHECK(isolate->builtins()->builtin(i::Builtins::kIllegal)->IsCode());
+ CHECK(isolate->builtins()->builtin(i::Builtins::kIllegal).IsCode());
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.NewContext");
LOG_API(isolate, Context, New);
@@ -5905,8 +5903,7 @@
bool Context::IsCodeGenerationFromStringsAllowed() {
i::Handle<i::Context> context = Utils::OpenHandle(this);
- return !context->allow_code_gen_from_strings()->IsFalse(
- context->GetIsolate());
+ return !context->allow_code_gen_from_strings().IsFalse(context->GetIsolate());
}
void Context::SetErrorMessageForCodeGenerationFromStrings(Local<String> error) {
@@ -5918,17 +5915,17 @@
namespace {
i::Address* GetSerializedDataFromFixedArray(i::Isolate* isolate,
i::FixedArray list, size_t index) {
- if (index < static_cast<size_t>(list->length())) {
+ if (index < static_cast<size_t>(list.length())) {
int int_index = static_cast<int>(index);
- i::Object object = list->get(int_index);
- if (!object->IsTheHole(isolate)) {
- list->set_the_hole(isolate, int_index);
+ i::Object object = list.get(int_index);
+ if (!object.IsTheHole(isolate)) {
+ list.set_the_hole(isolate, int_index);
// Shrink the list so that the last element is not the hole (unless it's
// the first element, because we don't want to end up with a non-canonical
// empty FixedArray).
- int last = list->length() - 1;
- while (last >= 0 && list->is_the_hole(isolate, last)) last--;
- if (last != -1) list->Shrink(isolate, last + 1);
+ int last = list.length() - 1;
+ while (last >= 0 && list.is_the_hole(isolate, last)) last--;
+ if (last != -1) list.Shrink(isolate, last + 1);
return i::Handle<i::Object>(object, isolate).location();
}
}
@@ -6024,7 +6021,7 @@
// If it's a global proxy, then test with the global object. Note that the
// inner global object may not necessarily be a JSGlobalObject.
i::PrototypeIterator iter(self->GetIsolate(),
- i::JSObject::cast(*obj)->map());
+ i::JSObject::cast(*obj).map());
// The global proxy should always have a prototype, as it is a bug to call
// this on a detached JSGlobalProxy.
DCHECK(!iter.IsAtEnd());
@@ -6206,11 +6203,11 @@
i::String obj = *Utils::OpenHandle(this);
- if (obj->IsThinString()) {
- obj = i::ThinString::cast(obj)->actual();
+ if (obj.IsThinString()) {
+ obj = i::ThinString::cast(obj).actual();
}
- if (!obj->SupportsExternalization()) {
+ if (!obj.SupportsExternalization()) {
return false;
}
@@ -6222,9 +6219,9 @@
CHECK(resource && resource->data());
- bool result = obj->MakeExternal(resource);
+ bool result = obj.MakeExternal(resource);
DCHECK(result);
- DCHECK(obj->IsExternalString());
+ DCHECK(obj.IsExternalString());
return result;
}
@@ -6234,11 +6231,11 @@
i::String obj = *Utils::OpenHandle(this);
- if (obj->IsThinString()) {
- obj = i::ThinString::cast(obj)->actual();
+ if (obj.IsThinString()) {
+ obj = i::ThinString::cast(obj).actual();
}
- if (!obj->SupportsExternalization()) {
+ if (!obj.SupportsExternalization()) {
return false;
}
@@ -6250,8 +6247,8 @@
CHECK(resource && resource->data());
- bool result = obj->MakeExternal(resource);
- DCHECK_IMPLIES(result, obj->IsExternalString());
+ bool result = obj.MakeExternal(resource);
+ DCHECK_IMPLIES(result, obj.IsExternalString());
return result;
}
@@ -6259,11 +6256,11 @@
i::DisallowHeapAllocation no_allocation;
i::String obj = *Utils::OpenHandle(this);
- if (obj->IsThinString()) {
- obj = i::ThinString::cast(obj)->actual();
+ if (obj.IsThinString()) {
+ obj = i::ThinString::cast(obj).actual();
}
- if (!obj->SupportsExternalization()) {
+ if (!obj.SupportsExternalization()) {
return false;
}
@@ -6365,7 +6362,7 @@
i::Handle<i::JSValue> jsvalue = i::Handle<i::JSValue>::cast(obj);
i::Isolate* isolate = jsvalue->GetIsolate();
LOG_API(isolate, NumberObject, NumberValue);
- return jsvalue->value()->Number();
+ return jsvalue->value().Number();
}
Local<v8::Value> v8::BigIntObject::New(Isolate* isolate, int64_t value) {
@@ -6405,7 +6402,7 @@
i::Handle<i::JSValue> jsvalue = i::Handle<i::JSValue>::cast(obj);
i::Isolate* isolate = jsvalue->GetIsolate();
LOG_API(isolate, BooleanObject, BooleanValue);
- return jsvalue->value()->IsTrue(isolate);
+ return jsvalue->value().IsTrue(isolate);
}
Local<v8::Value> v8::StringObject::New(Isolate* v8_isolate,
@@ -6466,7 +6463,7 @@
i::Handle<i::JSDate> jsdate = i::Handle<i::JSDate>::cast(obj);
i::Isolate* isolate = jsdate->GetIsolate();
LOG_API(isolate, Date, NumberValue);
- return jsdate->value()->Number();
+ return jsdate->value().Number();
}
// Assert that the static TimeZoneDetection cast in
@@ -6547,10 +6544,10 @@
uint32_t v8::Array::Length() const {
i::Handle<i::JSArray> obj = Utils::OpenHandle(this);
i::Object length = obj->length();
- if (length->IsSmi()) {
+ if (length.IsSmi()) {
return i::Smi::ToInt(length);
} else {
- return static_cast<uint32_t>(length->Number());
+ return static_cast<uint32_t>(length.Number());
}
}
@@ -6564,7 +6561,7 @@
size_t v8::Map::Size() const {
i::Handle<i::JSMap> obj = Utils::OpenHandle(this);
- return i::OrderedHashMap::cast(obj->table())->NumberOfElements();
+ return i::OrderedHashMap::cast(obj->table()).NumberOfElements();
}
void Map::Clear() {
@@ -6693,7 +6690,7 @@
size_t v8::Set::Size() const {
i::Handle<i::JSSet> obj = Utils::OpenHandle(this);
- return i::OrderedHashSet::cast(obj->table())->NumberOfElements();
+ return i::OrderedHashSet::cast(obj->table()).NumberOfElements();
}
void Set::Clear() {
@@ -7224,12 +7221,12 @@
if (obj->IsJSDataView()) {
i::Handle<i::JSDataView> data_view(i::JSDataView::cast(*obj),
obj->GetIsolate());
- DCHECK(data_view->buffer()->IsJSArrayBuffer());
+ DCHECK(data_view->buffer().IsJSArrayBuffer());
buffer = i::handle(i::JSArrayBuffer::cast(data_view->buffer()),
data_view->GetIsolate());
} else {
DCHECK(obj->IsJSTypedArray());
- buffer = i::JSTypedArray::cast(*obj)->GetBuffer();
+ buffer = i::JSTypedArray::cast(*obj).GetBuffer();
}
return Utils::ToLocal(buffer);
}
@@ -7640,7 +7637,7 @@
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
i::Context context = isolate->context();
if (context.is_null()) return Local<Context>();
- i::Context native_context = context->native_context();
+ i::Context native_context = context.native_context();
if (native_context.is_null()) return Local<Context>();
return Utils::ToLocal(i::Handle<i::Context>(native_context, isolate));
}
@@ -8285,8 +8282,8 @@
i::HeapIterator iterator(isolate->heap());
for (i::HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (obj->IsAbstractCode()) {
- i::AbstractCode::cast(obj)->DropStackFrameCache();
+ if (obj.IsAbstractCode()) {
+ i::AbstractCode::cast(obj).DropStackFrameCache();
}
}
}
@@ -8383,8 +8380,8 @@
i::Code js_entry = isolate->heap()->builtin(i::Builtins::kJSEntry);
unwind_state.js_entry_stub.code.start =
- reinterpret_cast<const void*>(js_entry->InstructionStart());
- unwind_state.js_entry_stub.code.length_in_bytes = js_entry->InstructionSize();
+ reinterpret_cast<const void*>(js_entry.InstructionStart());
+ unwind_state.js_entry_stub.code.length_in_bytes = js_entry.InstructionSize();
return unwind_state;
}
@@ -8464,12 +8461,12 @@
i::HandleScope scope(isolate);
i::DisallowHeapAllocation no_gc;
i::TemplateList listeners = isolate->heap()->message_listeners();
- for (int i = 0; i < listeners->length(); i++) {
- if (listeners->get(i)->IsUndefined(isolate)) continue; // skip deleted ones
- i::FixedArray listener = i::FixedArray::cast(listeners->get(i));
- i::Foreign callback_obj = i::Foreign::cast(listener->get(0));
- if (callback_obj->foreign_address() == FUNCTION_ADDR(that)) {
- listeners->set(i, i::ReadOnlyRoots(isolate).undefined_value());
+ for (int i = 0; i < listeners.length(); i++) {
+ if (listeners.get(i).IsUndefined(isolate)) continue; // skip deleted ones
+ i::FixedArray listener = i::FixedArray::cast(listeners.get(i));
+ i::Foreign callback_obj = i::Foreign::cast(listener.get(0));
+ if (callback_obj.foreign_address() == FUNCTION_ADDR(that)) {
+ listeners.set(i, i::ReadOnlyRoots(isolate).undefined_value());
}
}
}
@@ -8682,7 +8679,7 @@
int debug::GetContextId(Local<Context> context) {
i::Object value = Utils::OpenHandle(*context)->debug_context_id();
- return (value->IsSmi()) ? i::Smi::ToInt(value) : 0;
+ return (value.IsSmi()) ? i::Smi::ToInt(value) : 0;
}
void debug::SetInspector(Isolate* isolate,
@@ -8817,13 +8814,13 @@
i::Isolate* isolate = script->GetIsolate();
i::HandleScope scope(isolate);
i::Script::InitLineEnds(script);
- CHECK(script->line_ends()->IsFixedArray());
+ CHECK(script->line_ends().IsFixedArray());
i::Handle<i::FixedArray> line_ends(i::FixedArray::cast(script->line_ends()),
isolate);
std::vector<int> result(line_ends->length());
for (int i = 0; i < line_ends->length(); ++i) {
i::Smi line_end = i::Smi::cast(line_ends->get(i));
- result[i] = line_end->value();
+ result[i] = line_end.value();
}
return result;
}
@@ -8863,7 +8860,7 @@
i::HandleScope handle_scope(isolate);
i::Handle<i::Script> script = Utils::OpenHandle(this);
i::Object value = script->context_data();
- if (value->IsSmi()) return Just(i::Smi::ToInt(value));
+ if (value.IsSmi()) return Just(i::Smi::ToInt(value));
return Nothing<int>();
}
@@ -8906,11 +8903,11 @@
this->SourceMappingURL().IsEmpty()) {
i::WasmModuleObject module_object =
i::WasmModuleObject::cast(script->wasm_module_object());
- return module_object->GetPossibleBreakpoints(start, end, locations);
+ return module_object.GetPossibleBreakpoints(start, end, locations);
}
i::Script::InitLineEnds(script);
- CHECK(script->line_ends()->IsFixedArray());
+ CHECK(script->line_ends().IsFixedArray());
i::Isolate* isolate = script->GetIsolate();
i::Handle<i::FixedArray> line_ends =
i::Handle<i::FixedArray>::cast(i::handle(script->line_ends(), isolate));
@@ -8956,7 +8953,7 @@
if (script->type() == i::Script::TYPE_WASM) {
if (this->SourceMappingURL().IsEmpty()) {
return i::WasmModuleObject::cast(script->wasm_module_object())
- ->GetFunctionOffset(location.GetLineNumber()) +
+ .GetFunctionOffset(location.GetLineNumber()) +
location.GetColumnNumber();
}
DCHECK_EQ(0, location.GetLineNumber());
@@ -8970,7 +8967,7 @@
}
i::Script::InitLineEnds(script);
- CHECK(script->line_ends()->IsFixedArray());
+ CHECK(script->line_ends().IsFixedArray());
i::Handle<i::FixedArray> line_ends = i::Handle<i::FixedArray>::cast(
i::handle(script->line_ends(), script->GetIsolate()));
CHECK(line_ends->length());
@@ -9017,7 +9014,7 @@
i::Isolate* isolate = script->GetIsolate();
i::SharedFunctionInfo::ScriptIterator it(isolate, *script);
for (i::SharedFunctionInfo sfi = it.Next(); !sfi.is_null(); sfi = it.Next()) {
- if (sfi->is_toplevel()) {
+ if (sfi.is_toplevel()) {
return isolate->debug()->SetBreakpointForFunction(
handle(sfi, isolate), isolate->factory()->empty_string(), id);
}
@@ -9046,7 +9043,7 @@
DCHECK_EQ(i::Script::TYPE_WASM, script->type());
i::WasmModuleObject module_object =
i::WasmModuleObject::cast(script->wasm_module_object());
- const i::wasm::WasmModule* module = module_object->module();
+ const i::wasm::WasmModule* module = module_object.module();
DCHECK_GE(i::kMaxInt, module->functions.size());
return static_cast<int>(module->functions.size());
}
@@ -9057,7 +9054,7 @@
DCHECK_EQ(i::Script::TYPE_WASM, script->type());
i::WasmModuleObject module_object =
i::WasmModuleObject::cast(script->wasm_module_object());
- const i::wasm::WasmModule* module = module_object->module();
+ const i::wasm::WasmModule* module = module_object.module();
DCHECK_GE(i::kMaxInt, module->num_imported_functions);
return static_cast<int>(module->num_imported_functions);
}
@@ -9069,7 +9066,7 @@
DCHECK_EQ(i::Script::TYPE_WASM, script->type());
i::WasmModuleObject module_object =
i::WasmModuleObject::cast(script->wasm_module_object());
- const i::wasm::WasmModule* module = module_object->module();
+ const i::wasm::WasmModule* module = module_object.module();
DCHECK_LE(0, function_index);
DCHECK_GT(module->functions.size(), function_index);
const i::wasm::WasmFunction& func = module->functions[function_index];
@@ -9085,12 +9082,12 @@
DCHECK_EQ(i::Script::TYPE_WASM, script->type());
i::WasmModuleObject module_object =
i::WasmModuleObject::cast(script->wasm_module_object());
- const i::wasm::WasmModule* module = module_object->module();
+ const i::wasm::WasmModule* module = module_object.module();
DCHECK_LE(0, function_index);
DCHECK_GT(module->functions.size(), function_index);
const i::wasm::WasmFunction& func = module->functions[function_index];
i::wasm::ModuleWireBytes wire_bytes(
- module_object->native_module()->wire_bytes());
+ module_object.native_module()->wire_bytes());
i::Vector<const i::byte> function_bytes = wire_bytes.GetFunctionBytes(&func);
// TODO(herhut): Maybe also take module, name and signature into account.
return i::StringHasher::HashSequentialString(function_bytes.begin(),
@@ -9104,7 +9101,7 @@
DCHECK_EQ(i::Script::TYPE_WASM, script->type());
i::WasmModuleObject module_object =
i::WasmModuleObject::cast(script->wasm_module_object());
- return module_object->DisassembleFunction(function_index);
+ return module_object.DisassembleFunction(function_index);
}
debug::Location::Location(int line_number, int column_number)
@@ -9138,8 +9135,8 @@
i::Script::Iterator iterator(isolate);
for (i::Script script = iterator.Next(); !script.is_null();
script = iterator.Next()) {
- if (!script->IsUserJavaScript()) continue;
- if (script->HasValidSource()) {
+ if (!script.IsUserJavaScript()) continue;
+ if (script.HasValidSource()) {
i::HandleScope handle_scope(isolate);
i::Handle<i::Script> script_handle(script, isolate);
scripts.Append(ToApiHandle<Script>(script_handle));
@@ -9190,8 +9187,8 @@
*Utils::OpenHandle(*script));
for (i::SharedFunctionInfo info = iter.Next(); !info.is_null();
info = iter.Next()) {
- if (info->HasDebugInfo()) {
- info->GetDebugInfo()->set_computed_debug_is_blackboxed(false);
+ if (info.HasDebugInfo()) {
+ info.GetDebugInfo().set_computed_debug_is_blackboxed(false);
}
}
}
@@ -9227,7 +9224,7 @@
if (object->IsJSMapIterator()) {
i::Handle<i::JSMapIterator> it = i::Handle<i::JSMapIterator>::cast(object);
MapAsArrayKind const kind =
- static_cast<MapAsArrayKind>(it->map()->instance_type());
+ static_cast<MapAsArrayKind>(it->map().instance_type());
*is_key_value = kind == MapAsArrayKind::kEntries;
if (!it->HasMore()) return v8::Array::New(v8_isolate);
return Utils::ToLocal(
@@ -9236,7 +9233,7 @@
if (object->IsJSSetIterator()) {
i::Handle<i::JSSetIterator> it = i::Handle<i::JSSetIterator>::cast(object);
SetAsArrayKind const kind =
- static_cast<SetAsArrayKind>(it->map()->instance_type());
+ static_cast<SetAsArrayKind>(it->map().instance_type());
*is_key_value = kind == SetAsArrayKind::kEntries;
if (!it->HasMore()) return v8::Array::New(v8_isolate);
return Utils::ToLocal(
@@ -9263,8 +9260,8 @@
name, builtin_id, i::LanguageMode::kStrict);
i::Handle<i::JSFunction> fun = isolate->factory()->NewFunction(args);
- fun->shared()->set_internal_formal_parameter_count(0);
- fun->shared()->set_length(0);
+ fun->shared().set_internal_formal_parameter_count(0);
+ fun->shared().set_length(0);
return Utils::ToLocal(handle_scope.CloseAndEscape(fun));
}
@@ -9305,8 +9302,8 @@
MaybeLocal<debug::Script> debug::GeneratorObject::Script() {
i::Handle<i::JSGeneratorObject> obj = Utils::OpenHandle(this);
- i::Object maybe_script = obj->function()->shared()->script();
- if (!maybe_script->IsScript()) return MaybeLocal<debug::Script>();
+ i::Object maybe_script = obj->function().shared().script();
+ if (!maybe_script.IsScript()) return MaybeLocal<debug::Script>();
i::Handle<i::Script> script(i::Script::cast(maybe_script), obj->GetIsolate());
return ToApiHandle<debug::Script>(script);
}
@@ -9319,13 +9316,13 @@
debug::Location debug::GeneratorObject::SuspendedLocation() {
i::Handle<i::JSGeneratorObject> obj = Utils::OpenHandle(this);
CHECK(obj->is_suspended());
- i::Object maybe_script = obj->function()->shared()->script();
- if (!maybe_script->IsScript()) return debug::Location();
+ i::Object maybe_script = obj->function().shared().script();
+ if (!maybe_script.IsScript()) return debug::Location();
i::Isolate* isolate = obj->GetIsolate();
i::Handle<i::Script> script(i::Script::cast(maybe_script), isolate);
i::Script::PositionInfo info;
i::SharedFunctionInfo::EnsureSourcePositionsAvailable(
- isolate, i::handle(obj->function()->shared(), isolate));
+ isolate, i::handle(obj->function().shared(), isolate));
i::Script::GetPositionInfo(script, obj->source_position(), &info,
i::Script::WITH_OFFSET);
return debug::Location(info.line, info.column);
@@ -9370,7 +9367,7 @@
i::Handle<i::Context> context = Utils::OpenHandle(*v8_context);
i::Isolate* isolate = context->GetIsolate();
i::Handle<i::ScriptContextTable> table(
- context->global_object()->native_context()->script_context_table(),
+ context->global_object().native_context().script_context_table(),
isolate);
for (int i = 0; i < table->used(); i++) {
i::Handle<i::Context> context =
diff --git a/src/arm/assembler-arm-inl.h b/src/arm/assembler-arm-inl.h
index 62de378..ce83cbe 100644
--- a/src/arm/assembler-arm-inl.h
+++ b/src/arm/assembler-arm-inl.h
@@ -122,7 +122,7 @@
WriteBarrierMode write_barrier_mode,
ICacheFlushMode icache_flush_mode) {
DCHECK(IsCodeTarget(rmode_) || rmode_ == FULL_EMBEDDED_OBJECT);
- Assembler::set_target_address_at(pc_, constant_pool_, target->ptr(),
+ Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
icache_flush_mode);
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
WriteBarrierForCode(host(), this, target);
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index 24d10f7..bf7749e 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -290,7 +290,7 @@
Object obj(value);
os << arg1 << ": \n";
#ifdef DEBUG
- obj->Print(os);
+ obj.Print(os);
os << "\n";
#else
os << Brief(obj) << "\n";
@@ -339,7 +339,7 @@
if (obj.IsSmi()) {
PrintF("smi %d", Smi::ToInt(obj));
} else {
- obj->ShortPrint();
+ obj.ShortPrint();
}
PrintF(")");
}
diff --git a/src/arm64/assembler-arm64-inl.h b/src/arm64/assembler-arm64-inl.h
index 023ba63..50d6197 100644
--- a/src/arm64/assembler-arm64-inl.h
+++ b/src/arm64/assembler-arm64-inl.h
@@ -702,7 +702,7 @@
WriteBarrierMode write_barrier_mode,
ICacheFlushMode icache_flush_mode) {
DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_));
- Assembler::set_target_address_at(pc_, constant_pool_, target->ptr(),
+ Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
icache_flush_mode);
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
WriteBarrierForCode(host(), this, target);
diff --git a/src/arm64/simulator-arm64.cc b/src/arm64/simulator-arm64.cc
index cd69536..fd0f0387 100644
--- a/src/arm64/simulator-arm64.cc
+++ b/src/arm64/simulator-arm64.cc
@@ -3296,7 +3296,7 @@
if (obj.IsSmi()) {
PrintF("smi %" PRId32, Smi::ToInt(obj));
} else {
- obj->ShortPrint();
+ obj.ShortPrint();
}
PrintF(")");
}
diff --git a/src/asmjs/asm-js.cc b/src/asmjs/asm-js.cc
index 05e0a3c..7fc283d 100644
--- a/src/asmjs/asm-js.cc
+++ b/src/asmjs/asm-js.cc
@@ -73,11 +73,11 @@
Handle<Object> value = StdlibMathMember(isolate, stdlib, name); \
if (!value->IsJSFunction()) return false; \
SharedFunctionInfo shared = Handle<JSFunction>::cast(value)->shared(); \
- if (!shared->HasBuiltinId() || \
- shared->builtin_id() != Builtins::kMath##FName) { \
+ if (!shared.HasBuiltinId() || \
+ shared.builtin_id() != Builtins::kMath##FName) { \
return false; \
} \
- DCHECK_EQ(shared->GetCode(), \
+ DCHECK_EQ(shared.GetCode(), \
isolate->builtins()->builtin(Builtins::kMath##FName)); \
}
STDLIB_MATH_FUNCTION_LIST(STDLIB_MATH_FUNC)
diff --git a/src/ast/scopes.cc b/src/ast/scopes.cc
index c78d9c8..c8192f7a 100644
--- a/src/ast/scopes.cc
+++ b/src/ast/scopes.cc
@@ -310,8 +310,8 @@
Scope* innermost_scope = nullptr;
Scope* outer_scope = nullptr;
while (!scope_info.is_null()) {
- if (scope_info->scope_type() == WITH_SCOPE) {
- if (scope_info->IsDebugEvaluateScope()) {
+ if (scope_info.scope_type() == WITH_SCOPE) {
+ if (scope_info.IsDebugEvaluateScope()) {
outer_scope = new (zone)
DeclarationScope(zone, FUNCTION_SCOPE, handle(scope_info, isolate));
outer_scope->set_is_debug_evaluate_scope();
@@ -321,46 +321,46 @@
new (zone) Scope(zone, WITH_SCOPE, handle(scope_info, isolate));
}
- } else if (scope_info->scope_type() == SCRIPT_SCOPE) {
+ } else if (scope_info.scope_type() == SCRIPT_SCOPE) {
// If we reach a script scope, it's the outermost scope. Install the
// scope info of this script context onto the existing script scope to
// avoid nesting script scopes.
if (deserialization_mode == DeserializationMode::kIncludingVariables) {
script_scope->SetScriptScopeInfo(handle(scope_info, isolate));
}
- DCHECK(!scope_info->HasOuterScopeInfo());
+ DCHECK(!scope_info.HasOuterScopeInfo());
break;
- } else if (scope_info->scope_type() == FUNCTION_SCOPE) {
+ } else if (scope_info.scope_type() == FUNCTION_SCOPE) {
outer_scope = new (zone)
DeclarationScope(zone, FUNCTION_SCOPE, handle(scope_info, isolate));
- if (scope_info->IsAsmModule()) {
+ if (scope_info.IsAsmModule()) {
outer_scope->AsDeclarationScope()->set_is_asm_module();
}
- } else if (scope_info->scope_type() == EVAL_SCOPE) {
+ } else if (scope_info.scope_type() == EVAL_SCOPE) {
outer_scope = new (zone)
DeclarationScope(zone, EVAL_SCOPE, handle(scope_info, isolate));
- } else if (scope_info->scope_type() == CLASS_SCOPE) {
+ } else if (scope_info.scope_type() == CLASS_SCOPE) {
outer_scope = new (zone)
ClassScope(zone, ast_value_factory, handle(scope_info, isolate));
- } else if (scope_info->scope_type() == BLOCK_SCOPE) {
- if (scope_info->is_declaration_scope()) {
+ } else if (scope_info.scope_type() == BLOCK_SCOPE) {
+ if (scope_info.is_declaration_scope()) {
outer_scope = new (zone)
DeclarationScope(zone, BLOCK_SCOPE, handle(scope_info, isolate));
} else {
outer_scope =
new (zone) Scope(zone, BLOCK_SCOPE, handle(scope_info, isolate));
}
- } else if (scope_info->scope_type() == MODULE_SCOPE) {
+ } else if (scope_info.scope_type() == MODULE_SCOPE) {
outer_scope = new (zone)
ModuleScope(isolate, handle(scope_info, isolate), ast_value_factory);
} else {
- DCHECK_EQ(scope_info->scope_type(), CATCH_SCOPE);
- DCHECK_EQ(scope_info->ContextLocalCount(), 1);
- DCHECK_EQ(scope_info->ContextLocalMode(0), VariableMode::kVar);
- DCHECK_EQ(scope_info->ContextLocalInitFlag(0), kCreatedInitialized);
- String name = scope_info->ContextLocalName(0);
+ DCHECK_EQ(scope_info.scope_type(), CATCH_SCOPE);
+ DCHECK_EQ(scope_info.ContextLocalCount(), 1);
+ DCHECK_EQ(scope_info.ContextLocalMode(0), VariableMode::kVar);
+ DCHECK_EQ(scope_info.ContextLocalInitFlag(0), kCreatedInitialized);
+ String name = scope_info.ContextLocalName(0);
MaybeAssignedFlag maybe_assigned =
- scope_info->ContextLocalMaybeAssignedFlag(0);
+ scope_info.ContextLocalMaybeAssignedFlag(0);
outer_scope = new (zone)
Scope(zone, ast_value_factory->GetString(handle(name, isolate)),
maybe_assigned, handle(scope_info, isolate));
@@ -373,8 +373,8 @@
}
current_scope = outer_scope;
if (innermost_scope == nullptr) innermost_scope = current_scope;
- scope_info = scope_info->HasOuterScopeInfo() ? scope_info->OuterScopeInfo()
- : ScopeInfo();
+ scope_info = scope_info.HasOuterScopeInfo() ? scope_info.OuterScopeInfo()
+ : ScopeInfo();
}
if (deserialization_mode == DeserializationMode::kIncludingVariables &&
diff --git a/src/builtins/accessors.cc b/src/builtins/accessors.cc
index e8a20b4..ffa86df 100644
--- a/src/builtins/accessors.cc
+++ b/src/builtins/accessors.cc
@@ -152,7 +152,7 @@
DisallowHeapAllocation no_allocation;
HandleScope scope(isolate);
JSArray holder = JSArray::cast(*Utils::OpenHandle(*info.Holder()));
- Object result = holder->length();
+ Object result = holder.length();
info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(result, isolate)));
}
@@ -180,7 +180,7 @@
}
if (!was_readonly && V8_UNLIKELY(JSArray::HasReadOnlyLength(array)) &&
- length != array->length()->Number()) {
+ length != array->length().Number()) {
// AnythingToArrayLength() may have called setter re-entrantly and modified
// its property descriptor. Don't perform this check if "length" was
// previously readonly, as this may have been called during
@@ -200,7 +200,7 @@
JSArray::SetLength(array, length);
uint32_t actual_new_len = 0;
- CHECK(array->length()->ToArrayLength(&actual_new_len));
+ CHECK(array->length().ToArrayLength(&actual_new_len));
// Fail if there were non-deletable elements.
if (actual_new_len != length) {
if (info.ShouldThrowOnError()) {
@@ -233,8 +233,7 @@
JSModuleNamespace holder =
JSModuleNamespace::cast(*Utils::OpenHandle(*info.Holder()));
Handle<Object> result;
- if (!holder
- ->GetExport(isolate, Handle<String>::cast(Utils::OpenHandle(*name)))
+ if (!holder.GetExport(isolate, Handle<String>::cast(Utils::OpenHandle(*name)))
.ToHandle(&result)) {
isolate->OptionalRescheduleException(false);
} else {
@@ -285,12 +284,12 @@
// in the hierarchy, in this case for String values.
Object value = *Utils::OpenHandle(*v8::Local<v8::Value>(info.This()));
- if (!value->IsString()) {
+ if (!value.IsString()) {
// Not a string value. That means that we either got a String wrapper or
// a Value with a String wrapper in its prototype chain.
- value = JSValue::cast(*Utils::OpenHandle(*info.Holder()))->value();
+ value = JSValue::cast(*Utils::OpenHandle(*info.Holder())).value();
}
- Object result = Smi::FromInt(String::cast(value)->length());
+ Object result = Smi::FromInt(String::cast(value).length());
info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(result, isolate)));
}
@@ -479,10 +478,10 @@
DCHECK(array->length() == length);
for (int i = 0; i < length; i++) {
Object value = frame->GetParameter(i);
- if (value->IsTheHole(isolate)) {
+ if (value.IsTheHole(isolate)) {
// Generators currently use holes as dummy arguments when resuming. We
// must not leak those.
- DCHECK(IsResumableFunction(function->shared()->kind()));
+ DCHECK(IsResumableFunction(function->shared().kind()));
value = ReadOnlyRoots(isolate).undefined_value();
}
array->set(i, value);
@@ -516,7 +515,7 @@
Handle<JSFunction> function =
Handle<JSFunction>::cast(Utils::OpenHandle(*info.Holder()));
Handle<Object> result = isolate->factory()->null_value();
- if (!function->shared()->native()) {
+ if (!function->shared().native()) {
// Find the top invocation of the function by traversing frames.
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
JavaScriptFrame* frame = it.frame();
@@ -541,7 +540,7 @@
static inline bool AllowAccessToFunction(Context current_context,
JSFunction function) {
- return current_context->HasSameSecurityTokenAs(function->context());
+ return current_context.HasSameSecurityTokenAs(function.context());
}
class FrameFunctionIterator {
@@ -565,7 +564,7 @@
bool FindNextNonTopLevel() {
do {
if (!next().ToHandle(&function_)) return false;
- } while (function_->shared()->is_toplevel());
+ } while (function_->shared().is_toplevel());
return true;
}
@@ -574,8 +573,8 @@
// unless directly exposed, in which case the native flag is set on them.
// Returns true if one is found, and false if the iterator ends before.
bool FindFirstNativeOrUserJavaScript() {
- while (!function_->shared()->native() &&
- !function_->shared()->IsUserJavaScript()) {
+ while (!function_->shared().native() &&
+ !function_->shared().IsUserJavaScript()) {
if (!next().ToHandle(&function_)) return false;
}
return true;
@@ -646,7 +645,7 @@
MaybeHandle<JSFunction> FindCaller(Isolate* isolate,
Handle<JSFunction> function) {
FrameFunctionIterator it(isolate);
- if (function->shared()->native()) {
+ if (function->shared().native()) {
return MaybeHandle<JSFunction>();
}
// Find the function from the frames. Return null in case no frame
@@ -673,7 +672,7 @@
// Censor if the caller is not a sloppy mode function.
// Change from ES5, which used to throw, see:
// https://bugs.ecmascript.org/show_bug.cgi?id=310
- if (is_strict(caller->shared()->language_mode())) {
+ if (is_strict(caller->shared().language_mode())) {
return MaybeHandle<JSFunction>();
}
// Don't return caller from another security context.
diff --git a/src/builtins/arm/builtins-arm.cc b/src/builtins/arm/builtins-arm.cc
index d3a91a6..a0392de 100644
--- a/src/builtins/arm/builtins-arm.cc
+++ b/src/builtins/arm/builtins-arm.cc
@@ -1343,7 +1343,7 @@
__ ldr(r2, MemOperand(r2));
__ bind(&trampoline_loaded);
- __ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value()));
+ __ add(lr, r2, Operand(interpreter_entry_return_pc_offset.value()));
// Initialize the dispatch table register.
__ Move(
diff --git a/src/builtins/arm64/builtins-arm64.cc b/src/builtins/arm64/builtins-arm64.cc
index 22f58bd..2a658dd 100644
--- a/src/builtins/arm64/builtins-arm64.cc
+++ b/src/builtins/arm64/builtins-arm64.cc
@@ -1482,7 +1482,7 @@
__ Ldr(x1, MemOperand(x1));
__ Bind(&trampoline_loaded);
- __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value()));
+ __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset.value()));
// Initialize the dispatch table register.
__ Mov(
diff --git a/src/builtins/builtins-api.cc b/src/builtins/builtins-api.cc
index d5c6fd2..5fac499 100644
--- a/src/builtins/builtins-api.cc
+++ b/src/builtins/builtins-api.cc
@@ -23,23 +23,23 @@
// TODO(dcarney): CallOptimization duplicates this logic, merge.
JSReceiver GetCompatibleReceiver(Isolate* isolate, FunctionTemplateInfo info,
JSReceiver receiver) {
- Object recv_type = info->signature();
+ Object recv_type = info.signature();
// No signature, return holder.
- if (!recv_type->IsFunctionTemplateInfo()) return receiver;
+ if (!recv_type.IsFunctionTemplateInfo()) return receiver;
// A Proxy cannot have been created from the signature template.
- if (!receiver->IsJSObject()) return JSReceiver();
+ if (!receiver.IsJSObject()) return JSReceiver();
JSObject js_obj_receiver = JSObject::cast(receiver);
FunctionTemplateInfo signature = FunctionTemplateInfo::cast(recv_type);
// Check the receiver. Fast path for receivers with no hidden prototypes.
- if (signature->IsTemplateFor(js_obj_receiver)) return receiver;
- if (!js_obj_receiver->map()->has_hidden_prototype()) return JSReceiver();
+ if (signature.IsTemplateFor(js_obj_receiver)) return receiver;
+ if (!js_obj_receiver.map().has_hidden_prototype()) return JSReceiver();
for (PrototypeIterator iter(isolate, js_obj_receiver, kStartAtPrototype,
PrototypeIterator::END_AT_NON_HIDDEN);
!iter.IsAtEnd(); iter.Advance()) {
JSObject current = iter.GetCurrent<JSObject>();
- if (signature->IsTemplateFor(current)) return current;
+ if (signature.IsTemplateFor(current)) return current;
}
return JSReceiver();
}
@@ -53,7 +53,7 @@
JSReceiver raw_holder;
if (is_construct) {
DCHECK(args.receiver()->IsTheHole(isolate));
- if (fun_data->GetInstanceTemplate()->IsUndefined(isolate)) {
+ if (fun_data->GetInstanceTemplate().IsUndefined(isolate)) {
v8::Local<ObjectTemplate> templ =
ObjectTemplate::New(reinterpret_cast<v8::Isolate*>(isolate),
ToApiHandle<v8::FunctionTemplate>(fun_data));
@@ -98,10 +98,10 @@
}
Object raw_call_data = fun_data->call_code();
- if (!raw_call_data->IsUndefined(isolate)) {
- DCHECK(raw_call_data->IsCallHandlerInfo());
+ if (!raw_call_data.IsUndefined(isolate)) {
+ DCHECK(raw_call_data.IsCallHandlerInfo());
CallHandlerInfo call_data = CallHandlerInfo::cast(raw_call_data);
- Object data_obj = call_data->data();
+ Object data_obj = call_data.data();
FunctionCallbackArguments custom(isolate, data_obj, *function, raw_holder,
*new_target, args.address_of_arg_at(1),
@@ -129,7 +129,7 @@
Handle<JSFunction> function = args.target();
Handle<Object> receiver = args.receiver();
Handle<HeapObject> new_target = args.new_target();
- Handle<FunctionTemplateInfo> fun_data(function->shared()->get_api_func_data(),
+ Handle<FunctionTemplateInfo> fun_data(function->shared().get_api_func_data(),
isolate);
if (new_target->IsJSReceiver()) {
RETURN_RESULT_OR_FAILURE(
@@ -171,12 +171,12 @@
RuntimeCallCounterId::kInvokeApiFunction);
DCHECK(function->IsFunctionTemplateInfo() ||
(function->IsJSFunction() &&
- JSFunction::cast(*function)->shared()->IsApiFunction()));
+ JSFunction::cast(*function).shared().IsApiFunction()));
// Do proper receiver conversion for non-strict mode api functions.
if (!is_construct && !receiver->IsJSReceiver()) {
if (function->IsFunctionTemplateInfo() ||
- is_sloppy(JSFunction::cast(*function)->shared()->language_mode())) {
+ is_sloppy(JSFunction::cast(*function).shared().language_mode())) {
ASSIGN_RETURN_ON_EXCEPTION(isolate, receiver,
Object::ConvertReceiver(isolate, receiver),
Object);
@@ -191,7 +191,7 @@
Handle<FunctionTemplateInfo> fun_data =
function->IsFunctionTemplateInfo()
? Handle<FunctionTemplateInfo>::cast(function)
- : handle(JSFunction::cast(*function)->shared()->get_api_func_data(),
+ : handle(JSFunction::cast(*function).shared().get_api_func_data(),
isolate);
// Construct BuiltinArguments object:
// new target, function, arguments reversed, receiver.
@@ -211,8 +211,8 @@
}
DCHECK_EQ(cursor, BuiltinArguments::kPaddingOffset);
argv[BuiltinArguments::kPaddingOffset] =
- ReadOnlyRoots(isolate).the_hole_value()->ptr();
- argv[BuiltinArguments::kArgcOffset] = Smi::FromInt(frame_argc)->ptr();
+ ReadOnlyRoots(isolate).the_hole_value().ptr();
+ argv[BuiltinArguments::kArgcOffset] = Smi::FromInt(frame_argc).ptr();
argv[BuiltinArguments::kTargetOffset] = function->ptr();
argv[BuiltinArguments::kNewTargetOffset] = new_target->ptr();
MaybeHandle<Object> result;
@@ -254,12 +254,12 @@
// Get the invocation callback from the function descriptor that was
// used to create the called object.
- DCHECK(obj->map()->is_callable());
- JSFunction constructor = JSFunction::cast(obj->map()->GetConstructor());
- DCHECK(constructor->shared()->IsApiFunction());
+ DCHECK(obj.map().is_callable());
+ JSFunction constructor = JSFunction::cast(obj.map().GetConstructor());
+ DCHECK(constructor.shared().IsApiFunction());
Object handler =
- constructor->shared()->get_api_func_data()->GetInstanceCallHandler();
- DCHECK(!handler->IsUndefined(isolate));
+ constructor.shared().get_api_func_data().GetInstanceCallHandler();
+ DCHECK(!handler.IsUndefined(isolate));
CallHandlerInfo call_data = CallHandlerInfo::cast(handler);
// Get the data for the call and perform the callback.
@@ -267,7 +267,7 @@
{
HandleScope scope(isolate);
LOG(isolate, ApiObjectAccess("call non-function", obj));
- FunctionCallbackArguments custom(isolate, call_data->data(), constructor,
+ FunctionCallbackArguments custom(isolate, call_data.data(), constructor,
obj, new_target, args.address_of_arg_at(1),
args.length() - 1);
Handle<Object> result_handle = custom.Call(call_data);
diff --git a/src/builtins/builtins-array.cc b/src/builtins/builtins-array.cc
index 63daad5..1bfc9f7 100644
--- a/src/builtins/builtins-array.cc
+++ b/src/builtins/builtins-array.cc
@@ -29,8 +29,8 @@
}
inline bool HasSimpleElements(JSObject current) {
- return !current->map()->IsCustomElementsReceiverMap() &&
- !current->GetElementsAccessor()->HasAccessors(current);
+ return !current.map().IsCustomElementsReceiverMap() &&
+ !current.GetElementsAccessor()->HasAccessors(current);
}
inline bool HasOnlySimpleReceiverElements(Isolate* isolate, JSObject receiver) {
@@ -43,7 +43,7 @@
DisallowHeapAllocation no_gc;
PrototypeIterator iter(isolate, receiver, kStartAtReceiver);
for (; !iter.IsAtEnd(); iter.Advance()) {
- if (iter.GetCurrent()->IsJSProxy()) return false;
+ if (iter.GetCurrent().IsJSProxy()) return false;
JSObject current = iter.GetCurrent<JSObject>();
if (!HasSimpleElements(current)) return false;
}
@@ -70,8 +70,8 @@
int last_arg_index = std::min(first_arg_index + num_arguments, args_length);
for (int i = first_arg_index; i < last_arg_index; i++) {
Object arg = (*args)[i];
- if (arg->IsHeapObject()) {
- if (arg->IsHeapNumber()) {
+ if (arg.IsHeapObject()) {
+ if (arg.IsHeapNumber()) {
target_kind = PACKED_DOUBLE_ELEMENTS;
} else {
target_kind = PACKED_ELEMENTS;
@@ -101,7 +101,7 @@
Handle<JSArray> array = Handle<JSArray>::cast(receiver);
ElementsKind origin_kind = array->GetElementsKind();
if (IsDictionaryElementsKind(origin_kind)) return false;
- if (!array->map()->is_extensible()) return false;
+ if (!array->map().is_extensible()) return false;
if (args == nullptr) return true;
// If there may be elements accessors in the prototype chain, the fast path
@@ -148,7 +148,7 @@
Isolate* isolate, Handle<JSReceiver> receiver) {
if (receiver->IsJSArray()) {
Handle<JSArray> array = Handle<JSArray>::cast(receiver);
- double length = array->length()->Number();
+ double length = array->length().Number();
DCHECK(0 <= length && length <= kMaxSafeInteger);
return Just(length);
@@ -373,7 +373,7 @@
// Fast Elements Path
int to_add = args.length() - 1;
Handle<JSArray> array = Handle<JSArray>::cast(receiver);
- uint32_t len = static_cast<uint32_t>(array->length()->Number());
+ uint32_t len = static_cast<uint32_t>(array->length().Number());
if (to_add == 0) return *isolate->factory()->NewNumberFromUint(len);
// Currently fixed arrays cannot grow too big, so we should never hit this.
@@ -457,7 +457,7 @@
}
Handle<JSArray> array = Handle<JSArray>::cast(receiver);
- uint32_t len = static_cast<uint32_t>(array->length()->Number());
+ uint32_t len = static_cast<uint32_t>(array->length().Number());
if (len == 0) return ReadOnlyRoots(isolate).undefined_value();
if (JSArray::HasReadOnlyLength(array)) {
@@ -597,7 +597,7 @@
Handle<JSArray> array = Handle<JSArray>::cast(args.receiver());
// These are checked in the Torque builtin.
- DCHECK(array->map()->is_extensible());
+ DCHECK(array->map().is_extensible());
DCHECK(!IsDictionaryElementsKind(array->GetElementsKind()));
DCHECK(IsJSArrayFastElementMovingAllowed(isolate, *array));
DCHECK(!isolate->IsAnyInitialArrayPrototype(array));
@@ -644,7 +644,7 @@
IsFixedArrayField::encode(storage->IsFixedArray()) |
HasSimpleElementsField::encode(
storage->IsFixedArray() ||
- !storage->map()->IsCustomElementsReceiverMap())) {
+ !storage->map().IsCustomElementsReceiverMap())) {
DCHECK(!(this->fast_elements() && !is_fixed_array()));
}
@@ -708,7 +708,7 @@
// provided-for index range, go to dictionary mode now.
if (fast_elements() &&
index_offset_ >
- static_cast<uint32_t>(FixedArrayBase::cast(*storage_)->length())) {
+ static_cast<uint32_t>(FixedArrayBase::cast(*storage_).length())) {
SetDictionaryMode();
}
}
@@ -811,7 +811,7 @@
uint32_t EstimateElementCount(Isolate* isolate, Handle<JSArray> array) {
DisallowHeapAllocation no_gc;
- uint32_t length = static_cast<uint32_t>(array->length()->Number());
+ uint32_t length = static_cast<uint32_t>(array->length().Number());
int element_count = 0;
switch (array->GetElementsKind()) {
case PACKED_SMI_ELEMENTS:
@@ -828,7 +828,7 @@
int fast_length = static_cast<int>(length);
FixedArray elements = FixedArray::cast(array->elements());
for (int i = 0; i < fast_length; i++) {
- if (!elements->get(i)->IsTheHole(isolate)) element_count++;
+ if (!elements.get(i).IsTheHole(isolate)) element_count++;
}
break;
}
@@ -838,23 +838,23 @@
// a 32-bit signed integer.
DCHECK_GE(static_cast<int32_t>(FixedDoubleArray::kMaxLength), 0);
int fast_length = static_cast<int>(length);
- if (array->elements()->IsFixedArray()) {
- DCHECK_EQ(FixedArray::cast(array->elements())->length(), 0);
+ if (array->elements().IsFixedArray()) {
+ DCHECK_EQ(FixedArray::cast(array->elements()).length(), 0);
break;
}
FixedDoubleArray elements = FixedDoubleArray::cast(array->elements());
for (int i = 0; i < fast_length; i++) {
- if (!elements->is_the_hole(i)) element_count++;
+ if (!elements.is_the_hole(i)) element_count++;
}
break;
}
case DICTIONARY_ELEMENTS: {
NumberDictionary dictionary = NumberDictionary::cast(array->elements());
- int capacity = dictionary->Capacity();
+ int capacity = dictionary.Capacity();
ReadOnlyRoots roots(isolate);
for (int i = 0; i < capacity; i++) {
- Object key = dictionary->KeyAt(i);
- if (dictionary->IsKey(roots, key)) {
+ Object key = dictionary.KeyAt(i);
+ if (dictionary.IsKey(roots, key)) {
element_count++;
}
}
@@ -893,10 +893,10 @@
case HOLEY_ELEMENTS: {
DisallowHeapAllocation no_gc;
FixedArray elements = FixedArray::cast(object->elements());
- uint32_t length = static_cast<uint32_t>(elements->length());
+ uint32_t length = static_cast<uint32_t>(elements.length());
if (range < length) length = range;
for (uint32_t i = 0; i < length; i++) {
- if (!elements->get(i)->IsTheHole(isolate)) {
+ if (!elements.get(i).IsTheHole(isolate)) {
indices->push_back(i);
}
}
@@ -904,8 +904,8 @@
}
case HOLEY_DOUBLE_ELEMENTS:
case PACKED_DOUBLE_ELEMENTS: {
- if (object->elements()->IsFixedArray()) {
- DCHECK_EQ(object->elements()->length(), 0);
+ if (object->elements().IsFixedArray()) {
+ DCHECK_EQ(object->elements().length(), 0);
break;
}
Handle<FixedDoubleArray> elements(
@@ -922,13 +922,13 @@
case DICTIONARY_ELEMENTS: {
DisallowHeapAllocation no_gc;
NumberDictionary dict = NumberDictionary::cast(object->elements());
- uint32_t capacity = dict->Capacity();
+ uint32_t capacity = dict.Capacity();
ReadOnlyRoots roots(isolate);
FOR_WITH_HANDLE_SCOPE(isolate, uint32_t, j = 0, j, j < capacity, j++, {
- Object k = dict->KeyAt(j);
- if (!dict->IsKey(roots, k)) continue;
- DCHECK(k->IsNumber());
- uint32_t index = static_cast<uint32_t>(k->Number());
+ Object k = dict.KeyAt(j);
+ if (!dict.IsKey(roots, k)) continue;
+ DCHECK(k.IsNumber());
+ uint32_t index = static_cast<uint32_t>(k.Number());
if (index < range) {
indices->push_back(index);
}
@@ -972,7 +972,7 @@
case SLOW_STRING_WRAPPER_ELEMENTS: {
DCHECK(object->IsJSValue());
Handle<JSValue> js_value = Handle<JSValue>::cast(object);
- DCHECK(js_value->value()->IsString());
+ DCHECK(js_value->value().IsString());
Handle<String> string(String::cast(js_value->value()), isolate);
uint32_t length = static_cast<uint32_t>(string->length());
uint32_t i = 0;
@@ -1033,7 +1033,7 @@
if (receiver->IsJSArray()) {
Handle<JSArray> array = Handle<JSArray>::cast(receiver);
- length = static_cast<uint32_t>(array->length()->Number());
+ length = static_cast<uint32_t>(array->length().Number());
} else {
Handle<Object> val;
ASSIGN_RETURN_ON_EXCEPTION_VALUE(
@@ -1096,8 +1096,8 @@
if (length == 0) break;
// Run through the elements FixedArray and use HasElement and GetElement
// to check the prototype for missing elements.
- if (array->elements()->IsFixedArray()) {
- DCHECK_EQ(array->elements()->length(), 0);
+ if (array->elements().IsFixedArray()) {
+ DCHECK_EQ(array->elements().length(), 0);
break;
}
Handle<FixedDoubleArray> elements(
@@ -1197,7 +1197,7 @@
Isolate* isolate) {
int argument_count = args->length();
- bool is_array_species = *species == isolate->context()->array_function();
+ bool is_array_species = *species == isolate->context().array_function();
// Pass 1: estimate the length and number of elements of the result.
// The actual length can be larger if any of the arguments have getters
@@ -1214,7 +1214,7 @@
uint32_t element_estimate;
if (obj->IsJSArray()) {
Handle<JSArray> array(Handle<JSArray>::cast(obj));
- length_estimate = static_cast<uint32_t>(array->length()->Number());
+ length_estimate = static_cast<uint32_t>(array->length().Number());
if (length_estimate != 0) {
ElementsKind array_kind =
GetPackedElementsKind(array->GetElementsKind());
@@ -1271,16 +1271,16 @@
} else {
DisallowHeapAllocation no_gc;
JSArray array = JSArray::cast(*obj);
- uint32_t length = static_cast<uint32_t>(array->length()->Number());
- switch (array->GetElementsKind()) {
+ uint32_t length = static_cast<uint32_t>(array.length().Number());
+ switch (array.GetElementsKind()) {
case HOLEY_DOUBLE_ELEMENTS:
case PACKED_DOUBLE_ELEMENTS: {
// Empty array is FixedArray but not FixedDoubleArray.
if (length == 0) break;
FixedDoubleArray elements =
- FixedDoubleArray::cast(array->elements());
+ FixedDoubleArray::cast(array.elements());
for (uint32_t i = 0; i < length; i++) {
- if (elements->is_the_hole(i)) {
+ if (elements.is_the_hole(i)) {
// TODO(jkummerow/verwaest): We could be a bit more clever
// here: Check if there are no elements/getters on the
// prototype chain, and if so, allow creation of a holey
@@ -1289,7 +1289,7 @@
failure = true;
break;
}
- double double_value = elements->get_scalar(i);
+ double double_value = elements.get_scalar(i);
double_storage->set(j, double_value);
j++;
}
@@ -1298,9 +1298,9 @@
case HOLEY_SMI_ELEMENTS:
case PACKED_SMI_ELEMENTS: {
Object the_hole = ReadOnlyRoots(isolate).the_hole_value();
- FixedArray elements(FixedArray::cast(array->elements()));
+ FixedArray elements(FixedArray::cast(array.elements()));
for (uint32_t i = 0; i < length; i++) {
- Object element = elements->get(i);
+ Object element = elements.get(i);
if (element == the_hole) {
failure = true;
break;
@@ -1385,9 +1385,8 @@
DisallowHeapAllocation no_gc;
Map map = obj->map();
// If there is only the 'length' property we are fine.
- if (map->prototype() ==
- isolate->native_context()->initial_array_prototype() &&
- map->NumberOfOwnDescriptors() == 1) {
+ if (map.prototype() == isolate->native_context()->initial_array_prototype() &&
+ map.NumberOfOwnDescriptors() == 1) {
return true;
}
// TODO(cbruni): slower lookup for array subclasses and support slow
@@ -1414,12 +1413,12 @@
// and calculating total length.
for (int i = 0; i < n_arguments; i++) {
Object arg = (*args)[i];
- if (!arg->IsJSArray()) return MaybeHandle<JSArray>();
+ if (!arg.IsJSArray()) return MaybeHandle<JSArray>();
if (!HasOnlySimpleReceiverElements(isolate, JSObject::cast(arg))) {
return MaybeHandle<JSArray>();
}
// TODO(cbruni): support fast concatenation of DICTIONARY_ELEMENTS.
- if (!JSObject::cast(arg)->HasFastElements()) {
+ if (!JSObject::cast(arg).HasFastElements()) {
return MaybeHandle<JSArray>();
}
Handle<JSArray> array(JSArray::cast(arg), isolate);
diff --git a/src/builtins/builtins-arraybuffer.cc b/src/builtins/builtins-arraybuffer.cc
index e893991..541ad36 100644
--- a/src/builtins/builtins-arraybuffer.cc
+++ b/src/builtins/builtins-arraybuffer.cc
@@ -43,7 +43,7 @@
isolate, NewRangeError(MessageTemplate::kInvalidArrayBufferLength));
}
SharedFlag shared_flag =
- (*target == target->native_context()->array_buffer_fun())
+ (*target == target->native_context().array_buffer_fun())
? SharedFlag::kNotShared
: SharedFlag::kShared;
if (!JSArrayBuffer::SetupAllocatingData(Handle<JSArrayBuffer>::cast(result),
@@ -61,12 +61,12 @@
BUILTIN(ArrayBufferConstructor) {
HandleScope scope(isolate);
Handle<JSFunction> target = args.target();
- DCHECK(*target == target->native_context()->array_buffer_fun() ||
- *target == target->native_context()->shared_array_buffer_fun());
+ DCHECK(*target == target->native_context().array_buffer_fun() ||
+ *target == target->native_context().shared_array_buffer_fun());
if (args.new_target()->IsUndefined(isolate)) { // [[Call]]
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kConstructorNotFunction,
- handle(target->shared()->Name(), isolate)));
+ handle(target->shared().Name(), isolate)));
}
// [[Construct]]
Handle<JSReceiver> new_target = Handle<JSReceiver>::cast(args.new_target());
@@ -120,7 +120,7 @@
SealHandleScope shs(isolate);
DCHECK_EQ(2, args.length());
Object arg = args[1];
- return isolate->heap()->ToBoolean(arg->IsJSArrayBufferView());
+ return isolate->heap()->ToBoolean(arg.IsJSArrayBufferView());
}
static Object SliceHelper(BuiltinArguments args, Isolate* isolate,
diff --git a/src/builtins/builtins-bigint.cc b/src/builtins/builtins-bigint.cc
index 70007d5..a1f17f2 100644
--- a/src/builtins/builtins-bigint.cc
+++ b/src/builtins/builtins-bigint.cc
@@ -83,8 +83,8 @@
if (value->IsJSValue()) {
// 2a. Assert: value.[[BigIntData]] is a BigInt value.
// 2b. Return value.[[BigIntData]].
- Object data = JSValue::cast(*value)->value();
- if (data->IsBigInt()) return handle(BigInt::cast(data), isolate);
+ Object data = JSValue::cast(*value).value();
+ if (data.IsBigInt()) return handle(BigInt::cast(data), isolate);
}
// 3. Throw a TypeError exception.
THROW_NEW_ERROR(
diff --git a/src/builtins/builtins-collections-gen.cc b/src/builtins/builtins-collections-gen.cc
index d1b7710..d0d8d89 100644
--- a/src/builtins/builtins-collections-gen.cc
+++ b/src/builtins/builtins-collections-gen.cc
@@ -832,7 +832,7 @@
void CollectionsBuiltinsAssembler::BranchIfMapIteratorProtectorValid(
Label* if_true, Label* if_false) {
Node* protector_cell = LoadRoot(RootIndex::kMapIteratorProtector);
- DCHECK(isolate()->heap()->map_iterator_protector()->IsPropertyCell());
+ DCHECK(isolate()->heap()->map_iterator_protector().IsPropertyCell());
Branch(WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
SmiConstant(Isolate::kProtectorValid)),
if_true, if_false);
@@ -889,7 +889,7 @@
void CollectionsBuiltinsAssembler::BranchIfSetIteratorProtectorValid(
Label* if_true, Label* if_false) {
Node* const protector_cell = LoadRoot(RootIndex::kSetIteratorProtector);
- DCHECK(isolate()->heap()->set_iterator_protector()->IsPropertyCell());
+ DCHECK(isolate()->heap()->set_iterator_protector().IsPropertyCell());
Branch(WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
SmiConstant(Isolate::kProtectorValid)),
if_true, if_false);
diff --git a/src/builtins/builtins-console.cc b/src/builtins/builtins-console.cc
index e994e9b..4e1dd75 100644
--- a/src/builtins/builtins-console.cc
+++ b/src/builtins/builtins-console.cc
@@ -67,7 +67,7 @@
HandleScope scope(isolate);
std::unique_ptr<char[]> name;
const char* raw_name = "default";
- if (args.length() > 1 && args[1]->IsString()) {
+ if (args.length() > 1 && args[1].IsString()) {
// Try converting the first argument to a string.
name = args.at<String>(1)->ToCString();
raw_name = name.get();
@@ -119,9 +119,9 @@
name_string, builtin_id, i::LanguageMode::kSloppy);
Handle<JSFunction> fun = factory->NewFunction(args);
- fun->shared()->set_native(true);
- fun->shared()->DontAdaptArguments();
- fun->shared()->set_length(1);
+ fun->shared().set_native(true);
+ fun->shared().DontAdaptArguments();
+ fun->shared().set_length(1);
JSObject::AddProperty(isolate, fun, factory->console_context_id_symbol(),
handle(Smi::FromInt(context_id), isolate), NONE);
diff --git a/src/builtins/builtins-date.cc b/src/builtins/builtins-date.cc
index 429f488..5fa44c5 100644
--- a/src/builtins/builtins-date.cc
+++ b/src/builtins/builtins-date.cc
@@ -123,12 +123,12 @@
result = DateParser::Parse(isolate, str_content.ToUC16Vector(), *tmp);
}
if (!result) return std::numeric_limits<double>::quiet_NaN();
- double const day = MakeDay(tmp->get(0)->Number(), tmp->get(1)->Number(),
- tmp->get(2)->Number());
- double const time = MakeTime(tmp->get(3)->Number(), tmp->get(4)->Number(),
- tmp->get(5)->Number(), tmp->get(6)->Number());
+ double const day =
+ MakeDay(tmp->get(0).Number(), tmp->get(1).Number(), tmp->get(2).Number());
+ double const time = MakeTime(tmp->get(3).Number(), tmp->get(4).Number(),
+ tmp->get(5).Number(), tmp->get(6).Number());
double date = MakeDate(day, time);
- if (tmp->get(7)->IsNull(isolate)) {
+ if (tmp->get(7).IsNull(isolate)) {
if (date >= -DateCache::kMaxTimeBeforeUTCInMs &&
date <= DateCache::kMaxTimeBeforeUTCInMs) {
date = isolate->date_cache()->ToUTC(static_cast<int64_t>(date));
@@ -136,7 +136,7 @@
return std::numeric_limits<double>::quiet_NaN();
}
} else {
- date -= tmp->get(7)->Number() * 1000.0;
+ date -= tmp->get(7).Number() * 1000.0;
}
return DateCache::TimeClip(date);
}
@@ -222,7 +222,7 @@
} else if (argc == 1) {
Handle<Object> value = args.at(1);
if (value->IsJSDate()) {
- time_val = Handle<JSDate>::cast(value)->value()->Number();
+ time_val = Handle<JSDate>::cast(value)->value().Number();
} else {
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
Object::ToPrimitive(value));
@@ -374,7 +374,7 @@
Handle<Object> value = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
Object::ToNumber(isolate, value));
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int64_t local_time_ms = isolate->date_cache()->ToLocal(time_ms);
@@ -397,8 +397,8 @@
Object::ToNumber(isolate, year));
double y = year->Number(), m = 0.0, dt = 1.0;
int time_within_day = 0;
- if (!std::isnan(date->value()->Number())) {
- int64_t const time_ms = static_cast<int64_t>(date->value()->Number());
+ if (!std::isnan(date->value().Number())) {
+ int64_t const time_ms = static_cast<int64_t>(date->value().Number());
int64_t local_time_ms = isolate->date_cache()->ToLocal(time_ms);
int const days = isolate->date_cache()->DaysFromTime(local_time_ms);
time_within_day = isolate->date_cache()->TimeInDay(local_time_ms, days);
@@ -432,7 +432,7 @@
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, hour,
Object::ToNumber(isolate, hour));
double h = hour->Number();
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int64_t local_time_ms = isolate->date_cache()->ToLocal(time_ms);
@@ -471,7 +471,7 @@
Handle<Object> ms = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, ms,
Object::ToNumber(isolate, ms));
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int64_t local_time_ms = isolate->date_cache()->ToLocal(time_ms);
@@ -493,7 +493,7 @@
Handle<Object> min = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, min,
Object::ToNumber(isolate, min));
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int64_t local_time_ms = isolate->date_cache()->ToLocal(time_ms);
@@ -528,7 +528,7 @@
Handle<Object> month = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, month,
Object::ToNumber(isolate, month));
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int64_t local_time_ms = isolate->date_cache()->ToLocal(time_ms);
@@ -557,7 +557,7 @@
Handle<Object> sec = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, sec,
Object::ToNumber(isolate, sec));
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int64_t local_time_ms = isolate->date_cache()->ToLocal(time_ms);
@@ -595,8 +595,8 @@
Handle<Object> value = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
Object::ToNumber(isolate, value));
- if (std::isnan(date->value()->Number())) return date->value();
- int64_t const time_ms = static_cast<int64_t>(date->value()->Number());
+ if (std::isnan(date->value().Number())) return date->value();
+ int64_t const time_ms = static_cast<int64_t>(date->value().Number());
int const days = isolate->date_cache()->DaysFromTime(time_ms);
int const time_within_day = isolate->date_cache()->TimeInDay(time_ms, days);
int year, month, day;
@@ -616,8 +616,8 @@
Object::ToNumber(isolate, year));
double y = year->Number(), m = 0.0, dt = 1.0;
int time_within_day = 0;
- if (!std::isnan(date->value()->Number())) {
- int64_t const time_ms = static_cast<int64_t>(date->value()->Number());
+ if (!std::isnan(date->value().Number())) {
+ int64_t const time_ms = static_cast<int64_t>(date->value().Number());
int const days = isolate->date_cache()->DaysFromTime(time_ms);
time_within_day = isolate->date_cache()->TimeInDay(time_ms, days);
int year, month, day;
@@ -650,7 +650,7 @@
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, hour,
Object::ToNumber(isolate, hour));
double h = hour->Number();
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int day = isolate->date_cache()->DaysFromTime(time_ms);
@@ -688,7 +688,7 @@
Handle<Object> ms = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, ms,
Object::ToNumber(isolate, ms));
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int day = isolate->date_cache()->DaysFromTime(time_ms);
@@ -709,7 +709,7 @@
Handle<Object> min = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, min,
Object::ToNumber(isolate, min));
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int day = isolate->date_cache()->DaysFromTime(time_ms);
@@ -743,7 +743,7 @@
Handle<Object> month = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, month,
Object::ToNumber(isolate, month));
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int days = isolate->date_cache()->DaysFromTime(time_ms);
@@ -771,7 +771,7 @@
Handle<Object> sec = args.atOrUndefined(isolate, 1);
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, sec,
Object::ToNumber(isolate, sec));
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (!std::isnan(time_val)) {
int64_t const time_ms = static_cast<int64_t>(time_val);
int day = isolate->date_cache()->DaysFromTime(time_ms);
@@ -796,7 +796,7 @@
HandleScope scope(isolate);
CHECK_RECEIVER(JSDate, date, "Date.prototype.toDateString");
DateBuffer buffer =
- ToDateString(date->value()->Number(), isolate->date_cache(), kDateOnly);
+ ToDateString(date->value().Number(), isolate->date_cache(), kDateOnly);
RETURN_RESULT_OR_FAILURE(
isolate, isolate->factory()->NewStringFromUtf8(VectorOf(buffer)));
}
@@ -805,7 +805,7 @@
BUILTIN(DatePrototypeToISOString) {
HandleScope scope(isolate);
CHECK_RECEIVER(JSDate, date, "Date.prototype.toISOString");
- double const time_val = date->value()->Number();
+ double const time_val = date->value().Number();
if (std::isnan(time_val)) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewRangeError(MessageTemplate::kInvalidTimeValue));
@@ -833,7 +833,7 @@
HandleScope scope(isolate);
CHECK_RECEIVER(JSDate, date, "Date.prototype.toString");
DateBuffer buffer =
- ToDateString(date->value()->Number(), isolate->date_cache());
+ ToDateString(date->value().Number(), isolate->date_cache());
RETURN_RESULT_OR_FAILURE(
isolate, isolate->factory()->NewStringFromUtf8(VectorOf(buffer)));
}
@@ -843,7 +843,7 @@
HandleScope scope(isolate);
CHECK_RECEIVER(JSDate, date, "Date.prototype.toTimeString");
DateBuffer buffer =
- ToDateString(date->value()->Number(), isolate->date_cache(), kTimeOnly);
+ ToDateString(date->value().Number(), isolate->date_cache(), kTimeOnly);
RETURN_RESULT_OR_FAILURE(
isolate, isolate->factory()->NewStringFromUtf8(VectorOf(buffer)));
}
@@ -908,7 +908,7 @@
BUILTIN(DatePrototypeToUTCString) {
HandleScope scope(isolate);
CHECK_RECEIVER(JSDate, date, "Date.prototype.toUTCString");
- double const time_val = date->value()->Number();
+ double const time_val = date->value().Number();
if (std::isnan(time_val)) {
return *isolate->factory()->NewStringFromAsciiChecked("Invalid Date");
}
@@ -929,7 +929,7 @@
BUILTIN(DatePrototypeGetYear) {
HandleScope scope(isolate);
CHECK_RECEIVER(JSDate, date, "Date.prototype.getYear");
- double time_val = date->value()->Number();
+ double time_val = date->value().Number();
if (std::isnan(time_val)) return date->value();
int64_t time_ms = static_cast<int64_t>(time_val);
int64_t local_time_ms = isolate->date_cache()->ToLocal(time_ms);
@@ -954,8 +954,8 @@
}
}
int time_within_day = 0;
- if (!std::isnan(date->value()->Number())) {
- int64_t const time_ms = static_cast<int64_t>(date->value()->Number());
+ if (!std::isnan(date->value().Number())) {
+ int64_t const time_ms = static_cast<int64_t>(date->value().Number());
int64_t local_time_ms = isolate->date_cache()->ToLocal(time_ms);
int const days = isolate->date_cache()->DaysFromTime(local_time_ms);
time_within_day = isolate->date_cache()->TimeInDay(local_time_ms, days);
diff --git a/src/builtins/builtins-extras-utils.cc b/src/builtins/builtins-extras-utils.cc
index 6b191a3..9812b9f 100644
--- a/src/builtins/builtins-extras-utils.cc
+++ b/src/builtins/builtins-extras-utils.cc
@@ -39,7 +39,7 @@
DCHECK_EQ(2, args.length());
Handle<JSFunction> function = args.at<JSFunction>(1);
- Handle<NativeContext> native_context(isolate->context()->native_context(),
+ Handle<NativeContext> native_context(isolate->context().native_context(),
isolate);
Handle<Context> context = isolate->factory()->NewBuiltinContext(
native_context,
@@ -64,7 +64,7 @@
BUILTIN(ExtrasUtilsCallReflectApply) {
HandleScope scope(isolate);
Handle<Context> context(isolate->context(), isolate);
- Handle<NativeContext> native_context(isolate->context()->native_context(),
+ Handle<NativeContext> native_context(isolate->context().native_context(),
isolate);
Handle<JSFunction> function(
JSFunction::cast(context->get(
diff --git a/src/builtins/builtins-function.cc b/src/builtins/builtins-function.cc
index 465cb92..ffa273b 100644
--- a/src/builtins/builtins-function.cc
+++ b/src/builtins/builtins-function.cc
@@ -90,7 +90,7 @@
Execution::Call(isolate, function, target_global_proxy, 0, nullptr),
Object);
function = Handle<JSFunction>::cast(result);
- function->shared()->set_name_should_print_as_anonymous(true);
+ function->shared().set_name_should_print_as_anonymous(true);
}
// If new.target is equal to target then the function created
@@ -149,7 +149,7 @@
// determined after the function is resumed.
Handle<JSFunction> func = Handle<JSFunction>::cast(maybe_func);
Handle<Script> script =
- handle(Script::cast(func->shared()->script()), isolate);
+ handle(Script::cast(func->shared().script()), isolate);
int position = Script::GetEvalPosition(isolate, script);
USE(position);
@@ -168,7 +168,7 @@
// determined after the function is resumed.
Handle<JSFunction> func = Handle<JSFunction>::cast(maybe_func);
Handle<Script> script =
- handle(Script::cast(func->shared()->script()), isolate);
+ handle(Script::cast(func->shared().script()), isolate);
int position = Script::GetEvalPosition(isolate, script);
USE(position);
@@ -279,7 +279,7 @@
// With the revised toString behavior, all callable objects are valid
// receivers for this method.
if (receiver->IsJSReceiver() &&
- JSReceiver::cast(*receiver)->map()->is_callable()) {
+ JSReceiver::cast(*receiver).map().is_callable()) {
return ReadOnlyRoots(isolate).function_native_code_string();
}
THROW_NEW_ERROR_RETURN_FAILURE(
diff --git a/src/builtins/builtins-intl.cc b/src/builtins/builtins-intl.cc
index 3da0c8e..adbf17f 100644
--- a/src/builtins/builtins-intl.cc
+++ b/src/builtins/builtins-intl.cc
@@ -227,7 +227,7 @@
Handle<JSFunction> CreateBoundFunction(Isolate* isolate,
Handle<JSObject> object,
Builtins::Name builtin_id, int len) {
- Handle<NativeContext> native_context(isolate->context()->native_context(),
+ Handle<NativeContext> native_context(isolate->context().native_context(),
isolate);
Handle<Context> context = isolate->factory()->NewBuiltinContext(
native_context,
@@ -483,7 +483,7 @@
}
icu::number::LocalizedNumberFormatter* icu_localized_number_formatter =
- number_format->icu_number_formatter()->raw();
+ number_format->icu_number_formatter().raw();
CHECK_NOT_NULL(icu_localized_number_formatter);
// Return FormatNumber(nf, x).
@@ -970,7 +970,7 @@
Object::ToString(isolate, y));
// 7. Return CompareStrings(collator, X, Y).
- icu::Collator* icu_collator = collator->icu_collator()->raw();
+ icu::Collator* icu_collator = collator->icu_collator().raw();
CHECK_NOT_NULL(icu_collator);
return *Intl::CompareStrings(isolate, *icu_collator, string_x, string_y);
}
@@ -1072,7 +1072,7 @@
RETURN_RESULT_OR_FAILURE(
isolate,
JSSegmentIterator::Create(
- isolate, segmenter_holder->icu_break_iterator()->raw()->clone(),
+ isolate, segmenter_holder->icu_break_iterator().raw()->clone(),
segmenter_holder->granularity(), text));
}
diff --git a/src/builtins/builtins-sharedarraybuffer.cc b/src/builtins/builtins-sharedarraybuffer.cc
index d0e9cd6..b8c7aee 100644
--- a/src/builtins/builtins-sharedarraybuffer.cc
+++ b/src/builtins/builtins-sharedarraybuffer.cc
@@ -163,13 +163,13 @@
double timeout_number;
if (timeout->IsUndefined(isolate)) {
- timeout_number = ReadOnlyRoots(isolate).infinity_value()->Number();
+ timeout_number = ReadOnlyRoots(isolate).infinity_value().Number();
} else {
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, timeout,
Object::ToNumber(isolate, timeout));
timeout_number = timeout->Number();
if (std::isnan(timeout_number))
- timeout_number = ReadOnlyRoots(isolate).infinity_value()->Number();
+ timeout_number = ReadOnlyRoots(isolate).infinity_value().Number();
else if (timeout_number < 0)
timeout_number = 0;
}
diff --git a/src/builtins/builtins-string-gen.cc b/src/builtins/builtins-string-gen.cc
index 736dd71..b7cf551 100644
--- a/src/builtins/builtins-string-gen.cc
+++ b/src/builtins/builtins-string-gen.cc
@@ -2023,7 +2023,7 @@
// Check that the String iterator hasn't been modified in a way that would
// affect iteration.
Node* protector_cell = LoadRoot(RootIndex::kStringIteratorProtector);
- DCHECK(isolate()->heap()->string_iterator_protector()->IsPropertyCell());
+ DCHECK(isolate()->heap()->string_iterator_protector().IsPropertyCell());
Branch(WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
SmiConstant(Isolate::kProtectorValid)),
if_true, if_false);
diff --git a/src/builtins/builtins-symbol.cc b/src/builtins/builtins-symbol.cc
index 4ff872a..c984e44 100644
--- a/src/builtins/builtins-symbol.cc
+++ b/src/builtins/builtins-symbol.cc
@@ -56,11 +56,11 @@
Object result;
if (symbol->is_public()) {
result = symbol->name();
- DCHECK(result->IsString());
+ DCHECK(result.IsString());
} else {
result = ReadOnlyRoots(isolate).undefined_value();
}
- DCHECK_EQ(isolate->heap()->public_symbol_table()->SlowReverseLookup(*symbol),
+ DCHECK_EQ(isolate->heap()->public_symbol_table().SlowReverseLookup(*symbol),
result);
return result;
}
diff --git a/src/builtins/builtins-typed-array.cc b/src/builtins/builtins-typed-array.cc
index 27d17d0..f46a6c6 100644
--- a/src/builtins/builtins-typed-array.cc
+++ b/src/builtins/builtins-typed-array.cc
@@ -33,7 +33,7 @@
: std::min<int64_t>(relative, maximum);
} else {
DCHECK(num->IsHeapNumber());
- double relative = HeapNumber::cast(*num)->value();
+ double relative = HeapNumber::cast(*num).value();
DCHECK(!std::isnan(relative));
return static_cast<int64_t>(
relative < 0 ? std::max<double>(relative + maximum, minimum)
diff --git a/src/builtins/builtins-utils.h b/src/builtins/builtins-utils.h
index 7979b53..822f9df 100644
--- a/src/builtins/builtins-utils.h
+++ b/src/builtins/builtins-utils.h
@@ -66,31 +66,31 @@
// through the BuiltinArguments object args.
// TODO(cbruni): add global flag to check whether any tracing events have been
// enabled.
-#define BUILTIN(name) \
- V8_WARN_UNUSED_RESULT static Object Builtin_Impl_##name( \
- BuiltinArguments args, Isolate* isolate); \
- \
- V8_NOINLINE static Address Builtin_Impl_Stats_##name( \
- int args_length, Address* args_object, Isolate* isolate) { \
- BuiltinArguments args(args_length, args_object); \
- RuntimeCallTimerScope timer(isolate, \
- RuntimeCallCounterId::kBuiltin_##name); \
- TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.runtime"), \
- "V8.Builtin_" #name); \
- return Builtin_Impl_##name(args, isolate)->ptr(); \
- } \
- \
- V8_WARN_UNUSED_RESULT Address Builtin_##name( \
- int args_length, Address* args_object, Isolate* isolate) { \
- DCHECK(isolate->context().is_null() || isolate->context()->IsContext()); \
- if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled())) { \
- return Builtin_Impl_Stats_##name(args_length, args_object, isolate); \
- } \
- BuiltinArguments args(args_length, args_object); \
- return Builtin_Impl_##name(args, isolate)->ptr(); \
- } \
- \
- V8_WARN_UNUSED_RESULT static Object Builtin_Impl_##name( \
+#define BUILTIN(name) \
+ V8_WARN_UNUSED_RESULT static Object Builtin_Impl_##name( \
+ BuiltinArguments args, Isolate* isolate); \
+ \
+ V8_NOINLINE static Address Builtin_Impl_Stats_##name( \
+ int args_length, Address* args_object, Isolate* isolate) { \
+ BuiltinArguments args(args_length, args_object); \
+ RuntimeCallTimerScope timer(isolate, \
+ RuntimeCallCounterId::kBuiltin_##name); \
+ TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.runtime"), \
+ "V8.Builtin_" #name); \
+ return Builtin_Impl_##name(args, isolate).ptr(); \
+ } \
+ \
+ V8_WARN_UNUSED_RESULT Address Builtin_##name( \
+ int args_length, Address* args_object, Isolate* isolate) { \
+ DCHECK(isolate->context().is_null() || isolate->context().IsContext()); \
+ if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled())) { \
+ return Builtin_Impl_Stats_##name(args_length, args_object, isolate); \
+ } \
+ BuiltinArguments args(args_length, args_object); \
+ return Builtin_Impl_##name(args, isolate).ptr(); \
+ } \
+ \
+ V8_WARN_UNUSED_RESULT static Object Builtin_Impl_##name( \
BuiltinArguments args, Isolate* isolate)
// ----------------------------------------------------------------------------
diff --git a/src/builtins/builtins-weak-refs.cc b/src/builtins/builtins-weak-refs.cc
index eecce26..78f37c0 100644
--- a/src/builtins/builtins-weak-refs.cc
+++ b/src/builtins/builtins-weak-refs.cc
@@ -15,7 +15,7 @@
if (args.new_target()->IsUndefined(isolate)) { // [[Call]]
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kConstructorNotFunction,
- handle(target->shared()->Name(), isolate)));
+ handle(target->shared().Name(), isolate)));
}
// [[Construct]]
Handle<JSReceiver> new_target = Handle<JSReceiver>::cast(args.new_target());
@@ -38,9 +38,9 @@
finalization_group->set_flags(
JSFinalizationGroup::ScheduledForCleanupField::encode(false));
- DCHECK(finalization_group->active_cells()->IsUndefined(isolate));
- DCHECK(finalization_group->cleared_cells()->IsUndefined(isolate));
- DCHECK(finalization_group->key_map()->IsUndefined(isolate));
+ DCHECK(finalization_group->active_cells().IsUndefined(isolate));
+ DCHECK(finalization_group->cleared_cells().IsUndefined(isolate));
+ DCHECK(finalization_group->key_map().IsUndefined(isolate));
return *finalization_group;
}
@@ -125,7 +125,7 @@
if (args.new_target()->IsUndefined(isolate)) { // [[Call]]
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kConstructorNotFunction,
- handle(target->shared()->Name(), isolate)));
+ handle(target->shared().Name(), isolate)));
}
// [[Construct]]
Handle<JSReceiver> new_target = Handle<JSReceiver>::cast(args.new_target());
@@ -155,14 +155,14 @@
BUILTIN(WeakRefDeref) {
HandleScope scope(isolate);
CHECK_RECEIVER(JSWeakRef, weak_ref, "WeakRef.prototype.deref");
- if (weak_ref->target()->IsJSReceiver()) {
+ if (weak_ref->target().IsJSReceiver()) {
Handle<JSReceiver> target =
handle(JSReceiver::cast(weak_ref->target()), isolate);
// AddKeepDuringJobTarget might allocate and cause a GC, but it won't clear
// weak_ref since we hold a Handle to its target.
isolate->heap()->AddKeepDuringJobTarget(target);
} else {
- DCHECK(weak_ref->target()->IsUndefined(isolate));
+ DCHECK(weak_ref->target().IsUndefined(isolate));
}
return weak_ref->target();
}
diff --git a/src/builtins/builtins.cc b/src/builtins/builtins.cc
index 75b9197..d9fd81b 100644
--- a/src/builtins/builtins.cc
+++ b/src/builtins/builtins.cc
@@ -76,13 +76,13 @@
// Off-heap pc's can be looked up through binary search.
if (FLAG_embedded_builtins) {
Code maybe_builtin = InstructionStream::TryLookupCode(isolate_, pc);
- if (!maybe_builtin.is_null()) return name(maybe_builtin->builtin_index());
+ if (!maybe_builtin.is_null()) return name(maybe_builtin.builtin_index());
}
// May be called during initialization (disassembler).
if (initialized_) {
for (int i = 0; i < builtin_count; i++) {
- if (isolate_->heap()->builtin(i)->contains(pc)) return name(i);
+ if (isolate_->heap()->builtin(i).contains(pc)) return name(i);
}
}
return nullptr;
@@ -185,7 +185,7 @@
const char* kind = KindNameOf(i);
Code code = builtin(i);
PrintF(stdout, "%s Builtin, %s, %d\n", kind, builtin_name,
- code->InstructionSize());
+ code.InstructionSize());
}
}
@@ -197,7 +197,7 @@
// static
bool Builtins::IsBuiltin(const Code code) {
- return Builtins::IsBuiltinId(code->builtin_index());
+ return Builtins::IsBuiltinId(code.builtin_index());
}
bool Builtins::IsBuiltinHandle(Handle<HeapObject> maybe_code,
@@ -216,7 +216,7 @@
// static
bool Builtins::IsIsolateIndependentBuiltin(const Code code) {
if (FLAG_embedded_builtins) {
- const int builtin_index = code->builtin_index();
+ const int builtin_index = code.builtin_index();
return Builtins::IsBuiltinId(builtin_index) &&
Builtins::IsIsolateIndependent(builtin_index);
} else {
@@ -245,7 +245,7 @@
Heap* heap = isolate->heap();
Address* builtin_entry_table = isolate->builtin_entry_table();
for (int i = 0; i < builtin_count; i++) {
- builtin_entry_table[i] = heap->builtin(i)->InstructionStart();
+ builtin_entry_table[i] = heap->builtin(i).InstructionStart();
}
}
diff --git a/src/builtins/constants-table-builder.cc b/src/builtins/constants-table-builder.cc
index 4d000be..f512650 100644
--- a/src/builtins/constants-table-builder.cc
+++ b/src/builtins/constants-table-builder.cc
@@ -72,7 +72,7 @@
DCHECK(isolate_->IsGeneratingEmbeddedBuiltins());
DCHECK(self_reference->IsOddball());
- DCHECK(Oddball::cast(*self_reference)->kind() ==
+ DCHECK(Oddball::cast(*self_reference).kind() ==
Oddball::kSelfReferenceMarker);
#endif
@@ -101,20 +101,20 @@
for (auto it = it_scope.begin(); it != it_scope.end(); ++it) {
uint32_t index = *it.entry();
Object value = it.key();
- if (value->IsCode() && Code::cast(value)->kind() == Code::BUILTIN) {
+ if (value.IsCode() && Code::cast(value).kind() == Code::BUILTIN) {
// Replace placeholder code objects with the real builtin.
// See also: SetupIsolateDelegate::PopulateWithPlaceholders.
// TODO(jgruber): Deduplicate placeholders and their corresponding
// builtin.
- value = builtins->builtin(Code::cast(value)->builtin_index());
+ value = builtins->builtin(Code::cast(value).builtin_index());
}
- DCHECK(value->IsHeapObject());
+ DCHECK(value.IsHeapObject());
table->set(index, value);
}
#ifdef DEBUG
for (int i = 0; i < map_.size(); i++) {
- DCHECK(table->get(i)->IsHeapObject());
+ DCHECK(table->get(i).IsHeapObject());
DCHECK_NE(ReadOnlyRoots(isolate_).undefined_value(), table->get(i));
DCHECK_NE(ReadOnlyRoots(isolate_).self_reference_marker(), table->get(i));
}
diff --git a/src/builtins/ia32/builtins-ia32.cc b/src/builtins/ia32/builtins-ia32.cc
index a038b43..5c4adaa 100644
--- a/src/builtins/ia32/builtins-ia32.cc
+++ b/src/builtins/ia32/builtins-ia32.cc
@@ -1382,7 +1382,7 @@
__ bind(&trampoline_loaded);
__ Pop(eax);
- __ add(scratch, Immediate(interpreter_entry_return_pc_offset->value()));
+ __ add(scratch, Immediate(interpreter_entry_return_pc_offset.value()));
__ push(scratch);
// Initialize the dispatch table register.
diff --git a/src/builtins/mips/builtins-mips.cc b/src/builtins/mips/builtins-mips.cc
index f65df4b..d02305c 100644
--- a/src/builtins/mips/builtins-mips.cc
+++ b/src/builtins/mips/builtins-mips.cc
@@ -1304,7 +1304,7 @@
__ lw(t0, MemOperand(t0));
__ bind(&trampoline_loaded);
- __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value()));
+ __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset.value()));
// Initialize the dispatch table register.
__ li(kInterpreterDispatchTableRegister,
diff --git a/src/builtins/mips64/builtins-mips64.cc b/src/builtins/mips64/builtins-mips64.cc
index 3c7a876..5e8bd35 100644
--- a/src/builtins/mips64/builtins-mips64.cc
+++ b/src/builtins/mips64/builtins-mips64.cc
@@ -1321,7 +1321,7 @@
__ Ld(t0, MemOperand(t0));
__ bind(&trampoline_loaded);
- __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value()));
+ __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset.value()));
// Initialize the dispatch table register.
__ li(kInterpreterDispatchTableRegister,
diff --git a/src/builtins/ppc/builtins-ppc.cc b/src/builtins/ppc/builtins-ppc.cc
index a0b41db..836900b 100644
--- a/src/builtins/ppc/builtins-ppc.cc
+++ b/src/builtins/ppc/builtins-ppc.cc
@@ -1365,7 +1365,7 @@
__ LoadP(r5, MemOperand(r5));
__ bind(&trampoline_loaded);
- __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value()));
+ __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset.value()));
__ mtlr(r0);
// Initialize the dispatch table register.
diff --git a/src/builtins/setup-builtins-internal.cc b/src/builtins/setup-builtins-internal.cc
index 668ac0a..f50e091 100644
--- a/src/builtins/setup-builtins-internal.cc
+++ b/src/builtins/setup-builtins-internal.cc
@@ -214,7 +214,7 @@
// static
void SetupIsolateDelegate::AddBuiltin(Builtins* builtins, int index,
Code code) {
- DCHECK_EQ(index, code->builtin_index());
+ DCHECK_EQ(index, code.builtin_index());
builtins->set_builtin(index, code);
}
@@ -245,7 +245,7 @@
HeapIterator iterator(isolate->heap());
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (!obj->IsCode()) continue;
+ if (!obj.IsCode()) continue;
Code code = Code::cast(obj);
bool flush_icache = false;
for (RelocIterator it(code, kRelocMask); !it.done(); it.next()) {
@@ -253,26 +253,26 @@
if (RelocInfo::IsCodeTargetMode(rinfo->rmode())) {
Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
DCHECK_IMPLIES(RelocInfo::IsRelativeCodeTarget(rinfo->rmode()),
- Builtins::IsIsolateIndependent(target->builtin_index()));
- if (!target->is_builtin()) continue;
- Code new_target = builtins->builtin(target->builtin_index());
- rinfo->set_target_address(new_target->raw_instruction_start(),
+ Builtins::IsIsolateIndependent(target.builtin_index()));
+ if (!target.is_builtin()) continue;
+ Code new_target = builtins->builtin(target.builtin_index());
+ rinfo->set_target_address(new_target.raw_instruction_start(),
UPDATE_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
} else {
DCHECK(RelocInfo::IsEmbeddedObjectMode(rinfo->rmode()));
Object object = rinfo->target_object();
- if (!object->IsCode()) continue;
+ if (!object.IsCode()) continue;
Code target = Code::cast(object);
- if (!target->is_builtin()) continue;
- Code new_target = builtins->builtin(target->builtin_index());
+ if (!target.is_builtin()) continue;
+ Code new_target = builtins->builtin(target.builtin_index());
rinfo->set_target_object(isolate->heap(), new_target,
UPDATE_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
}
flush_icache = true;
}
if (flush_icache) {
- FlushInstructionCache(code->raw_instruction_start(),
- code->raw_instruction_size());
+ FlushInstructionCache(code.raw_instruction_start(),
+ code.raw_instruction_size());
}
}
}
@@ -359,13 +359,13 @@
ReplacePlaceholders(isolate);
#define SET_PROMISE_REJECTION_PREDICTION(Name) \
- builtins->builtin(Builtins::k##Name)->set_is_promise_rejection(true);
+ builtins->builtin(Builtins::k##Name).set_is_promise_rejection(true);
BUILTIN_PROMISE_REJECTION_PREDICTION_LIST(SET_PROMISE_REJECTION_PREDICTION)
#undef SET_PROMISE_REJECTION_PREDICTION
#define SET_EXCEPTION_CAUGHT_PREDICTION(Name) \
- builtins->builtin(Builtins::k##Name)->set_is_exception_caught(true);
+ builtins->builtin(Builtins::k##Name).set_is_exception_caught(true);
BUILTIN_EXCEPTION_CAUGHT_PREDICTION_LIST(SET_EXCEPTION_CAUGHT_PREDICTION)
#undef SET_EXCEPTION_CAUGHT_PREDICTION
diff --git a/src/builtins/x64/builtins-x64.cc b/src/builtins/x64/builtins-x64.cc
index 865d868..9ff5b06 100644
--- a/src/builtins/x64/builtins-x64.cc
+++ b/src/builtins/x64/builtins-x64.cc
@@ -1390,7 +1390,7 @@
kScratchRegister));
__ bind(&trampoline_loaded);
- __ addq(rbx, Immediate(interpreter_entry_return_pc_offset->value()));
+ __ addq(rbx, Immediate(interpreter_entry_return_pc_offset.value()));
__ Push(rbx);
// Initialize dispatch table register.
diff --git a/src/codegen/code-stub-assembler.cc b/src/codegen/code-stub-assembler.cc
index f924b38..006ade8 100644
--- a/src/codegen/code-stub-assembler.cc
+++ b/src/codegen/code-stub-assembler.cc
@@ -294,7 +294,7 @@
}
} else {
DCHECK_EQ(mode, SMI_PARAMETERS);
- if (ToSmiConstant(test, &smi_test) && smi_test->value() == 0) {
+ if (ToSmiConstant(test, &smi_test) && smi_test.value() == 0) {
return true;
}
}
@@ -10007,7 +10007,7 @@
element_size_shift -= kSmiShiftBits;
Smi smi_index;
constant_index = ToSmiConstant(index_node, &smi_index);
- if (constant_index) index = smi_index->value();
+ if (constant_index) index = smi_index.value();
index_node = BitcastTaggedToWord(index_node);
} else {
DCHECK(mode == INTPTR_PARAMETERS);
diff --git a/src/codegen/code-stub-assembler.h b/src/codegen/code-stub-assembler.h
index 31847c6..8e80bea 100644
--- a/src/codegen/code-stub-assembler.h
+++ b/src/codegen/code-stub-assembler.h
@@ -278,7 +278,7 @@
if (mode == ParameterMode::SMI_PARAMETERS) {
Smi constant;
if (ToSmiConstant(node, &constant)) {
- *out = static_cast<intptr_t>(constant->value());
+ *out = static_cast<intptr_t>(constant.value());
return true;
}
} else {
diff --git a/src/codegen/compilation-cache.cc b/src/codegen/compilation-cache.cc
index 866caf3..2d8ce46 100644
--- a/src/codegen/compilation-cache.cc
+++ b/src/codegen/compilation-cache.cc
@@ -39,7 +39,7 @@
Handle<CompilationCacheTable> CompilationSubCache::GetTable(int generation) {
DCHECK(generation < generations_);
Handle<CompilationCacheTable> result;
- if (tables_[generation]->IsUndefined(isolate())) {
+ if (tables_[generation].IsUndefined(isolate())) {
result = CompilationCacheTable::New(isolate(), kInitialCacheSize);
tables_[generation] = *result;
} else {
@@ -53,8 +53,8 @@
void CompilationSubCache::Age() {
// Don't directly age single-generation caches.
if (generations_ == 1) {
- if (!tables_[0]->IsUndefined(isolate())) {
- CompilationCacheTable::cast(tables_[0])->Age();
+ if (!tables_[0].IsUndefined(isolate())) {
+ CompilationCacheTable::cast(tables_[0]).Age();
}
return;
}
@@ -76,8 +76,7 @@
void CompilationSubCache::Clear() {
MemsetPointer(reinterpret_cast<Address*>(tables_),
- ReadOnlyRoots(isolate()).undefined_value()->ptr(),
- generations_);
+ ReadOnlyRoots(isolate()).undefined_value().ptr(), generations_);
}
void CompilationSubCache::Remove(Handle<SharedFunctionInfo> function_info) {
@@ -108,13 +107,13 @@
// an undefined name to have the same origin.
Handle<Object> name;
if (!maybe_name.ToHandle(&name)) {
- return script->name()->IsUndefined(isolate());
+ return script->name().IsUndefined(isolate());
}
// Do the fast bailout checks first.
if (line_offset != script->line_offset()) return false;
if (column_offset != script->column_offset()) return false;
// Check that both names are strings. If not, no match.
- if (!name->IsString() || !script->name()->IsString()) return false;
+ if (!name->IsString() || !script->name().IsString()) return false;
// Are the origin_options same?
if (resource_options.Flags() != script->origin_options().Flags())
return false;
diff --git a/src/codegen/compiler.cc b/src/codegen/compiler.cc
index 6c7e55b..999ebb1 100644
--- a/src/codegen/compiler.cc
+++ b/src/codegen/compiler.cc
@@ -94,7 +94,7 @@
int line_num = Script::GetLineNumber(script, shared->StartPosition()) + 1;
int column_num = Script::GetColumnNumber(script, shared->StartPosition()) + 1;
- String script_name = script->name()->IsString()
+ String script_name = script->name().IsString()
? String::cast(script->name())
: ReadOnlyRoots(isolate).empty_string();
CodeEventListener::LogEventsAndTags log_tag =
@@ -127,9 +127,9 @@
}
ScriptOriginOptions OriginOptionsForEval(Object script) {
- if (!script->IsScript()) return ScriptOriginOptions();
+ if (!script.IsScript()) return ScriptOriginOptions();
- const auto outer_origin_options = Script::cast(script)->origin_options();
+ const auto outer_origin_options = Script::cast(script).origin_options();
return ScriptOriginOptions(outer_origin_options.IsSharedCrossOrigin(),
outer_origin_options.IsOpaque());
}
@@ -270,7 +270,7 @@
compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
compiled_functions++;
- code_size += function->shared()->SourceSize();
+ code_size += function->shared().SourceSize();
PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
compiled_functions, code_size, compilation_time);
}
@@ -383,7 +383,7 @@
Script::GetLineNumber(script, shared_info->StartPosition()) + 1;
int column_num =
Script::GetColumnNumber(script, shared_info->StartPosition()) + 1;
- String script_name = script->name()->IsString()
+ String script_name = script->name().IsString()
? String::cast(script->name())
: ReadOnlyRoots(isolate).empty_string();
CodeEventListener::LogEventsAndTags log_tag = Logger::ToNativeByScript(
@@ -433,8 +433,8 @@
Isolate* isolate) {
DCHECK(parse_info->is_toplevel());
DCHECK(!parse_info->script().is_null());
- if (parse_info->script()->shared_function_infos()->length() > 0) {
- DCHECK_EQ(parse_info->script()->shared_function_infos()->length(),
+ if (parse_info->script()->shared_function_infos().length() > 0) {
+ DCHECK_EQ(parse_info->script()->shared_function_infos().length(),
parse_info->max_function_literal_id() + 1);
return;
}
@@ -659,15 +659,15 @@
if (osr_offset.IsNone()) {
if (function->has_feedback_vector()) {
FeedbackVector feedback_vector = function->feedback_vector();
- feedback_vector->EvictOptimizedCodeMarkedForDeoptimization(
+ feedback_vector.EvictOptimizedCodeMarkedForDeoptimization(
function->shared(), "GetCodeFromOptimizedCodeCache");
- Code code = feedback_vector->optimized_code();
+ Code code = feedback_vector.optimized_code();
if (!code.is_null()) {
// Caching of optimized code enabled and optimized code found.
- DCHECK(!code->marked_for_deoptimization());
- DCHECK(function->shared()->is_compiled());
- return Handle<Code>(code, feedback_vector->GetIsolate());
+ DCHECK(!code.marked_for_deoptimization());
+ DCHECK(function->shared().is_compiled());
+ return Handle<Code>(code, feedback_vector.GetIsolate());
}
}
}
@@ -700,7 +700,7 @@
// Cache optimized context-specific code.
Handle<JSFunction> function = compilation_info->closure();
Handle<SharedFunctionInfo> shared(function->shared(), function->GetIsolate());
- Handle<Context> native_context(function->context()->native_context(),
+ Handle<Context> native_context(function->context().native_context(),
function->GetIsolate());
if (compilation_info->osr_offset().IsNone()) {
Handle<FeedbackVector> vector =
@@ -800,7 +800,7 @@
// If code was pending optimization for testing, delete remove the strong root
// that was preventing the bytecode from being flushed between marking and
// optimization.
- if (!isolate->heap()->pending_optimize_for_test_bytecode()->IsUndefined()) {
+ if (!isolate->heap()->pending_optimize_for_test_bytecode().IsUndefined()) {
Handle<ObjectHashTable> table =
handle(ObjectHashTable::cast(
isolate->heap()->pending_optimize_for_test_bytecode()),
@@ -827,7 +827,7 @@
// Reset profiler ticks, function is no longer considered hot.
DCHECK(shared->is_compiled());
- function->feedback_vector()->set_profiler_ticks(0);
+ function->feedback_vector().set_profiler_ticks(0);
VMState<COMPILER> state(isolate);
TimerEventScope<TimerEventOptimizeCode> optimize_code_timer(isolate);
@@ -837,7 +837,7 @@
DCHECK(!isolate->has_pending_exception());
PostponeInterruptsScope postpone(isolate);
- bool has_script = shared->script()->IsScript();
+ bool has_script = shared->script().IsScript();
// BUG(5946): This DCHECK is necessary to make certain that we won't
// tolerate the lack of a script without bytecode.
DCHECK_IMPLIES(!has_script, shared->HasBytecodeArray());
@@ -881,8 +881,8 @@
// Set the optimization marker and return a code object which checks it.
function->SetOptimizationMarker(OptimizationMarker::kInOptimizationQueue);
DCHECK(function->IsInterpreted() ||
- (!function->is_compiled() && function->shared()->IsInterpreted()));
- DCHECK(function->shared()->HasBytecodeArray());
+ (!function->is_compiled() && function->shared().IsInterpreted()));
+ DCHECK(function->shared().HasBytecodeArray());
return BUILTIN_CODE(isolate, InterpreterEntryTrampoline);
}
} else {
@@ -1178,7 +1178,7 @@
Handle<SharedFunctionInfo> shared_info) {
DCHECK(shared_info->is_compiled());
DCHECK(shared_info->HasBytecodeArray());
- DCHECK(!shared_info->GetBytecodeArray()->HasSourcePositionTable());
+ DCHECK(!shared_info->GetBytecodeArray().HasSourcePositionTable());
// Collecting source positions requires allocating a new source position
// table.
@@ -1266,8 +1266,8 @@
// If debugging, make sure that instrumented bytecode has the source position
// table set on it as well.
if (shared_info->HasDebugInfo() &&
- shared_info->GetDebugInfo()->HasInstrumentedBytecodeArray()) {
- shared_info->GetDebugBytecodeArray()->set_source_position_table(
+ shared_info->GetDebugInfo().HasInstrumentedBytecodeArray()) {
+ shared_info->GetDebugBytecodeArray().set_source_position_table(
source_position_table);
}
@@ -1315,7 +1315,7 @@
parse_info.set_consumed_preparse_data(ConsumedPreparseData::For(
isolate,
handle(
- shared_info->uncompiled_data_with_preparse_data()->preparse_data(),
+ shared_info->uncompiled_data_with_preparse_data().preparse_data(),
isolate)));
}
@@ -1377,7 +1377,7 @@
JSFunction::InitializeFeedbackCell(function);
// Optimize now if --always-opt is enabled.
- if (FLAG_always_opt && !function->shared()->HasAsmWasmData()) {
+ if (FLAG_always_opt && !function->shared().HasAsmWasmData()) {
if (FLAG_trace_opt) {
PrintF("[optimizing ");
function->ShortPrint();
@@ -1395,7 +1395,7 @@
// Check postconditions on success.
DCHECK(!isolate->has_pending_exception());
- DCHECK(function->shared()->is_compiled());
+ DCHECK(function->shared().is_compiled());
DCHECK(function->is_compiled());
return true;
}
@@ -1449,8 +1449,8 @@
// Optimization failed, get unoptimized code. Unoptimized code must exist
// already if we are optimizing.
DCHECK(!isolate->has_pending_exception());
- DCHECK(function->shared()->is_compiled());
- DCHECK(function->shared()->IsInterpreted());
+ DCHECK(function->shared().is_compiled());
+ DCHECK(function->shared().IsInterpreted());
code = BUILTIN_CODE(isolate, InterpreterEntryTrampoline);
}
@@ -1459,7 +1459,7 @@
// Check postconditions on success.
DCHECK(!isolate->has_pending_exception());
- DCHECK(function->shared()->is_compiled());
+ DCHECK(function->shared().is_compiled());
DCHECK(function->is_compiled());
DCHECK_IMPLIES(function->HasOptimizationMarker(),
function->IsInOptimizationQueue());
@@ -1602,7 +1602,7 @@
bool Compiler::CodeGenerationFromStringsAllowed(Isolate* isolate,
Handle<Context> context,
Handle<String> source) {
- DCHECK(context->allow_code_gen_from_strings()->IsFalse(isolate));
+ DCHECK(context->allow_code_gen_from_strings().IsFalse(isolate));
// Check with callback if set.
AllowCodeGenerationFromStringsCallback callback =
isolate->allow_code_gen_callback();
@@ -1624,7 +1624,7 @@
// Check if native context allows code generation from
// strings. Throw an exception if it doesn't.
- if (native_context->allow_code_gen_from_strings()->IsFalse(isolate) &&
+ if (native_context->allow_code_gen_from_strings().IsFalse(isolate) &&
!CodeGenerationFromStringsAllowed(isolate, native_context, source)) {
Handle<Object> error_message =
native_context->ErrorMessageForCodeGenerationFromStrings();
@@ -1638,7 +1638,7 @@
int eval_scope_position = 0;
int eval_position = kNoSourcePosition;
Handle<SharedFunctionInfo> outer_info(
- native_context->empty_function()->shared(), isolate);
+ native_context->empty_function().shared(), isolate);
return Compiler::GetFunctionFromEval(
source, outer_info, native_context, LanguageMode::kSloppy, restriction,
parameters_end_pos, eval_scope_position, eval_position);
@@ -2037,7 +2037,7 @@
SharedFunctionInfo::ScriptIterator infos(isolate, *script);
for (SharedFunctionInfo info = infos.Next(); !info.is_null();
info = infos.Next()) {
- if (info->is_wrapped()) {
+ if (info.is_wrapped()) {
wrapped = Handle<SharedFunctionInfo>(info, isolate);
break;
}
@@ -2162,7 +2162,7 @@
Handle<SharedFunctionInfo> shared = compilation_info->shared_info();
// Reset profiler ticks, function is no longer considered hot.
- compilation_info->closure()->feedback_vector()->set_profiler_ticks(0);
+ compilation_info->closure()->feedback_vector().set_profiler_ticks(0);
DCHECK(!shared->HasBreakInfo());
@@ -2217,12 +2217,12 @@
JSFunction::InitializeFeedbackCell(function);
Code code = function->has_feedback_vector()
- ? function->feedback_vector()->optimized_code()
+ ? function->feedback_vector().optimized_code()
: Code();
if (!code.is_null()) {
// Caching of optimized code enabled and optimized code found.
- DCHECK(!code->marked_for_deoptimization());
- DCHECK(function->shared()->is_compiled());
+ DCHECK(!code.marked_for_deoptimization());
+ DCHECK(function->shared().is_compiled());
function->set_code(code);
}
diff --git a/src/codegen/handler-table.cc b/src/codegen/handler-table.cc
index 482bcff..8493712 100644
--- a/src/codegen/handler-table.cc
+++ b/src/codegen/handler-table.cc
@@ -14,21 +14,21 @@
namespace internal {
HandlerTable::HandlerTable(Code code)
- : HandlerTable(code->InstructionStart() + code->handler_table_offset(),
- code->handler_table_size()) {}
+ : HandlerTable(code.InstructionStart() + code.handler_table_offset(),
+ code.handler_table_size()) {}
HandlerTable::HandlerTable(BytecodeArray bytecode_array)
- : HandlerTable(bytecode_array->handler_table()) {}
+ : HandlerTable(bytecode_array.handler_table()) {}
HandlerTable::HandlerTable(ByteArray byte_array)
- : number_of_entries_(byte_array->length() / kRangeEntrySize /
+ : number_of_entries_(byte_array.length() / kRangeEntrySize /
sizeof(int32_t)),
#ifdef DEBUG
mode_(kRangeBasedEncoding),
#endif
raw_encoded_data_(
- reinterpret_cast<Address>(byte_array->GetDataStartAddress())) {
- DCHECK_EQ(0, byte_array->length() % (kRangeEntrySize * sizeof(int32_t)));
+ reinterpret_cast<Address>(byte_array.GetDataStartAddress())) {
+ DCHECK_EQ(0, byte_array.length() % (kRangeEntrySize * sizeof(int32_t)));
}
HandlerTable::HandlerTable(Address handler_table, int handler_table_size)
diff --git a/src/codegen/optimized-compilation-info.cc b/src/codegen/optimized-compilation-info.cc
index b4c8cc6..5f75345 100644
--- a/src/codegen/optimized-compilation-info.cc
+++ b/src/codegen/optimized-compilation-info.cc
@@ -151,7 +151,7 @@
std::unique_ptr<char[]> OptimizedCompilationInfo::GetDebugName() const {
if (!shared_info().is_null()) {
- return shared_info()->DebugName()->ToCString();
+ return shared_info()->DebugName().ToCString();
}
Vector<const char> name_vec = debug_name_;
if (name_vec.empty()) name_vec = ArrayVector("unknown");
@@ -216,7 +216,7 @@
JSGlobalObject OptimizedCompilationInfo::global_object() const {
DCHECK(has_global_object());
- return native_context()->global_object();
+ return native_context().global_object();
}
int OptimizedCompilationInfo::AddInlinedFunction(
diff --git a/src/codegen/reloc-info.cc b/src/codegen/reloc-info.cc
index d15787c..a889a8b 100644
--- a/src/codegen/reloc-info.cc
+++ b/src/codegen/reloc-info.cc
@@ -272,13 +272,13 @@
}
RelocIterator::RelocIterator(Code code, int mode_mask)
- : RelocIterator(code, code->unchecked_relocation_info(), mode_mask) {}
+ : RelocIterator(code, code.unchecked_relocation_info(), mode_mask) {}
RelocIterator::RelocIterator(Code code, ByteArray relocation_info,
int mode_mask)
- : RelocIterator(code, code->raw_instruction_start(), code->constant_pool(),
- relocation_info->GetDataEndAddress(),
- relocation_info->GetDataStartAddress(), mode_mask) {}
+ : RelocIterator(code, code.raw_instruction_start(), code.constant_pool(),
+ relocation_info.GetDataEndAddress(),
+ relocation_info.GetDataStartAddress(), mode_mask) {}
RelocIterator::RelocIterator(const CodeReference code_reference, int mode_mask)
: RelocIterator(Code(), code_reference.instruction_start(),
@@ -289,10 +289,10 @@
RelocIterator::RelocIterator(EmbeddedData* embedded_data, Code code,
int mode_mask)
: RelocIterator(
- code, embedded_data->InstructionStartOfBuiltin(code->builtin_index()),
- code->constant_pool(),
- code->relocation_start() + code->relocation_size(),
- code->relocation_start(), mode_mask) {}
+ code, embedded_data->InstructionStartOfBuiltin(code.builtin_index()),
+ code.constant_pool(),
+ code.relocation_start() + code.relocation_size(),
+ code.relocation_start(), mode_mask) {}
RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask)
: RelocIterator(Code(), reinterpret_cast<Address>(desc.buffer), 0,
@@ -469,10 +469,10 @@
} else if (IsCodeTargetMode(rmode_)) {
const Address code_target = target_address();
Code code = Code::GetCodeFromTargetAddress(code_target);
- DCHECK(code->IsCode());
- os << " (" << Code::Kind2String(code->kind());
+ DCHECK(code.IsCode());
+ os << " (" << Code::Kind2String(code.kind());
if (Builtins::IsBuiltin(code)) {
- os << " " << Builtins::name(code->builtin_index());
+ os << " " << Builtins::name(code.builtin_index());
}
os << ") (" << reinterpret_cast<const void*>(target_address()) << ")";
} else if (IsRuntimeEntry(rmode_) && isolate->deoptimizer_data() != nullptr) {
@@ -505,8 +505,8 @@
// Check that we can find the right code object.
Code code = Code::GetCodeFromTargetAddress(addr);
Object found = isolate->FindCodeObject(addr);
- CHECK(found->IsCode());
- CHECK(code->address() == HeapObject::cast(found)->address());
+ CHECK(found.IsCode());
+ CHECK(code.address() == HeapObject::cast(found).address());
break;
}
case INTERNAL_REFERENCE:
@@ -514,8 +514,8 @@
Address target = target_internal_reference();
Address pc = target_internal_reference_address();
Code code = Code::cast(isolate->FindCodeObject(pc));
- CHECK(target >= code->InstructionStart());
- CHECK(target <= code->InstructionEnd());
+ CHECK(target >= code.InstructionStart());
+ CHECK(target <= code.InstructionEnd());
break;
}
case OFF_HEAP_TARGET: {
diff --git a/src/codegen/safepoint-table.cc b/src/codegen/safepoint-table.cc
index 97e5736..423fa1c 100644
--- a/src/codegen/safepoint-table.cc
+++ b/src/codegen/safepoint-table.cc
@@ -49,8 +49,8 @@
}
SafepointTable::SafepointTable(Code code)
- : SafepointTable(code->InstructionStart(), code->safepoint_table_offset(),
- code->stack_slots(), true) {}
+ : SafepointTable(code.InstructionStart(), code.safepoint_table_offset(),
+ code.stack_slots(), true) {}
unsigned SafepointTable::find_return_pc(unsigned pc_offset) {
for (unsigned i = 0; i < length(); i++) {
diff --git a/src/codegen/source-position-table.cc b/src/codegen/source-position-table.cc
index 35b9d26..f1f8083 100644
--- a/src/codegen/source-position-table.cc
+++ b/src/codegen/source-position-table.cc
@@ -108,8 +108,8 @@
}
Vector<const byte> VectorFromByteArray(ByteArray byte_array) {
- return Vector<const byte>(byte_array->GetDataStartAddress(),
- byte_array->length());
+ return Vector<const byte>(byte_array.GetDataStartAddress(),
+ byte_array.length());
}
#ifdef ENABLE_SLOW_DCHECKS
diff --git a/src/codegen/source-position.cc b/src/codegen/source-position.cc
index aa7170f..41f5e5d 100644
--- a/src/codegen/source-position.cc
+++ b/src/codegen/source-position.cc
@@ -11,8 +11,8 @@
std::ostream& operator<<(std::ostream& out, const SourcePositionInfo& pos) {
out << "<";
- if (!pos.script.is_null() && pos.script->name()->IsString()) {
- out << String::cast(pos.script->name())->ToCString(DISALLOW_NULLS).get();
+ if (!pos.script.is_null() && pos.script->name().IsString()) {
+ out << String::cast(pos.script->name()).ToCString(DISALLOW_NULLS).get();
} else {
out << "unknown";
}
@@ -68,7 +68,7 @@
std::vector<SourcePositionInfo> stack;
while (pos.isInlined()) {
InliningPosition inl =
- deopt_data->InliningPositions()->get(pos.InliningId());
+ deopt_data->InliningPositions().get(pos.InliningId());
Handle<SharedFunctionInfo> function(
deopt_data->GetInlinedFunction(inl.inlined_function_id), isolate);
stack.push_back(SourcePositionInfo(pos, function));
@@ -84,15 +84,15 @@
SharedFunctionInfo function) const {
Script::PositionInfo pos;
Object source_name;
- if (function->script()->IsScript()) {
- Script script = Script::cast(function->script());
- source_name = script->name();
- script->GetPositionInfo(ScriptOffset(), &pos, Script::WITH_OFFSET);
+ if (function.script().IsScript()) {
+ Script script = Script::cast(function.script());
+ source_name = script.name();
+ script.GetPositionInfo(ScriptOffset(), &pos, Script::WITH_OFFSET);
}
out << "<";
- if (source_name->IsString()) {
+ if (source_name.IsString()) {
out << String::cast(source_name)
- ->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)
+ .ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)
.get();
} else {
out << "unknown";
@@ -113,18 +113,18 @@
void SourcePosition::Print(std::ostream& out, Code code) const {
DeoptimizationData deopt_data =
- DeoptimizationData::cast(code->deoptimization_data());
+ DeoptimizationData::cast(code.deoptimization_data());
if (!isInlined()) {
SharedFunctionInfo function(
- SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo()));
+ SharedFunctionInfo::cast(deopt_data.SharedFunctionInfo()));
Print(out, function);
} else {
- InliningPosition inl = deopt_data->InliningPositions()->get(InliningId());
+ InliningPosition inl = deopt_data.InliningPositions().get(InliningId());
if (inl.inlined_function_id == -1) {
out << *this;
} else {
SharedFunctionInfo function =
- deopt_data->GetInlinedFunction(inl.inlined_function_id);
+ deopt_data.GetInlinedFunction(inl.inlined_function_id);
Print(out, function);
}
out << " inlined at ";
@@ -136,7 +136,7 @@
Handle<SharedFunctionInfo> f)
: position(pos),
shared(f),
- script(f.is_null() || !f->script()->IsScript()
+ script(f.is_null() || !f->script().IsScript()
? Handle<Script>::null()
: handle(Script::cast(f->script()), f->GetIsolate())) {
if (!script.is_null()) {
diff --git a/src/compiler-dispatcher/compiler-dispatcher.cc b/src/compiler-dispatcher/compiler-dispatcher.cc
index 221bed3..dd0c9a1 100644
--- a/src/compiler-dispatcher/compiler-dispatcher.cc
+++ b/src/compiler-dispatcher/compiler-dispatcher.cc
@@ -104,7 +104,7 @@
if (trace_compiler_dispatcher_) {
PrintF("CompilerDispatcher: registering ");
- function->ShortPrint();
+ function.ShortPrint();
PrintF(" with job id %zu\n", job_id);
}
diff --git a/src/compiler-dispatcher/optimizing-compile-dispatcher.cc b/src/compiler-dispatcher/optimizing-compile-dispatcher.cc
index 333fdbd..4b24fca 100644
--- a/src/compiler-dispatcher/optimizing-compile-dispatcher.cc
+++ b/src/compiler-dispatcher/optimizing-compile-dispatcher.cc
@@ -25,7 +25,7 @@
bool restore_function_code) {
if (restore_function_code) {
Handle<JSFunction> function = job->compilation_info()->closure();
- function->set_code(function->shared()->GetCode());
+ function->set_code(function->shared().GetCode());
if (function->IsInOptimizationQueue()) {
function->ClearOptimizationMarker();
}
diff --git a/src/compiler/access-info.cc b/src/compiler/access-info.cc
index eae0b8d..b44c9c2 100644
--- a/src/compiler/access-info.cc
+++ b/src/compiler/access-info.cc
@@ -407,10 +407,10 @@
Handle<JSModuleNamespace> module_namespace(
JSModuleNamespace::cast(proto_info->module_namespace()), isolate());
Handle<Cell> cell(
- Cell::cast(module_namespace->module()->exports()->Lookup(
+ Cell::cast(module_namespace->module().exports().Lookup(
ReadOnlyRoots(isolate()), name, Smi::ToInt(name->GetHash()))),
isolate());
- if (cell->value()->IsTheHole(isolate())) {
+ if (cell->value().IsTheHole(isolate())) {
// This module has not been fully initialized yet.
return PropertyAccessInfo::Invalid(zone());
}
@@ -542,15 +542,15 @@
}
// Walk up the prototype chain.
- if (!map->prototype()->IsJSObject()) {
+ if (!map->prototype().IsJSObject()) {
// Perform the implicit ToObject for primitives here.
// Implemented according to ES6 section 7.3.2 GetV (V, P).
Handle<JSFunction> constructor;
if (Map::GetConstructorFunction(map, broker()->native_context().object())
.ToHandle(&constructor)) {
map = handle(constructor->initial_map(), isolate());
- DCHECK(map->prototype()->IsJSObject());
- } else if (map->prototype()->IsNull(isolate())) {
+ DCHECK(map->prototype().IsJSObject());
+ } else if (map->prototype().IsNull(isolate())) {
// Store to property not found on the receiver or any prototype, we need
// to transition to a new data property.
// Implemented according to ES6 section 9.1.9 [[Set]] (P, V, Receiver)
@@ -566,7 +566,7 @@
}
}
Handle<JSObject> map_prototype(JSObject::cast(map->prototype()), isolate());
- if (map_prototype->map()->is_deprecated()) {
+ if (map_prototype->map().is_deprecated()) {
// Try to migrate the prototype object so we don't embed the deprecated
// map into the optimized code.
JSObject::TryMigrateInstance(map_prototype);
@@ -744,7 +744,7 @@
Handle<Map> transition_map(transition, isolate());
int const number = transition_map->LastAdded();
PropertyDetails const details =
- transition_map->instance_descriptors()->GetDetails(number);
+ transition_map->instance_descriptors().GetDetails(number);
// Don't bother optimizing stores to read-only properties.
if (details.IsReadOnly()) {
return PropertyAccessInfo::Invalid(zone());
@@ -774,8 +774,7 @@
// Extract the field type from the property details (make sure its
// representation is TaggedPointer to reflect the heap object case).
Handle<FieldType> descriptors_field_type(
- transition_map->instance_descriptors()->GetFieldType(number),
- isolate());
+ transition_map->instance_descriptors().GetFieldType(number), isolate());
if (descriptors_field_type->IsNone()) {
// Store is not safe if the field type was cleared.
return PropertyAccessInfo::Invalid(zone());
diff --git a/src/compiler/backend/code-generator.cc b/src/compiler/backend/code-generator.cc
index 8d0122f..ee6c07d 100644
--- a/src/compiler/backend/code-generator.cc
+++ b/src/compiler/backend/code-generator.cc
@@ -1161,8 +1161,8 @@
// Smis.
DCHECK_EQ(4, kSystemPointerSize);
Smi smi(static_cast<Address>(constant.ToInt32()));
- DCHECK(smi->IsSmi());
- literal = DeoptimizationLiteral(smi->value());
+ DCHECK(smi.IsSmi());
+ literal = DeoptimizationLiteral(smi.value());
} else if (type.representation() == MachineRepresentation::kBit) {
if (constant.ToInt32() == 0) {
literal =
@@ -1196,8 +1196,8 @@
// Smis.
DCHECK_EQ(MachineRepresentation::kTagged, type.representation());
Smi smi(static_cast<Address>(constant.ToInt64()));
- DCHECK(smi->IsSmi());
- literal = DeoptimizationLiteral(smi->value());
+ DCHECK(smi.IsSmi());
+ literal = DeoptimizationLiteral(smi.value());
}
break;
case Constant::kFloat32:
diff --git a/src/compiler/bytecode-graph-builder.cc b/src/compiler/bytecode-graph-builder.cc
index 4461431..fa83f25 100644
--- a/src/compiler/bytecode-graph-builder.cc
+++ b/src/compiler/bytecode-graph-builder.cc
@@ -2028,7 +2028,7 @@
// TODO(mstarzinger): Thread through number of elements. The below number is
// only an estimate and does not match {ArrayLiteral::values::length}.
int number_of_elements =
- array_boilerplate_description->constant_elements()->length();
+ array_boilerplate_description->constant_elements().length();
Node* literal = NewNode(javascript()->CreateLiteralArray(
array_boilerplate_description, pair, literal_flags, number_of_elements));
environment()->BindAccumulator(literal, Environment::kAttachFrameState);
@@ -2098,7 +2098,7 @@
// the JSArray constant here.
cached_value = TemplateObjectDescription::GetTemplateObject(
isolate(), native_context(), description, shared_info(), slot.ToInt());
- nexus.vector()->Set(slot, *cached_value);
+ nexus.vector().Set(slot, *cached_value);
} else {
cached_value =
handle(JSArray::cast(nexus.GetFeedback()->GetHeapObjectAssumeStrong()),
diff --git a/src/compiler/code-assembler.cc b/src/compiler/code-assembler.cc
index 545fa71..e4896cf 100644
--- a/src/compiler/code-assembler.cc
+++ b/src/compiler/code-assembler.cc
@@ -1923,16 +1923,16 @@
Smi type(raw_type);
String location = String::cast(Object(raw_location));
const char* expected;
- switch (static_cast<ObjectType>(type->value())) {
-#define TYPE_CASE(Name) \
- case ObjectType::k##Name: \
- if (value->Is##Name()) return Smi::FromInt(0).ptr(); \
- expected = #Name; \
+ switch (static_cast<ObjectType>(type.value())) {
+#define TYPE_CASE(Name) \
+ case ObjectType::k##Name: \
+ if (value.Is##Name()) return Smi::FromInt(0).ptr(); \
+ expected = #Name; \
break;
-#define TYPE_STRUCT_CASE(NAME, Name, name) \
- case ObjectType::k##Name: \
- if (value->Is##Name()) return Smi::FromInt(0).ptr(); \
- expected = #Name; \
+#define TYPE_STRUCT_CASE(NAME, Name, name) \
+ case ObjectType::k##Name: \
+ if (value.Is##Name()) return Smi::FromInt(0).ptr(); \
+ expected = #Name; \
break;
TYPE_CASE(Object)
@@ -1945,11 +1945,11 @@
#undef TYPE_STRUCT_CASE
}
std::stringstream value_description;
- value->Print(value_description);
+ value.Print(value_description);
FATAL(
"Type cast failed in %s\n"
" Expected %s but found %s",
- location->ToAsciiArray(), expected, value_description.str().c_str());
+ location.ToAsciiArray(), expected, value_description.str().c_str());
#else
UNREACHABLE();
#endif
diff --git a/src/compiler/compilation-dependencies.cc b/src/compiler/compilation-dependencies.cc
index c79caf3..1a579c0 100644
--- a/src/compiler/compilation-dependencies.cc
+++ b/src/compiler/compilation-dependencies.cc
@@ -181,9 +181,8 @@
bool IsValid() const override {
DisallowHeapAllocation no_heap_allocation;
Handle<Map> owner = owner_.object();
- return representation_.Equals(owner->instance_descriptors()
- ->GetDetails(descriptor_)
- .representation());
+ return representation_.Equals(
+ owner->instance_descriptors().GetDetails(descriptor_).representation());
}
void Install(const MaybeObjectHandle& code) const override {
@@ -213,7 +212,7 @@
DisallowHeapAllocation no_heap_allocation;
Handle<Map> owner = owner_.object();
Handle<Object> type = type_.object();
- return *type == owner->instance_descriptors()->GetFieldType(descriptor_);
+ return *type == owner->instance_descriptors().GetFieldType(descriptor_);
}
void Install(const MaybeObjectHandle& code) const override {
@@ -242,7 +241,7 @@
DisallowHeapAllocation no_heap_allocation;
Handle<Map> owner = owner_.object();
return PropertyConstness::kConst ==
- owner->instance_descriptors()->GetDetails(descriptor_).constness();
+ owner->instance_descriptors().GetDetails(descriptor_).constness();
}
void Install(const MaybeObjectHandle& code) const override {
@@ -332,7 +331,7 @@
bool IsValid() const override {
Handle<AllocationSite> site = site_.object();
ElementsKind kind = site->PointsToLiteral()
- ? site->boilerplate()->GetElementsKind()
+ ? site->boilerplate().GetElementsKind()
: site->GetElementsKind();
return kind_ == kind;
}
@@ -372,9 +371,8 @@
void Install(const MaybeObjectHandle& code) const override {
SLOW_DCHECK(IsValid());
- DCHECK(!function_.object()
- ->initial_map()
- ->IsInobjectSlackTrackingInProgress());
+ DCHECK(
+ !function_.object()->initial_map().IsInobjectSlackTrackingInProgress());
}
private:
diff --git a/src/compiler/graph-visualizer.cc b/src/compiler/graph-visualizer.cc
index 6f89512..5de420a 100644
--- a/src/compiler/graph-visualizer.cc
+++ b/src/compiler/graph-visualizer.cc
@@ -102,9 +102,9 @@
if (!script.is_null() && !script->IsUndefined(isolate) && !shared.is_null()) {
Object source_name = script->name();
os << ", \"sourceName\": \"";
- if (source_name->IsString()) {
+ if (source_name.IsString()) {
std::ostringstream escaped_name;
- escaped_name << String::cast(source_name)->ToCString().get();
+ escaped_name << String::cast(source_name).ToCString().get();
os << JSONEscaped(escaped_name);
}
os << "\"";
@@ -173,7 +173,7 @@
JsonPrintFunctionSource(os, -1,
info->shared_info().is_null()
? std::unique_ptr<char[]>(new char[1]{0})
- : info->shared_info()->DebugName()->ToCString(),
+ : info->shared_info()->DebugName().ToCString(),
script, isolate, info->shared_info(), true);
const auto& inlined = info->inlined_functions();
SourceIdAssigner id_assigner(info->inlined_functions().size());
@@ -181,7 +181,7 @@
os << ", ";
Handle<SharedFunctionInfo> shared = inlined[id].shared_info;
const int source_id = id_assigner.GetIdFor(shared);
- JsonPrintFunctionSource(os, source_id, shared->DebugName()->ToCString(),
+ JsonPrintFunctionSource(os, source_id, shared->DebugName().ToCString(),
handle(Script::cast(shared->script()), isolate),
isolate, shared, true);
}
@@ -216,12 +216,12 @@
EmbeddedVector<char, 256> source_file(0);
bool source_available = false;
if (FLAG_trace_file_names && info->has_shared_info() &&
- info->shared_info()->script()->IsScript()) {
- Object source_name = Script::cast(info->shared_info()->script())->name();
- if (source_name->IsString()) {
+ info->shared_info()->script().IsScript()) {
+ Object source_name = Script::cast(info->shared_info()->script()).name();
+ if (source_name.IsString()) {
String str = String::cast(source_name);
- if (str->length() > 0) {
- SNPrintF(source_file, "%s", str->ToCString().get());
+ if (str.length() > 0) {
+ SNPrintF(source_file, "%s", str.ToCString().get());
std::replace(source_file.begin(),
source_file.begin() + source_file.length(), '/', '_');
source_available = true;
diff --git a/src/compiler/js-call-reducer.cc b/src/compiler/js-call-reducer.cc
index c6b2030..78eccdc 100644
--- a/src/compiler/js-call-reducer.cc
+++ b/src/compiler/js-call-reducer.cc
@@ -452,7 +452,7 @@
return inference.NoChange();
}
if (!descriptors->GetStrongValue(JSFunction::kLengthDescriptorIndex)
- ->IsAccessorInfo()) {
+ .IsAccessorInfo()) {
return inference.NoChange();
}
if (descriptors->GetKey(JSFunction::kNameDescriptorIndex) !=
@@ -460,7 +460,7 @@
return inference.NoChange();
}
if (!descriptors->GetStrongValue(JSFunction::kNameDescriptorIndex)
- ->IsAccessorInfo()) {
+ .IsAccessorInfo()) {
return inference.NoChange();
}
}
@@ -2820,7 +2820,7 @@
? jsgraph()->HeapConstant(api_holder)
: receiver;
} else if (function_template_info->accept_any_receiver() &&
- function_template_info->signature()->IsUndefined(isolate())) {
+ function_template_info->signature().IsUndefined(isolate())) {
// We haven't found any {receiver_maps}, but we might still be able to
// optimize the API call depending on the {function_template_info}.
// If the API function accepts any kind of {receiver}, we only need to
@@ -2846,7 +2846,7 @@
// faster than the generic call sequence.
Builtins::Name builtin_name =
!function_template_info->accept_any_receiver()
- ? (function_template_info->signature()->IsUndefined(isolate())
+ ? (function_template_info->signature().IsUndefined(isolate())
? Builtins::kCallFunctionTemplate_CheckAccess
: Builtins::
kCallFunctionTemplate_CheckAccessAndCompatibleReceiver)
diff --git a/src/compiler/js-heap-broker.cc b/src/compiler/js-heap-broker.cc
index 3a3f18e..2d9f4b9 100644
--- a/src/compiler/js-heap-broker.cc
+++ b/src/compiler/js-heap-broker.cc
@@ -282,8 +282,8 @@
TraceScope tracer(broker, this, "JSObjectData::SerializeObjectCreateMap");
Handle<JSObject> jsobject = Handle<JSObject>::cast(object());
- if (jsobject->map()->is_prototype_map()) {
- Handle<Object> maybe_proto_info(jsobject->map()->prototype_info(),
+ if (jsobject->map().is_prototype_map()) {
+ Handle<Object> maybe_proto_info(jsobject->map().prototype_info(),
broker->isolate());
if (maybe_proto_info->IsPrototypeInfo()) {
auto proto_info = Handle<PrototypeInfo>::cast(maybe_proto_info);
@@ -358,7 +358,7 @@
is_on_heap_(object->is_on_heap()),
length_(object->length()),
elements_external_pointer_(
- FixedTypedArrayBase::cast(object->elements())->external_pointer()) {}
+ FixedTypedArrayBase::cast(object->elements()).external_pointer()) {}
void JSTypedArrayData::Serialize(JSHeapBroker* broker) {
if (serialized_) return;
@@ -705,14 +705,14 @@
// TODO(turbofan): Do we want to support out-of-object properties?
if (!(boilerplate->HasFastProperties() &&
- boilerplate->property_array()->length() == 0)) {
+ boilerplate->property_array().length() == 0)) {
return false;
}
// Check the in-object properties.
- Handle<DescriptorArray> descriptors(
- boilerplate->map()->instance_descriptors(), isolate);
- int limit = boilerplate->map()->NumberOfOwnDescriptors();
+ Handle<DescriptorArray> descriptors(boilerplate->map().instance_descriptors(),
+ isolate);
+ int limit = boilerplate->map().NumberOfOwnDescriptors();
for (int i = 0; i < limit; i++) {
PropertyDetails details = descriptors->GetDetails(i);
if (details.location() != kField) continue;
@@ -954,15 +954,15 @@
DCHECK(!jsarray_map->is_dictionary_map());
Handle<Name> length_string = isolate->factory()->length_string();
DescriptorArray descriptors = jsarray_map->instance_descriptors();
- int number = descriptors->Search(*length_string, *jsarray_map);
+ int number = descriptors.Search(*length_string, *jsarray_map);
DCHECK_NE(DescriptorArray::kNotFound, number);
- return descriptors->GetDetails(number).IsReadOnly();
+ return descriptors.GetDetails(number).IsReadOnly();
}
bool SupportsFastArrayIteration(Isolate* isolate, Handle<Map> map) {
return map->instance_type() == JS_ARRAY_TYPE &&
IsFastElementsKind(map->elements_kind()) &&
- map->prototype()->IsJSArray() &&
+ map->prototype().IsJSArray() &&
isolate->IsAnyInitialArrayPrototype(
handle(JSArray::cast(map->prototype()), isolate)) &&
isolate->IsNoElementsProtectorIntact();
@@ -1724,7 +1724,7 @@
// We only serialize boilerplates that pass the IsInlinableFastLiteral
// check, so we only do a sanity check on the depth here.
CHECK_GT(depth, 0);
- CHECK(!boilerplate->map()->is_deprecated());
+ CHECK(!boilerplate->map().is_deprecated());
// Serialize the elements.
Isolate* const isolate = broker->isolate();
@@ -1775,13 +1775,13 @@
// TODO(turbofan): Do we want to support out-of-object properties?
CHECK(boilerplate->HasFastProperties() &&
- boilerplate->property_array()->length() == 0);
+ boilerplate->property_array().length() == 0);
CHECK_EQ(inobject_fields_.size(), 0u);
// Check the in-object properties.
- Handle<DescriptorArray> descriptors(
- boilerplate->map()->instance_descriptors(), isolate);
- int const limit = boilerplate->map()->NumberOfOwnDescriptors();
+ Handle<DescriptorArray> descriptors(boilerplate->map().instance_descriptors(),
+ isolate);
+ int const limit = boilerplate->map().NumberOfOwnDescriptors();
for (int i = 0; i < limit; i++) {
PropertyDetails details = descriptors->GetDetails(i);
if (details.location() != kField) continue;
@@ -1978,14 +1978,14 @@
CHECK(array_and_object_prototypes_.empty());
Object maybe_context = isolate()->heap()->native_contexts_list();
- while (!maybe_context->IsUndefined(isolate())) {
+ while (!maybe_context.IsUndefined(isolate())) {
Context context = Context::cast(maybe_context);
- Object array_prot = context->get(Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
- Object object_prot = context->get(Context::INITIAL_OBJECT_PROTOTYPE_INDEX);
+ Object array_prot = context.get(Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
+ Object object_prot = context.get(Context::INITIAL_OBJECT_PROTOTYPE_INDEX);
array_and_object_prototypes_.emplace(JSObject::cast(array_prot), isolate());
array_and_object_prototypes_.emplace(JSObject::cast(object_prot),
isolate());
- maybe_context = context->next_context_link();
+ maybe_context = context.next_context_link();
}
CHECK(!array_and_object_prototypes_.empty());
@@ -2427,7 +2427,7 @@
PropertyDetails MapRef::GetPropertyDetails(int descriptor_index) const {
if (broker()->mode() == JSHeapBroker::kDisabled) {
AllowHandleDereference allow_handle_dereference;
- return object()->instance_descriptors()->GetDetails(descriptor_index);
+ return object()->instance_descriptors().GetDetails(descriptor_index);
}
DescriptorArrayData* descriptors = data()->AsMap()->instance_descriptors();
return descriptors->contents().at(descriptor_index).details;
@@ -2439,7 +2439,7 @@
AllowHandleDereference allow_handle_dereference;
return NameRef(
broker(),
- handle(object()->instance_descriptors()->GetKey(descriptor_index),
+ handle(object()->instance_descriptors().GetKey(descriptor_index),
broker()->isolate()));
}
DescriptorArrayData* descriptors = data()->AsMap()->instance_descriptors();
@@ -2475,7 +2475,7 @@
AllowHandleAllocation handle_allocation;
AllowHandleDereference allow_handle_dereference;
Handle<FieldType> field_type(
- object()->instance_descriptors()->GetFieldType(descriptor_index),
+ object()->instance_descriptors().GetFieldType(descriptor_index),
broker()->isolate());
return ObjectRef(broker(), field_type);
}
@@ -2667,7 +2667,7 @@
void* JSTypedArrayRef::elements_external_pointer() const {
if (broker()->mode() == JSHeapBroker::kDisabled) {
AllowHandleDereference allow_handle_dereference;
- return FixedTypedArrayBase::cast(object()->elements())->external_pointer();
+ return FixedTypedArrayBase::cast(object()->elements()).external_pointer();
}
return data()->AsJSTypedArray()->elements_external_pointer();
}
@@ -2842,7 +2842,7 @@
base::Optional<ObjectRef> JSArrayRef::GetOwnCowElement(uint32_t index,
bool serialize) const {
if (broker()->mode() == JSHeapBroker::kDisabled) {
- if (!object()->elements()->IsCowArray()) return base::nullopt;
+ if (!object()->elements().IsCowArray()) return base::nullopt;
return GetOwnElementFromHeap(broker(), object(), index, false);
}
@@ -2910,7 +2910,7 @@
namespace {
OddballType GetOddballType(Isolate* isolate, Map map) {
- if (map->instance_type() != ODDBALL_TYPE) {
+ if (map.instance_type() != ODDBALL_TYPE) {
return OddballType::kNone;
}
ReadOnlyRoots roots(isolate);
@@ -2941,9 +2941,9 @@
AllowHandleDereference handle_dereference;
Map map = Handle<HeapObject>::cast(object())->map();
HeapObjectType::Flags flags(0);
- if (map->is_undetectable()) flags |= HeapObjectType::kUndetectable;
- if (map->is_callable()) flags |= HeapObjectType::kCallable;
- return HeapObjectType(map->instance_type(), flags,
+ if (map.is_undetectable()) flags |= HeapObjectType::kUndetectable;
+ if (map.is_callable()) flags |= HeapObjectType::kCallable;
+ return HeapObjectType(map.instance_type(), flags,
GetOddballType(broker()->isolate(), map));
}
HeapObjectType::Flags flags(0);
diff --git a/src/compiler/js-inlining-heuristic.cc b/src/compiler/js-inlining-heuristic.cc
index 5a5e56d..721f065 100644
--- a/src/compiler/js-inlining-heuristic.cc
+++ b/src/compiler/js-inlining-heuristic.cc
@@ -744,7 +744,7 @@
? candidate.functions[i].value().shared()
: candidate.shared_info.value();
PrintF(" - size:%d, name: %s\n", candidate.bytecode[i].value().length(),
- shared.object()->DebugName()->ToCString().get());
+ shared.object()->DebugName().ToCString().get());
}
}
}
diff --git a/src/compiler/js-native-context-specialization.cc b/src/compiler/js-native-context-specialization.cc
index ff91d1f..c20a7c8 100644
--- a/src/compiler/js-native-context-specialization.cc
+++ b/src/compiler/js-native-context-specialization.cc
@@ -246,7 +246,7 @@
FrameStateInfoOf(frame_state->op()).shared_info().ToHandleChecked();
DCHECK(shared->is_compiled());
int register_count = shared->internal_formal_parameter_count() +
- shared->GetBytecodeArray()->register_count();
+ shared->GetBytecodeArray().register_count();
Node* value = effect =
graph()->NewNode(javascript()->CreateAsyncFunctionObject(register_count),
closure, receiver, promise, context, effect, control);
@@ -542,8 +542,8 @@
none = false;
break;
}
- if (!current->map()->is_stable() ||
- current->map()->instance_type() <= LAST_SPECIAL_RECEIVER_TYPE) {
+ if (!current->map().is_stable() ||
+ current->map().instance_type() <= LAST_SPECIAL_RECEIVER_TYPE) {
return kMayBeInPrototypeChain;
}
}
@@ -560,7 +560,7 @@
// might be a different object each time, so it's much simpler to include
// {prototype}. That does, however, mean that we must check {prototype}'s
// map stability.
- if (!prototype->map()->is_stable()) return kMayBeInPrototypeChain;
+ if (!prototype->map().is_stable()) return kMayBeInPrototypeChain;
last_prototype.emplace(broker(), Handle<JSObject>::cast(prototype));
}
WhereToStart start = result == NodeProperties::kUnreliableReceiverMaps
@@ -3305,7 +3305,7 @@
Node* receiver) {
HeapObjectMatcher m(receiver);
if (m.HasValue()) {
- return handle(m.Value()->map()->FindRootMap(isolate()), isolate());
+ return handle(m.Value()->map().FindRootMap(isolate()), isolate());
} else if (m.IsJSCreate()) {
base::Optional<MapRef> initial_map =
NodeProperties::GetJSCreateMap(broker(), receiver);
diff --git a/src/compiler/linkage.cc b/src/compiler/linkage.cc
index e1de305..b49e136 100644
--- a/src/compiler/linkage.cc
+++ b/src/compiler/linkage.cc
@@ -161,7 +161,7 @@
// plus the receiver.
SharedFunctionInfo shared = info->closure()->shared();
return GetJSCallDescriptor(zone, info->is_osr(),
- 1 + shared->internal_formal_parameter_count(),
+ 1 + shared.internal_formal_parameter_count(),
CallDescriptor::kCanUseRoots);
}
return nullptr; // TODO(titzer): ?
diff --git a/src/compiler/pipeline-statistics.cc b/src/compiler/pipeline-statistics.cc
index dfa2c0f..9cfd1cf 100644
--- a/src/compiler/pipeline-statistics.cc
+++ b/src/compiler/pipeline-statistics.cc
@@ -66,8 +66,7 @@
phase_name_(nullptr) {
if (info->has_shared_info()) {
source_size_ = static_cast<size_t>(info->shared_info()->SourceSize());
- std::unique_ptr<char[]> name =
- info->shared_info()->DebugName()->ToCString();
+ std::unique_ptr<char[]> name = info->shared_info()->DebugName().ToCString();
function_name_ = name.get();
}
total_stats_.Begin(this);
diff --git a/src/compiler/pipeline.cc b/src/compiler/pipeline.cc
index 9b86ede..feccd5f 100644
--- a/src/compiler/pipeline.cc
+++ b/src/compiler/pipeline.cc
@@ -567,18 +567,18 @@
void PrintFunctionSource(OptimizedCompilationInfo* info, Isolate* isolate,
int source_id, Handle<SharedFunctionInfo> shared) {
- if (!shared->script()->IsUndefined(isolate)) {
+ if (!shared->script().IsUndefined(isolate)) {
Handle<Script> script(Script::cast(shared->script()), isolate);
- if (!script->source()->IsUndefined(isolate)) {
+ if (!script->source().IsUndefined(isolate)) {
CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
Object source_name = script->name();
OFStream os(tracing_scope.file());
os << "--- FUNCTION SOURCE (";
- if (source_name->IsString()) {
- os << String::cast(source_name)->ToCString().get() << ":";
+ if (source_name.IsString()) {
+ os << String::cast(source_name).ToCString().get() << ":";
}
- os << shared->DebugName()->ToCString().get() << ") id{";
+ os << shared->DebugName().ToCString().get() << ") id{";
os << info->optimization_id() << "," << source_id << "} start{";
os << shared->StartPosition() << "} ---\n";
{
@@ -604,7 +604,7 @@
int inlining_id, const OptimizedCompilationInfo::InlinedFunctionHolder& h) {
CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
OFStream os(tracing_scope.file());
- os << "INLINE (" << h.shared_info->DebugName()->ToCString().get() << ") id{"
+ os << "INLINE (" << h.shared_info->DebugName().ToCString().get() << ") id{"
<< info->optimization_id() << "," << source_id << "} AS " << inlining_id
<< " AT ";
const SourcePosition position = h.position.position;
@@ -654,11 +654,11 @@
bool print_source = code->kind() == Code::OPTIMIZED_FUNCTION;
if (print_source) {
Handle<SharedFunctionInfo> shared = info->shared_info();
- if (shared->script()->IsScript() &&
- !Script::cast(shared->script())->source()->IsUndefined(isolate)) {
+ if (shared->script().IsScript() &&
+ !Script::cast(shared->script()).source().IsUndefined(isolate)) {
os << "--- Raw source ---\n";
StringCharacterStream stream(
- String::cast(Script::cast(shared->script())->source()),
+ String::cast(Script::cast(shared->script()).source()),
shared->StartPosition());
// fun->end_position() points to the last character in the stream. We
// need to compensate by adding one to calculate the length.
@@ -956,7 +956,7 @@
compilation_info()->MarkAsAllocationFoldingEnabled();
}
- if (compilation_info()->closure()->raw_feedback_cell()->map() ==
+ if (compilation_info()->closure()->raw_feedback_cell().map() ==
ReadOnlyRoots(isolate).one_closure_cell_map()) {
compilation_info()->MarkAsFunctionContextSpecializing();
}
@@ -1016,7 +1016,7 @@
}
compilation_info()->SetCode(code);
- compilation_info()->native_context()->AddOptimizedCode(*code);
+ compilation_info()->native_context().AddOptimizedCode(*code);
RegisterWeakObjectsInOptimizedCode(code, isolate);
return SUCCEEDED;
}
@@ -1088,12 +1088,12 @@
Maybe<OuterContext> GetModuleContext(Handle<JSFunction> closure) {
Context current = closure->context();
size_t distance = 0;
- while (!current->IsNativeContext()) {
- if (current->IsModuleContext()) {
+ while (!current.IsNativeContext()) {
+ if (current.IsModuleContext()) {
return Just(
- OuterContext(handle(current, current->GetIsolate()), distance));
+ OuterContext(handle(current, current.GetIsolate()), distance));
}
- current = current->previous();
+ current = current.previous();
distance++;
}
return Nothing<OuterContext>();
diff --git a/src/compiler/serializer-for-background-compilation.cc b/src/compiler/serializer-for-background-compilation.cc
index fb80584..112068e 100644
--- a/src/compiler/serializer-for-background-compilation.cc
+++ b/src/compiler/serializer-for-background-compilation.cc
@@ -153,8 +153,8 @@
Zone* zone, CompilationSubject function)
: zone_(zone),
function_(function.blueprint()),
- parameter_count_(function_.shared->GetBytecodeArray()->parameter_count()),
- register_count_(function_.shared->GetBytecodeArray()->register_count()),
+ parameter_count_(function_.shared->GetBytecodeArray().parameter_count()),
+ register_count_(function_.shared->GetBytecodeArray().register_count()),
environment_hints_(environment_hints_size(), Hints(zone), zone),
return_value_hints_(zone) {
Handle<JSFunction> closure;
@@ -186,7 +186,7 @@
interpreter::Register new_target_reg =
function_.shared->GetBytecodeArray()
- ->incoming_new_target_or_generator_register();
+ .incoming_new_target_or_generator_register();
if (new_target_reg.is_valid()) {
DCHECK(register_hints(new_target_reg).IsEmpty());
if (new_target.has_value()) {
@@ -379,7 +379,7 @@
// For JSNativeContextSpecialization::ReduceJSGetSuperConstructor.
if (!constant->IsJSFunction()) continue;
MapRef map(broker(),
- handle(HeapObject::cast(*constant)->map(), broker()->isolate()));
+ handle(HeapObject::cast(*constant).map(), broker()->isolate()));
map.SerializePrototype();
ObjectRef proto = map.prototype();
if (proto.IsHeapObject() && proto.AsHeapObject().map().is_constructor()) {
@@ -608,7 +608,7 @@
padded.pop_back(); // Remove the spread element.
// Fill the rest with empty hints.
padded.resize(
- function.blueprint().shared->GetBytecodeArray()->parameter_count(),
+ function.blueprint().shared->GetBytecodeArray().parameter_count(),
Hints(zone()));
return RunChildSerializer(function, new_target, padded, false);
}
@@ -657,7 +657,7 @@
if (!hint->IsJSFunction()) continue;
Handle<JSFunction> function = Handle<JSFunction>::cast(hint);
- if (!function->shared()->IsInlineable() || !function->has_feedback_vector())
+ if (!function->shared().IsInlineable() || !function->has_feedback_vector())
continue;
environment()->accumulator_hints().Add(RunChildSerializer(
diff --git a/src/compiler/wasm-compiler.cc b/src/compiler/wasm-compiler.cc
index 52e3902..c100bd3 100644
--- a/src/compiler/wasm-compiler.cc
+++ b/src/compiler/wasm-compiler.cc
@@ -5963,8 +5963,8 @@
bool has_bigint_feature) {
if (WasmExportedFunction::IsWasmExportedFunction(*target)) {
auto imported_function = WasmExportedFunction::cast(*target);
- auto func_index = imported_function->function_index();
- auto module = imported_function->instance()->module();
+ auto func_index = imported_function.function_index();
+ auto module = imported_function.instance().module();
wasm::FunctionSig* imported_sig = module->functions[func_index].sig;
if (*imported_sig != *expected_sig) {
return WasmImportCallKind::kLinkError;
@@ -5978,7 +5978,7 @@
}
if (WasmCapiFunction::IsWasmCapiFunction(*target)) {
WasmCapiFunction capi_function = WasmCapiFunction::cast(*target);
- if (!capi_function->IsSignatureEqual(expected_sig)) {
+ if (!capi_function.IsSignatureEqual(expected_sig)) {
return WasmImportCallKind::kLinkError;
}
return WasmImportCallKind::kWasmToCapi;
@@ -6011,8 +6011,8 @@
COMPARE_SIG_FOR_BUILTIN(F32##name); \
break;
- if (FLAG_wasm_math_intrinsics && shared->HasBuiltinId()) {
- switch (shared->builtin_id()) {
+ if (FLAG_wasm_math_intrinsics && shared.HasBuiltinId()) {
+ switch (shared.builtin_id()) {
COMPARE_SIG_FOR_BUILTIN_F64(Acos);
COMPARE_SIG_FOR_BUILTIN_F64(Asin);
COMPARE_SIG_FOR_BUILTIN_F64(Atan);
@@ -6044,12 +6044,12 @@
#undef COMPARE_SIG_FOR_BUILTIN_F64
#undef COMPARE_SIG_FOR_BUILTIN_F32_F64
- if (IsClassConstructor(shared->kind())) {
+ if (IsClassConstructor(shared.kind())) {
// Class constructor will throw anyway.
return WasmImportCallKind::kUseCallBuiltin;
}
- bool sloppy = is_sloppy(shared->language_mode()) && !shared->native();
- if (shared->internal_formal_parameter_count() ==
+ bool sloppy = is_sloppy(shared.language_mode()) && !shared.native();
+ if (shared.internal_formal_parameter_count() ==
expected_sig->parameter_count()) {
return sloppy ? WasmImportCallKind::kJSFunctionArityMatchSloppy
: WasmImportCallKind::kJSFunctionArityMatch;
diff --git a/src/contexts-inl.h b/src/contexts-inl.h
index 6777f65..6903c0c 100644
--- a/src/contexts-inl.h
+++ b/src/contexts-inl.h
@@ -88,7 +88,7 @@
Object Context::next_context_link() { return get(Context::NEXT_CONTEXT_LINK); }
-bool Context::has_extension() { return !extension()->IsTheHole(); }
+bool Context::has_extension() { return !extension().IsTheHole(); }
HeapObject Context::extension() {
return HeapObject::cast(get(EXTENSION_INDEX));
}
@@ -105,44 +105,44 @@
}
bool Context::IsFunctionContext() const {
- return map()->instance_type() == FUNCTION_CONTEXT_TYPE;
+ return map().instance_type() == FUNCTION_CONTEXT_TYPE;
}
bool Context::IsCatchContext() const {
- return map()->instance_type() == CATCH_CONTEXT_TYPE;
+ return map().instance_type() == CATCH_CONTEXT_TYPE;
}
bool Context::IsWithContext() const {
- return map()->instance_type() == WITH_CONTEXT_TYPE;
+ return map().instance_type() == WITH_CONTEXT_TYPE;
}
bool Context::IsDebugEvaluateContext() const {
- return map()->instance_type() == DEBUG_EVALUATE_CONTEXT_TYPE;
+ return map().instance_type() == DEBUG_EVALUATE_CONTEXT_TYPE;
}
bool Context::IsAwaitContext() const {
- return map()->instance_type() == AWAIT_CONTEXT_TYPE;
+ return map().instance_type() == AWAIT_CONTEXT_TYPE;
}
bool Context::IsBlockContext() const {
- return map()->instance_type() == BLOCK_CONTEXT_TYPE;
+ return map().instance_type() == BLOCK_CONTEXT_TYPE;
}
bool Context::IsModuleContext() const {
- return map()->instance_type() == MODULE_CONTEXT_TYPE;
+ return map().instance_type() == MODULE_CONTEXT_TYPE;
}
bool Context::IsEvalContext() const {
- return map()->instance_type() == EVAL_CONTEXT_TYPE;
+ return map().instance_type() == EVAL_CONTEXT_TYPE;
}
bool Context::IsScriptContext() const {
- return map()->instance_type() == SCRIPT_CONTEXT_TYPE;
+ return map().instance_type() == SCRIPT_CONTEXT_TYPE;
}
bool Context::HasSameSecurityTokenAs(Context that) const {
- return this->native_context()->security_token() ==
- that->native_context()->security_token();
+ return this->native_context().security_token() ==
+ that.native_context().security_token();
}
#define NATIVE_CONTEXT_FIELD_ACCESSORS(index, type, name) \
@@ -232,7 +232,7 @@
if (!IsFastElementsKind(kind)) return Map();
DisallowHeapAllocation no_gc;
Object const initial_js_array_map = get(Context::ArrayMapIndex(kind));
- DCHECK(!initial_js_array_map->IsUndefined());
+ DCHECK(!initial_js_array_map.IsUndefined());
return Map::cast(initial_js_array_map);
}
diff --git a/src/contexts.cc b/src/contexts.cc
index c2f5e4e..d8d6cf9 100644
--- a/src/contexts.cc
+++ b/src/contexts.cc
@@ -40,11 +40,11 @@
bool ScriptContextTable::Lookup(Isolate* isolate, ScriptContextTable table,
String name, LookupResult* result) {
DisallowHeapAllocation no_gc;
- for (int i = 0; i < table->used(); i++) {
- Context context = table->get_context(i);
- DCHECK(context->IsScriptContext());
+ for (int i = 0; i < table.used(); i++) {
+ Context context = table.get_context(i);
+ DCHECK(context.IsScriptContext());
int slot_index = ScopeInfo::ContextSlotIndex(
- context->scope_info(), name, &result->mode, &result->init_flag,
+ context.scope_info(), name, &result->mode, &result->init_flag,
&result->maybe_assigned_flag);
if (slot_index >= 0) {
@@ -62,26 +62,26 @@
return true;
}
if (IsEvalContext()) {
- return scope_info()->language_mode() == LanguageMode::kStrict;
+ return scope_info().language_mode() == LanguageMode::kStrict;
}
if (!IsBlockContext()) return false;
- return scope_info()->is_declaration_scope();
+ return scope_info().is_declaration_scope();
}
Context Context::declaration_context() {
Context current = *this;
- while (!current->is_declaration_context()) {
- current = current->previous();
+ while (!current.is_declaration_context()) {
+ current = current.previous();
}
return current;
}
Context Context::closure_context() {
Context current = *this;
- while (!current->IsFunctionContext() && !current->IsScriptContext() &&
- !current->IsModuleContext() && !current->IsNativeContext() &&
- !current->IsEvalContext()) {
- current = current->previous();
+ while (!current.IsFunctionContext() && !current.IsScriptContext() &&
+ !current.IsModuleContext() && !current.IsNativeContext() &&
+ !current.IsEvalContext()) {
+ current = current.previous();
}
return current;
}
@@ -90,9 +90,9 @@
DCHECK(IsNativeContext() || IsFunctionContext() || IsBlockContext() ||
IsEvalContext() || IsCatchContext());
HeapObject object = extension();
- if (object->IsTheHole()) return JSObject();
- DCHECK(object->IsJSContextExtensionObject() ||
- (IsNativeContext() && object->IsJSGlobalObject()));
+ if (object.IsTheHole()) return JSObject();
+ DCHECK(object.IsJSContextExtensionObject() ||
+ (IsNativeContext() && object.IsJSGlobalObject()));
return JSObject::cast(object);
}
@@ -108,30 +108,30 @@
Module Context::module() {
Context current = *this;
- while (!current->IsModuleContext()) {
- current = current->previous();
+ while (!current.IsModuleContext()) {
+ current = current.previous();
}
- return Module::cast(current->extension());
+ return Module::cast(current.extension());
}
JSGlobalObject Context::global_object() {
- return JSGlobalObject::cast(native_context()->extension());
+ return JSGlobalObject::cast(native_context().extension());
}
Context Context::script_context() {
Context current = *this;
- while (!current->IsScriptContext()) {
- current = current->previous();
+ while (!current.IsScriptContext()) {
+ current = current.previous();
}
return current;
}
JSGlobalProxy Context::global_proxy() {
- return native_context()->global_proxy_object();
+ return native_context().global_proxy_object();
}
void Context::set_global_proxy(JSGlobalProxy object) {
- native_context()->set_global_proxy_object(object);
+ native_context().set_global_proxy_object(object);
}
/**
@@ -202,7 +202,7 @@
// 1. Check global objects, subjects of with, and extension objects.
DCHECK_IMPLIES(context->IsEvalContext(),
- context->extension()->IsTheHole(isolate));
+ context->extension().IsTheHole(isolate));
if ((context->IsNativeContext() || context->IsWithContext() ||
context->IsFunctionContext() || context->IsBlockContext()) &&
!context->extension_receiver().is_null()) {
@@ -215,13 +215,13 @@
}
// Try other script contexts.
ScriptContextTable script_contexts =
- context->global_object()->native_context()->script_context_table();
+ context->global_object().native_context().script_context_table();
ScriptContextTable::LookupResult r;
if (ScriptContextTable::Lookup(isolate, script_contexts, *name, &r)) {
- Context context = script_contexts->get_context(r.context_index);
+ Context context = script_contexts.get_context(r.context_index);
if (FLAG_trace_contexts) {
PrintF("=> found property in script context %d: %p\n",
- r.context_index, reinterpret_cast<void*>(context->ptr()));
+ r.context_index, reinterpret_cast<void*>(context.ptr()));
}
*index = r.slot_index;
*variable_mode = r.mode;
@@ -307,7 +307,7 @@
// only the function name variable. It's conceptually (and spec-wise)
// in an outer scope of the function's declaration scope.
if (follow_context_chain && context->IsFunctionContext()) {
- int function_index = scope_info->FunctionContextSlotIndex(*name);
+ int function_index = scope_info.FunctionContextSlotIndex(*name);
if (function_index >= 0) {
if (FLAG_trace_contexts) {
PrintF("=> found intermediate function in context slot %d\n",
@@ -318,7 +318,7 @@
*init_flag = kCreatedInitialized;
*variable_mode = VariableMode::kConst;
if (is_sloppy_function_name != nullptr &&
- is_sloppy(scope_info->language_mode())) {
+ is_sloppy(scope_info.language_mode())) {
*is_sloppy_function_name = true;
}
return context;
@@ -331,7 +331,7 @@
InitializationFlag flag;
MaybeAssignedFlag maybe_assigned_flag;
int cell_index =
- scope_info->ModuleIndex(*name, &mode, &flag, &maybe_assigned_flag);
+ scope_info.ModuleIndex(*name, &mode, &flag, &maybe_assigned_flag);
if (cell_index != 0) {
if (FLAG_trace_contexts) {
PrintF("=> found in module imports or exports\n");
@@ -349,7 +349,7 @@
} else if (context->IsDebugEvaluateContext()) {
// Check materialized locals.
Object ext = context->get(EXTENSION_INDEX);
- if (ext->IsJSReceiver()) {
+ if (ext.IsJSReceiver()) {
Handle<JSReceiver> extension(JSReceiver::cast(ext), isolate);
LookupIterator it(extension, name, extension);
Maybe<bool> found = JSReceiver::HasProperty(&it);
@@ -360,7 +360,7 @@
}
// Check the original context, but do not follow its context chain.
Object obj = context->get(WRAPPED_CONTEXT_INDEX);
- if (obj->IsContext()) {
+ if (obj.IsContext()) {
Handle<Context> context(Context::cast(obj), isolate);
Handle<Object> result =
Context::Lookup(context, name, DONT_FOLLOW_CHAINS, index,
@@ -370,9 +370,9 @@
// Check whitelist. Names that do not pass whitelist shall only resolve
// to with, script or native contexts up the context chain.
obj = context->get(WHITE_LIST_INDEX);
- if (obj->IsStringSet()) {
+ if (obj.IsStringSet()) {
failed_whitelist =
- failed_whitelist || !StringSet::cast(obj)->Has(isolate, name);
+ failed_whitelist || !StringSet::cast(obj).Has(isolate, name);
}
}
@@ -397,9 +397,9 @@
void Context::AddOptimizedCode(Code code) {
DCHECK(IsNativeContext());
- DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
- DCHECK(code->next_code_link()->IsUndefined());
- code->set_next_code_link(get(OPTIMIZED_CODE_LIST));
+ DCHECK(code.kind() == Code::OPTIMIZED_FUNCTION);
+ DCHECK(code.next_code_link().IsUndefined());
+ code.set_next_code_link(get(OPTIMIZED_CODE_LIST));
set(OPTIMIZED_CODE_LIST, code, UPDATE_WEAK_WRITE_BARRIER);
}
@@ -459,18 +459,18 @@
// During bootstrapping we allow all objects to pass as global
// objects. This is necessary to fix circular dependencies.
return isolate->heap()->gc_state() != Heap::NOT_IN_GC ||
- isolate->bootstrapper()->IsActive() || object->IsNativeContext();
+ isolate->bootstrapper()->IsActive() || object.IsNativeContext();
}
bool Context::IsBootstrappingOrValidParentContext(Object object,
Context child) {
// During bootstrapping we allow all objects to pass as
// contexts. This is necessary to fix circular dependencies.
- if (child->GetIsolate()->bootstrapper()->IsActive()) return true;
- if (!object->IsContext()) return false;
+ if (child.GetIsolate()->bootstrapper()->IsActive()) return true;
+ if (!object.IsContext()) return false;
Context context = Context::cast(object);
- return context->IsNativeContext() || context->IsScriptContext() ||
- context->IsModuleContext() || !child->IsModuleContext();
+ return context.IsNativeContext() || context.IsScriptContext() ||
+ context.IsModuleContext() || !child.IsModuleContext();
}
#endif
@@ -483,11 +483,11 @@
void Context::IncrementErrorsThrown() {
DCHECK(IsNativeContext());
- int previous_value = errors_thrown()->value();
+ int previous_value = errors_thrown().value();
set_errors_thrown(Smi::FromInt(previous_value + 1));
}
-int Context::GetErrorsThrown() { return errors_thrown()->value(); }
+int Context::GetErrorsThrown() { return errors_thrown().value(); }
STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == 4);
STATIC_ASSERT(NativeContext::kScopeInfoOffset ==
diff --git a/src/date/date.cc b/src/date/date.cc
index 66a6523..05e2ec0 100644
--- a/src/date/date.cc
+++ b/src/date/date.cc
@@ -38,10 +38,10 @@
void DateCache::ResetDateCache(
base::TimezoneCache::TimeZoneDetection time_zone_detection) {
- if (stamp_->value() >= Smi::kMaxValue) {
+ if (stamp_.value() >= Smi::kMaxValue) {
stamp_ = Smi::zero();
} else {
- stamp_ = Smi::FromInt(stamp_->value() + 1);
+ stamp_ = Smi::FromInt(stamp_.value() + 1);
}
DCHECK(stamp_ != Smi::FromInt(kInvalidStamp));
for (int i = 0; i < kDSTSize; ++i) {
diff --git a/src/date/dateparser-inl.h b/src/date/dateparser-inl.h
index 81e6e0d..b2099ca 100644
--- a/src/date/dateparser-inl.h
+++ b/src/date/dateparser-inl.h
@@ -14,7 +14,7 @@
template <typename Char>
bool DateParser::Parse(Isolate* isolate, Vector<Char> str, FixedArray out) {
- DCHECK(out->length() >= OUTPUT_SIZE);
+ DCHECK(out.length() >= OUTPUT_SIZE);
InputReader<Char> in(str);
DateStringTokenizer<Char> scanner(&in);
TimeZoneComposer tz;
diff --git a/src/date/dateparser.cc b/src/date/dateparser.cc
index 8a1dcf1..5d18640 100644
--- a/src/date/dateparser.cc
+++ b/src/date/dateparser.cc
@@ -58,9 +58,9 @@
if (!Smi::IsValid(year) || !IsMonth(month) || !IsDay(day)) return false;
- output->set(YEAR, Smi::FromInt(year));
- output->set(MONTH, Smi::FromInt(month - 1)); // 0-based
- output->set(DAY, Smi::FromInt(day));
+ output.set(YEAR, Smi::FromInt(year));
+ output.set(MONTH, Smi::FromInt(month - 1)); // 0-based
+ output.set(DAY, Smi::FromInt(day));
return true;
}
@@ -89,10 +89,10 @@
}
}
- output->set(HOUR, Smi::FromInt(hour));
- output->set(MINUTE, Smi::FromInt(minute));
- output->set(SECOND, Smi::FromInt(second));
- output->set(MILLISECOND, Smi::FromInt(millisecond));
+ output.set(HOUR, Smi::FromInt(hour));
+ output.set(MINUTE, Smi::FromInt(minute));
+ output.set(SECOND, Smi::FromInt(second));
+ output.set(MILLISECOND, Smi::FromInt(millisecond));
return true;
}
@@ -109,9 +109,9 @@
total_seconds = -total_seconds;
}
DCHECK(Smi::IsValid(total_seconds));
- output->set(UTC_OFFSET, Smi::FromInt(total_seconds));
+ output.set(UTC_OFFSET, Smi::FromInt(total_seconds));
} else {
- output->set_null(UTC_OFFSET);
+ output.set_null(UTC_OFFSET);
}
return true;
}
diff --git a/src/debug/debug-coverage.cc b/src/debug/debug-coverage.cc
index b96a4a8..7d85659 100644
--- a/src/debug/debug-coverage.cc
+++ b/src/debug/debug-coverage.cc
@@ -49,15 +49,15 @@
namespace {
int StartPosition(SharedFunctionInfo info) {
- int start = info->function_token_position();
- if (start == kNoSourcePosition) start = info->StartPosition();
+ int start = info.function_token_position();
+ if (start == kNoSourcePosition) start = info.StartPosition();
return start;
}
bool CompareSharedFunctionInfo(SharedFunctionInfo a, SharedFunctionInfo b) {
int a_start = StartPosition(a);
int b_start = StartPosition(b);
- if (a_start == b_start) return a->EndPosition() > b->EndPosition();
+ if (a_start == b_start) return a.EndPosition() > b.EndPosition();
return a_start < b_start;
}
@@ -74,18 +74,18 @@
}
std::vector<CoverageBlock> GetSortedBlockData(SharedFunctionInfo shared) {
- DCHECK(shared->HasCoverageInfo());
+ DCHECK(shared.HasCoverageInfo());
CoverageInfo coverage_info =
- CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());
+ CoverageInfo::cast(shared.GetDebugInfo().coverage_info());
std::vector<CoverageBlock> result;
- if (coverage_info->SlotCount() == 0) return result;
+ if (coverage_info.SlotCount() == 0) return result;
- for (int i = 0; i < coverage_info->SlotCount(); i++) {
- const int start_pos = coverage_info->StartSourcePosition(i);
- const int until_pos = coverage_info->EndSourcePosition(i);
- const int count = coverage_info->BlockCount(i);
+ for (int i = 0; i < coverage_info.SlotCount(); i++) {
+ const int start_pos = coverage_info.StartSourcePosition(i);
+ const int until_pos = coverage_info.EndSourcePosition(i);
+ const int count = coverage_info.BlockCount(i);
DCHECK_NE(kNoSourcePosition, start_pos);
result.emplace_back(start_pos, until_pos, count);
@@ -386,13 +386,13 @@
}
void ResetAllBlockCounts(SharedFunctionInfo shared) {
- DCHECK(shared->HasCoverageInfo());
+ DCHECK(shared.HasCoverageInfo());
CoverageInfo coverage_info =
- CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());
+ CoverageInfo::cast(shared.GetDebugInfo().coverage_info());
- for (int i = 0; i < coverage_info->SlotCount(); i++) {
- coverage_info->ResetBlockCount(i);
+ for (int i = 0; i < coverage_info.SlotCount(); i++) {
+ coverage_info.ResetBlockCount(i);
}
}
@@ -521,10 +521,10 @@
isolate->factory()->feedback_vectors_for_profiling_tools());
for (int i = 0; i < list->Length(); i++) {
FeedbackVector vector = FeedbackVector::cast(list->Get(i));
- SharedFunctionInfo shared = vector->shared_function_info();
- DCHECK(shared->IsSubjectToDebugging());
- uint32_t count = static_cast<uint32_t>(vector->invocation_count());
- if (reset_count) vector->clear_invocation_count();
+ SharedFunctionInfo shared = vector.shared_function_info();
+ DCHECK(shared.IsSubjectToDebugging());
+ uint32_t count = static_cast<uint32_t>(vector.invocation_count());
+ if (reset_count) vector.clear_invocation_count();
counter_map.Add(shared, count);
}
break;
@@ -537,18 +537,18 @@
HeapIterator heap_iterator(isolate->heap());
for (HeapObject current_obj = heap_iterator.next();
!current_obj.is_null(); current_obj = heap_iterator.next()) {
- if (!current_obj->IsJSFunction()) continue;
+ if (!current_obj.IsJSFunction()) continue;
JSFunction func = JSFunction::cast(current_obj);
- SharedFunctionInfo shared = func->shared();
- if (!shared->IsSubjectToDebugging()) continue;
- if (!(func->has_feedback_vector() ||
- func->has_closure_feedback_cell_array()))
+ SharedFunctionInfo shared = func.shared();
+ if (!shared.IsSubjectToDebugging()) continue;
+ if (!(func.has_feedback_vector() ||
+ func.has_closure_feedback_cell_array()))
continue;
uint32_t count = 0;
- if (func->has_feedback_vector()) {
- count = static_cast<uint32_t>(
- func->feedback_vector()->invocation_count());
- } else if (func->raw_feedback_cell()->interrupt_budget() <
+ if (func.has_feedback_vector()) {
+ count =
+ static_cast<uint32_t>(func.feedback_vector().invocation_count());
+ } else if (func.raw_feedback_cell().interrupt_budget() <
FLAG_budget_for_feedback_vector_allocation) {
// We haven't allocated feedback vector, but executed the function
// atleast once. We don't have precise invocation count here.
@@ -562,7 +562,7 @@
// vector wasn't allocated yet and the function's interrupt budget wasn't
// updated (i.e. it didn't execute return / jump).
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
- SharedFunctionInfo shared = it.frame()->function()->shared();
+ SharedFunctionInfo shared = it.frame()->function().shared();
if (counter_map.Get(shared) != 0) continue;
counter_map.Add(shared, 1);
}
@@ -576,7 +576,7 @@
Script::Iterator scripts(isolate);
for (Script script = scripts.Next(); !script.is_null();
script = scripts.Next()) {
- if (!script->IsUserJavaScript()) continue;
+ if (!script.IsUserJavaScript()) continue;
// Create and add new script data.
Handle<Script> script_handle(script, isolate);
@@ -601,7 +601,7 @@
// Use sorted list to reconstruct function nesting.
for (SharedFunctionInfo info : sorted) {
int start = StartPosition(info);
- int end = info->EndPosition();
+ int end = info.EndPosition();
uint32_t count = counter_map.Get(info);
// Find the correct outer function based on start position.
while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
@@ -614,8 +614,8 @@
break;
case v8::debug::CoverageMode::kBlockBinary:
case v8::debug::CoverageMode::kPreciseBinary:
- count = info->has_reported_binary_coverage() ? 0 : 1;
- info->set_has_reported_binary_coverage(true);
+ count = info.has_reported_binary_coverage() ? 0 : 1;
+ info.set_has_reported_binary_coverage(true);
break;
case v8::debug::CoverageMode::kBestEffort:
count = 1;
@@ -623,10 +623,10 @@
}
}
- Handle<String> name(info->DebugName(), isolate);
+ Handle<String> name(info.DebugName(), isolate);
CoverageFunction function(start, end, count, name);
- if (IsBlockMode(collectionMode) && info->HasCoverageInfo()) {
+ if (IsBlockMode(collectionMode) && info.HasCoverageInfo()) {
CollectBlockCoverage(&function, info, collectionMode);
}
@@ -681,21 +681,21 @@
HeapIterator heap_iterator(isolate->heap());
for (HeapObject o = heap_iterator.next(); !o.is_null();
o = heap_iterator.next()) {
- if (o->IsJSFunction()) {
+ if (o.IsJSFunction()) {
JSFunction func = JSFunction::cast(o);
- if (func->has_closure_feedback_cell_array()) {
+ if (func.has_closure_feedback_cell_array()) {
funcs_needing_feedback_vector.push_back(
Handle<JSFunction>(func, isolate));
}
- } else if (IsBinaryMode(mode) && o->IsSharedFunctionInfo()) {
+ } else if (IsBinaryMode(mode) && o.IsSharedFunctionInfo()) {
// If collecting binary coverage, reset
// SFI::has_reported_binary_coverage to avoid optimizing / inlining
// functions before they have reported coverage.
SharedFunctionInfo shared = SharedFunctionInfo::cast(o);
- shared->set_has_reported_binary_coverage(false);
- } else if (o->IsFeedbackVector()) {
+ shared.set_has_reported_binary_coverage(false);
+ } else if (o.IsFeedbackVector()) {
// In any case, clear any collected invocation counts.
- FeedbackVector::cast(o)->clear_invocation_count();
+ FeedbackVector::cast(o).clear_invocation_count();
}
}
}
diff --git a/src/debug/debug-evaluate.cc b/src/debug/debug-evaluate.cc
index 7cdc85a..8a4e35a 100644
--- a/src/debug/debug-evaluate.cc
+++ b/src/debug/debug-evaluate.cc
@@ -90,7 +90,7 @@
// Get context and receiver.
Handle<Context> native_context(
- Context::cast(it.frame()->context())->native_context(), isolate);
+ Context::cast(it.frame()->context()).native_context(), isolate);
// Materialize arguments as property on an extension object.
Handle<JSObject> materialized = factory->NewJSObjectWithNullProto();
@@ -115,7 +115,7 @@
factory->NewDebugEvaluateContext(native_context, scope_info, materialized,
Handle<Context>(), Handle<StringSet>());
Handle<SharedFunctionInfo> outer_info(
- native_context->empty_function()->shared(), isolate);
+ native_context->empty_function().shared(), isolate);
Handle<JSObject> receiver(native_context->global_proxy(), isolate);
const bool throw_on_side_effect = false;
MaybeHandle<Object> maybe_result =
@@ -226,7 +226,7 @@
.ToHandleChecked();
for (int i = 0; i < keys->length(); i++) {
- DCHECK(keys->get(i)->IsString());
+ DCHECK(keys->get(i).IsString());
Handle<String> key(String::cast(keys->get(i)), isolate_);
Handle<Object> value =
JSReceiver::GetDataProperty(element.materialized_object, key);
@@ -839,7 +839,7 @@
Isolate* isolate, Handle<SharedFunctionInfo> info) {
if (FLAG_trace_side_effect_free_debug_evaluate) {
PrintF("[debug-evaluate] Checking function %s for side effect.\n",
- info->DebugName()->ToCString().get());
+ info->DebugName().ToCString().get());
}
DCHECK(info->is_compiled());
@@ -880,8 +880,8 @@
return requires_runtime_checks ? DebugInfo::kRequiresRuntimeChecks
: DebugInfo::kHasNoSideEffect;
} else if (info->IsApiFunction()) {
- if (info->GetCode()->is_builtin()) {
- return info->GetCode()->builtin_index() == Builtins::kHandleApiCall
+ if (info->GetCode().is_builtin()) {
+ return info->GetCode().builtin_index() == Builtins::kHandleApiCall
? DebugInfo::kHasNoSideEffect
: DebugInfo::kHasSideEffects;
}
@@ -1024,9 +1024,9 @@
DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
Code callee_code = isolate->heap()->GcSafeFindCodeForInnerPointer(
rinfo->target_address());
- if (!callee_code->is_builtin()) continue;
+ if (!callee_code.is_builtin()) continue;
Builtins::Name callee =
- static_cast<Builtins::Name>(callee_code->builtin_index());
+ static_cast<Builtins::Name>(callee_code.builtin_index());
if (BuiltinGetSideEffectState(callee) == DebugInfo::kHasNoSideEffect) {
continue;
}
diff --git a/src/debug/debug-frames.cc b/src/debug/debug-frames.cc
index 5666361..a6ee317 100644
--- a/src/debug/debug-frames.cc
+++ b/src/debug/debug-frames.cc
@@ -47,7 +47,7 @@
wasm_interpreted_frame_ =
WasmInterpreterEntryFrame::cast(frame_)
->debug_info()
- ->GetInterpretedFrame(frame_->fp(), inlined_frame_index);
+ .GetInterpretedFrame(frame_->fp(), inlined_frame_index);
DCHECK(wasm_interpreted_frame_);
}
}
@@ -97,9 +97,9 @@
RedirectActiveFunctions::RedirectActiveFunctions(SharedFunctionInfo shared,
Mode mode)
: shared_(shared), mode_(mode) {
- DCHECK(shared->HasBytecodeArray());
+ DCHECK(shared.HasBytecodeArray());
if (mode == Mode::kUseDebugBytecode) {
- DCHECK(shared->HasDebugInfo());
+ DCHECK(shared.HasDebugInfo());
}
}
@@ -109,12 +109,12 @@
JavaScriptFrame* frame = it.frame();
JSFunction function = frame->function();
if (!frame->is_interpreted()) continue;
- if (function->shared() != shared_) continue;
+ if (function.shared() != shared_) continue;
InterpretedFrame* interpreted_frame =
reinterpret_cast<InterpretedFrame*>(frame);
BytecodeArray bytecode = mode_ == Mode::kUseDebugBytecode
- ? shared_->GetDebugInfo()->DebugBytecodeArray()
- : shared_->GetBytecodeArray();
+ ? shared_.GetDebugInfo().DebugBytecodeArray()
+ : shared_.GetBytecodeArray();
interpreted_frame->PatchBytecodeArray(bytecode);
}
}
diff --git a/src/debug/debug-scopes.cc b/src/debug/debug-scopes.cc
index aa5f655..966dc1a 100644
--- a/src/debug/debug-scopes.cc
+++ b/src/debug/debug-scopes.cc
@@ -47,8 +47,8 @@
if (!context_->IsNativeContext()) {
DisallowHeapAllocation no_gc;
- ScopeInfo closure_info = context_->closure_context()->scope_info();
- Handle<String> debug_name(closure_info->FunctionDebugName(), isolate_);
+ ScopeInfo closure_info = context_->closure_context().scope_info();
+ Handle<String> debug_name(closure_info.FunctionDebugName(), isolate_);
if (debug_name->length() > 0) return debug_name;
}
return isolate_->factory()->undefined_value();
@@ -56,11 +56,11 @@
ScopeIterator::ScopeIterator(Isolate* isolate, Handle<JSFunction> function)
: isolate_(isolate), context_(function->context(), isolate) {
- if (!function->shared()->IsSubjectToDebugging()) {
+ if (!function->shared().IsSubjectToDebugging()) {
context_ = Handle<Context>();
return;
}
- script_ = handle(Script::cast(function->shared()->script()), isolate);
+ script_ = handle(Script::cast(function->shared().script()), isolate);
UnwrapEvaluationContext();
}
@@ -70,8 +70,8 @@
generator_(generator),
function_(generator->function(), isolate),
context_(generator->context(), isolate),
- script_(Script::cast(function_->shared()->script()), isolate) {
- CHECK(function_->shared()->IsSubjectToDebugging());
+ script_(Script::cast(function_->shared().script()), isolate) {
+ CHECK(function_->shared().IsSubjectToDebugging());
TryParseAndRetrieveScopes(DEFAULT);
}
@@ -88,7 +88,7 @@
// Catch the case when the debugger stops in an internal function.
Handle<SharedFunctionInfo> shared_info(function_->shared(), isolate_);
Handle<ScopeInfo> scope_info(shared_info->scope_info(), isolate_);
- if (shared_info->script()->IsUndefined(isolate_)) {
+ if (shared_info->script().IsUndefined(isolate_)) {
current_scope_ = closure_scope_ = nullptr;
context_ = handle(function_->context(), isolate_);
function_ = Handle<JSFunction>();
@@ -191,14 +191,14 @@
if (!context_->IsDebugEvaluateContext()) return;
Context current = *context_;
do {
- Object wrapped = current->get(Context::WRAPPED_CONTEXT_INDEX);
- if (wrapped->IsContext()) {
+ Object wrapped = current.get(Context::WRAPPED_CONTEXT_INDEX);
+ if (wrapped.IsContext()) {
current = Context::cast(wrapped);
} else {
- DCHECK(!current->previous().is_null());
- current = current->previous();
+ DCHECK(!current.previous().is_null());
+ current = current.previous();
}
- } while (current->IsDebugEvaluateContext());
+ } while (current.IsDebugEvaluateContext());
context_ = handle(current, isolate_);
}
@@ -232,13 +232,13 @@
int ScopeIterator::start_position() {
if (InInnerScope()) return current_scope_->start_position();
if (context_->IsNativeContext()) return 0;
- return context_->closure_context()->scope_info()->StartPosition();
+ return context_->closure_context().scope_info().StartPosition();
}
int ScopeIterator::end_position() {
if (InInnerScope()) return current_scope_->end_position();
if (context_->IsNativeContext()) return 0;
- return context_->closure_context()->scope_info()->EndPosition();
+ return context_->closure_context().scope_info().EndPosition();
}
bool ScopeIterator::DeclaresLocals(Mode mode) const {
@@ -341,7 +341,7 @@
UNREACHABLE();
}
if (context_->IsNativeContext()) {
- DCHECK(context_->global_object()->IsJSGlobalObject());
+ DCHECK(context_->global_object().IsJSGlobalObject());
// If we are at the native context and have not yet seen script scope,
// fake it.
return seen_script_scope_ ? ScopeTypeGlobal : ScopeTypeScript;
@@ -481,13 +481,13 @@
case ScopeIterator::ScopeTypeWith:
os << "With:\n";
- context_->extension()->Print(os);
+ context_->extension().Print(os);
break;
case ScopeIterator::ScopeTypeCatch:
os << "Catch:\n";
- context_->extension()->Print(os);
- context_->get(Context::THROWN_OBJECT_INDEX)->Print(os);
+ context_->extension().Print(os);
+ context_->get(Context::THROWN_OBJECT_INDEX).Print(os);
break;
case ScopeIterator::ScopeTypeClosure:
@@ -502,10 +502,8 @@
case ScopeIterator::ScopeTypeScript:
os << "Script:\n";
- context_->global_object()
- ->native_context()
- ->script_context_table()
- ->Print(os);
+ context_->global_object().native_context().script_context_table().Print(
+ os);
break;
default:
@@ -521,7 +519,7 @@
} else {
DCHECK(!generator_.is_null());
SharedFunctionInfo::EnsureSourcePositionsAvailable(
- isolate_, handle(generator_->function()->shared(), isolate_));
+ isolate_, handle(generator_->function().shared(), isolate_));
return generator_->source_position();
}
}
@@ -557,7 +555,7 @@
void ScopeIterator::VisitScriptScope(const Visitor& visitor) const {
Handle<JSGlobalObject> global(context_->global_object(), isolate_);
Handle<ScriptContextTable> script_contexts(
- global->native_context()->script_context_table(), isolate_);
+ global->native_context().script_context_table(), isolate_);
// Skip the first script since that just declares 'this'.
for (int context_index = 1; context_index < script_contexts->used();
@@ -576,7 +574,7 @@
if (VisitContextLocals(visitor, scope_info, context_)) return;
int count_index = scope_info->ModuleVariableCountIndex();
- int module_variable_count = Smi::cast(scope_info->get(count_index))->value();
+ int module_variable_count = Smi::cast(scope_info->get(count_index)).value();
Handle<Module> module(context_->module(), isolate_);
@@ -645,8 +643,8 @@
DCHECK(!generator_.is_null());
FixedArray parameters_and_registers =
generator_->parameters_and_registers();
- DCHECK_LT(index, parameters_and_registers->length());
- value = handle(parameters_and_registers->get(index), isolate_);
+ DCHECK_LT(index, parameters_and_registers.length());
+ value = handle(parameters_and_registers.get(index), isolate_);
} else {
value = frame_inspector_->GetParameter(index);
@@ -664,10 +662,10 @@
FixedArray parameters_and_registers =
generator_->parameters_and_registers();
int parameter_count =
- function_->shared()->scope_info()->ParameterCount();
+ function_->shared().scope_info().ParameterCount();
index += parameter_count;
- DCHECK_LT(index, parameters_and_registers->length());
- value = handle(parameters_and_registers->get(index), isolate_);
+ DCHECK_LT(index, parameters_and_registers.length());
+ value = handle(parameters_and_registers.get(index), isolate_);
if (value->IsTheHole(isolate_)) {
value = isolate_->factory()->undefined_value();
}
@@ -715,7 +713,7 @@
// a proxy, return an empty object.
Handle<JSObject> ScopeIterator::WithContextExtension() {
DCHECK(context_->IsWithContext());
- if (context_->extension_receiver()->IsJSProxy()) {
+ if (context_->extension_receiver().IsJSProxy()) {
return isolate_->factory()->NewJSObjectWithNullProto();
}
return handle(JSObject::cast(context_->extension_receiver()), isolate_);
@@ -761,7 +759,7 @@
DCHECK(!context_->IsScriptContext());
DCHECK(!context_->IsNativeContext());
DCHECK(!context_->IsWithContext());
- if (!context_->scope_info()->CallsSloppyEval()) return;
+ if (!context_->scope_info().CallsSloppyEval()) return;
if (context_->extension_object().is_null()) return;
Handle<JSObject> extension(context_->extension_object(), isolate_);
Handle<FixedArray> keys =
@@ -771,7 +769,7 @@
for (int i = 0; i < keys->length(); i++) {
// Names of variables introduced by eval are strings.
- DCHECK(keys->get(i)->IsString());
+ DCHECK(keys->get(i).IsString());
Handle<String> key(String::cast(keys->get(i)), isolate_);
Handle<Object> value = JSReceiver::GetDataProperty(extension, key);
if (visitor(key, value)) return;
@@ -817,7 +815,7 @@
// Set the variable in the suspended generator.
DCHECK(!generator_.is_null());
int parameter_count =
- function_->shared()->scope_info()->ParameterCount();
+ function_->shared().scope_info().ParameterCount();
index += parameter_count;
Handle<FixedArray> parameters_and_registers(
generator_->parameters_and_registers(), isolate_);
@@ -854,7 +852,7 @@
Handle<Object> new_value) {
if (!context_->has_extension()) return false;
- DCHECK(context_->extension_object()->IsJSContextExtensionObject());
+ DCHECK(context_->extension_object().IsJSContextExtensionObject());
Handle<JSObject> ext(context_->extension_object(), isolate_);
LookupIterator it(isolate_, ext, variable_name, LookupIterator::OWN);
Maybe<bool> maybe = JSReceiver::HasOwnProperty(ext, variable_name);
@@ -887,7 +885,7 @@
VariableMode mode;
InitializationFlag init_flag;
MaybeAssignedFlag maybe_assigned_flag;
- cell_index = context_->scope_info()->ModuleIndex(
+ cell_index = context_->scope_info().ModuleIndex(
*variable_name, &mode, &init_flag, &maybe_assigned_flag);
// Setting imports is currently not supported.
@@ -904,7 +902,7 @@
bool ScopeIterator::SetScriptVariableValue(Handle<String> variable_name,
Handle<Object> new_value) {
Handle<ScriptContextTable> script_contexts(
- context_->global_object()->native_context()->script_context_table(),
+ context_->global_object().native_context().script_context_table(),
isolate_);
ScriptContextTable::LookupResult lookup_result;
if (ScriptContextTable::Lookup(isolate_, *script_contexts, *variable_name,
diff --git a/src/debug/debug-stack-trace-iterator.cc b/src/debug/debug-stack-trace-iterator.cc
index 9f8322f..2c2c438 100644
--- a/src/debug/debug-stack-trace-iterator.cc
+++ b/src/debug/debug-stack-trace-iterator.cc
@@ -69,9 +69,8 @@
DCHECK(!Done());
Handle<Object> context = frame_inspector_->GetContext();
if (context->IsContext()) {
- Object value =
- Context::cast(*context)->native_context()->debug_context_id();
- if (value->IsSmi()) return Smi::ToInt(value);
+ Object value = Context::cast(*context).native_context().debug_context_id();
+ if (value.IsSmi()) return Smi::ToInt(value);
}
return 0;
}
@@ -79,7 +78,7 @@
v8::MaybeLocal<v8::Value> DebugStackTraceIterator::GetReceiver() const {
DCHECK(!Done());
if (frame_inspector_->IsJavaScript() &&
- frame_inspector_->GetFunction()->shared()->kind() == kArrowFunction) {
+ frame_inspector_->GetFunction()->shared().kind() == kArrowFunction) {
// FrameInspector is not able to get receiver for arrow function.
// So let's try to fetch it using same logic as is used to retrieve 'this'
// during DebugEvaluate::Local.
diff --git a/src/debug/debug-type-profile.cc b/src/debug/debug-type-profile.cc
index efc42e6..a1692ac 100644
--- a/src/debug/debug-type-profile.cc
+++ b/src/debug/debug-type-profile.cc
@@ -26,7 +26,7 @@
for (Script script = scripts.Next(); !script.is_null();
script = scripts.Next()) {
- if (!script->IsUserJavaScript()) {
+ if (!script.IsUserJavaScript()) {
continue;
}
@@ -39,21 +39,20 @@
// the list multiple times.
for (int i = 0; i < list->Length(); i++) {
FeedbackVector vector = FeedbackVector::cast(list->Get(i));
- SharedFunctionInfo info = vector->shared_function_info();
- DCHECK(info->IsSubjectToDebugging());
+ SharedFunctionInfo info = vector.shared_function_info();
+ DCHECK(info.IsSubjectToDebugging());
// Match vectors with script.
- if (script != info->script()) {
+ if (script != info.script()) {
continue;
}
- if (!info->HasFeedbackMetadata() ||
- info->feedback_metadata()->is_empty() ||
- !info->feedback_metadata()->HasTypeProfileSlot()) {
+ if (!info.HasFeedbackMetadata() || info.feedback_metadata().is_empty() ||
+ !info.feedback_metadata().HasTypeProfileSlot()) {
continue;
}
- FeedbackSlot slot = vector->GetTypeProfileSlot();
+ FeedbackSlot slot = vector.GetTypeProfileSlot();
FeedbackNexus nexus(vector, slot);
- Handle<String> name(info->DebugName(), isolate);
+ Handle<String> name(info.DebugName(), isolate);
std::vector<int> source_positions = nexus.GetSourcePositions();
for (int position : source_positions) {
DCHECK_GE(position, 0);
@@ -89,10 +88,10 @@
for (int i = 0; i < list->Length(); i++) {
FeedbackVector vector = FeedbackVector::cast(list->Get(i));
- SharedFunctionInfo info = vector->shared_function_info();
- DCHECK(info->IsSubjectToDebugging());
- if (info->feedback_metadata()->HasTypeProfileSlot()) {
- FeedbackSlot slot = vector->GetTypeProfileSlot();
+ SharedFunctionInfo info = vector.shared_function_info();
+ DCHECK(info.IsSubjectToDebugging());
+ if (info.feedback_metadata().HasTypeProfileSlot()) {
+ FeedbackSlot slot = vector.GetTypeProfileSlot();
FeedbackNexus nexus(vector, slot);
nexus.ResetTypeProfile();
}
diff --git a/src/debug/debug.cc b/src/debug/debug.cc
index 302f4ae..d784b10 100644
--- a/src/debug/debug.cc
+++ b/src/debug/debug.cc
@@ -202,8 +202,8 @@
: debug_info_(debug_info),
break_index_(-1),
source_position_iterator_(
- debug_info->DebugBytecodeArray()->SourcePositionTable()) {
- position_ = debug_info->shared()->StartPosition();
+ debug_info->DebugBytecodeArray().SourcePositionTable()) {
+ position_ = debug_info->shared().StartPosition();
statement_position_ = position_;
// There is at least one break location.
DCHECK(!Done());
@@ -251,12 +251,12 @@
DebugBreakType BreakIterator::GetDebugBreakType() {
BytecodeArray bytecode_array = debug_info_->OriginalBytecodeArray();
interpreter::Bytecode bytecode =
- interpreter::Bytecodes::FromByte(bytecode_array->get(code_offset()));
+ interpreter::Bytecodes::FromByte(bytecode_array.get(code_offset()));
// Make sure we read the actual bytecode, not a prefix scaling bytecode.
if (interpreter::Bytecodes::IsPrefixScalingBytecode(bytecode)) {
- bytecode = interpreter::Bytecodes::FromByte(
- bytecode_array->get(code_offset() + 1));
+ bytecode =
+ interpreter::Bytecodes::FromByte(bytecode_array.get(code_offset() + 1));
}
if (bytecode == interpreter::Bytecode::kDebugger) {
@@ -296,7 +296,7 @@
DCHECK(debug_break_type >= DEBUG_BREAK_SLOT);
BytecodeArray bytecode_array = debug_info_->DebugBytecodeArray();
BytecodeArray original = debug_info_->OriginalBytecodeArray();
- bytecode_array->set(code_offset(), original->get(code_offset()));
+ bytecode_array.set(code_offset(), original.get(code_offset()));
}
BreakLocation BreakIterator::GetBreakLocation() {
@@ -541,8 +541,8 @@
FrameSummary summary = FrameSummary::GetTop(frame);
DCHECK(!summary.IsWasm());
Handle<JSFunction> function = summary.AsJavaScript().function();
- if (!function->shared()->HasBreakInfo()) return false;
- Handle<DebugInfo> debug_info(function->shared()->GetDebugInfo(), isolate_);
+ if (!function->shared().HasBreakInfo()) return false;
+ Handle<DebugInfo> debug_info(function->shared().GetDebugInfo(), isolate_);
// Enter the debugger.
DebugScope debug_scope(this);
std::vector<BreakLocation> break_locations;
@@ -563,7 +563,7 @@
bool is_break_at_entry) {
HandleScope scope(isolate_);
- if (!break_point->condition()->length()) return true;
+ if (!break_point->condition().length()) return true;
Handle<String> condition(break_point->condition(), isolate_);
MaybeHandle<Object> maybe_result;
Handle<Object> result;
@@ -685,13 +685,13 @@
} else {
if (!debug_info->HasInstrumentedBytecodeArray()) return;
FixedArray break_points = debug_info->break_points();
- for (int i = 0; i < break_points->length(); i++) {
- if (break_points->get(i)->IsUndefined(isolate_)) continue;
- BreakPointInfo info = BreakPointInfo::cast(break_points->get(i));
- if (info->GetBreakPointCount(isolate_) == 0) continue;
+ for (int i = 0; i < break_points.length(); i++) {
+ if (break_points.get(i).IsUndefined(isolate_)) continue;
+ BreakPointInfo info = BreakPointInfo::cast(break_points.get(i));
+ if (info.GetBreakPointCount(isolate_) == 0) continue;
DCHECK(debug_info->HasInstrumentedBytecodeArray());
BreakIterator it(debug_info);
- it.SkipToPosition(info->source_position());
+ it.SkipToPosition(info.source_position());
it.SetDebugBreak();
}
}
@@ -873,7 +873,7 @@
thread_local_.last_step_action_ = StepIn;
UpdateHookOnFunctionCall();
Handle<JSFunction> function(
- JSGeneratorObject::cast(thread_local_.suspended_generator_)->function(),
+ JSGeneratorObject::cast(thread_local_.suspended_generator_).function(),
isolate_);
FloodWithOneShot(Handle<SharedFunctionInfo>(function->shared(), isolate_));
clear_suspended_generator();
@@ -977,12 +977,12 @@
if (frame->is_wasm_compiled()) return;
WasmInterpreterEntryFrame* wasm_frame =
WasmInterpreterEntryFrame::cast(frame);
- wasm_frame->debug_info()->PrepareStep(step_action);
+ wasm_frame->debug_info().PrepareStep(step_action);
return;
}
JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
- DCHECK(js_frame->function()->IsJSFunction());
+ DCHECK(js_frame->function().IsJSFunction());
// Get the debug info (create it if it does not exist).
auto summary = FrameSummary::GetTop(frame).AsJavaScript();
@@ -1089,15 +1089,15 @@
Handle<FixedArray> locations = isolate->factory()->NewFixedArray(
debug_info->GetBreakPointCount(isolate));
int count = 0;
- for (int i = 0; i < debug_info->break_points()->length(); ++i) {
- if (!debug_info->break_points()->get(i)->IsUndefined(isolate)) {
+ for (int i = 0; i < debug_info->break_points().length(); ++i) {
+ if (!debug_info->break_points().get(i).IsUndefined(isolate)) {
BreakPointInfo break_point_info =
- BreakPointInfo::cast(debug_info->break_points()->get(i));
- int break_points = break_point_info->GetBreakPointCount(isolate);
+ BreakPointInfo::cast(debug_info->break_points().get(i));
+ int break_points = break_point_info.GetBreakPointCount(isolate);
if (break_points == 0) continue;
for (int j = 0; j < break_points; ++j) {
locations->set(count++,
- Smi::FromInt(break_point_info->source_position()));
+ Smi::FromInt(break_point_info.source_position()));
}
}
}
@@ -1148,8 +1148,8 @@
do {
Code code = iterator.Next();
if (code.is_null()) break;
- if (code->Inlines(*shared)) {
- code->set_marked_for_deoptimization(true);
+ if (code.Inlines(*shared)) {
+ code.set_marked_for_deoptimization(true);
found_something = true;
}
} while (true);
@@ -1213,7 +1213,7 @@
current = current->next()) {
if (current->debug_info()->CanBreakAtEntry()) {
needs_to_use_trampoline = true;
- if (current->debug_info()->shared()->IsApiFunction()) {
+ if (current->debug_info()->shared().IsApiFunction()) {
needs_to_clear_ic = true;
break;
}
@@ -1229,23 +1229,23 @@
HeapIterator iterator(isolate_->heap());
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (needs_to_clear_ic && obj->IsFeedbackVector()) {
- FeedbackVector::cast(obj)->ClearSlots(isolate_);
+ if (needs_to_clear_ic && obj.IsFeedbackVector()) {
+ FeedbackVector::cast(obj).ClearSlots(isolate_);
continue;
- } else if (obj->IsJSFunction()) {
+ } else if (obj.IsJSFunction()) {
JSFunction fun = JSFunction::cast(obj);
- SharedFunctionInfo shared = fun->shared();
- if (!shared->HasDebugInfo()) continue;
- if (!shared->GetDebugInfo()->CanBreakAtEntry()) continue;
- if (!fun->is_compiled()) {
+ SharedFunctionInfo shared = fun.shared();
+ if (!shared.HasDebugInfo()) continue;
+ if (!shared.GetDebugInfo().CanBreakAtEntry()) continue;
+ if (!fun.is_compiled()) {
needs_compile.push_back(handle(fun, isolate_));
} else {
- fun->set_code(*trampoline);
+ fun.set_code(*trampoline);
}
- } else if (obj->IsAccessorPair()) {
+ } else if (obj.IsAccessorPair()) {
AccessorPair accessor_pair = AccessorPair::cast(obj);
- if (accessor_pair->getter()->IsFunctionTemplateInfo() ||
- accessor_pair->setter()->IsFunctionTemplateInfo()) {
+ if (accessor_pair.getter().IsFunctionTemplateInfo() ||
+ accessor_pair.setter().IsFunctionTemplateInfo()) {
needs_instantiate.push_back(handle(accessor_pair, isolate_));
}
}
@@ -1255,7 +1255,7 @@
// Forcibly instantiate all lazy accessor pairs to make sure that they
// properly hit the debug break trampoline.
for (Handle<AccessorPair> accessor_pair : needs_instantiate) {
- if (accessor_pair->getter()->IsFunctionTemplateInfo()) {
+ if (accessor_pair->getter().IsFunctionTemplateInfo()) {
Handle<JSFunction> fun =
ApiNatives::InstantiateFunction(
handle(FunctionTemplateInfo::cast(accessor_pair->getter()),
@@ -1263,7 +1263,7 @@
.ToHandleChecked();
accessor_pair->set_getter(*fun);
}
- if (accessor_pair->setter()->IsFunctionTemplateInfo()) {
+ if (accessor_pair->setter().IsFunctionTemplateInfo()) {
Handle<JSFunction> fun =
ApiNatives::InstantiateFunction(
handle(FunctionTemplateInfo::cast(accessor_pair->setter()),
@@ -1330,12 +1330,12 @@
SharedFunctionInfo::ScriptIterator iterator(isolate_, *script);
for (SharedFunctionInfo info = iterator.Next(); !info.is_null();
info = iterator.Next()) {
- if (info->EndPosition() < start_position ||
- info->StartPosition() >= end_position) {
+ if (info.EndPosition() < start_position ||
+ info.StartPosition() >= end_position) {
continue;
}
- if (!info->IsSubjectToDebugging()) continue;
- if (!info->is_compiled() && !info->allows_lazy_compilation()) continue;
+ if (!info.IsSubjectToDebugging()) continue;
+ if (!info.is_compiled() && !info.allows_lazy_compilation()) continue;
candidates.push_back(i::handle(info, isolate_));
}
@@ -1394,26 +1394,26 @@
void NewCandidate(SharedFunctionInfo shared,
JSFunction closure = JSFunction()) {
- if (!shared->IsSubjectToDebugging()) return;
- int start_position = shared->function_token_position();
+ if (!shared.IsSubjectToDebugging()) return;
+ int start_position = shared.function_token_position();
if (start_position == kNoSourcePosition) {
- start_position = shared->StartPosition();
+ start_position = shared.StartPosition();
}
if (start_position > target_position_) return;
- if (target_position_ > shared->EndPosition()) return;
+ if (target_position_ > shared.EndPosition()) return;
if (!current_candidate_.is_null()) {
if (current_start_position_ == start_position &&
- shared->EndPosition() == current_candidate_->EndPosition()) {
+ shared.EndPosition() == current_candidate_.EndPosition()) {
// If we already have a matching closure, do not throw it away.
if (!current_candidate_closure_.is_null() && closure.is_null()) return;
// If a top-level function contains only one function
// declaration the source for the top-level and the function
// is the same. In that case prefer the non top-level function.
- if (!current_candidate_->is_toplevel() && shared->is_toplevel()) return;
+ if (!current_candidate_.is_toplevel() && shared.is_toplevel()) return;
} else if (start_position < current_start_position_ ||
- current_candidate_->EndPosition() < shared->EndPosition()) {
+ current_candidate_.EndPosition() < shared.EndPosition()) {
return;
}
}
@@ -1462,7 +1462,7 @@
shared = finder.Result();
if (shared.is_null()) break;
// We found it if it's already compiled.
- is_compiled_scope = shared->is_compiled_scope();
+ is_compiled_scope = shared.is_compiled_scope();
if (is_compiled_scope.is_compiled()) {
Handle<SharedFunctionInfo> shared_handle(shared, isolate_);
// If the iteration count is larger than 1, we had to compile the outer
@@ -1479,7 +1479,7 @@
// If not, compile to reveal inner functions.
HandleScope scope(isolate_);
// Code that cannot be compiled lazily are internal and not debuggable.
- DCHECK(shared->allows_lazy_compilation());
+ DCHECK(shared.allows_lazy_compilation());
if (!Compiler::Compile(handle(shared, isolate_), Compiler::CLEAR_EXCEPTION,
&is_compiled_scope)) {
break;
@@ -1617,7 +1617,7 @@
// Pack script back into the
// SFI::script_or_debug_info field.
Handle<DebugInfo> debug_info(node->debug_info());
- debug_info->shared()->set_script_or_debug_info(debug_info->script());
+ debug_info->shared().set_script_or_debug_info(debug_info->script());
delete node;
}
@@ -1626,7 +1626,7 @@
HandleScope scope(isolate_);
// Get the executing function in which the debug break occurred.
- Handle<SharedFunctionInfo> shared(frame->function()->shared(), isolate_);
+ Handle<SharedFunctionInfo> shared(frame->function().shared(), isolate_);
// With no debug info there are no break points, so we can't be at a return.
if (!shared->HasBreakInfo()) return false;
@@ -1673,7 +1673,7 @@
Script::Iterator iterator(isolate_);
for (Script script = iterator.Next(); !script.is_null();
script = iterator.Next()) {
- if (script->HasValidSource()) results->set(length++, script);
+ if (script.HasValidSource()) results->set(length++, script);
}
}
return FixedArray::ShrinkOrEmpty(isolate_, results, length);
@@ -1806,7 +1806,7 @@
// This array contains breakpoints installed using JS debug API.
for (int i = 0; i < break_points_hit->length(); ++i) {
BreakPoint break_point = BreakPoint::cast(break_points_hit->get(i));
- inspector_break_points_hit.push_back(break_point->id());
+ inspector_break_points_hit.push_back(break_point.id());
++inspector_break_points_count;
}
@@ -1834,13 +1834,13 @@
Handle<DebugInfo> debug_info = GetOrCreateDebugInfo(shared);
if (!debug_info->computed_debug_is_blackboxed()) {
bool is_blackboxed =
- !shared->IsSubjectToDebugging() || !shared->script()->IsScript();
+ !shared->IsSubjectToDebugging() || !shared->script().IsScript();
if (!is_blackboxed) {
SuppressDebug while_processing(this);
HandleScope handle_scope(isolate_);
PostponeInterruptsScope no_interrupts(isolate_);
DisableBreak no_recursive_break(this);
- DCHECK(shared->script()->IsScript());
+ DCHECK(shared->script().IsScript());
Handle<Script> script(Script::cast(shared->script()), isolate_);
DCHECK(script->IsUserJavaScript());
debug::Location start = GetDebugLocation(script, shared->StartPosition());
@@ -1978,7 +1978,7 @@
{ JavaScriptFrameIterator it(isolate_);
DCHECK(!it.done());
Object fun = it.frame()->function();
- if (fun->IsJSFunction()) {
+ if (fun.IsJSFunction()) {
HandleScope scope(isolate_);
Handle<JSFunction> function(JSFunction::cast(fun), isolate_);
// Don't stop in builtin and blackboxed functions.
@@ -2091,7 +2091,7 @@
Handle<DebugInfo> debug_info = current->debug_info();
if (debug_info->HasInstrumentedBytecodeArray() &&
debug_info->DebugExecutionMode() != isolate_->debug_execution_mode()) {
- DCHECK(debug_info->shared()->HasBytecodeArray());
+ DCHECK(debug_info->shared().HasBytecodeArray());
if (isolate_->debug_execution_mode() == DebugInfo::kBreakpoints) {
ClearSideEffectChecks(debug_info);
ApplyBreakPoints(debug_info);
@@ -2173,7 +2173,7 @@
Handle<Object> receiver) {
DCHECK_EQ(isolate_->debug_execution_mode(), DebugInfo::kSideEffects);
DisallowJavascriptExecution no_js(isolate_);
- IsCompiledScope is_compiled_scope(function->shared()->is_compiled_scope());
+ IsCompiledScope is_compiled_scope(function->shared().is_compiled_scope());
if (!function->is_compiled() &&
!Compiler::Compile(function, Compiler::KEEP_EXCEPTION,
&is_compiled_scope)) {
@@ -2188,7 +2188,7 @@
case DebugInfo::kHasSideEffects:
if (FLAG_trace_side_effect_free_debug_evaluate) {
PrintF("[debug-evaluate] Function %s failed side effect check.\n",
- function->shared()->DebugName()->ToCString().get());
+ function->shared().DebugName().ToCString().get());
}
side_effect_check_failed_ = true;
// Throw an uncatchable termination exception.
@@ -2225,7 +2225,7 @@
DCHECK_EQ(!receiver.is_null(), callback_info->IsAccessorInfo());
DCHECK_EQ(isolate_->debug_execution_mode(), DebugInfo::kSideEffects);
if (!callback_info.is_null() && callback_info->IsCallHandlerInfo() &&
- i::CallHandlerInfo::cast(*callback_info)->NextCallHasNoSideEffect()) {
+ i::CallHandlerInfo::cast(*callback_info).NextCallHasNoSideEffect()) {
return true;
}
// TODO(7515): always pass a valid callback info object.
@@ -2234,8 +2234,8 @@
// List of whitelisted internal accessors can be found in accessors.h.
AccessorInfo info = AccessorInfo::cast(*callback_info);
DCHECK_NE(kNotAccessor, accessor_kind);
- switch (accessor_kind == kSetter ? info->setter_side_effect_type()
- : info->getter_side_effect_type()) {
+ switch (accessor_kind == kSetter ? info.setter_side_effect_type()
+ : info.getter_side_effect_type()) {
case SideEffectType::kHasNoSideEffect:
// We do not support setter accessors with no side effects, since
// calling set accessors go through a store bytecode. Store bytecodes
@@ -2252,18 +2252,18 @@
}
if (FLAG_trace_side_effect_free_debug_evaluate) {
PrintF("[debug-evaluate] API Callback '");
- info->name()->ShortPrint();
+ info.name().ShortPrint();
PrintF("' may cause side effect.\n");
}
} else if (callback_info->IsInterceptorInfo()) {
InterceptorInfo info = InterceptorInfo::cast(*callback_info);
- if (info->has_no_side_effect()) return true;
+ if (info.has_no_side_effect()) return true;
if (FLAG_trace_side_effect_free_debug_evaluate) {
PrintF("[debug-evaluate] API Interceptor may cause side effect.\n");
}
} else if (callback_info->IsCallHandlerInfo()) {
CallHandlerInfo info = CallHandlerInfo::cast(*callback_info);
- if (info->IsSideEffectFreeCallHandlerInfo()) return true;
+ if (info.IsSideEffectFreeCallHandlerInfo()) return true;
if (FLAG_trace_side_effect_free_debug_evaluate) {
PrintF("[debug-evaluate] API CallHandlerInfo may cause side effect.\n");
}
@@ -2280,8 +2280,8 @@
using interpreter::Bytecode;
DCHECK_EQ(isolate_->debug_execution_mode(), DebugInfo::kSideEffects);
- SharedFunctionInfo shared = frame->function()->shared();
- BytecodeArray bytecode_array = shared->GetBytecodeArray();
+ SharedFunctionInfo shared = frame->function().shared();
+ BytecodeArray bytecode_array = shared.GetBytecodeArray();
int offset = frame->GetBytecodeOffset();
interpreter::BytecodeArrayAccessor bytecode_accessor(
handle(bytecode_array, isolate_), offset);
diff --git a/src/debug/liveedit.cc b/src/debug/liveedit.cc
index 2f8eb55..a8e4606 100644
--- a/src/debug/liveedit.cc
+++ b/src/debug/liveedit.cc
@@ -811,12 +811,12 @@
}
bool Lookup(SharedFunctionInfo sfi, FunctionData** data) {
- int start_position = sfi->StartPosition();
- if (!sfi->script()->IsScript() || start_position == -1) {
+ int start_position = sfi.StartPosition();
+ if (!sfi.script().IsScript() || start_position == -1) {
return false;
}
- Script script = Script::cast(sfi->script());
- return Lookup(GetFuncId(script->id(), sfi), data);
+ Script script = Script::cast(sfi.script());
+ return Lookup(GetFuncId(script.id(), sfi), data);
}
bool Lookup(Handle<Script> script, FunctionLiteral* literal,
@@ -829,21 +829,21 @@
HeapIterator iterator(isolate->heap(), HeapIterator::kFilterUnreachable);
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (obj->IsSharedFunctionInfo()) {
+ if (obj.IsSharedFunctionInfo()) {
SharedFunctionInfo sfi = SharedFunctionInfo::cast(obj);
FunctionData* data = nullptr;
if (!Lookup(sfi, &data)) continue;
data->shared = handle(sfi, isolate);
- } else if (obj->IsJSFunction()) {
+ } else if (obj.IsJSFunction()) {
JSFunction js_function = JSFunction::cast(obj);
- SharedFunctionInfo sfi = js_function->shared();
+ SharedFunctionInfo sfi = js_function.shared();
FunctionData* data = nullptr;
if (!Lookup(sfi, &data)) continue;
data->js_functions.emplace_back(js_function, isolate);
- } else if (obj->IsJSGeneratorObject()) {
+ } else if (obj.IsJSGeneratorObject()) {
JSGeneratorObject gen = JSGeneratorObject::cast(obj);
- if (gen->is_closed()) continue;
- SharedFunctionInfo sfi = gen->function()->shared();
+ if (gen.is_closed()) continue;
+ SharedFunctionInfo sfi = gen.function().shared();
FunctionData* data = nullptr;
if (!Lookup(sfi, &data)) continue;
data->running_generators.emplace_back(gen, isolate);
@@ -903,10 +903,10 @@
}
FuncId GetFuncId(int script_id, SharedFunctionInfo sfi) {
- DCHECK_EQ(script_id, Script::cast(sfi->script())->id());
- int start_position = sfi->StartPosition();
+ DCHECK_EQ(script_id, Script::cast(sfi.script()).id());
+ int start_position = sfi.StartPosition();
DCHECK_NE(start_position, -1);
- if (sfi->is_toplevel()) {
+ if (sfi.is_toplevel()) {
// This is the top-level function, so special case its start position
DCHECK_EQ(start_position, 0);
start_position = -1;
@@ -1119,10 +1119,10 @@
sfi->set_script(*new_script);
if (sfi->HasUncompiledData()) {
- sfi->uncompiled_data()->set_function_literal_id(
+ sfi->uncompiled_data().set_function_literal_id(
mapping.second->function_literal_id());
}
- new_script->shared_function_infos()->Set(
+ new_script->shared_function_infos().Set(
mapping.second->function_literal_id(), HeapObjectReference::Weak(*sfi));
DCHECK_EQ(sfi->FunctionLiteralId(isolate),
mapping.second->function_literal_id());
@@ -1144,11 +1144,11 @@
}
if (!sfi->HasBytecodeArray()) continue;
- FixedArray constants = sfi->GetBytecodeArray()->constant_pool();
- for (int i = 0; i < constants->length(); ++i) {
- if (!constants->get(i)->IsSharedFunctionInfo()) continue;
+ FixedArray constants = sfi->GetBytecodeArray().constant_pool();
+ for (int i = 0; i < constants.length(); ++i) {
+ if (!constants.get(i).IsSharedFunctionInfo()) continue;
FunctionData* data = nullptr;
- if (!function_data_map.Lookup(SharedFunctionInfo::cast(constants->get(i)),
+ if (!function_data_map.Lookup(SharedFunctionInfo::cast(constants.get(i)),
&data)) {
continue;
}
@@ -1159,7 +1159,7 @@
}
Handle<SharedFunctionInfo> new_sfi;
if (!data->shared.ToHandle(&new_sfi)) continue;
- constants->set(i, *new_sfi);
+ constants.set(i, *new_sfi);
}
}
for (const auto& mapping : changed) {
@@ -1176,7 +1176,7 @@
isolate->compilation_cache()->Remove(sfi);
for (auto& js_function : data->js_functions) {
js_function->set_shared(*new_sfi);
- js_function->set_code(js_function->shared()->GetCode());
+ js_function->set_code(js_function->shared().GetCode());
js_function->set_raw_feedback_cell(
*isolate->factory()->many_closures_cell());
@@ -1186,30 +1186,29 @@
}
SharedFunctionInfo::ScriptIterator it(isolate, *new_script);
for (SharedFunctionInfo sfi = it.Next(); !sfi.is_null(); sfi = it.Next()) {
- if (!sfi->HasBytecodeArray()) continue;
- FixedArray constants = sfi->GetBytecodeArray()->constant_pool();
- for (int i = 0; i < constants->length(); ++i) {
- if (!constants->get(i)->IsSharedFunctionInfo()) continue;
- SharedFunctionInfo inner_sfi =
- SharedFunctionInfo::cast(constants->get(i));
+ if (!sfi.HasBytecodeArray()) continue;
+ FixedArray constants = sfi.GetBytecodeArray().constant_pool();
+ for (int i = 0; i < constants.length(); ++i) {
+ if (!constants.get(i).IsSharedFunctionInfo()) continue;
+ SharedFunctionInfo inner_sfi = SharedFunctionInfo::cast(constants.get(i));
// See if there is a mapping from this function's start position to a
// unchanged function's id.
auto unchanged_it =
- start_position_to_unchanged_id.find(inner_sfi->StartPosition());
+ start_position_to_unchanged_id.find(inner_sfi.StartPosition());
if (unchanged_it == start_position_to_unchanged_id.end()) continue;
// Grab that function id from the new script's SFI list, which should have
// already been updated in in the unchanged pass.
SharedFunctionInfo old_unchanged_inner_sfi =
SharedFunctionInfo::cast(new_script->shared_function_infos()
- ->Get(unchanged_it->second)
+ .Get(unchanged_it->second)
->GetHeapObject());
if (old_unchanged_inner_sfi == inner_sfi) continue;
DCHECK_NE(old_unchanged_inner_sfi, inner_sfi);
// Now some sanity checks. Make sure that the unchanged SFI has already
// been processed and patched to be on the new script ...
- DCHECK_EQ(old_unchanged_inner_sfi->script(), *new_script);
- constants->set(i, old_unchanged_inner_sfi);
+ DCHECK_EQ(old_unchanged_inner_sfi.script(), *new_script);
+ constants.set(i, old_unchanged_inner_sfi);
}
}
#ifdef DEBUG
@@ -1222,28 +1221,28 @@
SharedFunctionInfo::ScriptIterator it(isolate, *new_script);
std::set<int> start_positions;
for (SharedFunctionInfo sfi = it.Next(); !sfi.is_null(); sfi = it.Next()) {
- DCHECK_EQ(sfi->script(), *new_script);
- DCHECK_EQ(sfi->FunctionLiteralId(isolate), it.CurrentIndex());
+ DCHECK_EQ(sfi.script(), *new_script);
+ DCHECK_EQ(sfi.FunctionLiteralId(isolate), it.CurrentIndex());
// Don't check the start position of the top-level function, as it can
// overlap with a function in the script.
- if (sfi->is_toplevel()) {
- DCHECK_EQ(start_positions.find(sfi->StartPosition()),
+ if (sfi.is_toplevel()) {
+ DCHECK_EQ(start_positions.find(sfi.StartPosition()),
start_positions.end());
- start_positions.insert(sfi->StartPosition());
+ start_positions.insert(sfi.StartPosition());
}
- if (!sfi->HasBytecodeArray()) continue;
+ if (!sfi.HasBytecodeArray()) continue;
// Check that all the functions in this function's constant pool are also
// on the new script, and that their id matches their index in the new
// scripts function list.
- FixedArray constants = sfi->GetBytecodeArray()->constant_pool();
- for (int i = 0; i < constants->length(); ++i) {
- if (!constants->get(i)->IsSharedFunctionInfo()) continue;
+ FixedArray constants = sfi.GetBytecodeArray().constant_pool();
+ for (int i = 0; i < constants.length(); ++i) {
+ if (!constants.get(i).IsSharedFunctionInfo()) continue;
SharedFunctionInfo inner_sfi =
- SharedFunctionInfo::cast(constants->get(i));
- DCHECK_EQ(inner_sfi->script(), *new_script);
+ SharedFunctionInfo::cast(constants.get(i));
+ DCHECK_EQ(inner_sfi.script(), *new_script);
DCHECK_EQ(inner_sfi, new_script->shared_function_infos()
- ->Get(inner_sfi->FunctionLiteralId(isolate))
+ .Get(inner_sfi.FunctionLiteralId(isolate))
->GetHeapObject());
}
}
diff --git a/src/deoptimizer/deoptimizer.cc b/src/deoptimizer/deoptimizer.cc
index e9fabd3..f738f23 100644
--- a/src/deoptimizer/deoptimizer.cc
+++ b/src/deoptimizer/deoptimizer.cc
@@ -54,7 +54,7 @@
}
void PushRawObject(Object obj, const char* debug_hint) {
- intptr_t value = obj->ptr();
+ intptr_t value = obj.ptr();
PushValue(value);
if (trace_scope_ != nullptr) {
DebugPrintOutputObject(obj, top_offset_, debug_hint);
@@ -121,10 +121,10 @@
if (trace_scope_ != nullptr) {
PrintF(trace_scope_->file(), " " V8PRIxPTR_FMT ": [top + %3d] <- ",
output_address(output_offset), output_offset);
- if (obj->IsSmi()) {
- PrintF(V8PRIxPTR_FMT " <Smi %d>", obj->ptr(), Smi::cast(obj)->value());
+ if (obj.IsSmi()) {
+ PrintF(V8PRIxPTR_FMT " <Smi %d>", obj.ptr(), Smi::cast(obj).value());
} else {
- obj->ShortPrint(trace_scope_->file());
+ obj.ShortPrint(trace_scope_->file());
}
PrintF(trace_scope_->file(), " ; %s", debug_hint);
}
@@ -156,16 +156,16 @@
}
Code Deoptimizer::FindDeoptimizingCode(Address addr) {
- if (function_->IsHeapObject()) {
+ if (function_.IsHeapObject()) {
// Search all deoptimizing code in the native context of the function.
Isolate* isolate = isolate_;
- Context native_context = function_->context()->native_context();
- Object element = native_context->DeoptimizedCodeListHead();
- while (!element->IsUndefined(isolate)) {
+ Context native_context = function_.context().native_context();
+ Object element = native_context.DeoptimizedCodeListHead();
+ while (!element.IsUndefined(isolate)) {
Code code = Code::cast(element);
- CHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
- if (code->contains(addr)) return code;
- element = code->next_code_link();
+ CHECK(code.kind() == Code::OPTIMIZED_FUNCTION);
+ if (code.contains(addr)) return code;
+ element = code.next_code_link();
}
}
return Code();
@@ -244,15 +244,15 @@
for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
if (it.frame()->type() == StackFrame::OPTIMIZED) {
Code code = it.frame()->LookupCode();
- if (code->kind() == Code::OPTIMIZED_FUNCTION &&
- code->marked_for_deoptimization()) {
+ if (code.kind() == Code::OPTIMIZED_FUNCTION &&
+ code.marked_for_deoptimization()) {
codes_->erase(code);
// Obtain the trampoline to the deoptimizer call.
- SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
+ SafepointEntry safepoint = code.GetSafepointEntry(it.frame()->pc());
int trampoline_pc = safepoint.trampoline_pc();
DCHECK_IMPLIES(code == topmost_, safe_to_deopt_);
// Replace the current pc on the stack with the trampoline.
- it.frame()->set_pc(code->raw_instruction_start() + trampoline_pc);
+ it.frame()->set_pc(code.raw_instruction_start() + trampoline_pc);
}
}
}
@@ -273,7 +273,7 @@
void Deoptimizer::DeoptimizeMarkedCodeForContext(Context context) {
DisallowHeapAllocation no_allocation;
- Isolate* isolate = context->GetIsolate();
+ Isolate* isolate = context.GetIsolate();
Code topmost_optimized_code;
bool safe_to_deopt_topmost_optimized_code = false;
#ifdef DEBUG
@@ -290,14 +290,14 @@
if (FLAG_trace_deopt) {
CodeTracer::Scope scope(isolate->GetCodeTracer());
PrintF(scope.file(), "[deoptimizer found activation of function: ");
- function->PrintName(scope.file());
+ function.PrintName(scope.file());
PrintF(scope.file(), " / %" V8PRIxPTR "]\n", function.ptr());
}
- SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
+ SafepointEntry safepoint = code.GetSafepointEntry(it.frame()->pc());
// Turbofan deopt is checked when we are patching addresses on stack.
bool safe_if_deopt_triggered = safepoint.has_deoptimization_index();
- bool is_builtin_code = code->kind() == Code::BUILTIN;
+ bool is_builtin_code = code.kind() == Code::BUILTIN;
DCHECK(topmost_optimized_code.is_null() || safe_if_deopt_triggered ||
is_builtin_code);
if (topmost_optimized_code.is_null()) {
@@ -315,26 +315,26 @@
// Move marked code from the optimized code list to the deoptimized code list.
// Walk over all optimized code objects in this native context.
Code prev;
- Object element = context->OptimizedCodeListHead();
- while (!element->IsUndefined(isolate)) {
+ Object element = context.OptimizedCodeListHead();
+ while (!element.IsUndefined(isolate)) {
Code code = Code::cast(element);
- CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
- Object next = code->next_code_link();
+ CHECK_EQ(code.kind(), Code::OPTIMIZED_FUNCTION);
+ Object next = code.next_code_link();
- if (code->marked_for_deoptimization()) {
+ if (code.marked_for_deoptimization()) {
codes.insert(code);
if (!prev.is_null()) {
// Skip this code in the optimized code list.
- prev->set_next_code_link(next);
+ prev.set_next_code_link(next);
} else {
// There was no previous node, the next node is the new head.
- context->SetOptimizedCodeListHead(next);
+ context.SetOptimizedCodeListHead(next);
}
// Move the code to the _deoptimized_ code list.
- code->set_next_code_link(context->DeoptimizedCodeListHead());
- context->SetDeoptimizedCodeListHead(code);
+ code.set_next_code_link(context.DeoptimizedCodeListHead());
+ context.SetDeoptimizedCodeListHead(code);
} else {
// Not marked; preserve this element.
prev = code;
@@ -372,11 +372,11 @@
DisallowHeapAllocation no_allocation;
// For all contexts, mark all code, then deoptimize.
Object context = isolate->heap()->native_contexts_list();
- while (!context->IsUndefined(isolate)) {
+ while (!context.IsUndefined(isolate)) {
Context native_context = Context::cast(context);
MarkAllCodeForContext(native_context);
DeoptimizeMarkedCodeForContext(native_context);
- context = native_context->next_context_link();
+ context = native_context.next_context_link();
}
}
@@ -392,47 +392,47 @@
DisallowHeapAllocation no_allocation;
// For all contexts, deoptimize code already marked.
Object context = isolate->heap()->native_contexts_list();
- while (!context->IsUndefined(isolate)) {
+ while (!context.IsUndefined(isolate)) {
Context native_context = Context::cast(context);
DeoptimizeMarkedCodeForContext(native_context);
- context = native_context->next_context_link();
+ context = native_context.next_context_link();
}
}
void Deoptimizer::MarkAllCodeForContext(Context context) {
- Object element = context->OptimizedCodeListHead();
- Isolate* isolate = context->GetIsolate();
- while (!element->IsUndefined(isolate)) {
+ Object element = context.OptimizedCodeListHead();
+ Isolate* isolate = context.GetIsolate();
+ while (!element.IsUndefined(isolate)) {
Code code = Code::cast(element);
- CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
- code->set_marked_for_deoptimization(true);
- element = code->next_code_link();
+ CHECK_EQ(code.kind(), Code::OPTIMIZED_FUNCTION);
+ code.set_marked_for_deoptimization(true);
+ element = code.next_code_link();
}
}
void Deoptimizer::DeoptimizeFunction(JSFunction function, Code code) {
- Isolate* isolate = function->GetIsolate();
+ Isolate* isolate = function.GetIsolate();
RuntimeCallTimerScope runtimeTimer(isolate,
RuntimeCallCounterId::kDeoptimizeCode);
TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
TRACE_EVENT0("v8", "V8.DeoptimizeCode");
- function->ResetIfBytecodeFlushed();
- if (code.is_null()) code = function->code();
+ function.ResetIfBytecodeFlushed();
+ if (code.is_null()) code = function.code();
- if (code->kind() == Code::OPTIMIZED_FUNCTION) {
+ if (code.kind() == Code::OPTIMIZED_FUNCTION) {
// Mark the code for deoptimization and unlink any functions that also
// refer to that code. The code cannot be shared across native contexts,
// so we only need to search one.
- code->set_marked_for_deoptimization(true);
+ code.set_marked_for_deoptimization(true);
// The code in the function's optimized code feedback vector slot might
// be different from the code on the function - evict it if necessary.
- function->feedback_vector()->EvictOptimizedCodeMarkedForDeoptimization(
- function->shared(), "unlinking code marked for deopt");
- if (!code->deopt_already_counted()) {
- function->feedback_vector()->increment_deopt_count();
- code->set_deopt_already_counted(true);
+ function.feedback_vector().EvictOptimizedCodeMarkedForDeoptimization(
+ function.shared(), "unlinking code marked for deopt");
+ if (!code.deopt_already_counted()) {
+ function.feedback_vector().increment_deopt_count();
+ code.set_deopt_already_counted(true);
}
- DeoptimizeMarkedCodeForContext(function->context()->native_context());
+ DeoptimizeMarkedCodeForContext(function.context().native_context());
}
}
@@ -485,7 +485,7 @@
compiled_code_ = FindOptimizedCode();
DCHECK(!compiled_code_.is_null());
- DCHECK(function->IsJSFunction());
+ DCHECK(function.IsJSFunction());
trace_scope_ = FLAG_trace_deopt
? new CodeTracer::Scope(isolate->GetCodeTracer())
: nullptr;
@@ -493,8 +493,8 @@
DCHECK(AllowHeapAllocation::IsAllowed());
disallow_heap_allocation_ = new DisallowHeapAllocation();
#endif // DEBUG
- if (compiled_code_->kind() != Code::OPTIMIZED_FUNCTION ||
- !compiled_code_->deopt_already_counted()) {
+ if (compiled_code_.kind() != Code::OPTIMIZED_FUNCTION ||
+ !compiled_code_.deopt_already_counted()) {
// If the function is optimized, and we haven't counted that deopt yet, then
// increment the function's deopt count so that we can avoid optimising
// functions that deopt too often.
@@ -504,17 +504,16 @@
// that can eventually lead to disabling optimization for a function.
isolate->counters()->soft_deopts_executed()->Increment();
} else if (!function.is_null()) {
- function->feedback_vector()->increment_deopt_count();
+ function.feedback_vector().increment_deopt_count();
}
}
- if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
- compiled_code_->set_deopt_already_counted(true);
+ if (compiled_code_.kind() == Code::OPTIMIZED_FUNCTION) {
+ compiled_code_.set_deopt_already_counted(true);
PROFILE(isolate_,
CodeDeoptEvent(compiled_code_, kind, from_, fp_to_sp_delta_));
}
unsigned size = ComputeInputFrameSize();
- int parameter_count =
- function->shared()->internal_formal_parameter_count() + 1;
+ int parameter_count = function.shared().internal_formal_parameter_count() + 1;
input_ = new (size) FrameDescription(size, parameter_count);
}
@@ -525,11 +524,11 @@
}
void Deoptimizer::PrintFunctionName() {
- if (function_->IsHeapObject() && function_->IsJSFunction()) {
- function_->ShortPrint(trace_scope_->file());
+ if (function_.IsHeapObject() && function_.IsJSFunction()) {
+ function_.ShortPrint(trace_scope_->file());
} else {
PrintF(trace_scope_->file(), "%s",
- Code::Kind2String(compiled_code_->kind()));
+ Code::Kind2String(compiled_code_.kind()));
}
}
@@ -567,7 +566,7 @@
DeoptimizerData* data = isolate->deoptimizer_data();
CHECK_LE(kind, DeoptimizerData::kLastDeoptimizeKind);
CHECK(!data->deopt_entry_code(kind).is_null());
- return data->deopt_entry_code(kind)->raw_instruction_start();
+ return data->deopt_entry_code(kind).raw_instruction_start();
}
bool Deoptimizer::IsDeoptimizationEntry(Isolate* isolate, Address addr,
@@ -576,7 +575,7 @@
CHECK_LE(type, DeoptimizerData::kLastDeoptimizeKind);
Code code = data->deopt_entry_code(type);
if (code.is_null()) return false;
- return addr == code->raw_instruction_start();
+ return addr == code.raw_instruction_start();
}
bool Deoptimizer::IsDeoptimizationEntry(Isolate* isolate, Address addr,
@@ -600,18 +599,18 @@
int length = 0;
// Count all entries in the deoptimizing code list of every context.
Object context = isolate->heap()->native_contexts_list();
- while (!context->IsUndefined(isolate)) {
+ while (!context.IsUndefined(isolate)) {
Context native_context = Context::cast(context);
- Object element = native_context->DeoptimizedCodeListHead();
- while (!element->IsUndefined(isolate)) {
+ Object element = native_context.DeoptimizedCodeListHead();
+ while (!element.IsUndefined(isolate)) {
Code code = Code::cast(element);
- DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
- if (!code->marked_for_deoptimization()) {
+ DCHECK(code.kind() == Code::OPTIMIZED_FUNCTION);
+ if (!code.marked_for_deoptimization()) {
length++;
}
- element = code->next_code_link();
+ element = code.next_code_link();
}
- context = Context::cast(context)->next_context_link();
+ context = Context::cast(context).next_context_link();
}
return length;
}
@@ -623,7 +622,7 @@
case TranslatedFrame::kInterpretedFunction: {
int bytecode_offset = translated_frame->node_id().ToInt();
HandlerTable table(
- translated_frame->raw_shared_info()->GetBytecodeArray());
+ translated_frame->raw_shared_info().GetBytecodeArray());
return table.LookupRange(bytecode_offset, data_out, nullptr);
}
case TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch: {
@@ -649,7 +648,7 @@
// Determine basic deoptimization information. The optimized frame is
// described by the input data.
DeoptimizationData input_data =
- DeoptimizationData::cast(compiled_code_->deoptimization_data());
+ DeoptimizationData::cast(compiled_code_.deoptimization_data());
{
// Read caller's PC, caller's FP and caller's constant pool values
@@ -681,27 +680,26 @@
PrintF(trace_scope_->file(),
" (opt #%d) @%d, FP to SP delta: %d, caller sp: " V8PRIxPTR_FMT
"]\n",
- input_data->OptimizationId()->value(), bailout_id_, fp_to_sp_delta_,
+ input_data.OptimizationId().value(), bailout_id_, fp_to_sp_delta_,
caller_frame_top_);
if (deopt_kind_ == DeoptimizeKind::kEager ||
deopt_kind_ == DeoptimizeKind::kSoft) {
- compiled_code_->PrintDeoptLocation(
+ compiled_code_.PrintDeoptLocation(
trace_scope_->file(), " ;;; deoptimize at ", from_);
}
}
- BailoutId node_id = input_data->BytecodeOffset(bailout_id_);
- ByteArray translations = input_data->TranslationByteArray();
- unsigned translation_index =
- input_data->TranslationIndex(bailout_id_)->value();
+ BailoutId node_id = input_data.BytecodeOffset(bailout_id_);
+ ByteArray translations = input_data.TranslationByteArray();
+ unsigned translation_index = input_data.TranslationIndex(bailout_id_).value();
TranslationIterator state_iterator(translations, translation_index);
translated_state_.Init(
isolate_, input_->GetFramePointerAddress(), &state_iterator,
- input_data->LiteralArray(), input_->GetRegisterValues(),
+ input_data.LiteralArray(), input_->GetRegisterValues(),
trace_scope_ == nullptr ? nullptr : trace_scope_->file(),
- function_->IsHeapObject()
- ? function_->shared()->internal_formal_parameter_count()
+ function_.IsHeapObject()
+ ? function_.shared().internal_formal_parameter_count()
: 0);
// Do the input frame to output frame(s) translation.
@@ -812,7 +810,7 @@
TranslatedFrame::iterator function_iterator = value_iterator++;
if (trace_scope_ != nullptr) {
PrintF(trace_scope_->file(), " translating interpreted frame ");
- std::unique_ptr<char[]> name = shared->DebugName()->ToCString();
+ std::unique_ptr<char[]> name = shared.DebugName().ToCString();
PrintF(trace_scope_->file(), "%s", name.get());
PrintF(trace_scope_->file(), " => bytecode_offset=%d, height=%d%s\n",
bytecode_offset, height_in_bytes,
@@ -829,7 +827,7 @@
unsigned output_frame_size = height_in_bytes + fixed_frame_size;
// Allocate and store the output frame description.
- int parameter_count = shared->internal_formal_parameter_count() + 1;
+ int parameter_count = shared.internal_formal_parameter_count() + 1;
FrameDescription* output_frame = new (output_frame_size)
FrameDescription(output_frame_size, parameter_count);
FrameWriter frame_writer(this, output_frame, trace_scope_);
@@ -919,16 +917,16 @@
}
// Read the context from the translations.
Object context = context_pos->GetRawValue();
- output_frame->SetContext(static_cast<intptr_t>(context->ptr()));
+ output_frame->SetContext(static_cast<intptr_t>(context.ptr()));
frame_writer.PushTranslatedValue(context_pos, "context");
// The function was mentioned explicitly in the BEGIN_FRAME.
frame_writer.PushTranslatedValue(function_iterator, "function");
// Set the bytecode array pointer.
- Object bytecode_array = shared->HasBreakInfo()
- ? shared->GetDebugInfo()->DebugBytecodeArray()
- : shared->GetBytecodeArray();
+ Object bytecode_array = shared.HasBreakInfo()
+ ? shared.GetDebugInfo().DebugBytecodeArray()
+ : shared.GetBytecodeArray();
frame_writer.PushRawObject(bytecode_array, "bytecode array\n");
// The bytecode offset was mentioned explicitly in the BEGIN_FRAME.
@@ -1029,12 +1027,12 @@
? builtins->builtin(Builtins::kInterpreterEnterBytecodeAdvance)
: builtins->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
output_frame->SetPc(
- static_cast<intptr_t>(dispatch_builtin->InstructionStart()));
+ static_cast<intptr_t>(dispatch_builtin.InstructionStart()));
// Update constant pool.
if (FLAG_enable_embedded_constant_pool) {
intptr_t constant_pool_value =
- static_cast<intptr_t>(dispatch_builtin->constant_pool());
+ static_cast<intptr_t>(dispatch_builtin.constant_pool());
output_frame->SetConstantPool(constant_pool_value);
if (is_topmost) {
Register constant_pool_reg =
@@ -1053,7 +1051,7 @@
// Set the continuation for the topmost frame.
Code continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
output_frame->SetContinuation(
- static_cast<intptr_t>(continuation->InstructionStart()));
+ static_cast<intptr_t>(continuation.InstructionStart()));
}
}
@@ -1150,12 +1148,12 @@
Code adaptor_trampoline =
builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
intptr_t pc_value = static_cast<intptr_t>(
- adaptor_trampoline->InstructionStart() +
- isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
+ adaptor_trampoline.InstructionStart() +
+ isolate_->heap()->arguments_adaptor_deopt_pc_offset().value());
output_frame->SetPc(pc_value);
if (FLAG_enable_embedded_constant_pool) {
intptr_t constant_pool_value =
- static_cast<intptr_t>(adaptor_trampoline->constant_pool());
+ static_cast<intptr_t>(adaptor_trampoline.constant_pool());
output_frame->SetConstantPool(constant_pool_value);
}
}
@@ -1297,18 +1295,18 @@
// Compute this frame's PC.
DCHECK(bailout_id.IsValidForConstructStub());
- Address start = construct_stub->InstructionStart();
+ Address start = construct_stub.InstructionStart();
int pc_offset =
bailout_id == BailoutId::ConstructStubCreate()
- ? isolate_->heap()->construct_stub_create_deopt_pc_offset()->value()
- : isolate_->heap()->construct_stub_invoke_deopt_pc_offset()->value();
+ ? isolate_->heap()->construct_stub_create_deopt_pc_offset().value()
+ : isolate_->heap()->construct_stub_invoke_deopt_pc_offset().value();
intptr_t pc_value = static_cast<intptr_t>(start + pc_offset);
output_frame->SetPc(pc_value);
// Update constant pool.
if (FLAG_enable_embedded_constant_pool) {
intptr_t constant_pool_value =
- static_cast<intptr_t>(construct_stub->constant_pool());
+ static_cast<intptr_t>(construct_stub.constant_pool());
output_frame->SetConstantPool(constant_pool_value);
if (is_topmost) {
Register constant_pool_reg =
@@ -1332,7 +1330,7 @@
DCHECK_EQ(DeoptimizeKind::kLazy, deopt_kind_);
Code continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
output_frame->SetContinuation(
- static_cast<intptr_t>(continuation->InstructionStart()));
+ static_cast<intptr_t>(continuation.InstructionStart()));
}
}
@@ -1557,7 +1555,7 @@
// Get the possible JSFunction for the case that this is a
// JavaScriptBuiltinContinuationFrame, which needs the JSFunction pointer
// like a normal JavaScriptFrame.
- const intptr_t maybe_function = value_iterator->GetRawValue()->ptr();
+ const intptr_t maybe_function = value_iterator->GetRawValue().ptr();
++value_iterator;
ReadOnlyRoots roots(isolate());
@@ -1609,7 +1607,7 @@
// set (it was automatically added at the end of the FrameState by the
// instruction selector).
Object context = value_iterator->GetRawValue();
- const intptr_t value = context->ptr();
+ const intptr_t value = context.ptr();
TranslatedFrame::iterator context_register_value = value_iterator++;
register_values[kContextRegister.code()] = context_register_value;
output_frame->SetContext(value);
@@ -1723,12 +1721,12 @@
Code continue_to_builtin = isolate()->builtins()->builtin(
TrampolineForBuiltinContinuation(mode, must_handle_result));
output_frame->SetPc(
- static_cast<intptr_t>(continue_to_builtin->InstructionStart()));
+ static_cast<intptr_t>(continue_to_builtin.InstructionStart()));
Code continuation =
isolate()->builtins()->builtin(Builtins::kNotifyDeoptimized);
output_frame->SetContinuation(
- static_cast<intptr_t>(continuation->InstructionStart()));
+ static_cast<intptr_t>(continuation.InstructionStart()));
}
void Deoptimizer::MaterializeHeapObjects() {
@@ -1759,8 +1757,8 @@
bool feedback_updated = translated_state_.DoUpdateFeedback();
if (trace_scope_ != nullptr && feedback_updated) {
PrintF(trace_scope_->file(), "Feedback updated");
- compiled_code_->PrintDeoptLocation(trace_scope_->file(),
- " from deoptimization at ", from_);
+ compiled_code_.PrintDeoptLocation(trace_scope_->file(),
+ " from deoptimization at ", from_);
}
isolate_->materialized_object_store()->Remove(
@@ -1779,8 +1777,8 @@
unsigned fixed_size = CommonFrameConstants::kFixedFrameSizeAboveFp;
// TODO(jkummerow): If {function_->IsSmi()} can indeed be true, then
// {function_} should not have type {JSFunction}.
- if (!function_->IsSmi()) {
- fixed_size += ComputeIncomingArgumentSize(function_->shared());
+ if (!function_.IsSmi()) {
+ fixed_size += ComputeIncomingArgumentSize(function_.shared());
}
return fixed_size;
}
@@ -1790,8 +1788,8 @@
// function into account so we have to avoid double counting them.
unsigned fixed_size_above_fp = ComputeInputFrameAboveFpFixedSize();
unsigned result = fixed_size_above_fp + fp_to_sp_delta_;
- if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
- unsigned stack_slots = compiled_code_->stack_slots();
+ if (compiled_code_.kind() == Code::OPTIMIZED_FUNCTION) {
+ unsigned stack_slots = compiled_code_.stack_slots();
unsigned outgoing_size = 0;
// ComputeOutgoingArgumentSize(compiled_code_, bailout_id_);
CHECK_EQ(fixed_size_above_fp + (stack_slots * kSystemPointerSize) -
@@ -1811,7 +1809,7 @@
// static
unsigned Deoptimizer::ComputeIncomingArgumentSize(SharedFunctionInfo shared) {
- int parameter_slots = shared->internal_formal_parameter_count() + 1;
+ int parameter_slots = shared.internal_formal_parameter_count() + 1;
if (kPadArguments) parameter_slots = RoundUp(parameter_slots, 2);
return parameter_slots * kSystemPointerSize;
}
@@ -1895,7 +1893,7 @@
TranslationIterator::TranslationIterator(ByteArray buffer, int index)
: buffer_(buffer), index_(index) {
- DCHECK(index >= 0 && index < buffer->length());
+ DCHECK(index >= 0 && index < buffer.length());
}
int32_t TranslationIterator::Next() {
@@ -1904,7 +1902,7 @@
uint32_t bits = 0;
for (int i = 0; true; i += 7) {
DCHECK(HasNext());
- uint8_t next = buffer_->get(index_++);
+ uint8_t next = buffer_.get(index_++);
bits |= (next >> 1) << i;
if ((next & 1) == 0) break;
}
@@ -1914,7 +1912,7 @@
return is_negative ? -result : result;
}
-bool TranslationIterator::HasNext() const { return index_ < buffer_->length(); }
+bool TranslationIterator::HasNext() const { return index_ < buffer_.length(); }
Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
Handle<ByteArray> result =
@@ -2163,12 +2161,12 @@
frame_fps_.erase(it);
FixedArray array = isolate()->heap()->materialized_objects();
- CHECK_LT(index, array->length());
+ CHECK_LT(index, array.length());
int fps_size = static_cast<int>(frame_fps_.size());
for (int i = index; i < fps_size; i++) {
- array->set(i, array->get(i + 1));
+ array.set(i, array.get(i + 1));
}
- array->set(fps_size, ReadOnlyRoots(isolate()).undefined_value());
+ array.set(fps_size, ReadOnlyRoots(isolate()).undefined_value());
return true;
}
@@ -2241,7 +2239,7 @@
*frame_it->shared_info(), frame_it->node_id());
DCHECK_EQ(parameter_count,
- function_->shared()->internal_formal_parameter_count());
+ function_->shared().internal_formal_parameter_count());
parameters_.resize(static_cast<size_t>(parameter_count));
for (int i = 0; i < parameter_count; i++) {
@@ -2276,7 +2274,7 @@
}
Deoptimizer::DeoptInfo Deoptimizer::GetDeoptInfo(Code code, Address pc) {
- CHECK(code->InstructionStart() <= pc && pc <= code->InstructionEnd());
+ CHECK(code.InstructionStart() <= pc && pc <= code.InstructionEnd());
SourcePosition last_position = SourcePosition::Unknown();
DeoptimizeReason last_reason = DeoptimizeReason::kUnknown;
int last_deopt_id = kNoDeoptimizationId;
@@ -2305,9 +2303,9 @@
// static
int Deoptimizer::ComputeSourcePositionFromBytecodeArray(
SharedFunctionInfo shared, BailoutId node_id) {
- DCHECK(shared->HasBytecodeArray());
- return AbstractCode::cast(shared->GetBytecodeArray())
- ->SourcePosition(node_id.ToInt());
+ DCHECK(shared.HasBytecodeArray());
+ return AbstractCode::cast(shared.GetBytecodeArray())
+ .SourcePosition(node_id.ToInt());
}
// static
@@ -2692,7 +2690,7 @@
switch (kind()) {
case kInterpretedFunction: {
int parameter_count =
- raw_shared_info_->internal_formal_parameter_count() + 1;
+ raw_shared_info_.internal_formal_parameter_count() + 1;
// + 2 for function and context.
return height_ + parameter_count + 2;
}
@@ -2713,7 +2711,7 @@
void TranslatedFrame::Handlify() {
if (!raw_shared_info_.is_null()) {
shared_info_ = Handle<SharedFunctionInfo>(raw_shared_info_,
- raw_shared_info_->GetIsolate());
+ raw_shared_info_.GetIsolate());
raw_shared_info_ = SharedFunctionInfo();
}
for (auto& value : values_) {
@@ -2730,14 +2728,14 @@
case Translation::INTERPRETED_FRAME: {
BailoutId bytecode_offset = BailoutId(iterator->Next());
SharedFunctionInfo shared_info =
- SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
+ SharedFunctionInfo::cast(literal_array.get(iterator->Next()));
int height = iterator->Next();
int return_value_offset = iterator->Next();
int return_value_count = iterator->Next();
if (trace_file != nullptr) {
- std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
+ std::unique_ptr<char[]> name = shared_info.DebugName().ToCString();
PrintF(trace_file, " reading input frame %s", name.get());
- int arg_count = shared_info->internal_formal_parameter_count() + 1;
+ int arg_count = shared_info.internal_formal_parameter_count() + 1;
PrintF(trace_file,
" => bytecode_offset=%d, args=%d, height=%d, retval=%i(#%i); "
"inputs:\n",
@@ -2751,10 +2749,10 @@
case Translation::ARGUMENTS_ADAPTOR_FRAME: {
SharedFunctionInfo shared_info =
- SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
+ SharedFunctionInfo::cast(literal_array.get(iterator->Next()));
int height = iterator->Next();
if (trace_file != nullptr) {
- std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
+ std::unique_ptr<char[]> name = shared_info.DebugName().ToCString();
PrintF(trace_file, " reading arguments adaptor frame %s", name.get());
PrintF(trace_file, " => height=%d; inputs:\n", height);
}
@@ -2764,10 +2762,10 @@
case Translation::CONSTRUCT_STUB_FRAME: {
BailoutId bailout_id = BailoutId(iterator->Next());
SharedFunctionInfo shared_info =
- SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
+ SharedFunctionInfo::cast(literal_array.get(iterator->Next()));
int height = iterator->Next();
if (trace_file != nullptr) {
- std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
+ std::unique_ptr<char[]> name = shared_info.DebugName().ToCString();
PrintF(trace_file, " reading construct stub frame %s", name.get());
PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
bailout_id.ToInt(), height);
@@ -2779,10 +2777,10 @@
case Translation::BUILTIN_CONTINUATION_FRAME: {
BailoutId bailout_id = BailoutId(iterator->Next());
SharedFunctionInfo shared_info =
- SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
+ SharedFunctionInfo::cast(literal_array.get(iterator->Next()));
int height = iterator->Next();
if (trace_file != nullptr) {
- std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
+ std::unique_ptr<char[]> name = shared_info.DebugName().ToCString();
PrintF(trace_file, " reading builtin continuation frame %s",
name.get());
PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
@@ -2798,10 +2796,10 @@
case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME: {
BailoutId bailout_id = BailoutId(iterator->Next());
SharedFunctionInfo shared_info =
- SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
+ SharedFunctionInfo::cast(literal_array.get(iterator->Next()));
int height = iterator->Next();
if (trace_file != nullptr) {
- std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
+ std::unique_ptr<char[]> name = shared_info.DebugName().ToCString();
PrintF(trace_file, " reading JavaScript builtin continuation frame %s",
name.get());
PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
@@ -2816,10 +2814,10 @@
case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME: {
BailoutId bailout_id = BailoutId(iterator->Next());
SharedFunctionInfo shared_info =
- SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
+ SharedFunctionInfo::cast(literal_array.get(iterator->Next()));
int height = iterator->Next();
if (trace_file != nullptr) {
- std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
+ std::unique_ptr<char[]> name = shared_info.DebugName().ToCString();
PrintF(trace_file,
" reading JavaScript builtin continuation frame with catch %s",
name.get());
@@ -2888,7 +2886,7 @@
*length = Smi::cast(*FullObjectSlot(
parent_frame_pointer +
ArgumentsAdaptorFrameConstants::kLengthOffset))
- ->value();
+ .value();
arguments_frame = parent_frame_pointer;
} else {
if (length) *length = formal_parameter_count_;
@@ -3044,7 +3042,7 @@
if (trace_file != nullptr) {
PrintF(trace_file, V8PRIxPTR_FMT " ; %s ", uncompressed_value,
converter.NameOfCPURegister(input_reg));
- Object(uncompressed_value)->ShortPrint(trace_file);
+ Object(uncompressed_value).ShortPrint(trace_file);
}
TranslatedValue translated_value =
TranslatedValue::NewTagged(this, Object(uncompressed_value));
@@ -3173,7 +3171,7 @@
PrintF(trace_file, V8PRIxPTR_FMT " ; [fp %c %3d] ",
uncompressed_value, slot_offset < 0 ? '-' : '+',
std::abs(slot_offset));
- Object(uncompressed_value)->ShortPrint(trace_file);
+ Object(uncompressed_value).ShortPrint(trace_file);
}
TranslatedValue translated_value =
TranslatedValue::NewTagged(this, Object(uncompressed_value));
@@ -3265,11 +3263,11 @@
case Translation::LITERAL: {
int literal_index = iterator->Next();
- Object value = literal_array->get(literal_index);
+ Object value = literal_array.get(literal_index);
if (trace_file != nullptr) {
- PrintF(trace_file, V8PRIxPTR_FMT " ; (literal %2d) ", value->ptr(),
+ PrintF(trace_file, V8PRIxPTR_FMT " ; (literal %2d) ", value.ptr(),
literal_index);
- value->ShortPrint(trace_file);
+ value.ShortPrint(trace_file);
}
TranslatedValue translated_value =
@@ -3288,11 +3286,11 @@
static_cast<const OptimizedFrame*>(frame)->GetDeoptimizationData(
&deopt_index);
DCHECK(!data.is_null() && deopt_index != Safepoint::kNoDeoptimizationIndex);
- TranslationIterator it(data->TranslationByteArray(),
- data->TranslationIndex(deopt_index)->value());
- Init(frame->isolate(), frame->fp(), &it, data->LiteralArray(),
+ TranslationIterator it(data.TranslationByteArray(),
+ data.TranslationIndex(deopt_index).value());
+ Init(frame->isolate(), frame->fp(), &it, data.LiteralArray(),
nullptr /* registers */, nullptr /* trace file */,
- frame->function()->shared()->internal_formal_parameter_count());
+ frame->function().shared().internal_formal_parameter_count());
}
void TranslatedState::Init(Isolate* isolate, Address input_frame_pointer,
@@ -3510,7 +3508,7 @@
int* value_index,
TranslatedValue* slot,
Handle<Map> map) {
- int length = Smi::cast(frame->values_[*value_index].GetRawValue())->value();
+ int length = Smi::cast(frame->values_[*value_index].GetRawValue()).value();
(*value_index)++;
Handle<FixedDoubleArray> array = Handle<FixedDoubleArray>::cast(
isolate()->factory()->NewFixedDoubleArray(length));
@@ -3621,13 +3619,13 @@
case STRING_TABLE_TYPE: {
// Check we have the right size.
int array_length =
- Smi::cast(frame->values_[value_index].GetRawValue())->value();
+ Smi::cast(frame->values_[value_index].GetRawValue()).value();
int instance_size = FixedArray::SizeFor(array_length);
CHECK_EQ(instance_size, slot->GetChildrenCount() * kTaggedSize);
// Canonicalize empty fixed array.
- if (*map == ReadOnlyRoots(isolate()).empty_fixed_array()->map() &&
+ if (*map == ReadOnlyRoots(isolate()).empty_fixed_array().map() &&
array_length == 0) {
slot->set_storage(isolate()->factory()->empty_fixed_array());
} else {
@@ -3642,7 +3640,7 @@
case PROPERTY_ARRAY_TYPE: {
// Check we have the right size.
int length_or_hash =
- Smi::cast(frame->values_[value_index].GetRawValue())->value();
+ Smi::cast(frame->values_[value_index].GetRawValue()).value();
int array_length = PropertyArray::LengthField::decode(length_or_hash);
int instance_size = PropertyArray::SizeFor(array_length);
CHECK_EQ(instance_size, slot->GetChildrenCount() * kTaggedSize);
@@ -3808,10 +3806,10 @@
if (marker == kStoreUnboxedDouble) {
double double_field_value;
if (field_value->IsSmi()) {
- double_field_value = Smi::cast(*field_value)->value();
+ double_field_value = Smi::cast(*field_value).value();
} else {
CHECK(field_value->IsHeapNumber());
- double_field_value = HeapNumber::cast(*field_value)->value();
+ double_field_value = HeapNumber::cast(*field_value).value();
}
object_storage->WriteField<double>(offset, double_field_value);
} else if (marker == kStoreMutableHeapNumber) {
@@ -4030,7 +4028,7 @@
FixedArray literal_array,
FILE* trace_file) {
CHECK_EQ(Translation::UPDATE_FEEDBACK, iterator->Next());
- feedback_vector_ = FeedbackVector::cast(literal_array->get(iterator->Next()));
+ feedback_vector_ = FeedbackVector::cast(literal_array.get(iterator->Next()));
feedback_slot_ = FeedbackSlot(iterator->Next());
if (trace_file != nullptr) {
PrintF(trace_file, " reading FeedbackVector (slot %d)\n",
diff --git a/src/diagnostics/disassembler.cc b/src/diagnostics/disassembler.cc
index 95fe618..86d001b 100644
--- a/src/diagnostics/disassembler.cc
+++ b/src/diagnostics/disassembler.cc
@@ -221,7 +221,7 @@
} else if (RelocInfo::IsEmbeddedObjectMode(rmode)) {
HeapStringAllocator allocator;
StringStream accumulator(&allocator);
- relocinfo->target_object()->ShortPrint(&accumulator);
+ relocinfo->target_object().ShortPrint(&accumulator);
std::unique_ptr<char[]> obj_name = accumulator.ToCString();
const bool is_compressed = RelocInfo::IsCompressedEmbeddedObject(rmode);
out->AddFormatted(" ;; %sobject: %s",
@@ -236,9 +236,9 @@
out->AddFormatted(" ;; code:");
Code code = isolate->heap()->GcSafeFindCodeForInnerPointer(
relocinfo->target_address());
- Code::Kind kind = code->kind();
- if (code->is_builtin()) {
- out->AddFormatted(" Builtin::%s", Builtins::name(code->builtin_index()));
+ Code::Kind kind = code.kind();
+ if (code.is_builtin()) {
+ out->AddFormatted(" Builtin::%s", Builtins::name(code.builtin_index()));
} else {
out->AddFormatted(" %s", Code::Kind2String(kind));
}
diff --git a/src/diagnostics/gdb-jit.cc b/src/diagnostics/gdb-jit.cc
index a6a5976..6323c40 100644
--- a/src/diagnostics/gdb-jit.cc
+++ b/src/diagnostics/gdb-jit.cc
@@ -902,7 +902,7 @@
LineInfo* lineinfo() const { return lineinfo_; }
bool is_function() const {
- Code::Kind kind = code_->kind();
+ Code::Kind kind = code_.kind();
return kind == Code::OPTIMIZED_FUNCTION;
}
@@ -910,24 +910,24 @@
ScopeInfo scope_info() const {
DCHECK(has_scope_info());
- return shared_info_->scope_info();
+ return shared_info_.scope_info();
}
uintptr_t CodeStart() const {
- return static_cast<uintptr_t>(code_->InstructionStart());
+ return static_cast<uintptr_t>(code_.InstructionStart());
}
uintptr_t CodeEnd() const {
- return static_cast<uintptr_t>(code_->InstructionEnd());
+ return static_cast<uintptr_t>(code_.InstructionEnd());
}
uintptr_t CodeSize() const { return CodeEnd() - CodeStart(); }
bool has_script() {
- return !shared_info_.is_null() && shared_info_->script()->IsScript();
+ return !shared_info_.is_null() && shared_info_.script().IsScript();
}
- Script script() { return Script::cast(shared_info_->script()); }
+ Script script() { return Script::cast(shared_info_.script()); }
bool IsLineInfoAvailable() { return lineinfo_ != nullptr; }
@@ -945,7 +945,7 @@
std::unique_ptr<char[]> GetFilename() {
if (!shared_info_.is_null()) {
- return String::cast(script()->name())->ToCString();
+ return String::cast(script().name()).ToCString();
} else {
std::unique_ptr<char[]> result(new char[1]);
result[0] = 0;
@@ -955,7 +955,7 @@
int GetScriptLineNumber(int pos) {
if (!shared_info_.is_null()) {
- return script()->GetLineNumber(pos) + 1;
+ return script().GetLineNumber(pos) + 1;
} else {
return 0;
}
@@ -1087,8 +1087,8 @@
#endif
fb_block_size.set(static_cast<uint32_t>(w->position() - fb_block_start));
- int params = scope->ParameterCount();
- int context_slots = scope->ContextLocalCount();
+ int params = scope.ParameterCount();
+ int context_slots = scope.ContextLocalCount();
// The real slot ID is internal_slots + context_slot_id.
int internal_slots = Context::MIN_CONTEXT_SLOTS;
int current_abbreviation = 4;
@@ -1254,8 +1254,8 @@
if (extra_info) {
ScopeInfo scope = desc_->scope_info();
- int params = scope->ParameterCount();
- int context_slots = scope->ContextLocalCount();
+ int params = scope.ParameterCount();
+ int context_slots = scope.ContextLocalCount();
// The real slot ID is internal_slots + context_slot_id.
int internal_slots = Context::MIN_CONTEXT_SLOTS;
// Total children is params + context_slots + internal_slots + 2
@@ -1737,7 +1737,7 @@
#ifdef OBJECT_PRINT
void __gdb_print_v8_object(Object object) {
StdoutStream os;
- object->Print(os);
+ object.Print(os);
os << std::flush;
}
#endif
@@ -1969,8 +1969,8 @@
CodeMap* code_map = GetCodeMap();
AddressRange range;
- range.start = code->address();
- range.end = code->address() + code->CodeSize();
+ range.start = code.address();
+ range.end = code.address() + code.CodeSize();
RemoveJITCodeEntries(code_map, range);
CodeDescription code_desc(name, code, shared, lineinfo);
@@ -1981,7 +1981,7 @@
}
AddUnwindInfo(&code_desc);
- Isolate* isolate = code->GetIsolate();
+ Isolate* isolate = code.GetIsolate();
JITCodeEntry* entry = CreateELFObject(&code_desc, isolate);
delete lineinfo;
diff --git a/src/diagnostics/objects-debug.cc b/src/diagnostics/objects-debug.cc
index 44f0555..6ce7570 100644
--- a/src/diagnostics/objects-debug.cc
+++ b/src/diagnostics/objects-debug.cc
@@ -103,18 +103,18 @@
void Object::ObjectVerify(Isolate* isolate) {
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kObjectVerify);
if (IsSmi()) {
- Smi::cast(*this)->SmiVerify(isolate);
+ Smi::cast(*this).SmiVerify(isolate);
} else {
- HeapObject::cast(*this)->HeapObjectVerify(isolate);
+ HeapObject::cast(*this).HeapObjectVerify(isolate);
}
CHECK(!IsConstructor() || IsCallable());
}
void Object::VerifyPointer(Isolate* isolate, Object p) {
- if (p->IsHeapObject()) {
+ if (p.IsHeapObject()) {
HeapObject::VerifyHeapPointer(isolate, p);
} else {
- CHECK(p->IsSmi());
+ CHECK(p.IsSmi());
}
}
@@ -129,8 +129,8 @@
namespace {
void VerifyForeignPointer(Isolate* isolate, HeapObject host, Object foreign) {
- host->VerifyPointer(isolate, foreign);
- CHECK(foreign->IsUndefined(isolate) || Foreign::IsNormalized(foreign));
+ host.VerifyPointer(isolate, foreign);
+ CHECK(foreign.IsUndefined(isolate) || Foreign::IsNormalized(foreign));
}
} // namespace
@@ -143,25 +143,25 @@
void HeapObject::HeapObjectVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::HeapObjectVerify(*this, isolate);
- switch (map()->instance_type()) {
+ switch (map().instance_type()) {
#define STRING_TYPE_CASE(TYPE, size, name, CamelName) case TYPE:
STRING_TYPE_LIST(STRING_TYPE_CASE)
#undef STRING_TYPE_CASE
if (IsConsString()) {
- ConsString::cast(*this)->ConsStringVerify(isolate);
+ ConsString::cast(*this).ConsStringVerify(isolate);
} else if (IsSlicedString()) {
- SlicedString::cast(*this)->SlicedStringVerify(isolate);
+ SlicedString::cast(*this).SlicedStringVerify(isolate);
} else if (IsThinString()) {
- ThinString::cast(*this)->ThinStringVerify(isolate);
+ ThinString::cast(*this).ThinStringVerify(isolate);
} else {
- String::cast(*this)->StringVerify(isolate);
+ String::cast(*this).StringVerify(isolate);
}
break;
case SYMBOL_TYPE:
- Symbol::cast(*this)->SymbolVerify(isolate);
+ Symbol::cast(*this).SymbolVerify(isolate);
break;
case MAP_TYPE:
- Map::cast(*this)->MapVerify(isolate);
+ Map::cast(*this).MapVerify(isolate);
break;
case HEAP_NUMBER_TYPE:
CHECK(IsHeapNumber());
@@ -170,17 +170,17 @@
CHECK(IsMutableHeapNumber());
break;
case BIGINT_TYPE:
- BigInt::cast(*this)->BigIntVerify(isolate);
+ BigInt::cast(*this).BigIntVerify(isolate);
break;
case CALL_HANDLER_INFO_TYPE:
- CallHandlerInfo::cast(*this)->CallHandlerInfoVerify(isolate);
+ CallHandlerInfo::cast(*this).CallHandlerInfoVerify(isolate);
break;
case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
ObjectBoilerplateDescription::cast(*this)
- ->ObjectBoilerplateDescriptionVerify(isolate);
+ .ObjectBoilerplateDescriptionVerify(isolate);
break;
case EMBEDDER_DATA_ARRAY_TYPE:
- EmbedderDataArray::cast(*this)->EmbedderDataArrayVerify(isolate);
+ EmbedderDataArray::cast(*this).EmbedderDataArrayVerify(isolate);
break;
// FixedArray types
case CLOSURE_FEEDBACK_CELL_ARRAY_TYPE:
@@ -197,7 +197,7 @@
case FIXED_ARRAY_TYPE:
case SCOPE_INFO_TYPE:
case SCRIPT_CONTEXT_TABLE_TYPE:
- FixedArray::cast(*this)->FixedArrayVerify(isolate);
+ FixedArray::cast(*this).FixedArrayVerify(isolate);
break;
case AWAIT_CONTEXT_TYPE:
case BLOCK_CONTEXT_TYPE:
@@ -208,288 +208,286 @@
case MODULE_CONTEXT_TYPE:
case SCRIPT_CONTEXT_TYPE:
case WITH_CONTEXT_TYPE:
- Context::cast(*this)->ContextVerify(isolate);
+ Context::cast(*this).ContextVerify(isolate);
break;
case NATIVE_CONTEXT_TYPE:
- NativeContext::cast(*this)->NativeContextVerify(isolate);
+ NativeContext::cast(*this).NativeContextVerify(isolate);
break;
case WEAK_FIXED_ARRAY_TYPE:
- WeakFixedArray::cast(*this)->WeakFixedArrayVerify(isolate);
+ WeakFixedArray::cast(*this).WeakFixedArrayVerify(isolate);
break;
case WEAK_ARRAY_LIST_TYPE:
- WeakArrayList::cast(*this)->WeakArrayListVerify(isolate);
+ WeakArrayList::cast(*this).WeakArrayListVerify(isolate);
break;
case FIXED_DOUBLE_ARRAY_TYPE:
- FixedDoubleArray::cast(*this)->FixedDoubleArrayVerify(isolate);
+ FixedDoubleArray::cast(*this).FixedDoubleArrayVerify(isolate);
break;
case FEEDBACK_METADATA_TYPE:
- FeedbackMetadata::cast(*this)->FeedbackMetadataVerify(isolate);
+ FeedbackMetadata::cast(*this).FeedbackMetadataVerify(isolate);
break;
case BYTE_ARRAY_TYPE:
- ByteArray::cast(*this)->ByteArrayVerify(isolate);
+ ByteArray::cast(*this).ByteArrayVerify(isolate);
break;
case BYTECODE_ARRAY_TYPE:
- BytecodeArray::cast(*this)->BytecodeArrayVerify(isolate);
+ BytecodeArray::cast(*this).BytecodeArrayVerify(isolate);
break;
case DESCRIPTOR_ARRAY_TYPE:
- DescriptorArray::cast(*this)->DescriptorArrayVerify(isolate);
+ DescriptorArray::cast(*this).DescriptorArrayVerify(isolate);
break;
case TRANSITION_ARRAY_TYPE:
- TransitionArray::cast(*this)->TransitionArrayVerify(isolate);
+ TransitionArray::cast(*this).TransitionArrayVerify(isolate);
break;
case PROPERTY_ARRAY_TYPE:
- PropertyArray::cast(*this)->PropertyArrayVerify(isolate);
+ PropertyArray::cast(*this).PropertyArrayVerify(isolate);
break;
case FREE_SPACE_TYPE:
- FreeSpace::cast(*this)->FreeSpaceVerify(isolate);
+ FreeSpace::cast(*this).FreeSpaceVerify(isolate);
break;
case FEEDBACK_CELL_TYPE:
- FeedbackCell::cast(*this)->FeedbackCellVerify(isolate);
+ FeedbackCell::cast(*this).FeedbackCellVerify(isolate);
break;
case FEEDBACK_VECTOR_TYPE:
- FeedbackVector::cast(*this)->FeedbackVectorVerify(isolate);
+ FeedbackVector::cast(*this).FeedbackVectorVerify(isolate);
break;
-#define VERIFY_TYPED_ARRAY(Type, type, TYPE, ctype) \
- case FIXED_##TYPE##_ARRAY_TYPE: \
- Fixed##Type##Array::cast(*this)->FixedTypedArrayVerify(isolate); \
+#define VERIFY_TYPED_ARRAY(Type, type, TYPE, ctype) \
+ case FIXED_##TYPE##_ARRAY_TYPE: \
+ Fixed##Type##Array::cast(*this).FixedTypedArrayVerify(isolate); \
break;
TYPED_ARRAYS(VERIFY_TYPED_ARRAY)
#undef VERIFY_TYPED_ARRAY
case CODE_TYPE:
- Code::cast(*this)->CodeVerify(isolate);
+ Code::cast(*this).CodeVerify(isolate);
break;
case ODDBALL_TYPE:
- Oddball::cast(*this)->OddballVerify(isolate);
+ Oddball::cast(*this).OddballVerify(isolate);
break;
case JS_OBJECT_TYPE:
case JS_ERROR_TYPE:
case JS_API_OBJECT_TYPE:
case JS_SPECIAL_API_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
- JSObject::cast(*this)->JSObjectVerify(isolate);
+ JSObject::cast(*this).JSObjectVerify(isolate);
break;
case WASM_MODULE_TYPE:
- WasmModuleObject::cast(*this)->WasmModuleObjectVerify(isolate);
+ WasmModuleObject::cast(*this).WasmModuleObjectVerify(isolate);
break;
case WASM_TABLE_TYPE:
- WasmTableObject::cast(*this)->WasmTableObjectVerify(isolate);
+ WasmTableObject::cast(*this).WasmTableObjectVerify(isolate);
break;
case WASM_MEMORY_TYPE:
- WasmMemoryObject::cast(*this)->WasmMemoryObjectVerify(isolate);
+ WasmMemoryObject::cast(*this).WasmMemoryObjectVerify(isolate);
break;
case WASM_GLOBAL_TYPE:
- WasmGlobalObject::cast(*this)->WasmGlobalObjectVerify(isolate);
+ WasmGlobalObject::cast(*this).WasmGlobalObjectVerify(isolate);
break;
case WASM_EXCEPTION_TYPE:
- WasmExceptionObject::cast(*this)->WasmExceptionObjectVerify(isolate);
+ WasmExceptionObject::cast(*this).WasmExceptionObjectVerify(isolate);
break;
case WASM_INSTANCE_TYPE:
- WasmInstanceObject::cast(*this)->WasmInstanceObjectVerify(isolate);
+ WasmInstanceObject::cast(*this).WasmInstanceObjectVerify(isolate);
break;
case JS_ARGUMENTS_TYPE:
- JSArgumentsObject::cast(*this)->JSArgumentsObjectVerify(isolate);
+ JSArgumentsObject::cast(*this).JSArgumentsObjectVerify(isolate);
break;
case JS_GENERATOR_OBJECT_TYPE:
- JSGeneratorObject::cast(*this)->JSGeneratorObjectVerify(isolate);
+ JSGeneratorObject::cast(*this).JSGeneratorObjectVerify(isolate);
break;
case JS_ASYNC_FUNCTION_OBJECT_TYPE:
- JSAsyncFunctionObject::cast(*this)->JSAsyncFunctionObjectVerify(isolate);
+ JSAsyncFunctionObject::cast(*this).JSAsyncFunctionObjectVerify(isolate);
break;
case JS_ASYNC_GENERATOR_OBJECT_TYPE:
- JSAsyncGeneratorObject::cast(*this)->JSAsyncGeneratorObjectVerify(
- isolate);
+ JSAsyncGeneratorObject::cast(*this).JSAsyncGeneratorObjectVerify(isolate);
break;
case JS_VALUE_TYPE:
- JSValue::cast(*this)->JSValueVerify(isolate);
+ JSValue::cast(*this).JSValueVerify(isolate);
break;
case JS_DATE_TYPE:
- JSDate::cast(*this)->JSDateVerify(isolate);
+ JSDate::cast(*this).JSDateVerify(isolate);
break;
case JS_BOUND_FUNCTION_TYPE:
- JSBoundFunction::cast(*this)->JSBoundFunctionVerify(isolate);
+ JSBoundFunction::cast(*this).JSBoundFunctionVerify(isolate);
break;
case JS_FUNCTION_TYPE:
- JSFunction::cast(*this)->JSFunctionVerify(isolate);
+ JSFunction::cast(*this).JSFunctionVerify(isolate);
break;
case JS_GLOBAL_PROXY_TYPE:
- JSGlobalProxy::cast(*this)->JSGlobalProxyVerify(isolate);
+ JSGlobalProxy::cast(*this).JSGlobalProxyVerify(isolate);
break;
case JS_GLOBAL_OBJECT_TYPE:
- JSGlobalObject::cast(*this)->JSGlobalObjectVerify(isolate);
+ JSGlobalObject::cast(*this).JSGlobalObjectVerify(isolate);
break;
case CELL_TYPE:
- Cell::cast(*this)->CellVerify(isolate);
+ Cell::cast(*this).CellVerify(isolate);
break;
case PROPERTY_CELL_TYPE:
- PropertyCell::cast(*this)->PropertyCellVerify(isolate);
+ PropertyCell::cast(*this).PropertyCellVerify(isolate);
break;
case JS_ARRAY_TYPE:
- JSArray::cast(*this)->JSArrayVerify(isolate);
+ JSArray::cast(*this).JSArrayVerify(isolate);
break;
case JS_MODULE_NAMESPACE_TYPE:
- JSModuleNamespace::cast(*this)->JSModuleNamespaceVerify(isolate);
+ JSModuleNamespace::cast(*this).JSModuleNamespaceVerify(isolate);
break;
case JS_SET_TYPE:
- JSSet::cast(*this)->JSSetVerify(isolate);
+ JSSet::cast(*this).JSSetVerify(isolate);
break;
case JS_MAP_TYPE:
- JSMap::cast(*this)->JSMapVerify(isolate);
+ JSMap::cast(*this).JSMapVerify(isolate);
break;
case JS_SET_KEY_VALUE_ITERATOR_TYPE:
case JS_SET_VALUE_ITERATOR_TYPE:
- JSSetIterator::cast(*this)->JSSetIteratorVerify(isolate);
+ JSSetIterator::cast(*this).JSSetIteratorVerify(isolate);
break;
case JS_MAP_KEY_ITERATOR_TYPE:
case JS_MAP_KEY_VALUE_ITERATOR_TYPE:
case JS_MAP_VALUE_ITERATOR_TYPE:
- JSMapIterator::cast(*this)->JSMapIteratorVerify(isolate);
+ JSMapIterator::cast(*this).JSMapIteratorVerify(isolate);
break;
case JS_ARRAY_ITERATOR_TYPE:
- JSArrayIterator::cast(*this)->JSArrayIteratorVerify(isolate);
+ JSArrayIterator::cast(*this).JSArrayIteratorVerify(isolate);
break;
case JS_STRING_ITERATOR_TYPE:
- JSStringIterator::cast(*this)->JSStringIteratorVerify(isolate);
+ JSStringIterator::cast(*this).JSStringIteratorVerify(isolate);
break;
case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
- JSAsyncFromSyncIterator::cast(*this)->JSAsyncFromSyncIteratorVerify(
+ JSAsyncFromSyncIterator::cast(*this).JSAsyncFromSyncIteratorVerify(
isolate);
break;
case WEAK_CELL_TYPE:
- WeakCell::cast(*this)->WeakCellVerify(isolate);
+ WeakCell::cast(*this).WeakCellVerify(isolate);
break;
case JS_WEAK_REF_TYPE:
- JSWeakRef::cast(*this)->JSWeakRefVerify(isolate);
+ JSWeakRef::cast(*this).JSWeakRefVerify(isolate);
break;
case JS_FINALIZATION_GROUP_TYPE:
- JSFinalizationGroup::cast(*this)->JSFinalizationGroupVerify(isolate);
+ JSFinalizationGroup::cast(*this).JSFinalizationGroupVerify(isolate);
break;
case JS_FINALIZATION_GROUP_CLEANUP_ITERATOR_TYPE:
JSFinalizationGroupCleanupIterator::cast(*this)
- ->JSFinalizationGroupCleanupIteratorVerify(isolate);
+ .JSFinalizationGroupCleanupIteratorVerify(isolate);
break;
case JS_WEAK_MAP_TYPE:
- JSWeakMap::cast(*this)->JSWeakMapVerify(isolate);
+ JSWeakMap::cast(*this).JSWeakMapVerify(isolate);
break;
case JS_WEAK_SET_TYPE:
- JSWeakSet::cast(*this)->JSWeakSetVerify(isolate);
+ JSWeakSet::cast(*this).JSWeakSetVerify(isolate);
break;
case JS_PROMISE_TYPE:
- JSPromise::cast(*this)->JSPromiseVerify(isolate);
+ JSPromise::cast(*this).JSPromiseVerify(isolate);
break;
case JS_REGEXP_TYPE:
- JSRegExp::cast(*this)->JSRegExpVerify(isolate);
+ JSRegExp::cast(*this).JSRegExpVerify(isolate);
break;
case JS_REGEXP_STRING_ITERATOR_TYPE:
- JSRegExpStringIterator::cast(*this)->JSRegExpStringIteratorVerify(
- isolate);
+ JSRegExpStringIterator::cast(*this).JSRegExpStringIteratorVerify(isolate);
break;
case FILLER_TYPE:
break;
case JS_PROXY_TYPE:
- JSProxy::cast(*this)->JSProxyVerify(isolate);
+ JSProxy::cast(*this).JSProxyVerify(isolate);
break;
case FOREIGN_TYPE:
- Foreign::cast(*this)->ForeignVerify(isolate);
+ Foreign::cast(*this).ForeignVerify(isolate);
break;
case PREPARSE_DATA_TYPE:
- PreparseData::cast(*this)->PreparseDataVerify(isolate);
+ PreparseData::cast(*this).PreparseDataVerify(isolate);
break;
case UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE:
UncompiledDataWithoutPreparseData::cast(*this)
- ->UncompiledDataWithoutPreparseDataVerify(isolate);
+ .UncompiledDataWithoutPreparseDataVerify(isolate);
break;
case UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE:
UncompiledDataWithPreparseData::cast(*this)
- ->UncompiledDataWithPreparseDataVerify(isolate);
+ .UncompiledDataWithPreparseDataVerify(isolate);
break;
case SHARED_FUNCTION_INFO_TYPE:
- SharedFunctionInfo::cast(*this)->SharedFunctionInfoVerify(isolate);
+ SharedFunctionInfo::cast(*this).SharedFunctionInfoVerify(isolate);
break;
case JS_MESSAGE_OBJECT_TYPE:
- JSMessageObject::cast(*this)->JSMessageObjectVerify(isolate);
+ JSMessageObject::cast(*this).JSMessageObjectVerify(isolate);
break;
case JS_ARRAY_BUFFER_TYPE:
- JSArrayBuffer::cast(*this)->JSArrayBufferVerify(isolate);
+ JSArrayBuffer::cast(*this).JSArrayBufferVerify(isolate);
break;
case JS_TYPED_ARRAY_TYPE:
- JSTypedArray::cast(*this)->JSTypedArrayVerify(isolate);
+ JSTypedArray::cast(*this).JSTypedArrayVerify(isolate);
break;
case JS_DATA_VIEW_TYPE:
- JSDataView::cast(*this)->JSDataViewVerify(isolate);
+ JSDataView::cast(*this).JSDataViewVerify(isolate);
break;
case SMALL_ORDERED_HASH_SET_TYPE:
- SmallOrderedHashSet::cast(*this)->SmallOrderedHashSetVerify(isolate);
+ SmallOrderedHashSet::cast(*this).SmallOrderedHashSetVerify(isolate);
break;
case SMALL_ORDERED_HASH_MAP_TYPE:
- SmallOrderedHashMap::cast(*this)->SmallOrderedHashMapVerify(isolate);
+ SmallOrderedHashMap::cast(*this).SmallOrderedHashMapVerify(isolate);
break;
case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
- SmallOrderedNameDictionary::cast(*this)->SmallOrderedNameDictionaryVerify(
+ SmallOrderedNameDictionary::cast(*this).SmallOrderedNameDictionaryVerify(
isolate);
break;
case CODE_DATA_CONTAINER_TYPE:
- CodeDataContainer::cast(*this)->CodeDataContainerVerify(isolate);
+ CodeDataContainer::cast(*this).CodeDataContainerVerify(isolate);
break;
#ifdef V8_INTL_SUPPORT
case JS_INTL_V8_BREAK_ITERATOR_TYPE:
- JSV8BreakIterator::cast(*this)->JSV8BreakIteratorVerify(isolate);
+ JSV8BreakIterator::cast(*this).JSV8BreakIteratorVerify(isolate);
break;
case JS_INTL_COLLATOR_TYPE:
- JSCollator::cast(*this)->JSCollatorVerify(isolate);
+ JSCollator::cast(*this).JSCollatorVerify(isolate);
break;
case JS_INTL_DATE_TIME_FORMAT_TYPE:
- JSDateTimeFormat::cast(*this)->JSDateTimeFormatVerify(isolate);
+ JSDateTimeFormat::cast(*this).JSDateTimeFormatVerify(isolate);
break;
case JS_INTL_LIST_FORMAT_TYPE:
- JSListFormat::cast(*this)->JSListFormatVerify(isolate);
+ JSListFormat::cast(*this).JSListFormatVerify(isolate);
break;
case JS_INTL_LOCALE_TYPE:
- JSLocale::cast(*this)->JSLocaleVerify(isolate);
+ JSLocale::cast(*this).JSLocaleVerify(isolate);
break;
case JS_INTL_NUMBER_FORMAT_TYPE:
- JSNumberFormat::cast(*this)->JSNumberFormatVerify(isolate);
+ JSNumberFormat::cast(*this).JSNumberFormatVerify(isolate);
break;
case JS_INTL_PLURAL_RULES_TYPE:
- JSPluralRules::cast(*this)->JSPluralRulesVerify(isolate);
+ JSPluralRules::cast(*this).JSPluralRulesVerify(isolate);
break;
case JS_INTL_RELATIVE_TIME_FORMAT_TYPE:
- JSRelativeTimeFormat::cast(*this)->JSRelativeTimeFormatVerify(isolate);
+ JSRelativeTimeFormat::cast(*this).JSRelativeTimeFormatVerify(isolate);
break;
case JS_INTL_SEGMENT_ITERATOR_TYPE:
- JSSegmentIterator::cast(*this)->JSSegmentIteratorVerify(isolate);
+ JSSegmentIterator::cast(*this).JSSegmentIteratorVerify(isolate);
break;
case JS_INTL_SEGMENTER_TYPE:
- JSSegmenter::cast(*this)->JSSegmenterVerify(isolate);
+ JSSegmenter::cast(*this).JSSegmenterVerify(isolate);
break;
#endif // V8_INTL_SUPPORT
-#define MAKE_STRUCT_CASE(TYPE, Name, name) \
- case TYPE: \
- Name::cast(*this)->Name##Verify(isolate); \
+#define MAKE_STRUCT_CASE(TYPE, Name, name) \
+ case TYPE: \
+ Name::cast(*this).Name##Verify(isolate); \
break;
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
case ALLOCATION_SITE_TYPE:
- AllocationSite::cast(*this)->AllocationSiteVerify(isolate);
+ AllocationSite::cast(*this).AllocationSiteVerify(isolate);
break;
case LOAD_HANDLER_TYPE:
- LoadHandler::cast(*this)->LoadHandlerVerify(isolate);
+ LoadHandler::cast(*this).LoadHandlerVerify(isolate);
break;
case STORE_HANDLER_TYPE:
- StoreHandler::cast(*this)->StoreHandlerVerify(isolate);
+ StoreHandler::cast(*this).StoreHandlerVerify(isolate);
break;
}
}
// static
void HeapObject::VerifyHeapPointer(Isolate* isolate, Object p) {
- CHECK(p->IsHeapObject());
+ CHECK(p.IsHeapObject());
CHECK(IsValidHeapObject(isolate->heap(), HeapObject::cast(p)));
}
@@ -497,7 +495,7 @@
TorqueGeneratedClassVerifiers::SymbolVerify(*this, isolate);
CHECK(HasHashCode());
CHECK_GT(Hash(), 0);
- CHECK(name()->IsUndefined(isolate) || name()->IsString());
+ CHECK(name().IsUndefined(isolate) || name().IsString());
CHECK_IMPLIES(IsPrivateName(), IsPrivate());
}
@@ -510,7 +508,7 @@
// - No Illegal bytecodes.
// - No consecutive sequences of prefix Wide / ExtraWide.
CHECK(IsBytecodeArray());
- CHECK(constant_pool()->IsFixedArray());
+ CHECK(constant_pool().IsFixedArray());
VerifyHeapPointer(isolate, constant_pool());
}
@@ -527,8 +525,8 @@
template <class Traits>
void FixedTypedArray<Traits>::FixedTypedArrayVerify(Isolate* isolate) {
- CHECK(IsHeapObject() && map()->instance_type() == Traits::kInstanceType);
- if (base_pointer()->ptr() == ptr()) {
+ CHECK(IsHeapObject() && map().instance_type() == Traits::kInstanceType);
+ if (base_pointer().ptr() == ptr()) {
CHECK_EQ(reinterpret_cast<Address>(external_pointer()),
FixedTypedArrayBase::kDataOffset - kHeapObjectTag);
} else {
@@ -546,34 +544,34 @@
namespace {
void VerifyJSObjectElements(Isolate* isolate, JSObject object) {
// Only TypedArrays can have these specialized elements.
- if (object->IsJSTypedArray()) {
+ if (object.IsJSTypedArray()) {
// TODO(cbruni): Fix CreateTypedArray to either not instantiate the object
// or propertly initialize it on errors during construction.
/* CHECK(object->HasFixedTypedArrayElements()); */
/* CHECK(object->elements()->IsFixedTypedArrayBase()); */
return;
}
- CHECK(!object->HasFixedTypedArrayElements());
- CHECK(!object->elements()->IsFixedTypedArrayBase());
+ CHECK(!object.HasFixedTypedArrayElements());
+ CHECK(!object.elements().IsFixedTypedArrayBase());
- if (object->HasDoubleElements()) {
- if (object->elements()->length() > 0) {
- CHECK(object->elements()->IsFixedDoubleArray());
+ if (object.HasDoubleElements()) {
+ if (object.elements().length() > 0) {
+ CHECK(object.elements().IsFixedDoubleArray());
}
return;
}
- FixedArray elements = FixedArray::cast(object->elements());
- if (object->HasSmiElements()) {
+ FixedArray elements = FixedArray::cast(object.elements());
+ if (object.HasSmiElements()) {
// We might have a partially initialized backing store, in which case we
// allow the hole + smi values.
- for (int i = 0; i < elements->length(); i++) {
- Object value = elements->get(i);
- CHECK(value->IsSmi() || value->IsTheHole(isolate));
+ for (int i = 0; i < elements.length(); i++) {
+ Object value = elements.get(i);
+ CHECK(value.IsSmi() || value.IsTheHole(isolate));
}
- } else if (object->HasObjectElements()) {
- for (int i = 0; i < elements->length(); i++) {
- Object element = elements->get(i);
+ } else if (object.HasObjectElements()) {
+ for (int i = 0; i < elements.length(); i++) {
+ Object element = elements.get(i);
CHECK(!HasWeakHeapObjectTag(element));
}
}
@@ -586,26 +584,26 @@
CHECK_IMPLIES(HasSloppyArgumentsElements(), IsJSArgumentsObject());
if (HasFastProperties()) {
- int actual_unused_property_fields = map()->GetInObjectProperties() +
- property_array()->length() -
- map()->NextFreePropertyIndex();
- if (map()->UnusedPropertyFields() != actual_unused_property_fields) {
+ int actual_unused_property_fields = map().GetInObjectProperties() +
+ property_array().length() -
+ map().NextFreePropertyIndex();
+ if (map().UnusedPropertyFields() != actual_unused_property_fields) {
// There are two reasons why this can happen:
// - in the middle of StoreTransitionStub when the new extended backing
// store is already set into the object and the allocation of the
// MutableHeapNumber triggers GC while the map isn't updated yet.
// - deletion of the last property can leave additional backing store
// capacity behind.
- CHECK_GT(actual_unused_property_fields, map()->UnusedPropertyFields());
- int delta = actual_unused_property_fields - map()->UnusedPropertyFields();
+ CHECK_GT(actual_unused_property_fields, map().UnusedPropertyFields());
+ int delta = actual_unused_property_fields - map().UnusedPropertyFields();
CHECK_EQ(0, delta % JSObject::kFieldsAdded);
}
- DescriptorArray descriptors = map()->instance_descriptors();
+ DescriptorArray descriptors = map().instance_descriptors();
bool is_transitionable_fast_elements_kind =
- IsTransitionableFastElementsKind(map()->elements_kind());
+ IsTransitionableFastElementsKind(map().elements_kind());
- for (int i = 0; i < map()->NumberOfOwnDescriptors(); i++) {
- PropertyDetails details = descriptors->GetDetails(i);
+ for (int i = 0; i < map().NumberOfOwnDescriptors(); i++) {
+ PropertyDetails details = descriptors.GetDetails(i);
if (details.location() == kField) {
DCHECK_EQ(kData, details.kind());
Representation r = details.representation();
@@ -618,43 +616,43 @@
VerifyObjectField(isolate, index.offset());
}
Object value = RawFastPropertyAt(index);
- if (r.IsDouble()) DCHECK(value->IsMutableHeapNumber());
- if (value->IsUninitialized(isolate)) continue;
- if (r.IsSmi()) DCHECK(value->IsSmi());
- if (r.IsHeapObject()) DCHECK(value->IsHeapObject());
- FieldType field_type = descriptors->GetFieldType(i);
- bool type_is_none = field_type->IsNone();
- bool type_is_any = field_type->IsAny();
+ if (r.IsDouble()) DCHECK(value.IsMutableHeapNumber());
+ if (value.IsUninitialized(isolate)) continue;
+ if (r.IsSmi()) DCHECK(value.IsSmi());
+ if (r.IsHeapObject()) DCHECK(value.IsHeapObject());
+ FieldType field_type = descriptors.GetFieldType(i);
+ bool type_is_none = field_type.IsNone();
+ bool type_is_any = field_type.IsAny();
if (r.IsNone()) {
CHECK(type_is_none);
} else if (!type_is_any && !(type_is_none && r.IsHeapObject())) {
- CHECK(!field_type->NowStable() || field_type->NowContains(value));
+ CHECK(!field_type.NowStable() || field_type.NowContains(value));
}
CHECK_IMPLIES(is_transitionable_fast_elements_kind,
Map::IsMostGeneralFieldType(r, field_type));
}
}
- if (map()->EnumLength() != kInvalidEnumCacheSentinel) {
- EnumCache enum_cache = descriptors->enum_cache();
- FixedArray keys = enum_cache->keys();
- FixedArray indices = enum_cache->indices();
- CHECK_LE(map()->EnumLength(), keys->length());
+ if (map().EnumLength() != kInvalidEnumCacheSentinel) {
+ EnumCache enum_cache = descriptors.enum_cache();
+ FixedArray keys = enum_cache.keys();
+ FixedArray indices = enum_cache.indices();
+ CHECK_LE(map().EnumLength(), keys.length());
CHECK_IMPLIES(indices != ReadOnlyRoots(isolate).empty_fixed_array(),
- keys->length() == indices->length());
+ keys.length() == indices.length());
}
}
// If a GC was caused while constructing this object, the elements
// pointer may point to a one pointer filler map.
if (ElementsAreSafeToExamine()) {
- CHECK_EQ((map()->has_fast_smi_or_object_elements() ||
- map()->has_frozen_or_sealed_elements() ||
+ CHECK_EQ((map().has_fast_smi_or_object_elements() ||
+ map().has_frozen_or_sealed_elements() ||
(elements() == GetReadOnlyRoots().empty_fixed_array()) ||
HasFastStringWrapperElements()),
- (elements()->map() == GetReadOnlyRoots().fixed_array_map() ||
- elements()->map() == GetReadOnlyRoots().fixed_cow_array_map()));
- CHECK_EQ(map()->has_fast_object_elements(), HasObjectElements());
+ (elements().map() == GetReadOnlyRoots().fixed_array_map() ||
+ elements().map() == GetReadOnlyRoots().fixed_cow_array_map()));
+ CHECK_EQ(map().has_fast_object_elements(), HasObjectElements());
VerifyJSObjectElements(isolate, *this);
}
}
@@ -667,23 +665,23 @@
CHECK(instance_size() == kVariableSizeSentinel ||
(kTaggedSize <= instance_size() &&
static_cast<size_t>(instance_size()) < heap->Capacity()));
- CHECK(GetBackPointer()->IsUndefined(isolate) ||
- !Map::cast(GetBackPointer())->is_stable());
- SLOW_DCHECK(instance_descriptors()->IsSortedNoDuplicates());
+ CHECK(GetBackPointer().IsUndefined(isolate) ||
+ !Map::cast(GetBackPointer()).is_stable());
+ SLOW_DCHECK(instance_descriptors().IsSortedNoDuplicates());
DisallowHeapAllocation no_gc;
SLOW_DCHECK(
TransitionsAccessor(isolate, *this, &no_gc).IsSortedNoDuplicates());
SLOW_DCHECK(TransitionsAccessor(isolate, *this, &no_gc)
.IsConsistentWithBackPointers());
SLOW_DCHECK(!FLAG_unbox_double_fields ||
- layout_descriptor()->IsConsistentWithMap(*this));
+ layout_descriptor().IsConsistentWithMap(*this));
if (!may_have_interesting_symbols()) {
CHECK(!has_named_interceptor());
CHECK(!is_dictionary_map());
CHECK(!is_access_check_needed());
DescriptorArray const descriptors = instance_descriptors();
for (int i = 0; i < NumberOfOwnDescriptors(); ++i) {
- CHECK(!descriptors->GetKey(i)->IsInterestingSymbol());
+ CHECK(!descriptors.GetKey(i).IsInterestingSymbol());
}
}
CHECK_IMPLIES(has_named_interceptor(), may_have_interesting_symbols());
@@ -696,7 +694,7 @@
CHECK_IMPLIES(is_deprecated(), !is_stable());
if (is_prototype_map()) {
DCHECK(prototype_info() == Smi::kZero ||
- prototype_info()->IsPrototypeInfo());
+ prototype_info().IsPrototypeInfo());
}
}
@@ -777,7 +775,7 @@
void NativeContext::NativeContextVerify(Isolate* isolate) {
ContextVerify(isolate);
CHECK_EQ(length(), NativeContext::NATIVE_CONTEXT_SLOTS);
- CHECK_EQ(kSize, map()->instance_size());
+ CHECK_EQ(kSize, map().instance_size());
}
void FeedbackMetadata::FeedbackMetadataVerify(Isolate* isolate) {
@@ -817,9 +815,9 @@
Object key = get(ToKeyIndex(descriptor))->cast<Object>();
// number_of_descriptors() may be out of sync with the actual descriptors
// written during descriptor array construction.
- if (key->IsUndefined(isolate)) continue;
+ if (key.IsUndefined(isolate)) continue;
PropertyDetails details = GetDetails(descriptor);
- if (Name::cast(key)->IsPrivate()) {
+ if (Name::cast(key).IsPrivate()) {
CHECK_NE(details.attributes() & DONT_ENUM, 0);
}
MaybeObject value = get(ToValueIndex(descriptor));
@@ -829,10 +827,10 @@
value == MaybeObject::FromObject(FieldType::None()) ||
value == MaybeObject::FromObject(FieldType::Any()) ||
value->IsCleared() ||
- (value->GetHeapObjectIfWeak(&heap_object) && heap_object->IsMap()));
+ (value->GetHeapObjectIfWeak(&heap_object) && heap_object.IsMap()));
} else {
CHECK(!value->IsWeakOrCleared());
- CHECK(!value->cast<Object>()->IsMap());
+ CHECK(!value->cast<Object>().IsMap());
}
}
}
@@ -847,7 +845,7 @@
TorqueGeneratedClassVerifiers::JSArgumentsObjectVerify(*this, isolate);
if (IsSloppyArgumentsElementsKind(GetElementsKind())) {
SloppyArgumentsElements::cast(elements())
- ->SloppyArgumentsElementsVerify(isolate, *this);
+ .SloppyArgumentsElementsVerify(isolate, *this);
}
if (isolate->IsInAnyContext(map(), Context::SLOPPY_ARGUMENTS_MAP_INDEX) ||
isolate->IsInAnyContext(map(),
@@ -867,16 +865,16 @@
FixedArrayVerify(isolate);
// Abort verification if only partially initialized (can't use arguments()
// getter because it does FixedArray::cast()).
- if (get(kArgumentsIndex)->IsUndefined(isolate)) return;
+ if (get(kArgumentsIndex).IsUndefined(isolate)) return;
- ElementsKind kind = holder->GetElementsKind();
+ ElementsKind kind = holder.GetElementsKind();
bool is_fast = kind == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
CHECK(IsFixedArray());
CHECK_GE(length(), 2);
CHECK_EQ(map(), ReadOnlyRoots(isolate).sloppy_arguments_elements_map());
Context context_object = context();
FixedArray arg_elements = FixedArray::cast(arguments());
- if (arg_elements->length() == 0) {
+ if (arg_elements.length() == 0) {
CHECK(arg_elements == ReadOnlyRoots(isolate).empty_fixed_array());
return;
}
@@ -892,7 +890,7 @@
// Verify that each context-mapped argument is either the hole or a valid
// Smi within context length range.
Object mapped = get_mapped_entry(i);
- if (mapped->IsTheHole(isolate)) {
+ if (mapped.IsTheHole(isolate)) {
// Slow sloppy arguments can be holey.
if (!is_fast) continue;
// Fast sloppy arguments elements are never holey. Either the element is
@@ -904,28 +902,28 @@
nofMappedParameters++;
CHECK_LE(maxMappedIndex, mappedIndex);
maxMappedIndex = mappedIndex;
- Object value = context_object->get(mappedIndex);
- CHECK(value->IsObject());
+ Object value = context_object.get(mappedIndex);
+ CHECK(value.IsObject());
// None of the context-mapped entries should exist in the arguments
// elements.
CHECK(!accessor->HasElement(holder, i, arg_elements));
}
- CHECK_LE(nofMappedParameters, context_object->length());
- CHECK_LE(nofMappedParameters, arg_elements->length());
- CHECK_LE(maxMappedIndex, context_object->length());
- CHECK_LE(maxMappedIndex, arg_elements->length());
+ CHECK_LE(nofMappedParameters, context_object.length());
+ CHECK_LE(nofMappedParameters, arg_elements.length());
+ CHECK_LE(maxMappedIndex, context_object.length());
+ CHECK_LE(maxMappedIndex, arg_elements.length());
}
USE_TORQUE_VERIFIER(JSGeneratorObject)
void JSAsyncFunctionObject::JSAsyncFunctionObjectVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSAsyncFunctionObjectVerify(*this, isolate);
- promise()->HeapObjectVerify(isolate);
+ promise().HeapObjectVerify(isolate);
}
void JSAsyncGeneratorObject::JSAsyncGeneratorObjectVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSAsyncGeneratorObjectVerify(*this, isolate);
- queue()->HeapObjectVerify(isolate);
+ queue().HeapObjectVerify(isolate);
}
USE_TORQUE_VERIFIER(JSValue)
@@ -933,31 +931,31 @@
void JSDate::JSDateVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSDateVerify(*this, isolate);
- if (month()->IsSmi()) {
+ if (month().IsSmi()) {
int month = Smi::ToInt(this->month());
CHECK(0 <= month && month <= 11);
}
- if (day()->IsSmi()) {
+ if (day().IsSmi()) {
int day = Smi::ToInt(this->day());
CHECK(1 <= day && day <= 31);
}
- if (hour()->IsSmi()) {
+ if (hour().IsSmi()) {
int hour = Smi::ToInt(this->hour());
CHECK(0 <= hour && hour <= 23);
}
- if (min()->IsSmi()) {
+ if (min().IsSmi()) {
int min = Smi::ToInt(this->min());
CHECK(0 <= min && min <= 59);
}
- if (sec()->IsSmi()) {
+ if (sec().IsSmi()) {
int sec = Smi::ToInt(this->sec());
CHECK(0 <= sec && sec <= 59);
}
- if (weekday()->IsSmi()) {
+ if (weekday().IsSmi()) {
int weekday = Smi::ToInt(this->weekday());
CHECK(0 <= weekday && weekday <= 6);
}
- if (cache_stamp()->IsSmi()) {
+ if (cache_stamp().IsSmi()) {
CHECK(Smi::ToInt(cache_stamp()) <=
Smi::ToInt(isolate->date_cache()->stamp()));
}
@@ -983,25 +981,25 @@
void ConsString::ConsStringVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::ConsStringVerify(*this, isolate);
CHECK_GE(this->length(), ConsString::kMinLength);
- CHECK(this->length() == this->first()->length() + this->second()->length());
+ CHECK(this->length() == this->first().length() + this->second().length());
if (this->IsFlat()) {
// A flat cons can only be created by String::SlowFlatten.
// Afterwards, the first part may be externalized or internalized.
- CHECK(this->first()->IsSeqString() || this->first()->IsExternalString() ||
- this->first()->IsThinString());
+ CHECK(this->first().IsSeqString() || this->first().IsExternalString() ||
+ this->first().IsThinString());
}
}
void ThinString::ThinStringVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::ThinStringVerify(*this, isolate);
- CHECK(this->actual()->IsInternalizedString());
- CHECK(this->actual()->IsSeqString() || this->actual()->IsExternalString());
+ CHECK(this->actual().IsInternalizedString());
+ CHECK(this->actual().IsSeqString() || this->actual().IsExternalString());
}
void SlicedString::SlicedStringVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::SlicedStringVerify(*this, isolate);
- CHECK(!this->parent()->IsConsString());
- CHECK(!this->parent()->IsSlicedString());
+ CHECK(!this->parent().IsConsString());
+ CHECK(!this->parent().IsSlicedString());
CHECK_GE(this->length(), SlicedString::kMinLength);
}
@@ -1009,17 +1007,17 @@
TorqueGeneratedClassVerifiers::JSBoundFunctionVerify(*this, isolate);
CHECK(IsCallable());
- if (!raw_bound_target_function()->IsUndefined(isolate)) {
- CHECK(bound_target_function()->IsCallable());
- CHECK_EQ(IsConstructor(), bound_target_function()->IsConstructor());
+ if (!raw_bound_target_function().IsUndefined(isolate)) {
+ CHECK(bound_target_function().IsCallable());
+ CHECK_EQ(IsConstructor(), bound_target_function().IsConstructor());
}
}
void JSFunction::JSFunctionVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSFunctionVerify(*this, isolate);
- CHECK(raw_feedback_cell()->IsFeedbackCell());
- CHECK(code()->IsCode());
- CHECK(map()->is_callable());
+ CHECK(raw_feedback_cell().IsFeedbackCell());
+ CHECK(code().IsCode());
+ CHECK(map().is_callable());
Handle<JSFunction> function(*this, isolate);
LookupIterator it(isolate, function, isolate->factory()->prototype_string(),
LookupIterator::OWN_SKIP_INTERCEPTOR);
@@ -1041,8 +1039,8 @@
TorqueGeneratedClassVerifiers::SharedFunctionInfoVerify(*this, isolate);
Object value = name_or_scope_info();
- if (value->IsScopeInfo()) {
- CHECK_LT(0, ScopeInfo::cast(value)->length());
+ if (value.IsScopeInfo()) {
+ CHECK_LT(0, ScopeInfo::cast(value).length());
CHECK_NE(value, ReadOnlyRoots(isolate).empty_scope_info());
}
@@ -1051,25 +1049,25 @@
HasUncompiledDataWithPreparseData() ||
HasUncompiledDataWithoutPreparseData() || HasWasmCapiFunctionData());
- CHECK(script_or_debug_info()->IsUndefined(isolate) ||
- script_or_debug_info()->IsScript() || HasDebugInfo());
+ CHECK(script_or_debug_info().IsUndefined(isolate) ||
+ script_or_debug_info().IsScript() || HasDebugInfo());
if (!is_compiled()) {
CHECK(!HasFeedbackMetadata());
- CHECK(outer_scope_info()->IsScopeInfo() ||
- outer_scope_info()->IsTheHole(isolate));
+ CHECK(outer_scope_info().IsScopeInfo() ||
+ outer_scope_info().IsTheHole(isolate));
} else if (HasBytecodeArray() && HasFeedbackMetadata()) {
- CHECK(feedback_metadata()->IsFeedbackMetadata());
+ CHECK(feedback_metadata().IsFeedbackMetadata());
}
int expected_map_index = Context::FunctionMapIndex(
language_mode(), kind(), HasSharedName(), needs_home_object());
CHECK_EQ(expected_map_index, function_map_index());
- if (scope_info()->length() > 0) {
+ if (scope_info().length() > 0) {
ScopeInfo info = scope_info();
- CHECK(kind() == info->function_kind());
- CHECK_EQ(kind() == kModule, info->scope_type() == MODULE_SCOPE);
+ CHECK(kind() == info.function_kind());
+ CHECK_EQ(kind() == kModule, info.scope_type() == MODULE_SCOPE);
}
if (IsApiFunction()) {
@@ -1093,16 +1091,15 @@
void JSGlobalProxy::JSGlobalProxyVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSGlobalProxyVerify(*this, isolate);
- CHECK(map()->is_access_check_needed());
+ CHECK(map().is_access_check_needed());
// Make sure that this object has no properties, elements.
- CHECK_EQ(0, FixedArray::cast(elements())->length());
+ CHECK_EQ(0, FixedArray::cast(elements()).length());
}
void JSGlobalObject::JSGlobalObjectVerify(Isolate* isolate) {
CHECK(IsJSGlobalObject());
// Do not check the dummy global object for the builtins.
- if (global_dictionary()->NumberOfElements() == 0 &&
- elements()->length() == 0) {
+ if (global_dictionary().NumberOfElements() == 0 && elements().length() == 0) {
return;
}
JSObjectVerify(isolate);
@@ -1112,11 +1109,11 @@
TorqueGeneratedClassVerifiers::OddballVerify(*this, isolate);
Heap* heap = isolate->heap();
Object number = to_number();
- if (number->IsHeapObject()) {
+ if (number.IsHeapObject()) {
CHECK(number == ReadOnlyRoots(heap).nan_value() ||
number == ReadOnlyRoots(heap).hole_nan_value());
} else {
- CHECK(number->IsSmi());
+ CHECK(number.IsSmi());
int value = Smi::ToInt(number);
// Hidden oddballs have negative smis.
const int kLeastHiddenOddballNumber = -7;
@@ -1160,7 +1157,7 @@
void CodeDataContainer::CodeDataContainerVerify(Isolate* isolate) {
CHECK(IsCodeDataContainer());
VerifyObjectField(isolate, kNextCodeLinkOffset);
- CHECK(next_code_link()->IsCode() || next_code_link()->IsUndefined(isolate));
+ CHECK(next_code_link().IsCode() || next_code_link().IsUndefined(isolate));
}
void Code::CodeVerify(Isolate* isolate) {
@@ -1172,7 +1169,7 @@
CHECK_LE(constant_pool_offset(), code_comments_offset());
CHECK_LE(code_comments_offset(), InstructionSize());
CHECK(IsAligned(raw_instruction_start(), kCodeAlignment));
- relocation_info()->ObjectVerify(isolate);
+ relocation_info().ObjectVerify(isolate);
CHECK(Code::SizeFor(body_size()) <= kMaxRegularHeapObjectSize ||
isolate->heap()->InSpace(*this, CODE_LO_SPACE));
Address last_gc_pc = kNullAddress;
@@ -1192,37 +1189,37 @@
// If a GC was caused while constructing this array, the elements
// pointer may point to a one pointer filler map.
if (!ElementsAreSafeToExamine()) return;
- if (elements()->IsUndefined(isolate)) return;
- CHECK(elements()->IsFixedArray() || elements()->IsFixedDoubleArray());
- if (elements()->length() == 0) {
+ if (elements().IsUndefined(isolate)) return;
+ CHECK(elements().IsFixedArray() || elements().IsFixedDoubleArray());
+ if (elements().length() == 0) {
CHECK_EQ(elements(), ReadOnlyRoots(isolate).empty_fixed_array());
}
- if (!length()->IsNumber()) return;
+ if (!length().IsNumber()) return;
// Verify that the length and the elements backing store are in sync.
- if (length()->IsSmi() && (HasFastElements() || HasFrozenOrSealedElements())) {
- if (elements()->length() > 0) {
- CHECK_IMPLIES(HasDoubleElements(), elements()->IsFixedDoubleArray());
+ if (length().IsSmi() && (HasFastElements() || HasFrozenOrSealedElements())) {
+ if (elements().length() > 0) {
+ CHECK_IMPLIES(HasDoubleElements(), elements().IsFixedDoubleArray());
CHECK_IMPLIES(HasSmiOrObjectElements() || HasFrozenOrSealedElements(),
- elements()->IsFixedArray());
+ elements().IsFixedArray());
}
int size = Smi::ToInt(length());
// Holey / Packed backing stores might have slack or might have not been
// properly initialized yet.
- CHECK(size <= elements()->length() ||
+ CHECK(size <= elements().length() ||
elements() == ReadOnlyRoots(isolate).empty_fixed_array());
} else {
CHECK(HasDictionaryElements());
uint32_t array_length;
- CHECK(length()->ToArrayLength(&array_length));
+ CHECK(length().ToArrayLength(&array_length));
if (array_length == 0xFFFFFFFF) {
- CHECK(length()->ToArrayLength(&array_length));
+ CHECK(length().ToArrayLength(&array_length));
}
if (array_length != 0) {
NumberDictionary dict = NumberDictionary::cast(elements());
// The dictionary can never have more elements than the array length + 1.
// If the backing store grows the verification might be triggered with
// the old length in place.
- uint32_t nof_elements = static_cast<uint32_t>(dict->NumberOfElements());
+ uint32_t nof_elements = static_cast<uint32_t>(dict.NumberOfElements());
if (nof_elements != 0) nof_elements--;
CHECK_LE(nof_elements, array_length);
}
@@ -1232,14 +1229,14 @@
void JSSet::JSSetVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSSetVerify(*this, isolate);
VerifyHeapPointer(isolate, table());
- CHECK(table()->IsOrderedHashSet() || table()->IsUndefined(isolate));
+ CHECK(table().IsOrderedHashSet() || table().IsUndefined(isolate));
// TODO(arv): Verify OrderedHashTable too.
}
void JSMap::JSMapVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSMapVerify(*this, isolate);
VerifyHeapPointer(isolate, table());
- CHECK(table()->IsOrderedHashMap() || table()->IsUndefined(isolate));
+ CHECK(table().IsOrderedHashMap() || table().IsUndefined(isolate));
// TODO(arv): Verify OrderedHashTable too.
}
@@ -1247,69 +1244,69 @@
CHECK(IsJSSetIterator());
JSObjectVerify(isolate);
VerifyHeapPointer(isolate, table());
- CHECK(table()->IsOrderedHashSet());
- CHECK(index()->IsSmi());
+ CHECK(table().IsOrderedHashSet());
+ CHECK(index().IsSmi());
}
void JSMapIterator::JSMapIteratorVerify(Isolate* isolate) {
CHECK(IsJSMapIterator());
JSObjectVerify(isolate);
VerifyHeapPointer(isolate, table());
- CHECK(table()->IsOrderedHashMap());
- CHECK(index()->IsSmi());
+ CHECK(table().IsOrderedHashMap());
+ CHECK(index().IsSmi());
}
void WeakCell::WeakCellVerify(Isolate* isolate) {
CHECK(IsWeakCell());
- CHECK(target()->IsJSReceiver() || target()->IsUndefined(isolate));
+ CHECK(target().IsJSReceiver() || target().IsUndefined(isolate));
- CHECK(prev()->IsWeakCell() || prev()->IsUndefined(isolate));
- if (prev()->IsWeakCell()) {
- CHECK_EQ(WeakCell::cast(prev())->next(), *this);
+ CHECK(prev().IsWeakCell() || prev().IsUndefined(isolate));
+ if (prev().IsWeakCell()) {
+ CHECK_EQ(WeakCell::cast(prev()).next(), *this);
}
- CHECK(next()->IsWeakCell() || next()->IsUndefined(isolate));
- if (next()->IsWeakCell()) {
- CHECK_EQ(WeakCell::cast(next())->prev(), *this);
+ CHECK(next().IsWeakCell() || next().IsUndefined(isolate));
+ if (next().IsWeakCell()) {
+ CHECK_EQ(WeakCell::cast(next()).prev(), *this);
}
- CHECK_IMPLIES(key()->IsUndefined(isolate),
- key_list_prev()->IsUndefined(isolate));
- CHECK_IMPLIES(key()->IsUndefined(isolate),
- key_list_next()->IsUndefined(isolate));
+ CHECK_IMPLIES(key().IsUndefined(isolate),
+ key_list_prev().IsUndefined(isolate));
+ CHECK_IMPLIES(key().IsUndefined(isolate),
+ key_list_next().IsUndefined(isolate));
- CHECK(key_list_prev()->IsWeakCell() || key_list_prev()->IsUndefined(isolate));
- if (key_list_prev()->IsWeakCell()) {
- CHECK_EQ(WeakCell::cast(key_list_prev())->key_list_next(), *this);
+ CHECK(key_list_prev().IsWeakCell() || key_list_prev().IsUndefined(isolate));
+ if (key_list_prev().IsWeakCell()) {
+ CHECK_EQ(WeakCell::cast(key_list_prev()).key_list_next(), *this);
}
- CHECK(key_list_next()->IsWeakCell() || key_list_next()->IsUndefined(isolate));
- if (key_list_next()->IsWeakCell()) {
- CHECK_EQ(WeakCell::cast(key_list_next())->key_list_prev(), *this);
+ CHECK(key_list_next().IsWeakCell() || key_list_next().IsUndefined(isolate));
+ if (key_list_next().IsWeakCell()) {
+ CHECK_EQ(WeakCell::cast(key_list_next()).key_list_prev(), *this);
}
- CHECK(finalization_group()->IsUndefined(isolate) ||
- finalization_group()->IsJSFinalizationGroup());
+ CHECK(finalization_group().IsUndefined(isolate) ||
+ finalization_group().IsJSFinalizationGroup());
}
void JSWeakRef::JSWeakRefVerify(Isolate* isolate) {
CHECK(IsJSWeakRef());
JSObjectVerify(isolate);
- CHECK(target()->IsUndefined(isolate) || target()->IsJSReceiver());
+ CHECK(target().IsUndefined(isolate) || target().IsJSReceiver());
}
void JSFinalizationGroup::JSFinalizationGroupVerify(Isolate* isolate) {
CHECK(IsJSFinalizationGroup());
JSObjectVerify(isolate);
VerifyHeapPointer(isolate, cleanup());
- CHECK(active_cells()->IsUndefined(isolate) || active_cells()->IsWeakCell());
- if (active_cells()->IsWeakCell()) {
- CHECK(WeakCell::cast(active_cells())->prev()->IsUndefined(isolate));
+ CHECK(active_cells().IsUndefined(isolate) || active_cells().IsWeakCell());
+ if (active_cells().IsWeakCell()) {
+ CHECK(WeakCell::cast(active_cells()).prev().IsUndefined(isolate));
}
- CHECK(cleared_cells()->IsUndefined(isolate) || cleared_cells()->IsWeakCell());
- if (cleared_cells()->IsWeakCell()) {
- CHECK(WeakCell::cast(cleared_cells())->prev()->IsUndefined(isolate));
+ CHECK(cleared_cells().IsUndefined(isolate) || cleared_cells().IsWeakCell());
+ if (cleared_cells().IsWeakCell()) {
+ CHECK(WeakCell::cast(cleared_cells()).prev().IsUndefined(isolate));
}
}
@@ -1323,35 +1320,35 @@
void FinalizationGroupCleanupJobTask::FinalizationGroupCleanupJobTaskVerify(
Isolate* isolate) {
CHECK(IsFinalizationGroupCleanupJobTask());
- CHECK(finalization_group()->IsJSFinalizationGroup());
+ CHECK(finalization_group().IsJSFinalizationGroup());
}
void JSWeakMap::JSWeakMapVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSWeakMapVerify(*this, isolate);
VerifyHeapPointer(isolate, table());
- CHECK(table()->IsEphemeronHashTable() || table()->IsUndefined(isolate));
+ CHECK(table().IsEphemeronHashTable() || table().IsUndefined(isolate));
}
void JSArrayIterator::JSArrayIteratorVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSArrayIteratorVerify(*this, isolate);
- CHECK(iterated_object()->IsJSReceiver());
+ CHECK(iterated_object().IsJSReceiver());
- CHECK_GE(next_index()->Number(), 0);
- CHECK_LE(next_index()->Number(), kMaxSafeInteger);
+ CHECK_GE(next_index().Number(), 0);
+ CHECK_LE(next_index().Number(), kMaxSafeInteger);
- if (iterated_object()->IsJSTypedArray()) {
+ if (iterated_object().IsJSTypedArray()) {
// JSTypedArray::length is limited to Smi range.
- CHECK(next_index()->IsSmi());
- CHECK_LE(next_index()->Number(), Smi::kMaxValue);
- } else if (iterated_object()->IsJSArray()) {
+ CHECK(next_index().IsSmi());
+ CHECK_LE(next_index().Number(), Smi::kMaxValue);
+ } else if (iterated_object().IsJSArray()) {
// JSArray::length is limited to Uint32 range.
- CHECK_LE(next_index()->Number(), kMaxUInt32);
+ CHECK_LE(next_index().Number(), kMaxUInt32);
}
}
void JSStringIterator::JSStringIteratorVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSStringIteratorVerify(*this, isolate);
- CHECK(string()->IsString());
+ CHECK(string().IsString());
CHECK_GE(index(), 0);
CHECK_LE(index(), String::kMaxLength);
@@ -1362,14 +1359,14 @@
void JSWeakSet::JSWeakSetVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSWeakSetVerify(*this, isolate);
VerifyHeapPointer(isolate, table());
- CHECK(table()->IsEphemeronHashTable() || table()->IsUndefined(isolate));
+ CHECK(table().IsEphemeronHashTable() || table().IsUndefined(isolate));
}
USE_TORQUE_VERIFIER(Microtask)
void CallableTask::CallableTaskVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::CallableTaskVerify(*this, isolate);
- CHECK(callable()->IsCallable());
+ CHECK(callable().IsCallable());
}
USE_TORQUE_VERIFIER(CallbackTask)
@@ -1377,7 +1374,7 @@
void PromiseReactionJobTask::PromiseReactionJobTaskVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::PromiseReactionJobTaskVerify(*this, isolate);
VerifyHeapPointer(isolate, handler());
- CHECK(handler()->IsUndefined(isolate) || handler()->IsCallable());
+ CHECK(handler().IsUndefined(isolate) || handler().IsCallable());
}
USE_TORQUE_VERIFIER(PromiseFulfillReactionJobTask)
@@ -1394,7 +1391,7 @@
TorqueGeneratedClassVerifiers::JSPromiseVerify(*this, isolate);
VerifySmiField(kFlagsOffset);
if (status() == Promise::kPending) {
- CHECK(reactions()->IsSmi() || reactions()->IsPromiseReaction());
+ CHECK(reactions().IsSmi() || reactions().IsPromiseReaction());
}
}
@@ -1432,7 +1429,7 @@
entry < Capacity(); entry++) {
for (int offset = 0; offset < Derived::kEntrySize; offset++) {
Object val = GetDataEntry(entry, offset);
- CHECK(val->IsTheHole(isolate));
+ CHECK(val.IsTheHole(isolate));
}
}
}
@@ -1443,7 +1440,7 @@
entry++) {
for (int offset = 0; offset < kEntrySize; offset++) {
Object val = GetDataEntry(entry, offset);
- CHECK(val->IsTheHole(isolate));
+ CHECK(val.IsTheHole(isolate));
}
}
}
@@ -1455,7 +1452,7 @@
entry++) {
for (int offset = 0; offset < kEntrySize; offset++) {
Object val = GetDataEntry(entry, offset);
- CHECK(val->IsTheHole(isolate));
+ CHECK(val.IsTheHole(isolate));
}
}
}
@@ -1468,7 +1465,7 @@
entry++) {
for (int offset = 0; offset < kEntrySize; offset++) {
Object val = GetDataEntry(entry, offset);
- CHECK(val->IsTheHole(isolate) ||
+ CHECK(val.IsTheHole(isolate) ||
(PropertyDetails::Empty().AsSmi() == Smi::cast(val)));
}
}
@@ -1479,52 +1476,51 @@
switch (TypeTag()) {
case JSRegExp::ATOM: {
FixedArray arr = FixedArray::cast(data());
- CHECK(arr->get(JSRegExp::kAtomPatternIndex)->IsString());
+ CHECK(arr.get(JSRegExp::kAtomPatternIndex).IsString());
break;
}
case JSRegExp::IRREGEXP: {
bool is_native = RegExpImpl::UsesNativeRegExp();
FixedArray arr = FixedArray::cast(data());
- Object one_byte_data = arr->get(JSRegExp::kIrregexpLatin1CodeIndex);
+ Object one_byte_data = arr.get(JSRegExp::kIrregexpLatin1CodeIndex);
// Smi : Not compiled yet (-1).
// Code/ByteArray: Compiled code.
- CHECK(
- (one_byte_data->IsSmi() &&
- Smi::ToInt(one_byte_data) == JSRegExp::kUninitializedValue) ||
- (is_native ? one_byte_data->IsCode() : one_byte_data->IsByteArray()));
- Object uc16_data = arr->get(JSRegExp::kIrregexpUC16CodeIndex);
- CHECK((uc16_data->IsSmi() &&
+ CHECK((one_byte_data.IsSmi() &&
+ Smi::ToInt(one_byte_data) == JSRegExp::kUninitializedValue) ||
+ (is_native ? one_byte_data.IsCode() : one_byte_data.IsByteArray()));
+ Object uc16_data = arr.get(JSRegExp::kIrregexpUC16CodeIndex);
+ CHECK((uc16_data.IsSmi() &&
Smi::ToInt(uc16_data) == JSRegExp::kUninitializedValue) ||
- (is_native ? uc16_data->IsCode() : uc16_data->IsByteArray()));
+ (is_native ? uc16_data.IsCode() : uc16_data.IsByteArray()));
- CHECK(arr->get(JSRegExp::kIrregexpCaptureCountIndex)->IsSmi());
- CHECK(arr->get(JSRegExp::kIrregexpMaxRegisterCountIndex)->IsSmi());
+ CHECK(arr.get(JSRegExp::kIrregexpCaptureCountIndex).IsSmi());
+ CHECK(arr.get(JSRegExp::kIrregexpMaxRegisterCountIndex).IsSmi());
break;
}
default:
CHECK_EQ(JSRegExp::NOT_COMPILED, TypeTag());
- CHECK(data()->IsUndefined(isolate));
+ CHECK(data().IsUndefined(isolate));
break;
}
}
void JSRegExpStringIterator::JSRegExpStringIteratorVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSRegExpStringIteratorVerify(*this, isolate);
- CHECK(iterating_string()->IsString());
+ CHECK(iterating_string().IsString());
VerifySmiField(kFlagsOffset);
}
void JSProxy::JSProxyVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::JSProxyVerify(*this, isolate);
- CHECK(map()->GetConstructor()->IsJSFunction());
+ CHECK(map().GetConstructor().IsJSFunction());
if (!IsRevoked()) {
- CHECK_EQ(target()->IsCallable(), map()->is_callable());
- CHECK_EQ(target()->IsConstructor(), map()->is_constructor());
+ CHECK_EQ(target().IsCallable(), map().is_callable());
+ CHECK_EQ(target().IsConstructor(), map().is_constructor());
}
- CHECK(map()->prototype()->IsNull(isolate));
+ CHECK(map().prototype().IsNull(isolate));
// There should be no properties on a Proxy.
- CHECK_EQ(0, map()->NumberOfOwnDescriptors());
+ CHECK_EQ(0, map().NumberOfOwnDescriptors());
}
void JSArrayBuffer::JSArrayBufferVerify(Isolate* isolate) {
@@ -1563,7 +1559,7 @@
TorqueGeneratedClassVerifiers::AsyncGeneratorRequestVerify(*this, isolate);
CHECK_GE(resume_mode(), JSGeneratorObject::kNext);
CHECK_LE(resume_mode(), JSGeneratorObject::kThrow);
- next()->ObjectVerify(isolate);
+ next().ObjectVerify(isolate);
}
void BigInt::BigIntVerify(Isolate* isolate) {
@@ -1579,9 +1575,9 @@
void ModuleInfoEntry::ModuleInfoEntryVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::ModuleInfoEntryVerify(*this, isolate);
- CHECK_IMPLIES(import_name()->IsString(), module_request() >= 0);
- CHECK_IMPLIES(export_name()->IsString() && import_name()->IsString(),
- local_name()->IsUndefined(isolate));
+ CHECK_IMPLIES(import_name().IsString(), module_request() >= 0);
+ CHECK_IMPLIES(export_name().IsString() && import_name().IsString(),
+ local_name().IsUndefined(isolate));
}
void Module::ModuleVerify(Isolate* isolate) {
@@ -1597,38 +1593,38 @@
VerifySmiField(kHashOffset);
VerifySmiField(kStatusOffset);
- CHECK((status() >= kEvaluating && code()->IsModuleInfo()) ||
- (status() == kInstantiated && code()->IsJSGeneratorObject()) ||
- (status() == kInstantiating && code()->IsJSFunction()) ||
- (code()->IsSharedFunctionInfo()));
+ CHECK((status() >= kEvaluating && code().IsModuleInfo()) ||
+ (status() == kInstantiated && code().IsJSGeneratorObject()) ||
+ (status() == kInstantiating && code().IsJSFunction()) ||
+ (code().IsSharedFunctionInfo()));
- CHECK_EQ(status() == kErrored, !exception()->IsTheHole(isolate));
+ CHECK_EQ(status() == kErrored, !exception().IsTheHole(isolate));
- CHECK(module_namespace()->IsUndefined(isolate) ||
- module_namespace()->IsJSModuleNamespace());
- if (module_namespace()->IsJSModuleNamespace()) {
+ CHECK(module_namespace().IsUndefined(isolate) ||
+ module_namespace().IsJSModuleNamespace());
+ if (module_namespace().IsJSModuleNamespace()) {
CHECK_LE(kInstantiating, status());
- CHECK_EQ(JSModuleNamespace::cast(module_namespace())->module(), *this);
+ CHECK_EQ(JSModuleNamespace::cast(module_namespace()).module(), *this);
}
- CHECK_EQ(requested_modules()->length(), info()->module_requests()->length());
+ CHECK_EQ(requested_modules().length(), info().module_requests().length());
- CHECK(import_meta()->IsTheHole(isolate) || import_meta()->IsJSObject());
+ CHECK(import_meta().IsTheHole(isolate) || import_meta().IsJSObject());
CHECK_NE(hash(), 0);
}
void PrototypeInfo::PrototypeInfoVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::PrototypeInfoVerify(*this, isolate);
- if (prototype_users()->IsWeakArrayList()) {
+ if (prototype_users().IsWeakArrayList()) {
PrototypeUsers::Verify(WeakArrayList::cast(prototype_users()));
} else {
- CHECK(prototype_users()->IsSmi());
+ CHECK(prototype_users().IsSmi());
}
}
void PrototypeUsers::Verify(WeakArrayList array) {
- if (array->length() == 0) {
+ if (array.length() == 0) {
// Allow empty & uninitialized lists.
return;
}
@@ -1637,18 +1633,18 @@
int empty_slots_count = 0;
while (empty_slot != kNoEmptySlotsMarker) {
CHECK_GT(empty_slot, 0);
- CHECK_LT(empty_slot, array->length());
- empty_slot = array->Get(empty_slot).ToSmi().value();
+ CHECK_LT(empty_slot, array.length());
+ empty_slot = array.Get(empty_slot).ToSmi().value();
++empty_slots_count;
}
// Verify that all elements are either weak pointers or SMIs marking empty
// slots.
int weak_maps_count = 0;
- for (int i = kFirstIndex; i < array->length(); ++i) {
+ for (int i = kFirstIndex; i < array.length(); ++i) {
HeapObject heap_object;
- MaybeObject object = array->Get(i);
- if ((object->GetHeapObjectIfWeak(&heap_object) && heap_object->IsMap()) ||
+ MaybeObject object = array.Get(i);
+ if ((object->GetHeapObjectIfWeak(&heap_object) && heap_object.IsMap()) ||
object->IsCleared()) {
++weak_maps_count;
} else {
@@ -1656,7 +1652,7 @@
}
}
- CHECK_EQ(weak_maps_count + empty_slots_count + 1, array->length());
+ CHECK_EQ(weak_maps_count + empty_slots_count + 1, array.length());
}
USE_TORQUE_VERIFIER(TemplateObjectDescription)
@@ -1706,26 +1702,26 @@
void WasmExportedFunctionData::WasmExportedFunctionDataVerify(
Isolate* isolate) {
TorqueGeneratedClassVerifiers::WasmExportedFunctionDataVerify(*this, isolate);
- CHECK(wrapper_code()->kind() == Code::JS_TO_WASM_FUNCTION ||
- wrapper_code()->kind() == Code::C_WASM_ENTRY);
+ CHECK(wrapper_code().kind() == Code::JS_TO_WASM_FUNCTION ||
+ wrapper_code().kind() == Code::C_WASM_ENTRY);
}
void WasmModuleObject::WasmModuleObjectVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::WasmModuleObjectVerify(*this, isolate);
- CHECK(managed_native_module()->IsForeign());
- CHECK(export_wrappers()->IsFixedArray());
- CHECK(script()->IsScript());
+ CHECK(managed_native_module().IsForeign());
+ CHECK(export_wrappers().IsFixedArray());
+ CHECK(script().IsScript());
}
void WasmTableObject::WasmTableObjectVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::WasmTableObjectVerify(*this, isolate);
- CHECK(elements()->IsFixedArray());
+ CHECK(elements().IsFixedArray());
VerifySmiField(kRawTypeOffset);
}
void WasmMemoryObject::WasmMemoryObjectVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::WasmMemoryObjectVerify(*this, isolate);
- CHECK(array_buffer()->IsJSArrayBuffer());
+ CHECK(array_buffer().IsJSArrayBuffer());
VerifySmiField(kMaximumPagesOffset);
}
@@ -1733,13 +1729,13 @@
void WasmExceptionObject::WasmExceptionObjectVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::WasmExceptionObjectVerify(*this, isolate);
- CHECK(serialized_signature()->IsByteArray());
+ CHECK(serialized_signature().IsByteArray());
}
void DataHandler::DataHandlerVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::DataHandlerVerify(*this, isolate);
- CHECK_IMPLIES(!smi_handler()->IsSmi(),
- smi_handler()->IsCode() && IsStoreHandler());
+ CHECK_IMPLIES(!smi_handler().IsSmi(),
+ smi_handler().IsCode() && IsStoreHandler());
int data_count = data_field_count();
if (data_count >= 1) {
VerifyMaybeObjectField(isolate, kData1Offset);
@@ -1808,40 +1804,40 @@
void AllocationSite::AllocationSiteVerify(Isolate* isolate) {
CHECK(IsAllocationSite());
- CHECK(dependent_code()->IsDependentCode());
- CHECK(transition_info_or_boilerplate()->IsSmi() ||
- transition_info_or_boilerplate()->IsJSObject());
- CHECK(nested_site()->IsAllocationSite() || nested_site() == Smi::kZero);
+ CHECK(dependent_code().IsDependentCode());
+ CHECK(transition_info_or_boilerplate().IsSmi() ||
+ transition_info_or_boilerplate().IsJSObject());
+ CHECK(nested_site().IsAllocationSite() || nested_site() == Smi::kZero);
}
void AllocationMemento::AllocationMementoVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::AllocationMementoVerify(*this, isolate);
VerifyHeapPointer(isolate, allocation_site());
- CHECK(!IsValid() || GetAllocationSite()->IsAllocationSite());
+ CHECK(!IsValid() || GetAllocationSite().IsAllocationSite());
}
void Script::ScriptVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::ScriptVerify(*this, isolate);
- for (int i = 0; i < shared_function_infos()->length(); ++i) {
- MaybeObject maybe_object = shared_function_infos()->Get(i);
+ for (int i = 0; i < shared_function_infos().length(); ++i) {
+ MaybeObject maybe_object = shared_function_infos().Get(i);
HeapObject heap_object;
CHECK(maybe_object->IsWeak() || maybe_object->IsCleared() ||
(maybe_object->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsUndefined(isolate)));
+ heap_object.IsUndefined(isolate)));
}
}
void NormalizedMapCache::NormalizedMapCacheVerify(Isolate* isolate) {
- WeakFixedArray::cast(*this)->WeakFixedArrayVerify(isolate);
+ WeakFixedArray::cast(*this).WeakFixedArrayVerify(isolate);
if (FLAG_enable_slow_asserts) {
for (int i = 0; i < length(); i++) {
MaybeObject e = WeakFixedArray::Get(i);
HeapObject heap_object;
if (e->GetHeapObjectIfWeak(&heap_object)) {
- Map::cast(heap_object)->DictionaryMapVerify(isolate);
+ Map::cast(heap_object).DictionaryMapVerify(isolate);
} else {
CHECK(e->IsCleared() || (e->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsUndefined(isolate)));
+ heap_object.IsUndefined(isolate)));
}
}
}
@@ -1860,7 +1856,7 @@
for (int i = 0; i < children_length(); ++i) {
Object child = get_child_raw(i);
- CHECK(child->IsNull() || child->IsPreparseData());
+ CHECK(child.IsNull() || child.IsPreparseData());
VerifyPointer(isolate, child);
}
}
@@ -1950,18 +1946,18 @@
// Named properties
if (HasFastProperties()) {
info->number_of_objects_with_fast_properties_++;
- info->number_of_fast_used_fields_ += map()->NextFreePropertyIndex();
- info->number_of_fast_unused_fields_ += map()->UnusedPropertyFields();
+ info->number_of_fast_used_fields_ += map().NextFreePropertyIndex();
+ info->number_of_fast_unused_fields_ += map().UnusedPropertyFields();
} else if (IsJSGlobalObject()) {
- GlobalDictionary dict = JSGlobalObject::cast(*this)->global_dictionary();
- info->number_of_slow_used_properties_ += dict->NumberOfElements();
+ GlobalDictionary dict = JSGlobalObject::cast(*this).global_dictionary();
+ info->number_of_slow_used_properties_ += dict.NumberOfElements();
info->number_of_slow_unused_properties_ +=
- dict->Capacity() - dict->NumberOfElements();
+ dict.Capacity() - dict.NumberOfElements();
} else {
NameDictionary dict = property_dictionary();
- info->number_of_slow_used_properties_ += dict->NumberOfElements();
+ info->number_of_slow_used_properties_ += dict.NumberOfElements();
info->number_of_slow_unused_properties_ +=
- dict->Capacity() - dict->NumberOfElements();
+ dict.Capacity() - dict.NumberOfElements();
}
// Indexed properties
switch (GetElementsKind()) {
@@ -1979,9 +1975,9 @@
info->number_of_objects_with_fast_elements_++;
int holes = 0;
FixedArray e = FixedArray::cast(elements());
- int len = e->length();
+ int len = e.length();
for (int i = 0; i < len; i++) {
- if (e->get(i)->IsTheHole(isolate)) holes++;
+ if (e.get(i).IsTheHole(isolate)) holes++;
}
info->number_of_fast_used_elements_ += len - holes;
info->number_of_fast_unused_elements_ += holes;
@@ -1995,15 +1991,15 @@
{
info->number_of_objects_with_fast_elements_++;
FixedArrayBase e = FixedArrayBase::cast(elements());
- info->number_of_fast_used_elements_ += e->length();
+ info->number_of_fast_used_elements_ += e.length();
break;
}
case DICTIONARY_ELEMENTS:
case SLOW_STRING_WRAPPER_ELEMENTS: {
NumberDictionary dict = element_dictionary();
- info->number_of_slow_used_elements_ += dict->NumberOfElements();
+ info->number_of_slow_used_elements_ += dict.NumberOfElements();
info->number_of_slow_unused_elements_ +=
- dict->Capacity() - dict->NumberOfElements();
+ dict.Capacity() - dict.NumberOfElements();
break;
}
case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
@@ -2060,7 +2056,7 @@
return false;
}
current_key = key;
- uint32_t hash = GetSortedKey(i)->Hash();
+ uint32_t hash = GetSortedKey(i).Hash();
if (hash < current) {
Print();
return false;
@@ -2079,10 +2075,10 @@
for (int i = 0; i < number_of_transitions(); i++) {
Name key = GetSortedKey(i);
- uint32_t hash = key->Hash();
+ uint32_t hash = key.Hash();
PropertyKind kind = kData;
PropertyAttributes attributes = NONE;
- if (!TransitionsAccessor::IsSpecialTransition(key->GetReadOnlyRoots(),
+ if (!TransitionsAccessor::IsSpecialTransition(key.GetReadOnlyRoots(),
key)) {
Map target = GetTarget(i);
PropertyDetails details =
@@ -2111,11 +2107,11 @@
bool TransitionsAccessor::IsSortedNoDuplicates() {
// Simple and non-existent transitions are always sorted.
if (encoding() != kFullTransitionArray) return true;
- return transitions()->IsSortedNoDuplicates();
+ return transitions().IsSortedNoDuplicates();
}
static bool CheckOneBackPointer(Map current_map, Object target) {
- return !target->IsMap() || Map::cast(target)->GetBackPointer() == current_map;
+ return !target.IsMap() || Map::cast(target).GetBackPointer() == current_map;
}
bool TransitionsAccessor::IsConsistentWithBackPointers() {
diff --git a/src/diagnostics/objects-printer.cc b/src/diagnostics/objects-printer.cc
index d80dd30..0f4d390 100644
--- a/src/diagnostics/objects-printer.cc
+++ b/src/diagnostics/objects-printer.cc
@@ -87,7 +87,7 @@
os << "Smi: " << std::hex << "0x" << Smi::ToInt(*this);
os << std::dec << " (" << Smi::ToInt(*this) << ")\n";
} else {
- HeapObject::cast(*this)->HeapObjectPrint(os);
+ HeapObject::cast(*this).HeapObjectPrint(os);
}
}
@@ -96,7 +96,7 @@
if (id != nullptr) {
os << id;
} else {
- os << map()->instance_type();
+ os << map().instance_type();
}
os << "]";
if (ReadOnlyHeap::Contains(*this)) {
@@ -108,42 +108,42 @@
}
void HeapObject::HeapObjectPrint(std::ostream& os) { // NOLINT
- InstanceType instance_type = map()->instance_type();
+ InstanceType instance_type = map().instance_type();
if (instance_type < FIRST_NONSTRING_TYPE) {
- String::cast(*this)->StringPrint(os);
+ String::cast(*this).StringPrint(os);
os << "\n";
return;
}
switch (instance_type) {
case SYMBOL_TYPE:
- Symbol::cast(*this)->SymbolPrint(os);
+ Symbol::cast(*this).SymbolPrint(os);
break;
case MAP_TYPE:
- Map::cast(*this)->MapPrint(os);
+ Map::cast(*this).MapPrint(os);
break;
case HEAP_NUMBER_TYPE:
- HeapNumber::cast(*this)->HeapNumberPrint(os);
+ HeapNumber::cast(*this).HeapNumberPrint(os);
os << "\n";
break;
case MUTABLE_HEAP_NUMBER_TYPE:
os << "<mutable ";
- MutableHeapNumber::cast(*this)->MutableHeapNumberPrint(os);
+ MutableHeapNumber::cast(*this).MutableHeapNumberPrint(os);
os << ">\n";
break;
case BIGINT_TYPE:
- BigInt::cast(*this)->BigIntPrint(os);
+ BigInt::cast(*this).BigIntPrint(os);
os << "\n";
break;
case EMBEDDER_DATA_ARRAY_TYPE:
- EmbedderDataArray::cast(*this)->EmbedderDataArrayPrint(os);
+ EmbedderDataArray::cast(*this).EmbedderDataArrayPrint(os);
break;
case FIXED_DOUBLE_ARRAY_TYPE:
- FixedDoubleArray::cast(*this)->FixedDoubleArrayPrint(os);
+ FixedDoubleArray::cast(*this).FixedDoubleArrayPrint(os);
break;
case FIXED_ARRAY_TYPE:
- FixedArray::cast(*this)->FixedArrayPrint(os);
+ FixedArray::cast(*this).FixedArrayPrint(os);
break;
case AWAIT_CONTEXT_TYPE:
case BLOCK_CONTEXT_TYPE:
@@ -155,10 +155,10 @@
case SCRIPT_CONTEXT_TYPE:
case WITH_CONTEXT_TYPE:
case SCRIPT_CONTEXT_TABLE_TYPE:
- Context::cast(*this)->ContextPrint(os);
+ Context::cast(*this).ContextPrint(os);
break;
case NATIVE_CONTEXT_TYPE:
- NativeContext::cast(*this)->NativeContextPrint(os);
+ NativeContext::cast(*this).NativeContextPrint(os);
break;
case HASH_TABLE_TYPE:
case ORDERED_HASH_MAP_TYPE:
@@ -167,52 +167,52 @@
case NAME_DICTIONARY_TYPE:
case GLOBAL_DICTIONARY_TYPE:
case SIMPLE_NUMBER_DICTIONARY_TYPE:
- FixedArray::cast(*this)->FixedArrayPrint(os);
+ FixedArray::cast(*this).FixedArrayPrint(os);
break;
case STRING_TABLE_TYPE:
- ObjectHashTable::cast(*this)->ObjectHashTablePrint(os);
+ ObjectHashTable::cast(*this).ObjectHashTablePrint(os);
break;
case NUMBER_DICTIONARY_TYPE:
- NumberDictionary::cast(*this)->NumberDictionaryPrint(os);
+ NumberDictionary::cast(*this).NumberDictionaryPrint(os);
break;
case EPHEMERON_HASH_TABLE_TYPE:
- EphemeronHashTable::cast(*this)->EphemeronHashTablePrint(os);
+ EphemeronHashTable::cast(*this).EphemeronHashTablePrint(os);
break;
case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
ObjectBoilerplateDescription::cast(*this)
- ->ObjectBoilerplateDescriptionPrint(os);
+ .ObjectBoilerplateDescriptionPrint(os);
break;
case PROPERTY_ARRAY_TYPE:
- PropertyArray::cast(*this)->PropertyArrayPrint(os);
+ PropertyArray::cast(*this).PropertyArrayPrint(os);
break;
case BYTE_ARRAY_TYPE:
- ByteArray::cast(*this)->ByteArrayPrint(os);
+ ByteArray::cast(*this).ByteArrayPrint(os);
break;
case BYTECODE_ARRAY_TYPE:
- BytecodeArray::cast(*this)->BytecodeArrayPrint(os);
+ BytecodeArray::cast(*this).BytecodeArrayPrint(os);
break;
case DESCRIPTOR_ARRAY_TYPE:
- DescriptorArray::cast(*this)->DescriptorArrayPrint(os);
+ DescriptorArray::cast(*this).DescriptorArrayPrint(os);
break;
case TRANSITION_ARRAY_TYPE:
- TransitionArray::cast(*this)->TransitionArrayPrint(os);
+ TransitionArray::cast(*this).TransitionArrayPrint(os);
break;
case FEEDBACK_CELL_TYPE:
- FeedbackCell::cast(*this)->FeedbackCellPrint(os);
+ FeedbackCell::cast(*this).FeedbackCellPrint(os);
break;
case CLOSURE_FEEDBACK_CELL_ARRAY_TYPE:
- ClosureFeedbackCellArray::cast(*this)->ClosureFeedbackCellArrayPrint(os);
+ ClosureFeedbackCellArray::cast(*this).ClosureFeedbackCellArrayPrint(os);
break;
case FEEDBACK_VECTOR_TYPE:
- FeedbackVector::cast(*this)->FeedbackVectorPrint(os);
+ FeedbackVector::cast(*this).FeedbackVectorPrint(os);
break;
case FREE_SPACE_TYPE:
- FreeSpace::cast(*this)->FreeSpacePrint(os);
+ FreeSpace::cast(*this).FreeSpacePrint(os);
break;
-#define PRINT_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype) \
- case Fixed##Type##Array::kInstanceType: \
- Fixed##Type##Array::cast(*this)->FixedTypedArrayPrint(os); \
+#define PRINT_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype) \
+ case Fixed##Type##Array::kInstanceType: \
+ Fixed##Type##Array::cast(*this).FixedTypedArrayPrint(os); \
break;
TYPED_ARRAYS(PRINT_FIXED_TYPED_ARRAY)
@@ -231,205 +231,205 @@
case JS_ERROR_TYPE:
// TODO(titzer): debug printing for more wasm objects
case WASM_EXCEPTION_TYPE:
- JSObject::cast(*this)->JSObjectPrint(os);
+ JSObject::cast(*this).JSObjectPrint(os);
break;
case WASM_MODULE_TYPE:
- WasmModuleObject::cast(*this)->WasmModuleObjectPrint(os);
+ WasmModuleObject::cast(*this).WasmModuleObjectPrint(os);
break;
case WASM_MEMORY_TYPE:
- WasmMemoryObject::cast(*this)->WasmMemoryObjectPrint(os);
+ WasmMemoryObject::cast(*this).WasmMemoryObjectPrint(os);
break;
case WASM_TABLE_TYPE:
- WasmTableObject::cast(*this)->WasmTableObjectPrint(os);
+ WasmTableObject::cast(*this).WasmTableObjectPrint(os);
break;
case WASM_GLOBAL_TYPE:
- WasmGlobalObject::cast(*this)->WasmGlobalObjectPrint(os);
+ WasmGlobalObject::cast(*this).WasmGlobalObjectPrint(os);
break;
case WASM_INSTANCE_TYPE:
- WasmInstanceObject::cast(*this)->WasmInstanceObjectPrint(os);
+ WasmInstanceObject::cast(*this).WasmInstanceObjectPrint(os);
break;
case JS_GENERATOR_OBJECT_TYPE:
- JSGeneratorObject::cast(*this)->JSGeneratorObjectPrint(os);
+ JSGeneratorObject::cast(*this).JSGeneratorObjectPrint(os);
break;
case JS_PROMISE_TYPE:
- JSPromise::cast(*this)->JSPromisePrint(os);
+ JSPromise::cast(*this).JSPromisePrint(os);
break;
case JS_ARRAY_TYPE:
- JSArray::cast(*this)->JSArrayPrint(os);
+ JSArray::cast(*this).JSArrayPrint(os);
break;
case JS_REGEXP_TYPE:
- JSRegExp::cast(*this)->JSRegExpPrint(os);
+ JSRegExp::cast(*this).JSRegExpPrint(os);
break;
case JS_REGEXP_STRING_ITERATOR_TYPE:
- JSRegExpStringIterator::cast(*this)->JSRegExpStringIteratorPrint(os);
+ JSRegExpStringIterator::cast(*this).JSRegExpStringIteratorPrint(os);
break;
case ODDBALL_TYPE:
- Oddball::cast(*this)->to_string()->Print(os);
+ Oddball::cast(*this).to_string().Print(os);
break;
case JS_BOUND_FUNCTION_TYPE:
- JSBoundFunction::cast(*this)->JSBoundFunctionPrint(os);
+ JSBoundFunction::cast(*this).JSBoundFunctionPrint(os);
break;
case JS_FUNCTION_TYPE:
- JSFunction::cast(*this)->JSFunctionPrint(os);
+ JSFunction::cast(*this).JSFunctionPrint(os);
break;
case JS_GLOBAL_PROXY_TYPE:
- JSGlobalProxy::cast(*this)->JSGlobalProxyPrint(os);
+ JSGlobalProxy::cast(*this).JSGlobalProxyPrint(os);
break;
case JS_GLOBAL_OBJECT_TYPE:
- JSGlobalObject::cast(*this)->JSGlobalObjectPrint(os);
+ JSGlobalObject::cast(*this).JSGlobalObjectPrint(os);
break;
case JS_VALUE_TYPE:
- JSValue::cast(*this)->JSValuePrint(os);
+ JSValue::cast(*this).JSValuePrint(os);
break;
case JS_DATE_TYPE:
- JSDate::cast(*this)->JSDatePrint(os);
+ JSDate::cast(*this).JSDatePrint(os);
break;
case CODE_TYPE:
- Code::cast(*this)->CodePrint(os);
+ Code::cast(*this).CodePrint(os);
break;
case CODE_DATA_CONTAINER_TYPE:
- CodeDataContainer::cast(*this)->CodeDataContainerPrint(os);
+ CodeDataContainer::cast(*this).CodeDataContainerPrint(os);
break;
case JS_PROXY_TYPE:
- JSProxy::cast(*this)->JSProxyPrint(os);
+ JSProxy::cast(*this).JSProxyPrint(os);
break;
case JS_SET_TYPE:
- JSSet::cast(*this)->JSSetPrint(os);
+ JSSet::cast(*this).JSSetPrint(os);
break;
case JS_MAP_TYPE:
- JSMap::cast(*this)->JSMapPrint(os);
+ JSMap::cast(*this).JSMapPrint(os);
break;
case JS_SET_KEY_VALUE_ITERATOR_TYPE:
case JS_SET_VALUE_ITERATOR_TYPE:
- JSSetIterator::cast(*this)->JSSetIteratorPrint(os);
+ JSSetIterator::cast(*this).JSSetIteratorPrint(os);
break;
case JS_MAP_KEY_ITERATOR_TYPE:
case JS_MAP_KEY_VALUE_ITERATOR_TYPE:
case JS_MAP_VALUE_ITERATOR_TYPE:
- JSMapIterator::cast(*this)->JSMapIteratorPrint(os);
+ JSMapIterator::cast(*this).JSMapIteratorPrint(os);
break;
case WEAK_CELL_TYPE:
- WeakCell::cast(*this)->WeakCellPrint(os);
+ WeakCell::cast(*this).WeakCellPrint(os);
break;
case JS_WEAK_REF_TYPE:
- JSWeakRef::cast(*this)->JSWeakRefPrint(os);
+ JSWeakRef::cast(*this).JSWeakRefPrint(os);
break;
case JS_FINALIZATION_GROUP_TYPE:
- JSFinalizationGroup::cast(*this)->JSFinalizationGroupPrint(os);
+ JSFinalizationGroup::cast(*this).JSFinalizationGroupPrint(os);
break;
case JS_FINALIZATION_GROUP_CLEANUP_ITERATOR_TYPE:
JSFinalizationGroupCleanupIterator::cast(*this)
- ->JSFinalizationGroupCleanupIteratorPrint(os);
+ .JSFinalizationGroupCleanupIteratorPrint(os);
break;
case JS_WEAK_MAP_TYPE:
- JSWeakMap::cast(*this)->JSWeakMapPrint(os);
+ JSWeakMap::cast(*this).JSWeakMapPrint(os);
break;
case JS_WEAK_SET_TYPE:
- JSWeakSet::cast(*this)->JSWeakSetPrint(os);
+ JSWeakSet::cast(*this).JSWeakSetPrint(os);
break;
case JS_MODULE_NAMESPACE_TYPE:
- JSModuleNamespace::cast(*this)->JSModuleNamespacePrint(os);
+ JSModuleNamespace::cast(*this).JSModuleNamespacePrint(os);
break;
case FOREIGN_TYPE:
- Foreign::cast(*this)->ForeignPrint(os);
+ Foreign::cast(*this).ForeignPrint(os);
break;
case CALL_HANDLER_INFO_TYPE:
- CallHandlerInfo::cast(*this)->CallHandlerInfoPrint(os);
+ CallHandlerInfo::cast(*this).CallHandlerInfoPrint(os);
break;
case PREPARSE_DATA_TYPE:
- PreparseData::cast(*this)->PreparseDataPrint(os);
+ PreparseData::cast(*this).PreparseDataPrint(os);
break;
case UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE:
UncompiledDataWithoutPreparseData::cast(*this)
- ->UncompiledDataWithoutPreparseDataPrint(os);
+ .UncompiledDataWithoutPreparseDataPrint(os);
break;
case UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE:
UncompiledDataWithPreparseData::cast(*this)
- ->UncompiledDataWithPreparseDataPrint(os);
+ .UncompiledDataWithPreparseDataPrint(os);
break;
case SHARED_FUNCTION_INFO_TYPE:
- SharedFunctionInfo::cast(*this)->SharedFunctionInfoPrint(os);
+ SharedFunctionInfo::cast(*this).SharedFunctionInfoPrint(os);
break;
case JS_MESSAGE_OBJECT_TYPE:
- JSMessageObject::cast(*this)->JSMessageObjectPrint(os);
+ JSMessageObject::cast(*this).JSMessageObjectPrint(os);
break;
case CELL_TYPE:
- Cell::cast(*this)->CellPrint(os);
+ Cell::cast(*this).CellPrint(os);
break;
case PROPERTY_CELL_TYPE:
- PropertyCell::cast(*this)->PropertyCellPrint(os);
+ PropertyCell::cast(*this).PropertyCellPrint(os);
break;
case JS_ARRAY_BUFFER_TYPE:
- JSArrayBuffer::cast(*this)->JSArrayBufferPrint(os);
+ JSArrayBuffer::cast(*this).JSArrayBufferPrint(os);
break;
case JS_ARRAY_ITERATOR_TYPE:
- JSArrayIterator::cast(*this)->JSArrayIteratorPrint(os);
+ JSArrayIterator::cast(*this).JSArrayIteratorPrint(os);
break;
case JS_TYPED_ARRAY_TYPE:
- JSTypedArray::cast(*this)->JSTypedArrayPrint(os);
+ JSTypedArray::cast(*this).JSTypedArrayPrint(os);
break;
case JS_DATA_VIEW_TYPE:
- JSDataView::cast(*this)->JSDataViewPrint(os);
+ JSDataView::cast(*this).JSDataViewPrint(os);
break;
#ifdef V8_INTL_SUPPORT
case JS_INTL_V8_BREAK_ITERATOR_TYPE:
- JSV8BreakIterator::cast(*this)->JSV8BreakIteratorPrint(os);
+ JSV8BreakIterator::cast(*this).JSV8BreakIteratorPrint(os);
break;
case JS_INTL_COLLATOR_TYPE:
- JSCollator::cast(*this)->JSCollatorPrint(os);
+ JSCollator::cast(*this).JSCollatorPrint(os);
break;
case JS_INTL_DATE_TIME_FORMAT_TYPE:
- JSDateTimeFormat::cast(*this)->JSDateTimeFormatPrint(os);
+ JSDateTimeFormat::cast(*this).JSDateTimeFormatPrint(os);
break;
case JS_INTL_LIST_FORMAT_TYPE:
- JSListFormat::cast(*this)->JSListFormatPrint(os);
+ JSListFormat::cast(*this).JSListFormatPrint(os);
break;
case JS_INTL_LOCALE_TYPE:
- JSLocale::cast(*this)->JSLocalePrint(os);
+ JSLocale::cast(*this).JSLocalePrint(os);
break;
case JS_INTL_NUMBER_FORMAT_TYPE:
- JSNumberFormat::cast(*this)->JSNumberFormatPrint(os);
+ JSNumberFormat::cast(*this).JSNumberFormatPrint(os);
break;
case JS_INTL_PLURAL_RULES_TYPE:
- JSPluralRules::cast(*this)->JSPluralRulesPrint(os);
+ JSPluralRules::cast(*this).JSPluralRulesPrint(os);
break;
case JS_INTL_RELATIVE_TIME_FORMAT_TYPE:
- JSRelativeTimeFormat::cast(*this)->JSRelativeTimeFormatPrint(os);
+ JSRelativeTimeFormat::cast(*this).JSRelativeTimeFormatPrint(os);
break;
case JS_INTL_SEGMENT_ITERATOR_TYPE:
- JSSegmentIterator::cast(*this)->JSSegmentIteratorPrint(os);
+ JSSegmentIterator::cast(*this).JSSegmentIteratorPrint(os);
break;
case JS_INTL_SEGMENTER_TYPE:
- JSSegmenter::cast(*this)->JSSegmenterPrint(os);
+ JSSegmenter::cast(*this).JSSegmenterPrint(os);
break;
#endif // V8_INTL_SUPPORT
#define MAKE_STRUCT_CASE(TYPE, Name, name) \
case TYPE: \
- Name::cast(*this)->Name##Print(os); \
+ Name::cast(*this).Name##Print(os); \
break;
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
case ALLOCATION_SITE_TYPE:
- AllocationSite::cast(*this)->AllocationSitePrint(os);
+ AllocationSite::cast(*this).AllocationSitePrint(os);
break;
case LOAD_HANDLER_TYPE:
- LoadHandler::cast(*this)->LoadHandlerPrint(os);
+ LoadHandler::cast(*this).LoadHandlerPrint(os);
break;
case STORE_HANDLER_TYPE:
- StoreHandler::cast(*this)->StoreHandlerPrint(os);
+ StoreHandler::cast(*this).StoreHandlerPrint(os);
break;
case SCOPE_INFO_TYPE:
- ScopeInfo::cast(*this)->ScopeInfoPrint(os);
+ ScopeInfo::cast(*this).ScopeInfoPrint(os);
break;
case FEEDBACK_METADATA_TYPE:
- FeedbackMetadata::cast(*this)->FeedbackMetadataPrint(os);
+ FeedbackMetadata::cast(*this).FeedbackMetadataPrint(os);
break;
case WEAK_FIXED_ARRAY_TYPE:
- WeakFixedArray::cast(*this)->WeakFixedArrayPrint(os);
+ WeakFixedArray::cast(*this).WeakFixedArrayPrint(os);
break;
case WEAK_ARRAY_LIST_TYPE:
- WeakArrayList::cast(*this)->WeakArrayListPrint(os);
+ WeakArrayList::cast(*this).WeakArrayListPrint(os);
break;
case INTERNALIZED_STRING_TYPE:
case EXTERNAL_INTERNALIZED_STRING_TYPE:
@@ -455,7 +455,7 @@
case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
case JS_STRING_ITERATOR_TYPE:
// TODO(all): Handle these types too.
- os << "UNKNOWN TYPE " << map()->instance_type();
+ os << "UNKNOWN TYPE " << map().instance_type();
UNREACHABLE();
}
}
@@ -493,14 +493,14 @@
bool JSObject::PrintProperties(std::ostream& os) { // NOLINT
if (HasFastProperties()) {
- DescriptorArray descs = map()->instance_descriptors();
- int nof_inobject_properties = map()->GetInObjectProperties();
+ DescriptorArray descs = map().instance_descriptors();
+ int nof_inobject_properties = map().GetInObjectProperties();
int i = 0;
- for (; i < map()->NumberOfOwnDescriptors(); i++) {
+ for (; i < map().NumberOfOwnDescriptors(); i++) {
os << "\n ";
- descs->GetKey(i)->NamePrint(os);
+ descs.GetKey(i).NamePrint(os);
os << ": ";
- PropertyDetails details = descs->GetDetails(i);
+ PropertyDetails details = descs.GetDetails(i);
switch (details.location()) {
case kField: {
FieldIndex field_index = FieldIndex::ForDescriptor(map(), i);
@@ -512,7 +512,7 @@
break;
}
case kDescriptor:
- os << Brief(descs->GetStrongValue(i));
+ os << Brief(descs.GetStrongValue(i));
break;
}
os << " ";
@@ -526,9 +526,9 @@
}
return i > 0;
} else if (IsJSGlobalObject()) {
- JSGlobalObject::cast(*this)->global_dictionary()->Print(os);
+ JSGlobalObject::cast(*this).global_dictionary().Print(os);
} else {
- property_dictionary()->Print(os);
+ property_dictionary().Print(os);
}
return true;
}
@@ -542,7 +542,7 @@
template <>
bool IsTheHoleAt(FixedDoubleArray array, int index) {
- return array->is_the_hole(index);
+ return array.is_the_hole(index);
}
template <class T>
@@ -550,7 +550,7 @@
if (IsTheHoleAt(array, index)) {
return std::numeric_limits<double>::quiet_NaN();
}
- return array->get_scalar(index);
+ return array.get_scalar(index);
}
template <class T>
@@ -589,13 +589,13 @@
template <typename T>
void PrintFixedArrayElements(std::ostream& os, T array) {
// Print in array notation for non-sparse arrays.
- Object previous_value = array->length() > 0 ? array->get(0) : Object();
+ Object previous_value = array.length() > 0 ? array.get(0) : Object();
Object value;
int previous_index = 0;
int i;
- for (i = 1; i <= array->length(); i++) {
- if (i < array->length()) value = array->get(i);
- if (previous_value == value && i != array->length()) {
+ for (i = 1; i <= array.length(); i++) {
+ if (i < array.length()) value = array.get(i);
+ if (previous_value == value && i != array.length()) {
continue;
}
os << "\n";
@@ -613,35 +613,35 @@
void PrintDictionaryElements(std::ostream& os, FixedArrayBase elements) {
// Print some internal fields
NumberDictionary dict = NumberDictionary::cast(elements);
- if (dict->requires_slow_elements()) {
+ if (dict.requires_slow_elements()) {
os << "\n - requires_slow_elements";
} else {
- os << "\n - max_number_key: " << dict->max_number_key();
+ os << "\n - max_number_key: " << dict.max_number_key();
}
- dict->Print(os);
+ dict.Print(os);
}
void PrintSloppyArgumentElements(std::ostream& os, ElementsKind kind,
SloppyArgumentsElements elements) {
- FixedArray arguments_store = elements->arguments();
- os << "\n 0: context: " << Brief(elements->context())
+ FixedArray arguments_store = elements.arguments();
+ os << "\n 0: context: " << Brief(elements.context())
<< "\n 1: arguments_store: " << Brief(arguments_store)
<< "\n parameter to context slot map:";
- for (uint32_t i = 0; i < elements->parameter_map_length(); i++) {
+ for (uint32_t i = 0; i < elements.parameter_map_length(); i++) {
uint32_t raw_index = i + SloppyArgumentsElements::kParameterMapStart;
- Object mapped_entry = elements->get_mapped_entry(i);
+ Object mapped_entry = elements.get_mapped_entry(i);
os << "\n " << raw_index << ": param(" << i
<< "): " << Brief(mapped_entry);
- if (mapped_entry->IsTheHole()) {
+ if (mapped_entry.IsTheHole()) {
os << " in the arguments_store[" << i << "]";
} else {
os << " in the context";
}
}
- if (arguments_store->length() == 0) return;
+ if (arguments_store.length() == 0) return;
os << "\n }"
<< "\n - arguments_store: " << Brief(arguments_store) << " "
- << ElementsKindToString(arguments_store->map()->elements_kind()) << " {";
+ << ElementsKindToString(arguments_store.map().elements_kind()) << " {";
if (kind == FAST_SLOPPY_ARGUMENTS_ELEMENTS) {
PrintFixedArrayElements(os, arguments_store);
} else {
@@ -666,7 +666,7 @@
// Don't call GetElementsKind, its validation code can cause the printer to
// fail when debugging.
os << " - elements: " << Brief(elements()) << " {";
- switch (map()->elements_kind()) {
+ switch (map().elements_kind()) {
case HOLEY_SMI_ELEMENTS:
case PACKED_SMI_ELEMENTS:
case HOLEY_ELEMENTS:
@@ -681,16 +681,16 @@
}
case HOLEY_DOUBLE_ELEMENTS:
case PACKED_DOUBLE_ELEMENTS: {
- DoPrintElements<FixedDoubleArray>(os, elements(), elements()->length());
+ DoPrintElements<FixedDoubleArray>(os, elements(), elements().length());
break;
}
// TODO(bmeurer, v8:4153): Change this to size_t later.
-#define PRINT_ELEMENTS(Type, type, TYPE, elementType) \
- case TYPE##_ELEMENTS: { \
- int length = static_cast<int>(JSTypedArray::cast(*this)->length()); \
- DoPrintElements<Fixed##Type##Array>(os, elements(), length); \
- break; \
+#define PRINT_ELEMENTS(Type, type, TYPE, elementType) \
+ case TYPE##_ELEMENTS: { \
+ int length = static_cast<int>(JSTypedArray::cast(*this).length()); \
+ DoPrintElements<Fixed##Type##Array>(os, elements(), length); \
+ break; \
}
TYPED_ARRAYS(PRINT_ELEMENTS)
#undef PRINT_ELEMENTS
@@ -701,7 +701,7 @@
break;
case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
case SLOW_SLOPPY_ARGUMENTS_ELEMENTS:
- PrintSloppyArgumentElements(os, map()->elements_kind(),
+ PrintSloppyArgumentElements(os, map().elements_kind(),
SloppyArgumentsElements::cast(elements()));
break;
case NO_ELEMENTS:
@@ -712,28 +712,28 @@
static void JSObjectPrintHeader(std::ostream& os, JSObject obj,
const char* id) { // NOLINT
- Isolate* isolate = obj->GetIsolate();
- obj->PrintHeader(os, id);
+ Isolate* isolate = obj.GetIsolate();
+ obj.PrintHeader(os, id);
// Don't call GetElementsKind, its validation code can cause the printer to
// fail when debugging.
os << " [";
- if (obj->HasFastProperties()) {
+ if (obj.HasFastProperties()) {
os << "FastProperties";
} else {
os << "DictionaryProperties";
}
PrototypeIterator iter(isolate, obj);
os << "]\n - prototype: " << Brief(iter.GetCurrent());
- os << "\n - elements: " << Brief(obj->elements()) << " ["
- << ElementsKindToString(obj->map()->elements_kind());
- if (obj->elements()->IsCowArray()) os << " (COW)";
+ os << "\n - elements: " << Brief(obj.elements()) << " ["
+ << ElementsKindToString(obj.map().elements_kind());
+ if (obj.elements().IsCowArray()) os << " (COW)";
os << "]";
- Object hash = obj->GetHash();
- if (hash->IsSmi()) {
+ Object hash = obj.GetHash();
+ if (hash.IsSmi()) {
os << "\n - hash: " << Brief(hash);
}
- if (obj->GetEmbedderFieldCount() > 0) {
- os << "\n - embedder fields: " << obj->GetEmbedderFieldCount();
+ if (obj.GetEmbedderFieldCount() > 0) {
+ os << "\n - embedder fields: " << obj.GetEmbedderFieldCount();
}
}
@@ -741,21 +741,21 @@
JSObject obj, // NOLINT
bool print_elements = true) {
os << "\n - properties: ";
- Object properties_or_hash = obj->raw_properties_or_hash();
- if (!properties_or_hash->IsSmi()) {
+ Object properties_or_hash = obj.raw_properties_or_hash();
+ if (!properties_or_hash.IsSmi()) {
os << Brief(properties_or_hash);
}
os << " {";
- if (obj->PrintProperties(os)) os << "\n ";
+ if (obj.PrintProperties(os)) os << "\n ";
os << "}\n";
if (print_elements) {
- int length = obj->HasFixedTypedArrayElements()
- ? FixedTypedArrayBase::cast(obj->elements())
- ->number_of_elements_onheap_only()
- : obj->elements()->length();
- if (length > 0) obj->PrintElements(os);
+ int length = obj.HasFixedTypedArrayElements()
+ ? FixedTypedArrayBase::cast(obj.elements())
+ .number_of_elements_onheap_only()
+ : obj.elements().length();
+ if (length > 0) obj.PrintElements(os);
}
- int embedder_fields = obj->GetEmbedderFieldCount();
+ int embedder_fields = obj.GetEmbedderFieldCount();
if (embedder_fields > 0) {
os << " - embedder fields = {";
for (int i = 0; i < embedder_fields; i++) {
@@ -801,23 +801,23 @@
if (is_suspended()) os << " (suspended)";
if (is_suspended()) {
DisallowHeapAllocation no_gc;
- SharedFunctionInfo fun_info = function()->shared();
- if (fun_info->HasSourceCode()) {
- Script script = Script::cast(fun_info->script());
- String script_name = script->name()->IsString()
- ? String::cast(script->name())
+ SharedFunctionInfo fun_info = function().shared();
+ if (fun_info.HasSourceCode()) {
+ Script script = Script::cast(fun_info.script());
+ String script_name = script.name().IsString()
+ ? String::cast(script.name())
: GetReadOnlyRoots().empty_string();
os << "\n - source position: ";
// Can't collect source positions here if not available as that would
// allocate memory.
- if (fun_info->HasBytecodeArray() &&
- fun_info->GetBytecodeArray()->HasSourcePositionTable()) {
+ if (fun_info.HasBytecodeArray() &&
+ fun_info.GetBytecodeArray().HasSourcePositionTable()) {
os << source_position();
os << " (";
- script_name->PrintUC16(os);
- int lin = script->GetLineNumber(source_position()) + 1;
- int col = script->GetColumnNumber(source_position()) + 1;
+ script_name.PrintUC16(os);
+ int lin = script.GetLineNumber(source_position()) + 1;
+ int col = script.GetColumnNumber(source_position()) + 1;
os << ", lin " << lin;
os << ", col " << col;
} else {
@@ -870,7 +870,7 @@
PrintHeader(os, "Symbol");
os << "\n - hash: " << Hash();
os << "\n - name: " << Brief(name());
- if (name()->IsUndefined()) {
+ if (name().IsUndefined()) {
os << " (" << PrivateSymbolToName() << ")";
}
os << "\n - private: " << is_private();
@@ -879,12 +879,12 @@
void DescriptorArray::DescriptorArrayPrint(std::ostream& os) {
PrintHeader(os, "DescriptorArray");
os << "\n - enum_cache: ";
- if (enum_cache()->keys()->length() == 0) {
+ if (enum_cache().keys().length() == 0) {
os << "empty";
} else {
- os << enum_cache()->keys()->length();
- os << "\n - keys: " << Brief(enum_cache()->keys());
- os << "\n - indices: " << Brief(enum_cache()->indices());
+ os << enum_cache().keys().length();
+ os << "\n - keys: " << Brief(enum_cache().keys());
+ os << "\n - indices: " << Brief(enum_cache().indices());
}
os << "\n - nof slack descriptors: " << number_of_slack_descriptors();
os << "\n - nof descriptors: " << number_of_descriptors();
@@ -904,25 +904,25 @@
namespace {
void PrintFixedArrayWithHeader(std::ostream& os, FixedArray array,
const char* type) {
- array->PrintHeader(os, type);
- os << "\n - length: " << array->length();
+ array.PrintHeader(os, type);
+ os << "\n - length: " << array.length();
PrintFixedArrayElements(os, array);
os << "\n";
}
template <typename T>
void PrintHashTableWithHeader(std::ostream& os, T table, const char* type) {
- table->PrintHeader(os, type);
- os << "\n - length: " << table->length();
- os << "\n - elements: " << table->NumberOfElements();
- os << "\n - deleted: " << table->NumberOfDeletedElements();
- os << "\n - capacity: " << table->Capacity();
+ table.PrintHeader(os, type);
+ os << "\n - length: " << table.length();
+ os << "\n - elements: " << table.NumberOfElements();
+ os << "\n - deleted: " << table.NumberOfDeletedElements();
+ os << "\n - capacity: " << table.Capacity();
os << "\n - elements: {";
- for (int i = 0; i < table->Capacity(); i++) {
+ for (int i = 0; i < table.Capacity(); i++) {
os << '\n'
- << std::setw(12) << i << ": " << Brief(table->KeyAt(i)) << " -> "
- << Brief(table->ValueAt(i));
+ << std::setw(12) << i << ": " << Brief(table.KeyAt(i)) << " -> "
+ << Brief(table.ValueAt(i));
}
os << "\n }\n";
}
@@ -973,12 +973,12 @@
namespace {
void PrintContextWithHeader(std::ostream& os, Context context,
const char* type) {
- context->PrintHeader(os, type);
- os << "\n - length: " << context->length();
- os << "\n - scope_info: " << Brief(context->scope_info());
- os << "\n - previous: " << Brief(context->unchecked_previous());
- os << "\n - extension: " << Brief(context->extension());
- os << "\n - native_context: " << Brief(context->native_context());
+ context.PrintHeader(os, type);
+ os << "\n - length: " << context.length();
+ os << "\n - scope_info: " << Brief(context.scope_info());
+ os << "\n - previous: " << Brief(context.unchecked_previous());
+ os << "\n - extension: " << Brief(context.extension());
+ os << "\n - native_context: " << Brief(context.native_context());
PrintFixedArrayElements(os, context);
os << "\n";
}
@@ -1240,7 +1240,7 @@
void Name::NamePrint(std::ostream& os) { // NOLINT
if (IsString()) {
- String::cast(*this)->StringPrint(os);
+ String::cast(*this).StringPrint(os);
} else {
os << Brief(*this);
}
@@ -1252,19 +1252,19 @@
void JSDate::JSDatePrint(std::ostream& os) { // NOLINT
JSObjectPrintHeader(os, *this, "JSDate");
os << "\n - value: " << Brief(value());
- if (!year()->IsSmi()) {
+ if (!year().IsSmi()) {
os << "\n - time = NaN\n";
} else {
// TODO(svenpanne) Add some basic formatting to our streams.
ScopedVector<char> buf(100);
SNPrintF(buf, "\n - time = %s %04d/%02d/%02d %02d:%02d:%02d\n",
- weekdays[weekday()->IsSmi() ? Smi::ToInt(weekday()) + 1 : 0],
- year()->IsSmi() ? Smi::ToInt(year()) : -1,
- month()->IsSmi() ? Smi::ToInt(month()) : -1,
- day()->IsSmi() ? Smi::ToInt(day()) : -1,
- hour()->IsSmi() ? Smi::ToInt(hour()) : -1,
- min()->IsSmi() ? Smi::ToInt(min()) : -1,
- sec()->IsSmi() ? Smi::ToInt(sec()) : -1);
+ weekdays[weekday().IsSmi() ? Smi::ToInt(weekday()) + 1 : 0],
+ year().IsSmi() ? Smi::ToInt(year()) : -1,
+ month().IsSmi() ? Smi::ToInt(month()) : -1,
+ day().IsSmi() ? Smi::ToInt(day()) : -1,
+ hour().IsSmi() ? Smi::ToInt(hour()) : -1,
+ min().IsSmi() ? Smi::ToInt(min()) : -1,
+ sec().IsSmi() ? Smi::ToInt(sec()) : -1);
os << buf.begin();
}
JSObjectPrintBody(os, *this);
@@ -1273,9 +1273,9 @@
void JSProxy::JSProxyPrint(std::ostream& os) { // NOLINT
PrintHeader(os, "JSProxy");
os << "\n - target: ";
- target()->ShortPrint(os);
+ target().ShortPrint(os);
os << "\n - handler: ";
- handler()->ShortPrint(os);
+ handler().ShortPrint(os);
os << "\n";
}
@@ -1378,7 +1378,7 @@
os << "\n - byte_offset: " << byte_offset();
os << "\n - byte_length: " << byte_length();
os << "\n - length: " << length();
- if (!buffer()->IsJSArrayBuffer()) {
+ if (!buffer().IsJSArrayBuffer()) {
os << "\n <invalid buffer>\n";
return;
}
@@ -1399,7 +1399,7 @@
os << "\n - buffer =" << Brief(buffer());
os << "\n - byte_offset: " << byte_offset();
os << "\n - byte_length: " << byte_length();
- if (!buffer()->IsJSArrayBuffer()) {
+ if (!buffer().IsJSArrayBuffer()) {
os << "\n <invalid buffer>";
return;
}
@@ -1422,7 +1422,7 @@
if (has_prototype_slot()) {
if (has_prototype()) {
os << Brief(prototype());
- if (map()->has_non_instance_prototype()) {
+ if (map().has_non_instance_prototype()) {
os << " (non-instance prototype)";
}
}
@@ -1432,41 +1432,41 @@
os << "<no-prototype-slot>";
}
os << "\n - shared_info: " << Brief(shared());
- os << "\n - name: " << Brief(shared()->Name());
+ os << "\n - name: " << Brief(shared().Name());
// Print Builtin name for builtin functions
- int builtin_index = code()->builtin_index();
+ int builtin_index = code().builtin_index();
if (Builtins::IsBuiltinId(builtin_index) && !IsInterpreted()) {
os << "\n - builtin: " << isolate->builtins()->name(builtin_index);
}
os << "\n - formal_parameter_count: "
- << shared()->internal_formal_parameter_count();
- if (shared()->is_safe_to_skip_arguments_adaptor()) {
+ << shared().internal_formal_parameter_count();
+ if (shared().is_safe_to_skip_arguments_adaptor()) {
os << "\n - safe_to_skip_arguments_adaptor";
}
- os << "\n - kind: " << shared()->kind();
+ os << "\n - kind: " << shared().kind();
os << "\n - context: " << Brief(context());
os << "\n - code: " << Brief(code());
if (IsInterpreted()) {
os << "\n - interpreted";
- if (shared()->HasBytecodeArray()) {
- os << "\n - bytecode: " << shared()->GetBytecodeArray();
+ if (shared().HasBytecodeArray()) {
+ os << "\n - bytecode: " << shared().GetBytecodeArray();
}
}
if (WasmExportedFunction::IsWasmExportedFunction(*this)) {
WasmExportedFunction function = WasmExportedFunction::cast(*this);
os << "\n - WASM instance "
- << reinterpret_cast<void*>(function->instance()->ptr());
- os << "\n - WASM function index " << function->function_index();
+ << reinterpret_cast<void*>(function.instance().ptr());
+ os << "\n - WASM function index " << function.function_index();
}
- shared()->PrintSourceCode(os);
+ shared().PrintSourceCode(os);
JSObjectPrintBody(os, *this);
os << "\n - feedback vector: ";
- if (!shared()->HasFeedbackMetadata()) {
+ if (!shared().HasFeedbackMetadata()) {
os << "feedback metadata is not available in SFI\n";
} else if (has_feedback_vector()) {
- feedback_vector()->FeedbackVectorPrint(os);
+ feedback_vector().FeedbackVectorPrint(os);
} else {
os << "not available\n";
}
@@ -1475,10 +1475,10 @@
void SharedFunctionInfo::PrintSourceCode(std::ostream& os) {
if (HasSourceCode()) {
os << "\n - source code: ";
- String source = String::cast(Script::cast(script())->source());
+ String source = String::cast(Script::cast(script()).source());
int start = StartPosition();
int length = EndPosition() - start;
- std::unique_ptr<char[]> source_string = source->ToCString(
+ std::unique_ptr<char[]> source_string = source.ToCString(
DISALLOW_NULLS, FAST_STRING_TRAVERSAL, start, length, nullptr);
os << source_string.get();
}
@@ -1534,7 +1534,7 @@
os << "\n - length: " << length();
os << "\n - feedback_metadata: ";
if (HasFeedbackMetadata()) {
- feedback_metadata()->FeedbackMetadataPrint(os);
+ feedback_metadata().FeedbackMetadataPrint(os);
} else {
os << "<none>";
}
@@ -1567,13 +1567,13 @@
void PropertyCell::PropertyCellPrint(std::ostream& os) { // NOLINT
PrintHeader(os, "PropertyCell");
os << "\n - name: ";
- name()->NamePrint(os);
+ name().NamePrint(os);
os << "\n - value: " << Brief(value());
os << "\n - details: ";
property_details().PrintAsSlowTo(os);
PropertyCellType cell_type = property_details().cell_type();
os << "\n - cell_type: ";
- if (value()->IsTheHole()) {
+ if (value().IsTheHole()) {
switch (cell_type) {
case PropertyCellType::kUninitialized:
os << "Uninitialized";
@@ -1743,7 +1743,7 @@
void Module::ModulePrint(std::ostream& os) { // NOLINT
PrintHeader(os, "Module");
- os << "\n - origin: " << Brief(script()->GetNameOrSourceURL());
+ os << "\n - origin: " << Brief(script().GetNameOrSourceURL());
os << "\n - code: " << Brief(code());
os << "\n - exports: " << Brief(exports());
os << "\n - requested_modules: " << Brief(requested_modules());
@@ -1790,7 +1790,7 @@
os << "\n - native module: " << Brief(managed_native_module());
os << "\n - export_wrappers: " << Brief(export_wrappers());
os << "\n - offset table: " << Brief(asm_js_offset_table());
- os << "\n - uses bitset: " << uses_bitset()->value();
+ os << "\n - uses bitset: " << uses_bitset().value();
os << "\n";
}
@@ -1827,8 +1827,8 @@
if (has_debug_info()) {
os << "\n - debug_info: " << Brief(debug_info());
}
- for (int i = 0; i < tables()->length(); i++) {
- os << "\n - table " << i << ": " << Brief(tables()->get(i));
+ for (int i = 0; i < tables().length(); i++) {
+ os << "\n - table " << i << ": " << Brief(tables().get(i));
}
os << "\n - imported_function_refs: " << Brief(imported_function_refs());
if (has_indirect_function_table_refs()) {
@@ -2061,7 +2061,7 @@
if (!PointsToLiteral()) {
ElementsKind kind = GetElementsKind();
os << "Array allocation with ElementsKind " << ElementsKindToString(kind);
- } else if (boilerplate()->IsJSArray()) {
+ } else if (boilerplate().IsJSArray()) {
os << "Array literal with boilerplate " << Brief(boilerplate());
} else {
os << "Object literal with boilerplate " << Brief(boilerplate());
@@ -2073,7 +2073,7 @@
PrintHeader(os, "AllocationMemento");
os << "\n - allocation site: ";
if (IsValid()) {
- GetAllocationSite()->AllocationSitePrint(os);
+ GetAllocationSite().AllocationSitePrint(os);
} else {
os << "<invalid>\n";
}
@@ -2205,7 +2205,7 @@
os << " {\n";
for (int i = nof_internal_slots; start < end; ++i, ++start) {
os << " - " << i << ": ";
- String::cast(scope_info->get(start))->ShortPrint(os);
+ String::cast(scope_info.get(start)).ShortPrint(os);
os << "\n";
}
os << " }";
@@ -2235,7 +2235,7 @@
if (HasFunctionName()) {
os << "\n - function name(" << FunctionVariableField::decode(flags)
<< "): ";
- FunctionName()->ShortPrint(os);
+ FunctionName().ShortPrint(os);
}
if (IsAsmModule()) os << "\n - asm module";
if (HasSimpleParameters()) os << "\n - simple parameters";
@@ -2272,7 +2272,7 @@
os << "\n - original bytecode array: " << Brief(original_bytecode_array());
os << "\n - debug bytecode array: " << Brief(debug_bytecode_array());
os << "\n - break_points: ";
- break_points()->FixedArrayPrint(os);
+ break_points().FixedArrayPrint(os);
os << "\n - coverage_info: " << Brief(coverage_info());
}
@@ -2392,14 +2392,14 @@
Smi smi;
HeapObject heap_object;
if (ToSmi(&smi)) {
- smi->SmiPrint(os);
+ smi.SmiPrint(os);
} else if (IsCleared()) {
os << "[cleared]";
} else if (GetHeapObjectIfWeak(&heap_object)) {
os << "[weak] ";
- heap_object->HeapObjectPrint(os);
+ heap_object.HeapObjectPrint(os);
} else if (GetHeapObjectIfStrong(&heap_object)) {
- heap_object->HeapObjectPrint(os);
+ heap_object.HeapObjectPrint(os);
} else {
UNREACHABLE();
}
@@ -2416,14 +2416,14 @@
// TODO(cbruni): remove once the new maptracer is in place.
void Name::NameShortPrint() {
if (this->IsString()) {
- PrintF("%s", String::cast(*this)->ToCString().get());
+ PrintF("%s", String::cast(*this).ToCString().get());
} else {
DCHECK(this->IsSymbol());
Symbol s = Symbol::cast(*this);
- if (s->name()->IsUndefined()) {
- PrintF("#<%s>", s->PrivateSymbolToName());
+ if (s.name().IsUndefined()) {
+ PrintF("#<%s>", s.PrivateSymbolToName());
} else {
- PrintF("<%s>", String::cast(s->name())->ToCString().get());
+ PrintF("<%s>", String::cast(s.name()).ToCString().get());
}
}
}
@@ -2431,14 +2431,14 @@
// TODO(cbruni): remove once the new maptracer is in place.
int Name::NameShortPrint(Vector<char> str) {
if (this->IsString()) {
- return SNPrintF(str, "%s", String::cast(*this)->ToCString().get());
+ return SNPrintF(str, "%s", String::cast(*this).ToCString().get());
} else {
DCHECK(this->IsSymbol());
Symbol s = Symbol::cast(*this);
- if (s->name()->IsUndefined()) {
- return SNPrintF(str, "#<%s>", s->PrivateSymbolToName());
+ if (s.name().IsUndefined()) {
+ return SNPrintF(str, "#<%s>", s.PrivateSymbolToName());
} else {
- return SNPrintF(str, "<%s>", String::cast(s->name())->ToCString().get());
+ return SNPrintF(str, "<%s>", String::cast(s.name()).ToCString().get());
}
}
}
@@ -2446,7 +2446,7 @@
void Map::PrintMapDetails(std::ostream& os) {
DisallowHeapAllocation no_gc;
this->MapPrint(os);
- instance_descriptors()->PrintDescriptors(os);
+ instance_descriptors().PrintDescriptors(os);
}
void Map::MapPrint(std::ostream& os) { // NOLINT
@@ -2502,7 +2502,7 @@
<< Brief(instance_descriptors());
if (FLAG_unbox_double_fields) {
os << "\n - layout descriptor: ";
- layout_descriptor()->ShortPrint(os);
+ layout_descriptor().ShortPrint(os);
}
Isolate* isolate;
@@ -2538,9 +2538,9 @@
Name key = GetKey(i);
os << "\n [" << i << "]: ";
#ifdef OBJECT_PRINT
- key->NamePrint(os);
+ key.NamePrint(os);
#else
- key->ShortPrint(os);
+ key.ShortPrint(os);
#endif
os << " ";
PrintDescriptorDetails(os, i, PropertyDetails::kPrintFull);
@@ -2556,16 +2556,16 @@
switch (details.location()) {
case kField: {
FieldType field_type = GetFieldType(descriptor);
- field_type->PrintTo(os);
+ field_type.PrintTo(os);
break;
}
case kDescriptor:
Object value = GetStrongValue(descriptor);
os << Brief(value);
- if (value->IsAccessorPair()) {
+ if (value.IsAccessorPair()) {
AccessorPair pair = AccessorPair::cast(value);
- os << "(get: " << Brief(pair->getter())
- << ", set: " << Brief(pair->setter()) << ")";
+ os << "(get: " << Brief(pair.getter())
+ << ", set: " << Brief(pair.setter()) << ")";
}
break;
}
@@ -2591,12 +2591,12 @@
Map target) {
os << "\n ";
#ifdef OBJECT_PRINT
- key->NamePrint(os);
+ key.NamePrint(os);
#else
- key->ShortPrint(os);
+ key.ShortPrint(os);
#endif
os << ": ";
- ReadOnlyRoots roots = key->GetReadOnlyRoots();
+ ReadOnlyRoots roots = key.GetReadOnlyRoots();
if (key == roots.nonextensible_symbol()) {
os << "(transition to non-extensible)";
} else if (key == roots.sealed_symbol()) {
@@ -2604,17 +2604,17 @@
} else if (key == roots.frozen_symbol()) {
os << "(transition to frozen)";
} else if (key == roots.elements_transition_symbol()) {
- os << "(transition to " << ElementsKindToString(target->elements_kind())
+ os << "(transition to " << ElementsKindToString(target.elements_kind())
<< ")";
} else if (key == roots.strict_function_transition_symbol()) {
os << " (transition to strict function)";
} else {
DCHECK(!IsSpecialTransition(roots, key));
os << "(transition to ";
- int descriptor = target->LastAdded();
- DescriptorArray descriptors = target->instance_descriptors();
- descriptors->PrintDescriptorDetails(os, descriptor,
- PropertyDetails::kForTransitions);
+ int descriptor = target.LastAdded();
+ DescriptorArray descriptors = target.instance_descriptors();
+ descriptors.PrintDescriptorDetails(os, descriptor,
+ PropertyDetails::kForTransitions);
os << ")";
}
os << " -> " << Brief(target);
@@ -2644,7 +2644,7 @@
break;
}
case kFullTransitionArray:
- return transitions()->PrintInternal(os);
+ return transitions().PrintInternal(os);
}
}
@@ -2677,22 +2677,22 @@
} else if (key == roots.frozen_symbol()) {
os << "to frozen";
} else if (key == roots.elements_transition_symbol()) {
- os << "to " << ElementsKindToString(target->elements_kind());
+ os << "to " << ElementsKindToString(target.elements_kind());
} else if (key == roots.strict_function_transition_symbol()) {
os << "to strict function";
} else {
#ifdef OBJECT_PRINT
- key->NamePrint(os);
+ key.NamePrint(os);
#else
- key->ShortPrint(os);
+ key.ShortPrint(os);
#endif
os << " ";
DCHECK(!IsSpecialTransition(ReadOnlyRoots(isolate_), key));
os << "to ";
- int descriptor = target->LastAdded();
- DescriptorArray descriptors = target->instance_descriptors();
- descriptors->PrintDescriptorDetails(os, descriptor,
- PropertyDetails::kForTransitions);
+ int descriptor = target.LastAdded();
+ DescriptorArray descriptors = target.instance_descriptors();
+ descriptors.PrintDescriptorDetails(os, descriptor,
+ PropertyDetails::kForTransitions);
}
TransitionsAccessor transitions(isolate_, target, no_gc);
transitions.PrintTransitionTree(os, level + 1, no_gc);
@@ -2737,7 +2737,7 @@
}
V8_EXPORT_PRIVATE extern void _v8_internal_Print_Object(void* object) {
- GetObjectFromRaw(object)->Print();
+ GetObjectFromRaw(object).Print();
}
V8_EXPORT_PRIVATE extern void _v8_internal_Print_Code(void* object) {
@@ -2766,25 +2766,25 @@
}
i::Code code = isolate->FindCodeObject(address);
- if (!code->IsCode()) {
+ if (!code.IsCode()) {
i::PrintF("No code object found containing %p\n", object);
return;
}
#ifdef ENABLE_DISASSEMBLER
i::StdoutStream os;
- code->Disassemble(nullptr, os, address);
+ code.Disassemble(nullptr, os, address);
#else // ENABLE_DISASSEMBLER
- code->Print();
+ code.Print();
#endif // ENABLE_DISASSEMBLER
}
V8_EXPORT_PRIVATE extern void _v8_internal_Print_LayoutDescriptor(
void* object) {
i::Object o(GetObjectFromRaw(object));
- if (!o->IsLayoutDescriptor()) {
+ if (!o.IsLayoutDescriptor()) {
printf("Please provide a layout descriptor\n");
} else {
- i::LayoutDescriptor::cast(o)->Print();
+ i::LayoutDescriptor::cast(o).Print();
}
}
@@ -2795,7 +2795,7 @@
V8_EXPORT_PRIVATE extern void _v8_internal_Print_TransitionTree(void* object) {
i::Object o(GetObjectFromRaw(object));
- if (!o->IsMap()) {
+ if (!o.IsMap()) {
printf("Please provide a valid Map\n");
} else {
#if defined(DEBUG) || defined(OBJECT_PRINT)
diff --git a/src/diagnostics/perf-jit.cc b/src/diagnostics/perf-jit.cc
index 9e08e6f..1eb23e1 100644
--- a/src/diagnostics/perf-jit.cc
+++ b/src/diagnostics/perf-jit.cc
@@ -201,8 +201,8 @@
SharedFunctionInfo shared,
const char* name, int length) {
if (FLAG_perf_basic_prof_only_functions &&
- (abstract_code->kind() != AbstractCode::INTERPRETED_FUNCTION &&
- abstract_code->kind() != AbstractCode::OPTIMIZED_FUNCTION)) {
+ (abstract_code.kind() != AbstractCode::INTERPRETED_FUNCTION &&
+ abstract_code.kind() != AbstractCode::OPTIMIZED_FUNCTION)) {
return;
}
@@ -211,25 +211,25 @@
if (perf_output_handle_ == nullptr) return;
// We only support non-interpreted functions.
- if (!abstract_code->IsCode()) return;
- Code code = abstract_code->GetCode();
- DCHECK(code->raw_instruction_start() == code->address() + Code::kHeaderSize);
+ if (!abstract_code.IsCode()) return;
+ Code code = abstract_code.GetCode();
+ DCHECK(code.raw_instruction_start() == code.address() + Code::kHeaderSize);
// Debug info has to be emitted first.
if (FLAG_perf_prof && !shared.is_null()) {
// TODO(herhut): This currently breaks for js2wasm/wasm2js functions.
- if (code->kind() != Code::JS_TO_WASM_FUNCTION &&
- code->kind() != Code::WASM_TO_JS_FUNCTION) {
+ if (code.kind() != Code::JS_TO_WASM_FUNCTION &&
+ code.kind() != Code::WASM_TO_JS_FUNCTION) {
LogWriteDebugInfo(code, shared);
}
}
const char* code_name = name;
- uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code->InstructionStart());
+ uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code.InstructionStart());
// Code generated by Turbofan will have the safepoint table directly after
// instructions. There is no need to record the safepoint table itself.
- uint32_t code_size = code->ExecutableInstructionSize();
+ uint32_t code_size = code.ExecutableInstructionSize();
// Unwinding info comes right after debug info.
if (FLAG_perf_prof_unwinding_info) LogWriteUnwindingInfo(code);
@@ -281,11 +281,11 @@
size_t GetScriptNameLength(const SourcePositionInfo& info) {
if (!info.script.is_null()) {
Object name_or_url = info.script->GetNameOrSourceURL();
- if (name_or_url->IsString()) {
+ if (name_or_url.IsString()) {
String str = String::cast(name_or_url);
- if (str->IsOneByteRepresentation()) return str->length();
+ if (str.IsOneByteRepresentation()) return str.length();
int length;
- str->ToCString(DISALLOW_NULLS, FAST_STRING_TRAVERSAL, &length);
+ str.ToCString(DISALLOW_NULLS, FAST_STRING_TRAVERSAL, &length);
return static_cast<size_t>(length);
}
}
@@ -297,15 +297,14 @@
const DisallowHeapAllocation& no_gc) {
if (!info.script.is_null()) {
Object name_or_url = info.script->GetNameOrSourceURL();
- if (name_or_url->IsSeqOneByteString()) {
+ if (name_or_url.IsSeqOneByteString()) {
SeqOneByteString str = SeqOneByteString::cast(name_or_url);
- return {reinterpret_cast<char*>(str->GetChars(no_gc)),
- static_cast<size_t>(str->length())};
- } else if (name_or_url->IsString()) {
+ return {reinterpret_cast<char*>(str.GetChars(no_gc)),
+ static_cast<size_t>(str.length())};
+ } else if (name_or_url.IsString()) {
int length;
- *storage =
- String::cast(name_or_url)
- ->ToCString(DISALLOW_NULLS, FAST_STRING_TRAVERSAL, &length);
+ *storage = String::cast(name_or_url)
+ .ToCString(DISALLOW_NULLS, FAST_STRING_TRAVERSAL, &length);
return {storage->get(), static_cast<size_t>(length)};
}
}
@@ -328,21 +327,21 @@
void PerfJitLogger::LogWriteDebugInfo(Code code, SharedFunctionInfo shared) {
// Compute the entry count and get the name of the script.
uint32_t entry_count = 0;
- for (SourcePositionTableIterator iterator(code->SourcePositionTable());
+ for (SourcePositionTableIterator iterator(code.SourcePositionTable());
!iterator.done(); iterator.Advance()) {
entry_count++;
}
if (entry_count == 0) return;
// The WasmToJS wrapper stubs have source position entries.
- if (!shared->HasSourceCode()) return;
- Isolate* isolate = shared->GetIsolate();
- Handle<Script> script(Script::cast(shared->script()), isolate);
+ if (!shared.HasSourceCode()) return;
+ Isolate* isolate = shared.GetIsolate();
+ Handle<Script> script(Script::cast(shared.script()), isolate);
PerfJitCodeDebugInfo debug_info;
debug_info.event_ = PerfJitCodeLoad::kDebugInfo;
debug_info.time_stamp_ = GetTimestamp();
- debug_info.address_ = code->InstructionStart();
+ debug_info.address_ = code.InstructionStart();
debug_info.entry_count_ = entry_count;
uint32_t size = sizeof(debug_info);
@@ -352,7 +351,7 @@
Handle<Code> code_handle(code, isolate);
Handle<SharedFunctionInfo> function_handle(shared, isolate);
- for (SourcePositionTableIterator iterator(code->SourcePositionTable());
+ for (SourcePositionTableIterator iterator(code.SourcePositionTable());
!iterator.done(); iterator.Advance()) {
SourcePositionInfo info(GetSourcePositionInfo(code_handle, function_handle,
iterator.source_position()));
@@ -363,9 +362,9 @@
debug_info.size_ = size + padding;
LogWriteBytes(reinterpret_cast<const char*>(&debug_info), sizeof(debug_info));
- Address code_start = code->InstructionStart();
+ Address code_start = code.InstructionStart();
- for (SourcePositionTableIterator iterator(code->SourcePositionTable());
+ for (SourcePositionTableIterator iterator(code.SourcePositionTable());
!iterator.done(); iterator.Advance()) {
SourcePositionInfo info(GetSourcePositionInfo(code_handle, function_handle,
iterator.source_position()));
@@ -394,8 +393,8 @@
unwinding_info_header.time_stamp_ = GetTimestamp();
unwinding_info_header.eh_frame_hdr_size_ = EhFrameConstants::kEhFrameHdrSize;
- if (code->has_unwinding_info()) {
- unwinding_info_header.unwinding_size_ = code->unwinding_info_size();
+ if (code.has_unwinding_info()) {
+ unwinding_info_header.unwinding_size_ = code.unwinding_info_size();
unwinding_info_header.mapped_size_ = unwinding_info_header.unwinding_size_;
} else {
unwinding_info_header.unwinding_size_ = EhFrameConstants::kEhFrameHdrSize;
@@ -410,9 +409,9 @@
LogWriteBytes(reinterpret_cast<const char*>(&unwinding_info_header),
sizeof(unwinding_info_header));
- if (code->has_unwinding_info()) {
- LogWriteBytes(reinterpret_cast<const char*>(code->unwinding_info_start()),
- code->unwinding_info_size());
+ if (code.has_unwinding_info()) {
+ LogWriteBytes(reinterpret_cast<const char*>(code.unwinding_info_start()),
+ code.unwinding_info_size());
} else {
OFStream perf_output_stream(perf_output_handle_);
EhFrameWriter::WriteEmptyEhFrame(perf_output_stream);
@@ -426,7 +425,7 @@
void PerfJitLogger::CodeMoveEvent(AbstractCode from, AbstractCode to) {
// We may receive a CodeMove event if a BytecodeArray object moves. Otherwise
// code relocation is not supported.
- CHECK(from->IsBytecodeArray());
+ CHECK(from.IsBytecodeArray());
}
void PerfJitLogger::LogWriteBytes(const char* bytes, int size) {
diff --git a/src/execution/arguments-inl.h b/src/execution/arguments-inl.h
index 291a918..18978a4 100644
--- a/src/execution/arguments-inl.h
+++ b/src/execution/arguments-inl.h
@@ -22,7 +22,7 @@
return Smi::ToInt(Object(*address_of_arg_at(index)));
}
-double Arguments::number_at(int index) { return (*this)[index]->Number(); }
+double Arguments::number_at(int index) { return (*this)[index].Number(); }
} // namespace internal
} // namespace v8
diff --git a/src/execution/arguments.h b/src/execution/arguments.h
index 3dc8c07..6023b8e 100644
--- a/src/execution/arguments.h
+++ b/src/execution/arguments.h
@@ -47,7 +47,7 @@
inline double number_at(int index);
inline void set_at(int index, Object value) {
- *address_of_arg_at(index) = value->ptr();
+ *address_of_arg_at(index) = value.ptr();
}
inline FullObjectSlot slot_at(int index) {
@@ -101,7 +101,7 @@
} \
\
Type Name(int args_length, Address* args_object, Isolate* isolate) { \
- DCHECK(isolate->context().is_null() || isolate->context()->IsContext()); \
+ DCHECK(isolate->context().is_null() || isolate->context().IsContext()); \
CLOBBER_DOUBLE_REGISTERS(); \
if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled())) { \
return Stats_##Name(args_length, args_object, isolate); \
@@ -112,7 +112,7 @@
\
static InternalType __RT_impl_##Name(Arguments args, Isolate* isolate)
-#define CONVERT_OBJECT(x) (x)->ptr()
+#define CONVERT_OBJECT(x) (x).ptr()
#define CONVERT_OBJECTPAIR(x) (x)
#define RUNTIME_FUNCTION(Name) \
diff --git a/src/execution/execution.cc b/src/execution/execution.cc
index 740a8c8..1ea977c 100644
--- a/src/execution/execution.cc
+++ b/src/execution/execution.cc
@@ -195,10 +195,10 @@
if (params.target->IsJSFunction()) {
Handle<JSFunction> function = Handle<JSFunction>::cast(params.target);
if ((!params.is_construct || function->IsConstructor()) &&
- function->shared()->IsApiFunction() &&
- !function->shared()->BreakAtEntry()) {
+ function->shared().IsApiFunction() &&
+ !function->shared().BreakAtEntry()) {
SaveAndSwitchContext save(isolate, function->context());
- DCHECK(function->context()->global_object()->IsJSGlobalObject());
+ DCHECK(function->context().global_object().IsJSGlobalObject());
Handle<Object> receiver = params.is_construct
? isolate->factory()->the_hole_value()
@@ -286,12 +286,12 @@
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
- value->ObjectVerify(isolate);
+ value.ObjectVerify(isolate);
}
#endif
// Update the pending exception flag and return the value.
- bool has_exception = value->IsException(isolate);
+ bool has_exception = value.IsException(isolate);
DCHECK(has_exception == isolate->has_pending_exception());
if (has_exception) {
if (params.message_handling == Execution::MessageHandling::kReport) {
diff --git a/src/execution/frames-inl.h b/src/execution/frames-inl.h
index c148d698..94433d4 100644
--- a/src/execution/frames-inl.h
+++ b/src/execution/frames-inl.h
@@ -107,7 +107,7 @@
// fp[4]: argc.
// fp[2 + argc - 1]: receiver.
Object argc_slot = argc_slot_object();
- DCHECK(argc_slot->IsSmi());
+ DCHECK(argc_slot.IsSmi());
int argc = Smi::ToInt(argc_slot);
const int receiverOffset = BuiltinExitFrameConstants::kNewTargetOffset +
@@ -138,7 +138,7 @@
}
inline void StandardFrame::SetExpression(int index, Object value) {
- Memory<Address>(GetExpressionAddress(index)) = value->ptr();
+ Memory<Address>(GetExpressionAddress(index)) = value.ptr();
}
inline Address StandardFrame::caller_fp() const {
@@ -187,7 +187,7 @@
}
inline void JavaScriptFrame::set_receiver(Object value) {
- Memory<Address>(GetParameterSlot(-1)) = value->ptr();
+ Memory<Address>(GetParameterSlot(-1)) = value.ptr();
}
inline bool JavaScriptFrame::has_adapted_arguments() const {
diff --git a/src/execution/frames.cc b/src/execution/frames.cc
index 519838a..4eef4aa 100644
--- a/src/execution/frames.cc
+++ b/src/execution/frames.cc
@@ -160,8 +160,8 @@
bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) const {
if (frame->is_java_script()) {
JavaScriptFrame* jsFrame = static_cast<JavaScriptFrame*>(frame);
- if (!jsFrame->function()->IsJSFunction()) return false;
- return jsFrame->function()->shared()->IsSubjectToDebugging();
+ if (!jsFrame->function().IsJSFunction()) return false;
+ return jsFrame->function().shared().IsSubjectToDebugging();
}
// apart from javascript, only wasm is valid
return frame->is_wasm();
@@ -180,9 +180,9 @@
Code interpreter_bytecode_dispatch =
isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
- if (interpreter_entry_trampoline->contains(pc) ||
- interpreter_bytecode_advance->contains(pc) ||
- interpreter_bytecode_dispatch->contains(pc)) {
+ if (interpreter_entry_trampoline.contains(pc) ||
+ interpreter_bytecode_advance.contains(pc) ||
+ interpreter_bytecode_dispatch.contains(pc)) {
return true;
} else if (FLAG_interpreted_frames_native_stack) {
intptr_t marker = Memory<intptr_t>(
@@ -194,14 +194,14 @@
Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset));
// There's no need to run a full ContainsSlow if we know the frame can't be
// an InterpretedFrame, so we do these fast checks first
- if (StackFrame::IsTypeMarker(marker) || maybe_function->IsSmi()) {
+ if (StackFrame::IsTypeMarker(marker) || maybe_function.IsSmi()) {
return false;
} else if (!isolate->heap()->InSpaceSlow(pc, CODE_SPACE)) {
return false;
}
interpreter_entry_trampoline =
isolate->heap()->GcSafeFindCodeForInnerPointer(pc);
- return interpreter_entry_trampoline->is_interpreter_trampoline_builtin();
+ return interpreter_entry_trampoline.is_interpreter_trampoline_builtin();
} else {
return false;
}
@@ -384,7 +384,7 @@
// that it really an Smi.
Object number_of_args =
reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->GetExpression(0);
- if (!number_of_args->IsSmi()) {
+ if (!number_of_args.IsSmi()) {
return false;
}
}
@@ -444,24 +444,24 @@
Code StackFrame::LookupCode() const {
Code result = GetContainingCode(isolate(), pc());
- DCHECK_GE(pc(), result->InstructionStart());
- DCHECK_LT(pc(), result->InstructionEnd());
+ DCHECK_GE(pc(), result.InstructionStart());
+ DCHECK_LT(pc(), result.InstructionEnd());
return result;
}
void StackFrame::IteratePc(RootVisitor* v, Address* pc_address,
Address* constant_pool_address, Code holder) {
Address pc = *pc_address;
- DCHECK(holder->GetHeap()->GcSafeCodeContains(holder, pc));
- unsigned pc_offset = static_cast<unsigned>(pc - holder->InstructionStart());
+ DCHECK(holder.GetHeap()->GcSafeCodeContains(holder, pc));
+ unsigned pc_offset = static_cast<unsigned>(pc - holder.InstructionStart());
Object code = holder;
v->VisitRootPointer(Root::kTop, nullptr, FullObjectSlot(&code));
if (code == holder) return;
holder = Code::unchecked_cast(code);
- pc = holder->InstructionStart() + pc_offset;
+ pc = holder.InstructionStart() + pc_offset;
*pc_address = pc;
if (FLAG_enable_embedded_constant_pool && constant_pool_address) {
- *constant_pool_address = holder->constant_pool();
+ *constant_pool_address = holder.constant_pool();
}
}
@@ -492,7 +492,7 @@
Object maybe_function = Object(
Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset));
if (!StackFrame::IsTypeMarker(marker)) {
- if (maybe_function->IsSmi()) {
+ if (maybe_function.IsSmi()) {
return NATIVE;
} else if (IsInterpreterFramePc(iterator->isolate(), *(state->pc_address),
state)) {
@@ -532,13 +532,13 @@
// Look up the code object to figure out the type of the stack frame.
Code code_obj = GetContainingCode(iterator->isolate(), pc);
if (!code_obj.is_null()) {
- switch (code_obj->kind()) {
+ switch (code_obj.kind()) {
case Code::BUILTIN:
if (StackFrame::IsTypeMarker(marker)) break;
- if (code_obj->is_interpreter_trampoline_builtin()) {
+ if (code_obj.is_interpreter_trampoline_builtin()) {
return INTERPRETED;
}
- if (code_obj->is_turbofanned()) {
+ if (code_obj.is_turbofanned()) {
// TODO(bmeurer): We treat frames for BUILTIN Code objects as
// OptimizedFrame for now (all the builtins with JavaScript
// linkage are actually generated with TurboFan currently, so
@@ -679,7 +679,7 @@
const int offset = ExitFrameConstants::kFrameTypeOffset;
Object marker(Memory<Address>(fp + offset));
- if (!marker->IsSmi()) {
+ if (!marker.IsSmi()) {
return EXIT;
}
@@ -719,7 +719,7 @@
Object BuiltinExitFrame::receiver() const { return receiver_slot_object(); }
bool BuiltinExitFrame::IsConstructor() const {
- return !new_target_slot_object()->IsUndefined(isolate());
+ return !new_target_slot_object().IsUndefined(isolate());
}
Object BuiltinExitFrame::GetParameter(int i) const {
@@ -731,7 +731,7 @@
int BuiltinExitFrame::ComputeParametersCount() const {
Object argc_slot = argc_slot_object();
- DCHECK(argc_slot->IsSmi());
+ DCHECK(argc_slot.IsSmi());
// Argc also counts the receiver, target, new target, and argc itself as args,
// therefore the real argument count is argc - 4.
int argc = Smi::ToInt(argc_slot) - 4;
@@ -816,8 +816,8 @@
int StandardFrame::position() const {
AbstractCode code = AbstractCode::cast(LookupCode());
- int code_offset = static_cast<int>(pc() - code->InstructionStart());
- return code->SourcePosition(code_offset);
+ int code_offset = static_cast<int>(pc() - code.InstructionStart());
+ return code.SourcePosition(code_offset);
}
int StandardFrame::ComputeExpressionsCount() const {
@@ -879,17 +879,17 @@
InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
isolate()->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
if (!entry->safepoint_entry.is_valid()) {
- entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
+ entry->safepoint_entry = entry->code.GetSafepointEntry(inner_pointer);
DCHECK(entry->safepoint_entry.is_valid());
} else {
DCHECK(entry->safepoint_entry.Equals(
- entry->code->GetSafepointEntry(inner_pointer)));
+ entry->code.GetSafepointEntry(inner_pointer)));
}
code = entry->code;
safepoint_entry = entry->safepoint_entry;
- stack_slots = code->stack_slots();
- has_tagged_params = code->has_tagged_params();
+ stack_slots = code.stack_slots();
+ has_tagged_params = code.has_tagged_params();
}
uint32_t slot_space = stack_slots * kSystemPointerSize;
@@ -1044,18 +1044,18 @@
int StubFrame::LookupExceptionHandlerInTable(int* stack_slots) {
Code code = LookupCode();
- DCHECK(code->is_turbofanned());
- DCHECK_EQ(code->kind(), Code::BUILTIN);
+ DCHECK(code.is_turbofanned());
+ DCHECK_EQ(code.kind(), Code::BUILTIN);
HandlerTable table(code);
- int pc_offset = static_cast<int>(pc() - code->InstructionStart());
- *stack_slots = code->stack_slots();
+ int pc_offset = static_cast<int>(pc() - code.InstructionStart());
+ *stack_slots = code.stack_slots();
return table.LookupReturn(pc_offset);
}
void OptimizedFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); }
void JavaScriptFrame::SetParameterValue(int index, Object value) const {
- Memory<Address>(GetParameterSlot(index)) = value->ptr();
+ Memory<Address>(GetParameterSlot(index)) = value.ptr();
}
bool JavaScriptFrame::IsConstructor() const {
@@ -1073,11 +1073,11 @@
return functions.size() > 1;
}
-Code JavaScriptFrame::unchecked_code() const { return function()->code(); }
+Code JavaScriptFrame::unchecked_code() const { return function().code(); }
int OptimizedFrame::ComputeParametersCount() const {
Code code = LookupCode();
- if (code->kind() == Code::BUILTIN) {
+ if (code.kind() == Code::BUILTIN) {
return static_cast<int>(
Memory<intptr_t>(fp() + OptimizedBuiltinFrameConstants::kArgCOffset));
} else {
@@ -1092,7 +1092,7 @@
void JavaScriptFrame::GetFunctions(
std::vector<SharedFunctionInfo>* functions) const {
DCHECK(functions->empty());
- functions->push_back(function()->shared());
+ functions->push_back(function().shared());
}
void JavaScriptFrame::GetFunctions(
@@ -1102,14 +1102,14 @@
GetFunctions(&raw_functions);
for (const auto& raw_function : raw_functions) {
functions->push_back(
- Handle<SharedFunctionInfo>(raw_function, function()->GetIsolate()));
+ Handle<SharedFunctionInfo>(raw_function, function().GetIsolate()));
}
}
void JavaScriptFrame::Summarize(std::vector<FrameSummary>* functions) const {
DCHECK(functions->empty());
Code code = LookupCode();
- int offset = static_cast<int>(pc() - code->InstructionStart());
+ int offset = static_cast<int>(pc() - code.InstructionStart());
AbstractCode abstract_code = AbstractCode::cast(code);
Handle<FixedArray> params = GetParameters();
FrameSummary::JavaScriptFrameSummary summary(
@@ -1126,7 +1126,7 @@
// During deoptimization of an optimized function, we may have yet to
// materialize some closures on the stack. The arguments marker object
// marks this case.
- DCHECK(function_slot_object()->IsJSFunction() ||
+ DCHECK(function_slot_object().IsJSFunction() ||
ReadOnlyRoots(isolate()).arguments_marker() == function_slot_object());
return function_slot_object();
}
@@ -1136,18 +1136,18 @@
Object JavaScriptFrame::context() const {
const int offset = StandardFrameConstants::kContextOffset;
Object maybe_result(Memory<Address>(fp() + offset));
- DCHECK(!maybe_result->IsSmi());
+ DCHECK(!maybe_result.IsSmi());
return maybe_result;
}
Script JavaScriptFrame::script() const {
- return Script::cast(function()->shared()->script());
+ return Script::cast(function().shared().script());
}
int JavaScriptFrame::LookupExceptionHandlerInTable(
int* stack_depth, HandlerTable::CatchPrediction* prediction) {
- DCHECK(!LookupCode()->has_handler_table());
- DCHECK(!LookupCode()->is_optimized_code());
+ DCHECK(!LookupCode().has_handler_table());
+ DCHECK(!LookupCode().is_optimized_code());
return -1;
}
@@ -1155,21 +1155,21 @@
AbstractCode code, int code_offset,
FILE* file,
bool print_line_number) {
- PrintF(file, "%s", function->IsOptimized() ? "*" : "~");
- function->PrintName(file);
+ PrintF(file, "%s", function.IsOptimized() ? "*" : "~");
+ function.PrintName(file);
PrintF(file, "+%d", code_offset);
if (print_line_number) {
- SharedFunctionInfo shared = function->shared();
- int source_pos = code->SourcePosition(code_offset);
- Object maybe_script = shared->script();
- if (maybe_script->IsScript()) {
+ SharedFunctionInfo shared = function.shared();
+ int source_pos = code.SourcePosition(code_offset);
+ Object maybe_script = shared.script();
+ if (maybe_script.IsScript()) {
Script script = Script::cast(maybe_script);
- int line = script->GetLineNumber(source_pos) + 1;
- Object script_name_raw = script->name();
- if (script_name_raw->IsString()) {
- String script_name = String::cast(script->name());
+ int line = script.GetLineNumber(source_pos) + 1;
+ Object script_name_raw = script.name();
+ if (script_name_raw.IsString()) {
+ String script_name = String::cast(script.name());
std::unique_ptr<char[]> c_script_name =
- script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+ script_name.ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
PrintF(file, " at %s:%d", c_script_name.get(), line);
} else {
PrintF(file, " at <unknown>:%d", line);
@@ -1196,20 +1196,20 @@
code_offset = iframe->GetBytecodeOffset();
} else {
Code code = frame->unchecked_code();
- code_offset = static_cast<int>(frame->pc() - code->InstructionStart());
+ code_offset = static_cast<int>(frame->pc() - code.InstructionStart());
}
- PrintFunctionAndOffset(function, function->abstract_code(), code_offset,
+ PrintFunctionAndOffset(function, function.abstract_code(), code_offset,
file, print_line_number);
if (print_args) {
// function arguments
// (we are intentionally only printing the actually
// supplied parameters, not all parameters required)
PrintF(file, "(this=");
- frame->receiver()->ShortPrint(file);
+ frame->receiver().ShortPrint(file);
const int length = frame->ComputeParametersCount();
for (int i = 0; i < length; i++) {
PrintF(file, ", ");
- frame->GetParameter(i)->ShortPrint(file);
+ frame->GetParameter(i).ShortPrint(file);
}
PrintF(file, ")");
}
@@ -1224,16 +1224,16 @@
int code_offset) {
auto ic_stats = ICStats::instance();
ICInfo& ic_info = ic_stats->Current();
- SharedFunctionInfo shared = function->shared();
+ SharedFunctionInfo shared = function.shared();
ic_info.function_name = ic_stats->GetOrCacheFunctionName(function);
ic_info.script_offset = code_offset;
- int source_pos = code->SourcePosition(code_offset);
- Object maybe_script = shared->script();
- if (maybe_script->IsScript()) {
+ int source_pos = code.SourcePosition(code_offset);
+ Object maybe_script = shared.script();
+ if (maybe_script.IsScript()) {
Script script = Script::cast(maybe_script);
- ic_info.line_num = script->GetLineNumber(source_pos) + 1;
+ ic_info.line_num = script.GetLineNumber(source_pos) + 1;
ic_info.script_name = ic_stats->GetOrCacheScriptName(script);
}
}
@@ -1245,7 +1245,7 @@
int JavaScriptFrame::ComputeParametersCount() const {
DCHECK(can_access_heap_objects() &&
isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
- return function()->shared()->internal_formal_parameter_count();
+ return function().shared().internal_formal_parameter_count();
}
Handle<FixedArray> JavaScriptFrame::GetParameters() const {
@@ -1293,7 +1293,7 @@
// Only allow setting exception if previous value was the hole.
CHECK_EQ(ReadOnlyRoots(isolate()).the_hole_value(),
Object(Memory<Address>(exception_argument_slot)));
- Memory<Address>(exception_argument_slot) = exception->ptr();
+ Memory<Address>(exception_argument_slot) = exception.ptr();
}
FrameSummary::JavaScriptFrameSummary::JavaScriptFrameSummary(
@@ -1307,8 +1307,8 @@
code_offset_(code_offset),
is_constructor_(is_constructor),
parameters_(parameters, isolate) {
- DCHECK(abstract_code->IsBytecodeArray() ||
- Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION);
+ DCHECK(abstract_code.IsBytecodeArray() ||
+ Code::cast(abstract_code).kind() != Code::OPTIMIZED_FUNCTION);
}
void FrameSummary::EnsureSourcePositionsAvailable() {
@@ -1331,11 +1331,11 @@
bool FrameSummary::JavaScriptFrameSummary::AreSourcePositionsAvailable() const {
return !FLAG_enable_lazy_source_positions ||
- function()->shared()->GetBytecodeArray()->HasSourcePositionTable();
+ function()->shared().GetBytecodeArray().HasSourcePositionTable();
}
bool FrameSummary::JavaScriptFrameSummary::is_subject_to_debugging() const {
- return function()->shared()->IsSubjectToDebugging();
+ return function()->shared().IsSubjectToDebugging();
}
int FrameSummary::JavaScriptFrameSummary::SourcePosition() const {
@@ -1347,7 +1347,7 @@
}
Handle<Object> FrameSummary::JavaScriptFrameSummary::script() const {
- return handle(function_->shared()->script(), isolate());
+ return handle(function_->shared().script(), isolate());
}
Handle<String> FrameSummary::JavaScriptFrameSummary::FunctionName() const {
@@ -1355,7 +1355,7 @@
}
Handle<Context> FrameSummary::JavaScriptFrameSummary::native_context() const {
- return handle(function_->context()->native_context(), isolate());
+ return handle(function_->context().native_context(), isolate());
}
FrameSummary::WasmFrameSummary::WasmFrameSummary(
@@ -1392,7 +1392,7 @@
}
Handle<Script> FrameSummary::WasmFrameSummary::script() const {
- return handle(wasm_instance()->module_object()->script(),
+ return handle(wasm_instance()->module_object().script(),
wasm_instance()->GetIsolate());
}
@@ -1516,7 +1516,7 @@
// Delegate to JS frame in absence of turbofan deoptimization.
// TODO(turbofan): Revisit once we support deoptimization across the board.
Code code = LookupCode();
- if (code->kind() == Code::BUILTIN) {
+ if (code.kind() == Code::BUILTIN) {
return JavaScriptFrame::Summarize(frames);
}
@@ -1597,14 +1597,14 @@
DCHECK_NULL(prediction);
Code code = LookupCode();
HandlerTable table(code);
- int pc_offset = static_cast<int>(pc() - code->InstructionStart());
- if (stack_slots) *stack_slots = code->stack_slots();
+ int pc_offset = static_cast<int>(pc() - code.InstructionStart());
+ if (stack_slots) *stack_slots = code.stack_slots();
// When the return pc has been replaced by a trampoline there won't be
// a handler for this trampoline. Thus we need to use the return pc that
// _used to be_ on the stack to get the right ExceptionHandler.
- if (code->kind() == Code::OPTIMIZED_FUNCTION &&
- code->marked_for_deoptimization()) {
+ if (code.kind() == Code::OPTIMIZED_FUNCTION &&
+ code.marked_for_deoptimization()) {
SafepointTable safepoints(code);
pc_offset = safepoints.find_return_pc(pc_offset);
}
@@ -1616,21 +1616,21 @@
DCHECK(is_optimized());
JSFunction opt_function = function();
- Code code = opt_function->code();
+ Code code = opt_function.code();
// The code object may have been replaced by lazy deoptimization. Fall
// back to a slow search in this case to find the original optimized
// code object.
- if (!code->contains(pc())) {
+ if (!code.contains(pc())) {
code = isolate()->heap()->GcSafeFindCodeForInnerPointer(pc());
}
DCHECK(!code.is_null());
- DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
+ DCHECK(code.kind() == Code::OPTIMIZED_FUNCTION);
- SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
+ SafepointEntry safepoint_entry = code.GetSafepointEntry(pc());
if (safepoint_entry.has_deoptimization_index()) {
*deopt_index = safepoint_entry.deoptimization_index();
- return DeoptimizationData::cast(code->deoptimization_data());
+ return DeoptimizationData::cast(code.deoptimization_data());
}
*deopt_index = Safepoint::kNoDeoptimizationIndex;
return DeoptimizationData();
@@ -1638,7 +1638,7 @@
Object OptimizedFrame::receiver() const {
Code code = LookupCode();
- if (code->kind() == Code::BUILTIN) {
+ if (code.kind() == Code::BUILTIN) {
Address argc_ptr = fp() + OptimizedBuiltinFrameConstants::kArgCOffset;
intptr_t argc = *reinterpret_cast<intptr_t*>(argc_ptr);
intptr_t args_size =
@@ -1659,7 +1659,7 @@
// Delegate to JS frame in absence of turbofan deoptimization.
// TODO(turbofan): Revisit once we support deoptimization across the board.
Code code = LookupCode();
- if (code->kind() == Code::BUILTIN) {
+ if (code.kind() == Code::BUILTIN) {
return JavaScriptFrame::GetFunctions(functions);
}
@@ -1668,10 +1668,10 @@
DeoptimizationData const data = GetDeoptimizationData(&deopt_index);
DCHECK(!data.is_null());
DCHECK_NE(Safepoint::kNoDeoptimizationIndex, deopt_index);
- FixedArray const literal_array = data->LiteralArray();
+ FixedArray const literal_array = data.LiteralArray();
- TranslationIterator it(data->TranslationByteArray(),
- data->TranslationIndex(deopt_index)->value());
+ TranslationIterator it(data.TranslationByteArray(),
+ data.TranslationIndex(deopt_index).value());
Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
DCHECK_EQ(Translation::BEGIN, opcode);
it.Next(); // Skip frame count.
@@ -1690,7 +1690,7 @@
jsframe_count--;
// The second operand of the frame points to the function.
- Object shared = literal_array->get(it.Next());
+ Object shared = literal_array.get(it.Next());
functions->push_back(SharedFunctionInfo::cast(shared));
// Skip over remaining operands to advance to the next opcode.
@@ -1714,7 +1714,7 @@
int InterpretedFrame::position() const {
AbstractCode code = AbstractCode::cast(GetBytecodeArray());
int code_offset = GetBytecodeOffset();
- return code->SourcePosition(code_offset);
+ return code.SourcePosition(code_offset);
}
int InterpretedFrame::LookupExceptionHandlerInTable(
@@ -1823,21 +1823,21 @@
int index) const {
PrintIndex(accumulator, mode, index);
accumulator->Add("WASM [");
- accumulator->PrintName(script()->name());
+ accumulator->PrintName(script().name());
Address instruction_start = isolate()
->wasm_engine()
->code_manager()
->LookupCode(pc())
->instruction_start();
Vector<const uint8_t> raw_func_name =
- module_object()->GetRawFunctionName(function_index());
+ module_object().GetRawFunctionName(function_index());
const int kMaxPrintedFunctionName = 64;
char func_name[kMaxPrintedFunctionName + 1];
int func_name_len = std::min(kMaxPrintedFunctionName, raw_func_name.length());
memcpy(func_name, raw_func_name.begin(), func_name_len);
func_name[func_name_len] = '\0';
int pos = position();
- const wasm::WasmModule* module = wasm_instance()->module_object()->module();
+ const wasm::WasmModule* module = wasm_instance().module_object().module();
int func_index = function_index();
int func_code_offset = module->functions[func_index].code.offset();
accumulator->Add("], function #%u ('%s'), pc=%p (+0x%x), pos=%d (+%d)\n",
@@ -1870,14 +1870,14 @@
}
WasmModuleObject WasmCompiledFrame::module_object() const {
- return wasm_instance()->module_object();
+ return wasm_instance().module_object();
}
uint32_t WasmCompiledFrame::function_index() const {
return FrameSummary::GetSingle(this).AsWasmCompiled().function_index();
}
-Script WasmCompiledFrame::script() const { return module_object()->script(); }
+Script WasmCompiledFrame::script() const { return module_object().script(); }
int WasmCompiledFrame::position() const {
return FrameSummary::GetSingle(this).SourcePosition();
@@ -1934,7 +1934,7 @@
PrintIndex(accumulator, mode, index);
accumulator->Add("WASM INTERPRETER ENTRY [");
Script script = this->script();
- accumulator->PrintName(script->name());
+ accumulator->PrintName(script.name());
accumulator->Add("]");
if (mode != OVERVIEW) accumulator->Add("\n");
}
@@ -1943,7 +1943,7 @@
std::vector<FrameSummary>* functions) const {
Handle<WasmInstanceObject> instance(wasm_instance(), isolate());
std::vector<std::pair<uint32_t, int>> interpreted_stack =
- instance->debug_info()->GetInterpretedStack(fp());
+ instance->debug_info().GetInterpretedStack(fp());
for (auto& e : interpreted_stack) {
FrameSummary::WasmInterpretedFrameSummary summary(isolate(), instance,
@@ -1961,15 +1961,15 @@
}
WasmDebugInfo WasmInterpreterEntryFrame::debug_info() const {
- return wasm_instance()->debug_info();
+ return wasm_instance().debug_info();
}
WasmModuleObject WasmInterpreterEntryFrame::module_object() const {
- return wasm_instance()->module_object();
+ return wasm_instance().module_object();
}
Script WasmInterpreterEntryFrame::script() const {
- return module_object()->script();
+ return module_object().script();
}
int WasmInterpreterEntryFrame::position() const {
@@ -1977,7 +1977,7 @@
}
Object WasmInterpreterEntryFrame::context() const {
- return wasm_instance()->native_context();
+ return wasm_instance().native_context();
}
Address WasmInterpreterEntryFrame::GetCallerStackPointer() const {
@@ -2024,7 +2024,7 @@
void JavaScriptFrame::Print(StringStream* accumulator, PrintMode mode,
int index) const {
- Handle<SharedFunctionInfo> shared = handle(function()->shared(), isolate());
+ Handle<SharedFunctionInfo> shared = handle(function().shared(), isolate());
SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), shared);
DisallowHeapAllocation no_gc;
@@ -2045,23 +2045,23 @@
// or context slots.
ScopeInfo scope_info = shared->scope_info();
Object script_obj = shared->script();
- if (script_obj->IsScript()) {
+ if (script_obj.IsScript()) {
Script script = Script::cast(script_obj);
accumulator->Add(" [");
- accumulator->PrintName(script->name());
+ accumulator->PrintName(script.name());
if (is_interpreted()) {
const InterpretedFrame* iframe =
reinterpret_cast<const InterpretedFrame*>(this);
BytecodeArray bytecodes = iframe->GetBytecodeArray();
int offset = iframe->GetBytecodeOffset();
- int source_pos = AbstractCode::cast(bytecodes)->SourcePosition(offset);
- int line = script->GetLineNumber(source_pos) + 1;
+ int source_pos = AbstractCode::cast(bytecodes).SourcePosition(offset);
+ int line = script.GetLineNumber(source_pos) + 1;
accumulator->Add(":%d] [bytecode=%p offset=%d]", line,
reinterpret_cast<void*>(bytecodes.ptr()), offset);
} else {
int function_start_pos = shared->StartPosition();
- int line = script->GetLineNumber(function_start_pos) + 1;
+ int line = script.GetLineNumber(function_start_pos) + 1;
accumulator->Add(":~%d] [pc=%p]", line, reinterpret_cast<void*>(pc()));
}
}
@@ -2089,15 +2089,15 @@
accumulator->Add(" {\n");
// Compute the number of locals and expression stack elements.
- int heap_locals_count = scope_info->ContextLocalCount();
+ int heap_locals_count = scope_info.ContextLocalCount();
int expressions_count = ComputeExpressionsCount();
// Try to get hold of the context of this frame.
Context context;
- if (this->context()->IsContext()) {
+ if (this->context().IsContext()) {
context = Context::cast(this->context());
- while (context->IsWithContext()) {
- context = context->previous();
+ while (context.IsWithContext()) {
+ context = context.previous();
DCHECK(!context.is_null());
}
}
@@ -2108,12 +2108,12 @@
}
for (int i = 0; i < heap_locals_count; i++) {
accumulator->Add(" var ");
- accumulator->PrintName(scope_info->ContextLocalName(i));
+ accumulator->PrintName(scope_info.ContextLocalName(i));
accumulator->Add(" = ");
if (!context.is_null()) {
int index = Context::MIN_CONTEXT_SLOTS + i;
- if (index < context->length()) {
- accumulator->Add("%o", context->get(index));
+ if (index < context.length()) {
+ accumulator->Add("%o", context.get(index));
} else {
accumulator->Add(
"// warning: missing context slot - inconsistent frame?");
@@ -2142,7 +2142,7 @@
int actual = ComputeParametersCount();
int expected = -1;
JSFunction function = this->function();
- expected = function->shared()->internal_formal_parameter_count();
+ expected = function.shared().internal_formal_parameter_count();
PrintIndex(accumulator, mode, index);
accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
@@ -2190,7 +2190,7 @@
// the full stack frame contains only tagged pointers or only raw values.
// This is used for the WasmCompileLazy builtin, where we actually pass
// untagged arguments and also store untagged values on the stack.
- if (code->has_tagged_params()) IterateExpressions(v);
+ if (code.has_tagged_params()) IterateExpressions(v);
}
// -------------------------------------------------------------------------
diff --git a/src/execution/futex-emulation.cc b/src/execution/futex-emulation.cc
index 82b16ca..a019eee 100644
--- a/src/execution/futex-emulation.cc
+++ b/src/execution/futex-emulation.cc
@@ -84,7 +84,7 @@
Handle<JSArrayBuffer> array_buffer, size_t addr,
int32_t value, double rel_timeout_ms) {
Object res = Wait32(isolate, array_buffer, addr, value, rel_timeout_ms);
- if (res->IsSmi()) {
+ if (res.IsSmi()) {
int val = Smi::ToInt(res);
switch (val) {
case WaitReturnValue::kOk:
@@ -201,7 +201,7 @@
// be false, so we'll loop and then check interrupts.
if (interrupted) {
Object interrupt_object = isolate->stack_guard()->HandleInterrupts();
- if (interrupt_object->IsException(isolate)) {
+ if (interrupt_object.IsException(isolate)) {
result = interrupt_object;
callback_result = AtomicsWaitEvent::kTerminatedExecution;
mutex_.Pointer()->Lock();
diff --git a/src/execution/isolate-inl.h b/src/execution/isolate-inl.h
index 3b8094d..8e5bfeb 100644
--- a/src/execution/isolate-inl.h
+++ b/src/execution/isolate-inl.h
@@ -21,37 +21,37 @@
}
void Isolate::set_context(Context context) {
- DCHECK(context.is_null() || context->IsContext());
+ DCHECK(context.is_null() || context.IsContext());
thread_local_top()->context_ = context;
}
Handle<NativeContext> Isolate::native_context() {
- return handle(context()->native_context(), this);
+ return handle(context().native_context(), this);
}
NativeContext Isolate::raw_native_context() {
- return context()->native_context();
+ return context().native_context();
}
Object Isolate::pending_exception() {
DCHECK(has_pending_exception());
- DCHECK(!thread_local_top()->pending_exception_->IsException(this));
+ DCHECK(!thread_local_top()->pending_exception_.IsException(this));
return thread_local_top()->pending_exception_;
}
void Isolate::set_pending_exception(Object exception_obj) {
- DCHECK(!exception_obj->IsException(this));
+ DCHECK(!exception_obj.IsException(this));
thread_local_top()->pending_exception_ = exception_obj;
}
void Isolate::clear_pending_exception() {
- DCHECK(!thread_local_top()->pending_exception_->IsException(this));
+ DCHECK(!thread_local_top()->pending_exception_.IsException(this));
thread_local_top()->pending_exception_ = ReadOnlyRoots(this).the_hole_value();
}
bool Isolate::has_pending_exception() {
- DCHECK(!thread_local_top()->pending_exception_->IsException(this));
- return !thread_local_top()->pending_exception_->IsTheHole(this);
+ DCHECK(!thread_local_top()->pending_exception_.IsException(this));
+ return !thread_local_top()->pending_exception_.IsTheHole(this);
}
void Isolate::clear_pending_message() {
@@ -61,18 +61,18 @@
Object Isolate::scheduled_exception() {
DCHECK(has_scheduled_exception());
- DCHECK(!thread_local_top()->scheduled_exception_->IsException(this));
+ DCHECK(!thread_local_top()->scheduled_exception_.IsException(this));
return thread_local_top()->scheduled_exception_;
}
bool Isolate::has_scheduled_exception() {
- DCHECK(!thread_local_top()->scheduled_exception_->IsException(this));
+ DCHECK(!thread_local_top()->scheduled_exception_.IsException(this));
return thread_local_top()->scheduled_exception_ !=
ReadOnlyRoots(this).the_hole_value();
}
void Isolate::clear_scheduled_exception() {
- DCHECK(!thread_local_top()->scheduled_exception_->IsException(this));
+ DCHECK(!thread_local_top()->scheduled_exception_.IsException(this));
thread_local_top()->scheduled_exception_ =
ReadOnlyRoots(this).the_hole_value();
}
@@ -88,11 +88,11 @@
}
Handle<JSGlobalObject> Isolate::global_object() {
- return handle(context()->global_object(), this);
+ return handle(context().global_object(), this);
}
Handle<JSGlobalProxy> Isolate::global_proxy() {
- return handle(context()->global_proxy(), this);
+ return handle(context().global_proxy(), this);
}
Isolate::ExceptionScope::ExceptionScope(Isolate* isolate)
@@ -103,12 +103,12 @@
isolate_->set_pending_exception(*pending_exception_);
}
-#define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \
- Handle<type> Isolate::name() { \
- return Handle<type>(raw_native_context()->name(), this); \
- } \
- bool Isolate::is_##name(type value) { \
- return raw_native_context()->is_##name(value); \
+#define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \
+ Handle<type> Isolate::name() { \
+ return Handle<type>(raw_native_context().name(), this); \
+ } \
+ bool Isolate::is_##name(type value) { \
+ return raw_native_context().is_##name(value); \
}
NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
#undef NATIVE_CONTEXT_FIELD_ACCESSOR
@@ -116,7 +116,7 @@
bool Isolate::IsArrayConstructorIntact() {
Cell array_constructor_cell =
Cell::cast(root(RootIndex::kArrayConstructorProtector));
- return array_constructor_cell->value() == Smi::FromInt(kProtectorValid);
+ return array_constructor_cell.value() == Smi::FromInt(kProtectorValid);
}
bool Isolate::IsArraySpeciesLookupChainIntact() {
@@ -134,64 +134,64 @@
PropertyCell species_cell =
PropertyCell::cast(root(RootIndex::kArraySpeciesProtector));
- return species_cell->value()->IsSmi() &&
- Smi::ToInt(species_cell->value()) == kProtectorValid;
+ return species_cell.value().IsSmi() &&
+ Smi::ToInt(species_cell.value()) == kProtectorValid;
}
bool Isolate::IsTypedArraySpeciesLookupChainIntact() {
PropertyCell species_cell =
PropertyCell::cast(root(RootIndex::kTypedArraySpeciesProtector));
- return species_cell->value()->IsSmi() &&
- Smi::ToInt(species_cell->value()) == kProtectorValid;
+ return species_cell.value().IsSmi() &&
+ Smi::ToInt(species_cell.value()) == kProtectorValid;
}
bool Isolate::IsRegExpSpeciesLookupChainIntact() {
PropertyCell species_cell =
PropertyCell::cast(root(RootIndex::kRegExpSpeciesProtector));
- return species_cell->value()->IsSmi() &&
- Smi::ToInt(species_cell->value()) == kProtectorValid;
+ return species_cell.value().IsSmi() &&
+ Smi::ToInt(species_cell.value()) == kProtectorValid;
}
bool Isolate::IsPromiseSpeciesLookupChainIntact() {
PropertyCell species_cell =
PropertyCell::cast(root(RootIndex::kPromiseSpeciesProtector));
- return species_cell->value()->IsSmi() &&
- Smi::ToInt(species_cell->value()) == kProtectorValid;
+ return species_cell.value().IsSmi() &&
+ Smi::ToInt(species_cell.value()) == kProtectorValid;
}
bool Isolate::IsStringLengthOverflowIntact() {
Cell string_length_cell = Cell::cast(root(RootIndex::kStringLengthProtector));
- return string_length_cell->value() == Smi::FromInt(kProtectorValid);
+ return string_length_cell.value() == Smi::FromInt(kProtectorValid);
}
bool Isolate::IsArrayBufferDetachingIntact() {
PropertyCell buffer_detaching =
PropertyCell::cast(root(RootIndex::kArrayBufferDetachingProtector));
- return buffer_detaching->value() == Smi::FromInt(kProtectorValid);
+ return buffer_detaching.value() == Smi::FromInt(kProtectorValid);
}
bool Isolate::IsArrayIteratorLookupChainIntact() {
PropertyCell array_iterator_cell =
PropertyCell::cast(root(RootIndex::kArrayIteratorProtector));
- return array_iterator_cell->value() == Smi::FromInt(kProtectorValid);
+ return array_iterator_cell.value() == Smi::FromInt(kProtectorValid);
}
bool Isolate::IsMapIteratorLookupChainIntact() {
PropertyCell map_iterator_cell =
PropertyCell::cast(root(RootIndex::kMapIteratorProtector));
- return map_iterator_cell->value() == Smi::FromInt(kProtectorValid);
+ return map_iterator_cell.value() == Smi::FromInt(kProtectorValid);
}
bool Isolate::IsSetIteratorLookupChainIntact() {
PropertyCell set_iterator_cell =
PropertyCell::cast(root(RootIndex::kSetIteratorProtector));
- return set_iterator_cell->value() == Smi::FromInt(kProtectorValid);
+ return set_iterator_cell.value() == Smi::FromInt(kProtectorValid);
}
bool Isolate::IsStringIteratorLookupChainIntact() {
PropertyCell string_iterator_cell =
PropertyCell::cast(root(RootIndex::kStringIteratorProtector));
- return string_iterator_cell->value() == Smi::FromInt(kProtectorValid);
+ return string_iterator_cell.value() == Smi::FromInt(kProtectorValid);
}
} // namespace internal
diff --git a/src/execution/isolate.cc b/src/execution/isolate.cc
index 1a7722b..72322b6 100644
--- a/src/execution/isolate.cc
+++ b/src/execution/isolate.cc
@@ -295,7 +295,7 @@
// The builtins constants table is also tightly tied to embedded builtins.
hash = base::hash_combine(
- hash, static_cast<size_t>(heap_.builtins_constants_table()->length()));
+ hash, static_cast<size_t>(heap_.builtins_constants_table().length()));
return hash;
}
@@ -602,7 +602,7 @@
Handle<Object> receiver(generator_object->receiver(), isolate_);
Handle<AbstractCode> code(
- AbstractCode::cast(function->shared()->GetBytecodeArray()), isolate_);
+ AbstractCode::cast(function->shared().GetBytecodeArray()), isolate_);
int offset = Smi::ToInt(generator_object->input_or_debug_pos());
// The stored bytecode offset is relative to a different base than what
// is used in the source position table, hence the subtraction.
@@ -610,11 +610,10 @@
Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
if (V8_UNLIKELY(FLAG_detailed_error_stack_trace)) {
- int param_count = function->shared()->internal_formal_parameter_count();
+ int param_count = function->shared().internal_formal_parameter_count();
parameters = isolate_->factory()->NewFixedArray(param_count);
for (int i = 0; i < param_count; i++) {
- parameters->set(i,
- generator_object->parameters_and_registers()->get(i));
+ parameters->set(i, generator_object->parameters_and_registers().get(i));
}
}
@@ -669,7 +668,7 @@
if (summary.code()->kind() != wasm::WasmCode::kFunction) return;
Handle<WasmInstanceObject> instance = summary.wasm_instance();
int flags = 0;
- if (instance->module_object()->is_asm_js()) {
+ if (instance->module_object().is_asm_js()) {
flags |= FrameArray::kIsAsmJsWasmFrame;
if (summary.at_to_number_conversion()) {
flags |= FrameArray::kAsmJsAtNumberConversion;
@@ -687,7 +686,7 @@
FrameSummary::WasmInterpretedFrameSummary const& summary) {
Handle<WasmInstanceObject> instance = summary.wasm_instance();
int flags = FrameArray::kIsWasmInterpretedFrame;
- DCHECK(!instance->module_object()->is_asm_js());
+ DCHECK(!instance->module_object().is_asm_js());
elements_ = FrameArray::AppendWasmFrame(elements_, instance,
summary.function_index(), {},
summary.byte_offset(), flags);
@@ -702,7 +701,7 @@
// TODO(szuend): Remove this check once the flag is enabled
// by default.
if (!FLAG_experimental_stack_trace_frames &&
- function->shared()->IsApiFunction()) {
+ function->shared().IsApiFunction()) {
return;
}
@@ -778,7 +777,7 @@
bool IsStrictFrame(Handle<JSFunction> function) {
if (!encountered_strict_function_) {
encountered_strict_function_ =
- is_strict(function->shared()->language_mode());
+ is_strict(function->shared().language_mode());
}
return encountered_strict_function_;
}
@@ -818,16 +817,15 @@
// The --builtins-in-stack-traces command line flag allows including
// internal call sites in the stack trace for debugging purposes.
if (!FLAG_builtins_in_stack_traces &&
- !function->shared()->IsUserJavaScript()) {
- return function->shared()->native() ||
- function->shared()->IsApiFunction();
+ !function->shared().IsUserJavaScript()) {
+ return function->shared().native() || function->shared().IsApiFunction();
}
return true;
}
bool IsInSameSecurityContext(Handle<JSFunction> function) {
if (!check_security_context_) return true;
- return isolate_->context()->HasSameSecurityTokenAs(function->context());
+ return isolate_->context().HasSameSecurityTokenAs(function->context());
}
// TODO(jgruber): Fix all cases in which frames give us a hole value (e.g. the
@@ -869,9 +867,9 @@
bool IsBuiltinFunction(Isolate* isolate, HeapObject object,
Builtins::Name builtin_index) {
- if (!object->IsJSFunction()) return false;
+ if (!object.IsJSFunction()) return false;
JSFunction const function = JSFunction::cast(object);
- return function->code() == isolate->builtins()->builtin(builtin_index);
+ return function.code() == isolate->builtins()->builtin(builtin_index);
}
void CaptureAsyncStackTrace(Isolate* isolate, Handle<JSPromise> promise,
@@ -881,10 +879,10 @@
if (promise->status() != Promise::kPending) return;
// Check that we have exactly one PromiseReaction on the {promise}.
- if (!promise->reactions()->IsPromiseReaction()) return;
+ if (!promise->reactions().IsPromiseReaction()) return;
Handle<PromiseReaction> reaction(
PromiseReaction::cast(promise->reactions()), isolate);
- if (!reaction->next()->IsSmi()) return;
+ if (!reaction->next().IsSmi()) return;
// Check if the {reaction} has one of the known async function or
// async generator continuations as its fulfill handler.
@@ -897,7 +895,7 @@
// Now peak into the handlers' AwaitContext to get to
// the JSGeneratorObject for the async function.
Handle<Context> context(
- JSFunction::cast(reaction->fulfill_handler())->context(), isolate);
+ JSFunction::cast(reaction->fulfill_handler()).context(), isolate);
Handle<JSGeneratorObject> generator_object(
JSGeneratorObject::cast(context->extension()), isolate);
CHECK(generator_object->is_suspended());
@@ -913,7 +911,7 @@
} else {
Handle<JSAsyncGeneratorObject> async_generator_object =
Handle<JSAsyncGeneratorObject>::cast(generator_object);
- if (async_generator_object->queue()->IsUndefined(isolate)) return;
+ if (async_generator_object->queue().IsUndefined(isolate)) return;
Handle<AsyncGeneratorRequest> async_generator_request(
AsyncGeneratorRequest::cast(async_generator_object->queue()),
isolate);
@@ -938,7 +936,7 @@
PromiseBuiltins::kPromiseAllResolveElementCapabilitySlot;
Handle<PromiseCapability> capability(
PromiseCapability::cast(context->get(index)), isolate);
- if (!capability->promise()->IsJSPromise()) return;
+ if (!capability->promise().IsJSPromise()) return;
promise = handle(JSPromise::cast(capability->promise()), isolate);
} else {
// We have some generic promise chain here, so try to
@@ -951,7 +949,7 @@
} else if (promise_or_capability->IsPromiseCapability()) {
Handle<PromiseCapability> capability =
Handle<PromiseCapability>::cast(promise_or_capability);
- if (!capability->promise()->IsJSPromise()) return;
+ if (!capability->promise().IsJSPromise()) return;
promise = handle(JSPromise::cast(capability->promise()), isolate);
} else {
// Otherwise the {promise_or_capability} must be undefined here.
@@ -1068,7 +1066,7 @@
// Now peak into the handlers' AwaitContext to get to
// the JSGeneratorObject for the async function.
Handle<Context> context(
- JSFunction::cast(promise_reaction_job_task->handler())->context(),
+ JSFunction::cast(promise_reaction_job_task->handler()).context(),
isolate);
Handle<JSGeneratorObject> generator_object(
JSGeneratorObject::cast(context->extension()), isolate);
@@ -1186,12 +1184,12 @@
JavaScriptFrame* frame = it.frame();
DCHECK(!frame->is_builtin());
- Handle<SharedFunctionInfo> shared = handle(frame->function()->shared(), this);
+ Handle<SharedFunctionInfo> shared = handle(frame->function().shared(), this);
SharedFunctionInfo::EnsureSourcePositionsAvailable(this, shared);
int position = frame->position();
- Object maybe_script = frame->function()->shared()->script();
- if (maybe_script->IsScript()) {
+ Object maybe_script = frame->function().shared().script();
+ if (maybe_script.IsScript()) {
Handle<Script> script(Script::cast(maybe_script), this);
Script::PositionInfo info;
Script::GetPositionInfo(script, position, &info, Script::WITH_OFFSET);
@@ -1205,7 +1203,7 @@
if (frame->is_interpreted()) {
InterpretedFrame* iframe = static_cast<InterpretedFrame*>(frame);
Address bytecode_start =
- iframe->GetBytecodeArray()->GetFirstBytecodeAddress();
+ iframe->GetBytecodeArray().GetFirstBytecodeAddress();
return bytecode_start + iframe->GetBytecodeOffset();
}
@@ -1305,7 +1303,7 @@
return ScheduleThrow(
*factory()->NewTypeError(MessageTemplate::kNoAccess));
}
- data = handle(access_check_info->data(), this);
+ data = handle(access_check_info.data(), this);
}
// Leaving JavaScript.
@@ -1327,18 +1325,17 @@
DisallowHeapAllocation no_gc;
if (receiver->IsJSGlobalProxy()) {
- Object receiver_context =
- JSGlobalProxy::cast(*receiver)->native_context();
- if (!receiver_context->IsContext()) return false;
+ Object receiver_context = JSGlobalProxy::cast(*receiver).native_context();
+ if (!receiver_context.IsContext()) return false;
// Get the native context of current top context.
// avoid using Isolate::native_context() because it uses Handle.
Context native_context =
- accessing_context->global_object()->native_context();
+ accessing_context->global_object().native_context();
if (receiver_context == native_context) return true;
- if (Context::cast(receiver_context)->security_token() ==
- native_context->security_token())
+ if (Context::cast(receiver_context).security_token() ==
+ native_context.security_token())
return true;
}
}
@@ -1350,9 +1347,9 @@
DisallowHeapAllocation no_gc;
AccessCheckInfo access_check_info = AccessCheckInfo::Get(this, receiver);
if (access_check_info.is_null()) return false;
- Object fun_obj = access_check_info->callback();
+ Object fun_obj = access_check_info.callback();
callback = v8::ToCData<v8::AccessCheckCallback>(fun_obj);
- data = handle(access_check_info->data(), this);
+ data = handle(access_check_info.data(), this);
}
LOG(this, ApiSecurityCheck());
@@ -1449,27 +1446,27 @@
// to the console for easier debugging.
int line_number =
location->script()->GetLineNumber(location->start_pos()) + 1;
- if (exception->IsString() && location->script()->name()->IsString()) {
+ if (exception->IsString() && location->script()->name().IsString()) {
base::OS::PrintError(
"Extension or internal compilation error: %s in %s at line %d.\n",
- String::cast(*exception)->ToCString().get(),
- String::cast(location->script()->name())->ToCString().get(),
+ String::cast(*exception).ToCString().get(),
+ String::cast(location->script()->name()).ToCString().get(),
line_number);
- } else if (location->script()->name()->IsString()) {
+ } else if (location->script()->name().IsString()) {
base::OS::PrintError(
"Extension or internal compilation error in %s at line %d.\n",
- String::cast(location->script()->name())->ToCString().get(),
+ String::cast(location->script()->name()).ToCString().get(),
line_number);
} else if (exception->IsString()) {
base::OS::PrintError("Extension or internal compilation error: %s.\n",
- String::cast(*exception)->ToCString().get());
+ String::cast(*exception).ToCString().get());
} else {
base::OS::PrintError("Extension or internal compilation error.\n");
}
#ifdef OBJECT_PRINT
// Since comments and empty lines have been stripped from the source of
// builtins, print the actual source here so that line numbers match.
- if (location->script()->source()->IsString()) {
+ if (location->script()->source().IsString()) {
Handle<String> src(String::cast(location->script()->source()),
location->script()->GetIsolate());
PrintF("Failing script:");
@@ -1506,8 +1503,8 @@
Handle<Script> script = location->script();
Handle<Object> name(script->GetNameOrSourceURL(), this);
printf("at ");
- if (name->IsString() && String::cast(*name)->length() > 0)
- String::cast(*name)->PrintOn(stdout);
+ if (name->IsString() && String::cast(*name).length() > 0)
+ String::cast(*name).PrintOn(stdout);
else
printf("<anonymous>");
// Script::GetLineNumber and Script::GetColumnNumber can allocate on the heap to
@@ -1528,7 +1525,7 @@
printf(", line %d\n", script->GetLineNumber(location->start_pos()) + 1);
}
}
- raw_exception->Print();
+ raw_exception.Print();
printf("Stack Trace:\n");
PrintStack(stdout);
printf("=========================================================\n");
@@ -1653,8 +1650,8 @@
// Gather information from the handler.
Code code = frame->LookupCode();
HandlerTable table(code);
- return FoundHandler(Context(), code->InstructionStart(),
- table.LookupReturn(0), code->constant_pool(),
+ return FoundHandler(Context(), code.InstructionStart(),
+ table.LookupReturn(0), code.constant_pool(),
handler->address() + StackHandlerConstants::kSize,
0);
}
@@ -1720,17 +1717,17 @@
// TODO(bmeurer): Turbofanned BUILTIN frames appear as OPTIMIZED,
// but do not have a code kind of OPTIMIZED_FUNCTION.
- if (code->kind() == Code::OPTIMIZED_FUNCTION &&
- code->marked_for_deoptimization()) {
+ if (code.kind() == Code::OPTIMIZED_FUNCTION &&
+ code.marked_for_deoptimization()) {
// If the target code is lazy deoptimized, we jump to the original
// return address, but we make a note that we are throwing, so
// that the deoptimizer can do the right thing.
- offset = static_cast<int>(frame->pc() - code->entry());
+ offset = static_cast<int>(frame->pc() - code.entry());
set_deoptimizer_lazy_throw(true);
}
- return FoundHandler(Context(), code->InstructionStart(), offset,
- code->constant_pool(), return_sp, frame->fp());
+ return FoundHandler(Context(), code.InstructionStart(), offset,
+ code.constant_pool(), return_sp, frame->fp());
}
case StackFrame::STUB: {
@@ -1748,8 +1745,8 @@
break;
}
Code code = stub_frame->LookupCode();
- if (!code->IsCode() || code->kind() != Code::BUILTIN ||
- !code->has_handler_table() || !code->is_turbofanned()) {
+ if (!code.IsCode() || code.kind() != Code::BUILTIN ||
+ !code.has_handler_table() || !code.is_turbofanned()) {
break;
}
@@ -1763,8 +1760,8 @@
StandardFrameConstants::kFixedFrameSizeAboveFp -
stack_slots * kSystemPointerSize;
- return FoundHandler(Context(), code->InstructionStart(), offset,
- code->constant_pool(), return_sp, frame->fp());
+ return FoundHandler(Context(), code.InstructionStart(), offset,
+ code.constant_pool(), return_sp, frame->fp());
}
case StackFrame::INTERPRETED: {
@@ -1772,7 +1769,7 @@
if (!catchable_by_js) break;
InterpretedFrame* js_frame = static_cast<InterpretedFrame*>(frame);
int register_slots = InterpreterFrameConstants::RegisterStackSlotCount(
- js_frame->GetBytecodeArray()->register_count());
+ js_frame->GetBytecodeArray().register_count());
int context_reg = 0; // Will contain register index holding context.
int offset =
js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
@@ -1796,8 +1793,8 @@
Code code =
builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
- return FoundHandler(context, code->InstructionStart(), 0,
- code->constant_pool(), return_sp, frame->fp());
+ return FoundHandler(context, code.InstructionStart(), 0,
+ code.constant_pool(), return_sp, frame->fp());
}
case StackFrame::BUILTIN:
@@ -1825,8 +1822,8 @@
// Reconstruct the stack pointer from the frame pointer.
Address return_sp = js_frame->fp() - js_frame->GetSPToFPDelta();
Code code = js_frame->LookupCode();
- return FoundHandler(Context(), code->InstructionStart(), 0,
- code->constant_pool(), return_sp, frame->fp());
+ return FoundHandler(Context(), code.InstructionStart(), 0,
+ code.constant_pool(), return_sp, frame->fp());
} break;
default:
@@ -1840,7 +1837,7 @@
USE(removed);
// If there were any materialized objects, the code should be
// marked for deopt.
- DCHECK_IMPLIES(removed, frame->LookupCode()->marked_for_deoptimization());
+ DCHECK_IMPLIES(removed, frame->LookupCode().marked_for_deoptimization());
}
}
@@ -1861,7 +1858,7 @@
const FrameSummary& summary = summaries[i - 1];
Handle<AbstractCode> code = summary.AsJavaScript().abstract_code();
if (code->IsCode() && code->kind() == AbstractCode::BUILTIN) {
- prediction = code->GetCode()->GetBuiltinCatchPrediction();
+ prediction = code->GetCode().GetBuiltinCatchPrediction();
if (prediction == HandlerTable::UNCAUGHT) continue;
return prediction;
}
@@ -1983,7 +1980,7 @@
DCHECK(handler->rethrow_);
DCHECK(handler->capture_message_);
Object message(reinterpret_cast<Address>(handler->message_obj_));
- DCHECK(message->IsJSMessageObject() || message->IsTheHole(this));
+ DCHECK(message.IsJSMessageObject() || message.IsTheHole(this));
thread_local_top()->pending_message_obj_ = message;
}
@@ -2064,7 +2061,7 @@
Handle<SharedFunctionInfo> shared;
Handle<Object> script = summary.script();
if (!script->IsScript() ||
- (Script::cast(*script)->source()->IsUndefined(this))) {
+ (Script::cast(*script).source().IsUndefined(this))) {
return false;
}
@@ -2123,15 +2120,15 @@
if (elements->IsWasmFrame(i) || elements->IsAsmJsWasmFrame(i)) {
Handle<WasmInstanceObject> instance(elements->WasmInstance(i), this);
uint32_t func_index =
- static_cast<uint32_t>(elements->WasmFunctionIndex(i)->value());
- int code_offset = elements->Offset(i)->value();
+ static_cast<uint32_t>(elements->WasmFunctionIndex(i).value());
+ int code_offset = elements->Offset(i).value();
bool is_at_number_conversion =
elements->IsAsmJsWasmFrame(i) &&
- elements->Flags(i)->value() & FrameArray::kAsmJsAtNumberConversion;
+ elements->Flags(i).value() & FrameArray::kAsmJsAtNumberConversion;
// WasmCode* held alive by the {GlobalWasmCodeRef}.
wasm::WasmCode* code =
Managed<wasm::GlobalWasmCodeRef>::cast(elements->WasmCodeObject(i))
- ->get()
+ .get()
->code();
int byte_offset =
FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
@@ -2139,26 +2136,26 @@
int pos = WasmModuleObject::GetSourcePosition(
handle(instance->module_object(), this), func_index, byte_offset,
is_at_number_conversion);
- Handle<Script> script(instance->module_object()->script(), this);
+ Handle<Script> script(instance->module_object().script(), this);
*target = MessageLocation(script, pos, pos + 1);
return true;
}
Handle<JSFunction> fun = handle(elements->Function(i), this);
- if (!fun->shared()->IsSubjectToDebugging()) continue;
+ if (!fun->shared().IsSubjectToDebugging()) continue;
- Object script = fun->shared()->script();
- if (script->IsScript() &&
- !(Script::cast(script)->source()->IsUndefined(this))) {
+ Object script = fun->shared().script();
+ if (script.IsScript() &&
+ !(Script::cast(script).source().IsUndefined(this))) {
Handle<SharedFunctionInfo> shared = handle(fun->shared(), this);
AbstractCode abstract_code = elements->Code(i);
- const int code_offset = elements->Offset(i)->value();
+ const int code_offset = elements->Offset(i).value();
Handle<Script> casted_script(Script::cast(script), this);
if (shared->HasBytecodeArray() &&
- shared->GetBytecodeArray()->HasSourcePositionTable()) {
- int pos = abstract_code->SourcePosition(code_offset);
+ shared->GetBytecodeArray().HasSourcePositionTable()) {
+ int pos = abstract_code.SourcePosition(code_offset);
*target = MessageLocation(casted_script, pos, pos + 1, shared);
} else {
*target = MessageLocation(casted_script, shared, code_offset);
@@ -2274,7 +2271,7 @@
}
// Actually report the pending message to all message handlers.
- if (!message_obj->IsTheHole(this) && should_report_exception) {
+ if (!message_obj.IsTheHole(this) && should_report_exception) {
HandleScope scope(this);
Handle<JSMessageObject> message(JSMessageObject::cast(message_obj), this);
Handle<Script> script(message->script(), this);
@@ -2350,13 +2347,13 @@
thread_local_top()->external_caught_exception_ = true;
v8::TryCatch* handler = try_catch_handler();
- DCHECK(thread_local_top()->pending_message_obj_->IsJSMessageObject() ||
- thread_local_top()->pending_message_obj_->IsTheHole(this));
+ DCHECK(thread_local_top()->pending_message_obj_.IsJSMessageObject() ||
+ thread_local_top()->pending_message_obj_.IsTheHole(this));
handler->can_continue_ = true;
handler->has_terminated_ = false;
handler->exception_ = reinterpret_cast<void*>(pending_exception().ptr());
// Propagate to the external try-catch only if we got an actual message.
- if (thread_local_top()->pending_message_obj_->IsTheHole(this)) return true;
+ if (thread_local_top()->pending_message_obj_.IsTheHole(this)) return true;
handler->message_obj_ =
reinterpret_cast<void*>(thread_local_top()->pending_message_obj_.ptr());
@@ -2484,7 +2481,7 @@
: handle(Handle<PromiseCapability>::cast(promise_or_capability)
->promise(),
isolate));
- if (reaction->reject_handler()->IsUndefined(isolate)) {
+ if (reaction->reject_handler().IsUndefined(isolate)) {
if (InternalPromiseHasUserDefinedRejectHandler(isolate, promise)) {
return true;
}
@@ -2529,11 +2526,11 @@
catch_prediction = PredictException(JavaScriptFrame::cast(frame));
} else if (frame->type() == StackFrame::STUB) {
Code code = frame->LookupCode();
- if (!code->IsCode() || code->kind() != Code::BUILTIN ||
- !code->has_handler_table() || !code->is_turbofanned()) {
+ if (!code.IsCode() || code.kind() != Code::BUILTIN ||
+ !code.has_handler_table() || !code.is_turbofanned()) {
continue;
}
- catch_prediction = code->GetBuiltinCatchPrediction();
+ catch_prediction = code.GetBuiltinCatchPrediction();
} else {
continue;
}
@@ -2611,7 +2608,7 @@
if (!it.done() &&
(!top_backup_incumbent || it.frame()->sp() < top_backup_incumbent)) {
Context context = Context::cast(it.frame()->context());
- return Handle<Context>(context->native_context(), this);
+ return Handle<Context>(context.native_context(), this);
}
// 2nd candidate: the last Context::Scope's incumbent context if any.
@@ -2647,7 +2644,7 @@
#ifdef USE_SIMULATOR
thread_local_top()->simulator_ = Simulator::current(this);
#endif
- DCHECK(context().is_null() || context()->IsContext());
+ DCHECK(context().is_null() || context().IsContext());
return from + sizeof(ThreadLocalTop);
}
@@ -3144,13 +3141,13 @@
SetTerminationOnExternalTryCatch();
} else {
v8::TryCatch* handler = try_catch_handler();
- DCHECK(thread_local_top()->pending_message_obj_->IsJSMessageObject() ||
- thread_local_top()->pending_message_obj_->IsTheHole(this));
+ DCHECK(thread_local_top()->pending_message_obj_.IsJSMessageObject() ||
+ thread_local_top()->pending_message_obj_.IsTheHole(this));
handler->can_continue_ = true;
handler->has_terminated_ = false;
handler->exception_ = reinterpret_cast<void*>(pending_exception().ptr());
// Propagate to the external try-catch only if we got an actual message.
- if (thread_local_top()->pending_message_obj_->IsTheHole(this)) return true;
+ if (thread_local_top()->pending_message_obj_.IsTheHole(this)) return true;
handler->message_obj_ =
reinterpret_cast<void*>(thread_local_top()->pending_message_obj_.ptr());
@@ -3478,7 +3475,7 @@
#ifndef V8_TARGET_ARCH_ARM
// The IET for profiling should always be a full on-heap Code object.
DCHECK(!Code::cast(heap_.interpreter_entry_trampoline_for_profiling())
- ->is_off_heap_trampoline());
+ .is_off_heap_trampoline());
#endif // V8_TARGET_ARCH_ARM
if (FLAG_print_builtin_code) builtins()->PrintBuiltinCode();
@@ -3686,14 +3683,14 @@
}
void Isolate::SetFeedbackVectorsForProfilingTools(Object value) {
- DCHECK(value->IsUndefined(this) || value->IsArrayList());
+ DCHECK(value.IsUndefined(this) || value.IsArrayList());
heap()->set_feedback_vectors_for_profiling_tools(value);
}
void Isolate::MaybeInitializeVectorListFromHeap() {
- if (!heap()->feedback_vectors_for_profiling_tools()->IsUndefined(this)) {
+ if (!heap()->feedback_vectors_for_profiling_tools().IsUndefined(this)) {
// Already initialized, return early.
- DCHECK(heap()->feedback_vectors_for_profiling_tools()->IsArrayList());
+ DCHECK(heap()->feedback_vectors_for_profiling_tools().IsArrayList());
return;
}
@@ -3704,13 +3701,13 @@
HeapIterator heap_iterator(heap());
for (HeapObject current_obj = heap_iterator.next(); !current_obj.is_null();
current_obj = heap_iterator.next()) {
- if (!current_obj->IsFeedbackVector()) continue;
+ if (!current_obj.IsFeedbackVector()) continue;
FeedbackVector vector = FeedbackVector::cast(current_obj);
- SharedFunctionInfo shared = vector->shared_function_info();
+ SharedFunctionInfo shared = vector.shared_function_info();
// No need to preserve the feedback vector for non-user-visible functions.
- if (!shared->IsSubjectToDebugging()) continue;
+ if (!shared.IsSubjectToDebugging()) continue;
vectors.emplace_back(vector, this);
}
@@ -3732,14 +3729,14 @@
bool Isolate::IsArrayOrObjectOrStringPrototype(Object object) {
Object context = heap()->native_contexts_list();
- while (!context->IsUndefined(this)) {
+ while (!context.IsUndefined(this)) {
Context current_context = Context::cast(context);
- if (current_context->initial_object_prototype() == object ||
- current_context->initial_array_prototype() == object ||
- current_context->initial_string_prototype() == object) {
+ if (current_context.initial_object_prototype() == object ||
+ current_context.initial_array_prototype() == object ||
+ current_context.initial_string_prototype() == object) {
return true;
}
- context = current_context->next_context_link();
+ context = current_context.next_context_link();
}
return false;
}
@@ -3747,12 +3744,12 @@
bool Isolate::IsInAnyContext(Object object, uint32_t index) {
DisallowHeapAllocation no_gc;
Object context = heap()->native_contexts_list();
- while (!context->IsUndefined(this)) {
+ while (!context.IsUndefined(this)) {
Context current_context = Context::cast(context);
- if (current_context->get(index) == object) {
+ if (current_context.get(index) == object) {
return true;
}
- context = current_context->next_context_link();
+ context = current_context.next_context_link();
}
return false;
}
@@ -3760,20 +3757,20 @@
bool Isolate::IsNoElementsProtectorIntact(Context context) {
PropertyCell no_elements_cell = heap()->no_elements_protector();
bool cell_reports_intact =
- no_elements_cell->value()->IsSmi() &&
- Smi::ToInt(no_elements_cell->value()) == kProtectorValid;
+ no_elements_cell.value().IsSmi() &&
+ Smi::ToInt(no_elements_cell.value()) == kProtectorValid;
#ifdef DEBUG
- Context native_context = context->native_context();
+ Context native_context = context.native_context();
Map root_array_map =
- native_context->GetInitialJSArrayMap(GetInitialFastElementsKind());
+ native_context.GetInitialJSArrayMap(GetInitialFastElementsKind());
JSObject initial_array_proto = JSObject::cast(
- native_context->get(Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
+ native_context.get(Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
JSObject initial_object_proto = JSObject::cast(
- native_context->get(Context::INITIAL_OBJECT_PROTOTYPE_INDEX));
+ native_context.get(Context::INITIAL_OBJECT_PROTOTYPE_INDEX));
JSObject initial_string_proto = JSObject::cast(
- native_context->get(Context::INITIAL_STRING_PROTOTYPE_INDEX));
+ native_context.get(Context::INITIAL_STRING_PROTOTYPE_INDEX));
if (root_array_map.is_null() || initial_array_proto == initial_object_proto) {
// We are in the bootstrapping process, and the entire check sequence
@@ -3782,12 +3779,12 @@
}
// Check that the array prototype hasn't been altered WRT empty elements.
- if (root_array_map->prototype() != initial_array_proto) {
+ if (root_array_map.prototype() != initial_array_proto) {
DCHECK_EQ(false, cell_reports_intact);
return cell_reports_intact;
}
- FixedArrayBase elements = initial_array_proto->elements();
+ FixedArrayBase elements = initial_array_proto.elements();
ReadOnlyRoots roots(heap());
if (elements != roots.empty_fixed_array() &&
elements != roots.empty_slow_element_dictionary()) {
@@ -3796,7 +3793,7 @@
}
// Check that the Object.prototype hasn't been altered WRT empty elements.
- elements = initial_object_proto->elements();
+ elements = initial_object_proto.elements();
if (elements != roots.empty_fixed_array() &&
elements != roots.empty_slow_element_dictionary()) {
DCHECK_EQ(false, cell_reports_intact);
@@ -3820,7 +3817,7 @@
DCHECK(!has_pending_exception());
// Check that the String.prototype hasn't been altered WRT empty elements.
- elements = initial_string_proto->elements();
+ elements = initial_string_proto.elements();
if (elements != roots.empty_fixed_array() &&
elements != roots.empty_slow_element_dictionary()) {
DCHECK_EQ(false, cell_reports_intact);
@@ -3829,7 +3826,7 @@
// Check that the String.prototype has the Object.prototype
// as its [[Prototype]] still.
- if (initial_string_proto->map()->prototype() != initial_object_proto) {
+ if (initial_string_proto.map().prototype() != initial_object_proto) {
DCHECK_EQ(false, cell_reports_intact);
return cell_reports_intact;
}
@@ -3845,10 +3842,10 @@
bool Isolate::IsIsConcatSpreadableLookupChainIntact() {
Cell is_concat_spreadable_cell = heap()->is_concat_spreadable_protector();
bool is_is_concat_spreadable_set =
- Smi::ToInt(is_concat_spreadable_cell->value()) == kProtectorInvalid;
+ Smi::ToInt(is_concat_spreadable_cell.value()) == kProtectorInvalid;
#ifdef DEBUG
Map root_array_map =
- raw_native_context()->GetInitialJSArrayMap(GetInitialFastElementsKind());
+ raw_native_context().GetInitialJSArrayMap(GetInitialFastElementsKind());
if (root_array_map.is_null()) {
// Ignore the value of is_concat_spreadable during bootstrap.
return !is_is_concat_spreadable_set;
@@ -3871,13 +3868,13 @@
bool Isolate::IsIsConcatSpreadableLookupChainIntact(JSReceiver receiver) {
if (!IsIsConcatSpreadableLookupChainIntact()) return false;
- return !receiver->HasProxyInPrototype(this);
+ return !receiver.HasProxyInPrototype(this);
}
bool Isolate::IsPromiseHookProtectorIntact() {
PropertyCell promise_hook_cell = heap()->promise_hook_protector();
bool is_promise_hook_protector_intact =
- Smi::ToInt(promise_hook_cell->value()) == kProtectorValid;
+ Smi::ToInt(promise_hook_cell.value()) == kProtectorValid;
DCHECK_IMPLIES(is_promise_hook_protector_intact,
!promise_hook_or_async_event_delegate_);
DCHECK_IMPLIES(is_promise_hook_protector_intact,
@@ -3888,21 +3885,21 @@
bool Isolate::IsPromiseResolveLookupChainIntact() {
Cell promise_resolve_cell = heap()->promise_resolve_protector();
bool is_promise_resolve_protector_intact =
- Smi::ToInt(promise_resolve_cell->value()) == kProtectorValid;
+ Smi::ToInt(promise_resolve_cell.value()) == kProtectorValid;
return is_promise_resolve_protector_intact;
}
bool Isolate::IsPromiseThenLookupChainIntact() {
PropertyCell promise_then_cell = heap()->promise_then_protector();
bool is_promise_then_protector_intact =
- Smi::ToInt(promise_then_cell->value()) == kProtectorValid;
+ Smi::ToInt(promise_then_cell.value()) == kProtectorValid;
return is_promise_then_protector_intact;
}
bool Isolate::IsPromiseThenLookupChainIntact(Handle<JSReceiver> receiver) {
DisallowHeapAllocation no_gc;
if (!receiver->IsJSPromise()) return false;
- if (!IsInAnyContext(receiver->map()->prototype(),
+ if (!IsInAnyContext(receiver->map().prototype(),
Context::PROMISE_PROTOTYPE_INDEX)) {
return false;
}
@@ -3911,7 +3908,7 @@
void Isolate::UpdateNoElementsProtectorOnSetElement(Handle<JSObject> object) {
DisallowHeapAllocation no_gc;
- if (!object->map()->is_prototype_map()) return;
+ if (!object->map().is_prototype_map()) return;
if (!IsNoElementsProtectorIntact()) return;
if (!IsArrayOrObjectOrStringPrototype(*object)) return;
PropertyCell::SetValueWithInvalidation(
@@ -3920,7 +3917,7 @@
}
void Isolate::InvalidateIsConcatSpreadableProtector() {
- DCHECK(factory()->is_concat_spreadable_protector()->value()->IsSmi());
+ DCHECK(factory()->is_concat_spreadable_protector()->value().IsSmi());
DCHECK(IsIsConcatSpreadableLookupChainIntact());
factory()->is_concat_spreadable_protector()->set_value(
Smi::FromInt(kProtectorInvalid));
@@ -3928,7 +3925,7 @@
}
void Isolate::InvalidateArrayConstructorProtector() {
- DCHECK(factory()->array_constructor_protector()->value()->IsSmi());
+ DCHECK(factory()->array_constructor_protector()->value().IsSmi());
DCHECK(IsArrayConstructorIntact());
factory()->array_constructor_protector()->set_value(
Smi::FromInt(kProtectorInvalid));
@@ -3936,7 +3933,7 @@
}
void Isolate::InvalidateArraySpeciesProtector() {
- DCHECK(factory()->array_species_protector()->value()->IsSmi());
+ DCHECK(factory()->array_species_protector()->value().IsSmi());
DCHECK(IsArraySpeciesLookupChainIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->array_species_protector(),
@@ -3945,7 +3942,7 @@
}
void Isolate::InvalidateTypedArraySpeciesProtector() {
- DCHECK(factory()->typed_array_species_protector()->value()->IsSmi());
+ DCHECK(factory()->typed_array_species_protector()->value().IsSmi());
DCHECK(IsTypedArraySpeciesLookupChainIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->typed_array_species_protector(),
@@ -3954,7 +3951,7 @@
}
void Isolate::InvalidateRegExpSpeciesProtector() {
- DCHECK(factory()->regexp_species_protector()->value()->IsSmi());
+ DCHECK(factory()->regexp_species_protector()->value().IsSmi());
DCHECK(IsRegExpSpeciesLookupChainIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->regexp_species_protector(),
@@ -3963,7 +3960,7 @@
}
void Isolate::InvalidatePromiseSpeciesProtector() {
- DCHECK(factory()->promise_species_protector()->value()->IsSmi());
+ DCHECK(factory()->promise_species_protector()->value().IsSmi());
DCHECK(IsPromiseSpeciesLookupChainIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->promise_species_protector(),
@@ -3972,7 +3969,7 @@
}
void Isolate::InvalidateStringLengthOverflowProtector() {
- DCHECK(factory()->string_length_protector()->value()->IsSmi());
+ DCHECK(factory()->string_length_protector()->value().IsSmi());
DCHECK(IsStringLengthOverflowIntact());
factory()->string_length_protector()->set_value(
Smi::FromInt(kProtectorInvalid));
@@ -3980,7 +3977,7 @@
}
void Isolate::InvalidateArrayIteratorProtector() {
- DCHECK(factory()->array_iterator_protector()->value()->IsSmi());
+ DCHECK(factory()->array_iterator_protector()->value().IsSmi());
DCHECK(IsArrayIteratorLookupChainIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->array_iterator_protector(),
@@ -3989,7 +3986,7 @@
}
void Isolate::InvalidateMapIteratorProtector() {
- DCHECK(factory()->map_iterator_protector()->value()->IsSmi());
+ DCHECK(factory()->map_iterator_protector()->value().IsSmi());
DCHECK(IsMapIteratorLookupChainIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->map_iterator_protector(),
@@ -3998,7 +3995,7 @@
}
void Isolate::InvalidateSetIteratorProtector() {
- DCHECK(factory()->set_iterator_protector()->value()->IsSmi());
+ DCHECK(factory()->set_iterator_protector()->value().IsSmi());
DCHECK(IsSetIteratorLookupChainIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->set_iterator_protector(),
@@ -4007,7 +4004,7 @@
}
void Isolate::InvalidateStringIteratorProtector() {
- DCHECK(factory()->string_iterator_protector()->value()->IsSmi());
+ DCHECK(factory()->string_iterator_protector()->value().IsSmi());
DCHECK(IsStringIteratorLookupChainIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->string_iterator_protector(),
@@ -4016,7 +4013,7 @@
}
void Isolate::InvalidateArrayBufferDetachingProtector() {
- DCHECK(factory()->array_buffer_detaching_protector()->value()->IsSmi());
+ DCHECK(factory()->array_buffer_detaching_protector()->value().IsSmi());
DCHECK(IsArrayBufferDetachingIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->array_buffer_detaching_protector(),
@@ -4025,7 +4022,7 @@
}
void Isolate::InvalidatePromiseHookProtector() {
- DCHECK(factory()->promise_hook_protector()->value()->IsSmi());
+ DCHECK(factory()->promise_hook_protector()->value().IsSmi());
DCHECK(IsPromiseHookProtectorIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->promise_hook_protector(),
@@ -4034,7 +4031,7 @@
}
void Isolate::InvalidatePromiseResolveProtector() {
- DCHECK(factory()->promise_resolve_protector()->value()->IsSmi());
+ DCHECK(factory()->promise_resolve_protector()->value().IsSmi());
DCHECK(IsPromiseResolveLookupChainIntact());
factory()->promise_resolve_protector()->set_value(
Smi::FromInt(kProtectorInvalid));
@@ -4042,7 +4039,7 @@
}
void Isolate::InvalidatePromiseThenProtector() {
- DCHECK(factory()->promise_then_protector()->value()->IsSmi());
+ DCHECK(factory()->promise_then_protector()->value().IsSmi());
DCHECK(IsPromiseThenLookupChainIntact());
PropertyCell::SetValueWithInvalidation(
this, factory()->promise_then_protector(),
diff --git a/src/execution/messages.cc b/src/execution/messages.cc
index 2c12948..4461242 100644
--- a/src/execution/messages.cc
+++ b/src/execution/messages.cc
@@ -118,7 +118,7 @@
isolate->set_external_caught_exception(false);
// Turn the exception on the message into a string if it is an object.
- if (message->argument()->IsJSObject()) {
+ if (message->argument().IsJSObject()) {
HandleScope scope(isolate);
Handle<Object> argument(message->argument(), isolate);
@@ -169,17 +169,17 @@
} else {
for (int i = 0; i < global_length; i++) {
HandleScope scope(isolate);
- if (global_listeners->get(i)->IsUndefined(isolate)) continue;
+ if (global_listeners->get(i).IsUndefined(isolate)) continue;
FixedArray listener = FixedArray::cast(global_listeners->get(i));
- Foreign callback_obj = Foreign::cast(listener->get(0));
+ Foreign callback_obj = Foreign::cast(listener.get(0));
int32_t message_levels =
- static_cast<int32_t>(Smi::ToInt(listener->get(2)));
+ static_cast<int32_t>(Smi::ToInt(listener.get(2)));
if (!(message_levels & error_level)) {
continue;
}
v8::MessageCallback callback =
- FUNCTION_CAST<v8::MessageCallback>(callback_obj->foreign_address());
- Handle<Object> callback_data(listener->get(1), isolate);
+ FUNCTION_CAST<v8::MessageCallback>(callback_obj.foreign_address());
+ Handle<Object> callback_data(listener.get(1), isolate);
{
RuntimeCallTimerScope timer(
isolate, RuntimeCallCounterId::kMessageListenerCallback);
@@ -218,7 +218,7 @@
Handle<SharedFunctionInfo> shared(script->eval_from_shared(), isolate);
// Find the name of the function calling eval.
- if (shared->Name()->BooleanValue(isolate)) {
+ if (shared->Name().BooleanValue(isolate)) {
return shared->Name();
}
@@ -250,7 +250,7 @@
if (script->has_eval_from_shared()) {
Handle<SharedFunctionInfo> eval_from_shared(script->eval_from_shared(),
isolate);
- if (eval_from_shared->script()->IsScript()) {
+ if (eval_from_shared->script().IsScript()) {
Handle<Script> eval_from_script =
handle(Script::cast(eval_from_shared->script()), isolate);
builder.AppendCString(" (");
@@ -266,7 +266,7 @@
Script::COMPILATION_TYPE_EVAL);
// eval script originated from "real" source.
Handle<Object> name_obj = handle(eval_from_script->name(), isolate);
- if (eval_from_script->name()->IsString()) {
+ if (eval_from_script->name().IsString()) {
builder.AppendString(Handle<String>::cast(name_obj));
Script::PositionInfo info;
@@ -287,7 +287,7 @@
builder.AppendString(str);
}
} else {
- DCHECK(!eval_from_script->name()->IsString());
+ DCHECK(!eval_from_script->name().IsString());
builder.AppendCString("unknown source");
}
}
@@ -330,9 +330,9 @@
receiver_ = handle(array->Receiver(frame_ix), isolate);
function_ = handle(array->Function(frame_ix), isolate);
code_ = handle(array->Code(frame_ix), isolate);
- offset_ = array->Offset(frame_ix)->value();
+ offset_ = array->Offset(frame_ix).value();
- const int flags = array->Flags(frame_ix)->value();
+ const int flags = array->Flags(frame_ix).value();
is_constructor_ = (flags & FrameArray::kIsConstructor) != 0;
is_strict_ = (flags & FrameArray::kIsStrict) != 0;
is_async_ = (flags & FrameArray::kIsAsync) != 0;
@@ -392,7 +392,7 @@
Handle<Object> ScriptNameOrSourceUrl(Handle<Script> script, Isolate* isolate) {
Object name_or_url = script->source_url();
- if (!name_or_url->IsString()) name_or_url = script->name();
+ if (!name_or_url.IsString()) name_or_url = script->name();
return handle(name_or_url, isolate);
}
@@ -416,7 +416,7 @@
return isolate_->factory()->null_value();
}
- Handle<String> name(function_->shared()->Name(), isolate_);
+ Handle<String> name(function_->shared().Name(), isolate_);
name = String::Flatten(isolate_, name);
// The static initializer function is not a method, so don't add a
@@ -448,7 +448,7 @@
KeyAccumulator::GetOwnEnumPropertyKeys(isolate_, current_obj);
for (int i = 0; i < keys->length(); i++) {
HandleScope inner_scope(isolate_);
- if (!keys->get(i)->IsName()) continue;
+ if (!keys->get(i).IsName()) continue;
Handle<Name> name_key(Name::cast(keys->get(i)), isolate_);
if (!CheckMethodName(isolate_, current_obj, name_key, function_,
LookupIterator::OWN_SKIP_INTERCEPTOR))
@@ -513,7 +513,7 @@
namespace {
bool IsNonEmptyString(Handle<Object> object) {
- return (object->IsString() && String::cast(*object)->length() > 0);
+ return (object->IsString() && String::cast(*object).length() > 0);
}
void AppendFileLocation(Isolate* isolate, StackFrameBase* call_site,
@@ -688,11 +688,11 @@
}
bool JSStackFrame::HasScript() const {
- return function_->shared()->script()->IsScript();
+ return function_->shared().script().IsScript();
}
Handle<Script> JSStackFrame::GetScript() const {
- return handle(Script::cast(function_->shared()->script()), isolate_);
+ return handle(Script::cast(function_->shared().script()), isolate_);
}
void WasmStackFrame::FromFrameArray(Isolate* isolate, Handle<FrameArray> array,
@@ -704,16 +704,16 @@
array->IsAsmJsWasmFrame(frame_ix));
isolate_ = isolate;
wasm_instance_ = handle(array->WasmInstance(frame_ix), isolate);
- wasm_func_index_ = array->WasmFunctionIndex(frame_ix)->value();
+ wasm_func_index_ = array->WasmFunctionIndex(frame_ix).value();
if (array->IsWasmInterpretedFrame(frame_ix)) {
code_ = nullptr;
} else {
// The {WasmCode*} is held alive by the {GlobalWasmCodeRef}.
auto global_wasm_code_ref =
Managed<wasm::GlobalWasmCodeRef>::cast(array->WasmCodeObject(frame_ix));
- code_ = global_wasm_code_ref->get()->code();
+ code_ = global_wasm_code_ref.get()->code();
}
- offset_ = array->Offset(frame_ix)->value();
+ offset_ = array->Offset(frame_ix).value();
}
Handle<Object> WasmStackFrame::GetReceiver() const { return wasm_instance_; }
@@ -783,7 +783,7 @@
bool WasmStackFrame::HasScript() const { return true; }
Handle<Script> WasmStackFrame::GetScript() const {
- return handle(wasm_instance_->module_object()->script(), isolate_);
+ return handle(wasm_instance_->module_object().script(), isolate_);
}
void AsmJsWasmStackFrame::FromFrameArray(Isolate* isolate,
@@ -792,7 +792,7 @@
DCHECK(array->IsAsmJsWasmFrame(frame_ix));
WasmStackFrame::FromFrameArray(isolate, array, frame_ix);
is_at_number_conversion_ =
- array->Flags(frame_ix)->value() & FrameArray::kAsmJsAtNumberConversion;
+ array->Flags(frame_ix).value() & FrameArray::kAsmJsAtNumberConversion;
}
Handle<Object> AsmJsWasmStackFrame::GetReceiver() const {
@@ -805,13 +805,13 @@
}
Handle<Object> AsmJsWasmStackFrame::GetFileName() {
- Handle<Script> script(wasm_instance_->module_object()->script(), isolate_);
+ Handle<Script> script(wasm_instance_->module_object().script(), isolate_);
DCHECK(script->IsUserJavaScript());
return handle(script->name(), isolate_);
}
Handle<Object> AsmJsWasmStackFrame::GetScriptNameOrSourceUrl() {
- Handle<Script> script(wasm_instance_->module_object()->script(), isolate_);
+ Handle<Script> script(wasm_instance_->module_object().script(), isolate_);
DCHECK_EQ(Script::TYPE_NORMAL, script->type());
return ScriptNameOrSourceUrl(script, isolate_);
}
@@ -831,14 +831,14 @@
int AsmJsWasmStackFrame::GetLineNumber() {
DCHECK_LE(0, GetPosition());
- Handle<Script> script(wasm_instance_->module_object()->script(), isolate_);
+ Handle<Script> script(wasm_instance_->module_object().script(), isolate_);
DCHECK(script->IsUserJavaScript());
return Script::GetLineNumber(script, GetPosition()) + 1;
}
int AsmJsWasmStackFrame::GetColumnNumber() {
DCHECK_LE(0, GetPosition());
- Handle<Script> script(wasm_instance_->module_object()->script(), isolate_);
+ Handle<Script> script(wasm_instance_->module_object().script(), isolate_);
DCHECK(script->IsUserJavaScript());
return Script::GetColumnNumber(script, GetPosition()) + 1;
}
@@ -873,7 +873,7 @@
StackFrameBase* FrameArrayIterator::Frame() {
DCHECK(HasFrame());
- const int flags = array_->Flags(frame_ix_)->value();
+ const int flags = array_->Flags(frame_ix_).value();
int flag_mask = FrameArray::kIsWasmFrame |
FrameArray::kIsWasmInterpretedFrame |
FrameArray::kIsAsmJsWasmFrame;
diff --git a/src/execution/runtime-profiler.cc b/src/execution/runtime-profiler.cc
index bdcd7e2..0ed36cb 100644
--- a/src/execution/runtime-profiler.cc
+++ b/src/execution/runtime-profiler.cc
@@ -71,7 +71,7 @@
const char* type) {
if (FLAG_trace_opt) {
PrintF("[marking ");
- function->ShortPrint();
+ function.ShortPrint();
PrintF(" for %s recompilation, reason: %s", type, reason);
PrintF("]\n");
}
@@ -80,41 +80,41 @@
void RuntimeProfiler::Optimize(JSFunction function, OptimizationReason reason) {
DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
TraceRecompile(function, OptimizationReasonToString(reason), "optimized");
- function->MarkForOptimization(ConcurrencyMode::kConcurrent);
+ function.MarkForOptimization(ConcurrencyMode::kConcurrent);
}
void RuntimeProfiler::AttemptOnStackReplacement(InterpretedFrame* frame,
int loop_nesting_levels) {
JSFunction function = frame->function();
- SharedFunctionInfo shared = function->shared();
- if (!FLAG_use_osr || !shared->IsUserJavaScript()) {
+ SharedFunctionInfo shared = function.shared();
+ if (!FLAG_use_osr || !shared.IsUserJavaScript()) {
return;
}
// If the code is not optimizable, don't try OSR.
- if (shared->optimization_disabled()) return;
+ if (shared.optimization_disabled()) return;
// We're using on-stack replacement: Store new loop nesting level in
// BytecodeArray header so that certain back edges in any interpreter frame
// for this bytecode will trigger on-stack replacement for that frame.
if (FLAG_trace_osr) {
PrintF("[OSR - arming back edges in ");
- function->PrintName();
+ function.PrintName();
PrintF("]\n");
}
DCHECK_EQ(StackFrame::INTERPRETED, frame->type());
- int level = frame->GetBytecodeArray()->osr_loop_nesting_level();
- frame->GetBytecodeArray()->set_osr_loop_nesting_level(
+ int level = frame->GetBytecodeArray().osr_loop_nesting_level();
+ frame->GetBytecodeArray().set_osr_loop_nesting_level(
Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker));
}
void RuntimeProfiler::MaybeOptimize(JSFunction function,
InterpretedFrame* frame) {
- if (function->IsInOptimizationQueue()) {
+ if (function.IsInOptimizationQueue()) {
if (FLAG_trace_opt_verbose) {
PrintF("[function ");
- function->PrintName();
+ function.PrintName();
PrintF(" is already in optimization queue]\n");
}
return;
@@ -127,10 +127,10 @@
return;
}
- if (function->shared()->optimization_disabled()) return;
+ if (function.shared().optimization_disabled()) return;
OptimizationReason reason =
- ShouldOptimize(function, function->shared()->GetBytecodeArray());
+ ShouldOptimize(function, function.shared().GetBytecodeArray());
if (reason != OptimizationReason::kDoNotOptimize) {
Optimize(function, reason);
@@ -138,19 +138,19 @@
}
bool RuntimeProfiler::MaybeOSR(JSFunction function, InterpretedFrame* frame) {
- int ticks = function->feedback_vector()->profiler_ticks();
+ int ticks = function.feedback_vector().profiler_ticks();
// TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
// than kMaxToplevelSourceSize.
- if (function->IsMarkedForOptimization() ||
- function->IsMarkedForConcurrentOptimization() ||
- function->HasOptimizedCode()) {
+ if (function.IsMarkedForOptimization() ||
+ function.IsMarkedForConcurrentOptimization() ||
+ function.HasOptimizedCode()) {
// Attempt OSR if we are still running interpreted code even though the
// the function has long been marked or even already been optimized.
int64_t allowance =
kOSRBytecodeSizeAllowanceBase +
static_cast<int64_t>(ticks) * kOSRBytecodeSizeAllowancePerTick;
- if (function->shared()->GetBytecodeArray()->length() <= allowance) {
+ if (function.shared().GetBytecodeArray().length() <= allowance) {
AttemptOnStackReplacement(frame);
}
return true;
@@ -160,27 +160,27 @@
OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction function,
BytecodeArray bytecode) {
- int ticks = function->feedback_vector()->profiler_ticks();
+ int ticks = function.feedback_vector().profiler_ticks();
int ticks_for_optimization =
kProfilerTicksBeforeOptimization +
- (bytecode->length() / kBytecodeSizeAllowancePerTick);
+ (bytecode.length() / kBytecodeSizeAllowancePerTick);
if (ticks >= ticks_for_optimization) {
return OptimizationReason::kHotAndStable;
} else if (!any_ic_changed_ &&
- bytecode->length() < kMaxBytecodeSizeForEarlyOpt) {
+ bytecode.length() < kMaxBytecodeSizeForEarlyOpt) {
// If no IC was patched since the last tick and this function is very
// small, optimistically optimize it now.
return OptimizationReason::kSmallFunction;
} else if (FLAG_trace_opt_verbose) {
PrintF("[not yet optimizing ");
- function->PrintName();
+ function.PrintName();
PrintF(", not enough ticks: %d/%d and ", ticks,
kProfilerTicksBeforeOptimization);
if (any_ic_changed_) {
PrintF("ICs changed]\n");
} else {
PrintF(" too large for small function optimization: %d/%d]\n",
- bytecode->length(), kMaxBytecodeSizeForEarlyOpt);
+ bytecode.length(), kMaxBytecodeSizeForEarlyOpt);
}
}
return OptimizationReason::kDoNotOptimize;
@@ -206,18 +206,18 @@
if (!frame->is_interpreted()) continue;
JSFunction function = frame->function();
- DCHECK(function->shared()->is_compiled());
- if (!function->shared()->IsInterpreted()) continue;
+ DCHECK(function.shared().is_compiled());
+ if (!function.shared().IsInterpreted()) continue;
- if (!function->has_feedback_vector()) continue;
+ if (!function.has_feedback_vector()) continue;
MaybeOptimize(function, InterpretedFrame::cast(frame));
// TODO(leszeks): Move this increment to before the maybe optimize checks,
// and update the tests to assume the increment has already happened.
- int ticks = function->feedback_vector()->profiler_ticks();
+ int ticks = function.feedback_vector().profiler_ticks();
if (ticks < Smi::kMaxValue) {
- function->feedback_vector()->set_profiler_ticks(ticks + 1);
+ function.feedback_vector().set_profiler_ticks(ticks + 1);
}
}
any_ic_changed_ = false;
diff --git a/src/execution/simulator.h b/src/execution/simulator.h
index 6ea5b6f..74f8e91 100644
--- a/src/execution/simulator.h
+++ b/src/execution/simulator.h
@@ -109,7 +109,7 @@
}
static GeneratedCode FromCode(Code code) {
- return FromAddress(code->GetIsolate(), code->entry());
+ return FromAddress(code.GetIsolate(), code.entry());
}
#ifdef USE_SIMULATOR
diff --git a/src/extensions/statistics-extension.cc b/src/extensions/statistics-extension.cc
index 554ab07..458aec3 100644
--- a/src/extensions/statistics-extension.cc
+++ b/src/extensions/statistics-extension.cc
@@ -129,16 +129,16 @@
int source_position_table_total = 0;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (obj->IsCode()) {
+ if (obj.IsCode()) {
Code code = Code::cast(obj);
- reloc_info_total += code->relocation_info()->Size();
- ByteArray source_position_table = code->SourcePositionTable();
- if (source_position_table->length() > 0) {
- source_position_table_total += code->SourcePositionTable()->Size();
+ reloc_info_total += code.relocation_info().Size();
+ ByteArray source_position_table = code.SourcePositionTable();
+ if (source_position_table.length() > 0) {
+ source_position_table_total += code.SourcePositionTable().Size();
}
- } else if (obj->IsBytecodeArray()) {
+ } else if (obj.IsBytecodeArray()) {
source_position_table_total +=
- BytecodeArray::cast(obj)->SourcePositionTable()->Size();
+ BytecodeArray::cast(obj).SourcePositionTable().Size();
}
}
diff --git a/src/external-reference.cc b/src/external-reference.cc
index 9641198..ac4dd6b 100644
--- a/src/external-reference.cc
+++ b/src/external-reference.cc
@@ -639,7 +639,7 @@
Address GetOrCreateHash(Isolate* isolate, Address raw_key) {
DisallowHeapAllocation no_gc;
- return Object(raw_key)->GetOrCreateHash(isolate).ptr();
+ return Object(raw_key).GetOrCreateHash(isolate).ptr();
}
FUNCTION_REFERENCE(get_or_create_hash_raw, GetOrCreateHash)
diff --git a/src/feedback-vector-inl.h b/src/feedback-vector-inl.h
index a261509..928e498 100644
--- a/src/feedback-vector-inl.h
+++ b/src/feedback-vector-inl.h
@@ -113,7 +113,7 @@
bool FeedbackVector::is_empty() const { return length() == 0; }
FeedbackMetadata FeedbackVector::metadata() const {
- return shared_function_info()->feedback_metadata();
+ return shared_function_info().feedback_metadata();
}
void FeedbackVector::clear_invocation_count() { set_invocation_count(0); }
@@ -136,7 +136,7 @@
MaybeObject slot = optimized_code_weak_or_smi();
Smi value;
if (!slot->ToSmi(&value)) return OptimizationMarker::kNone;
- return static_cast<OptimizationMarker>(value->value());
+ return static_cast<OptimizationMarker>(value.value());
}
bool FeedbackVector::has_optimized_code() const {
@@ -170,7 +170,7 @@
DCHECK_GE(index, 0);
ClosureFeedbackCellArray cell_array =
ClosureFeedbackCellArray::cast(closure_feedback_cell_array());
- return cell_array->GetFeedbackCell(index);
+ return cell_array.GetFeedbackCell(index);
}
void FeedbackVector::Set(FeedbackSlot slot, MaybeObject value,
@@ -287,13 +287,13 @@
}
bool FeedbackMetadataIterator::HasNext() const {
- return next_slot_.ToInt() < metadata()->slot_count();
+ return next_slot_.ToInt() < metadata().slot_count();
}
FeedbackSlot FeedbackMetadataIterator::Next() {
DCHECK(HasNext());
cur_slot_ = next_slot_;
- slot_kind_ = metadata()->GetKind(cur_slot_);
+ slot_kind_ = metadata().GetKind(cur_slot_);
next_slot_ = FeedbackSlot(next_slot_.ToInt() + entry_size());
return cur_slot_;
}
@@ -303,18 +303,18 @@
}
MaybeObject FeedbackNexus::GetFeedback() const {
- MaybeObject feedback = vector()->Get(slot());
+ MaybeObject feedback = vector().Get(slot());
FeedbackVector::AssertNoLegacyTypes(feedback);
return feedback;
}
MaybeObject FeedbackNexus::GetFeedbackExtra() const {
#ifdef DEBUG
- FeedbackSlotKind kind = vector()->GetKind(slot());
+ FeedbackSlotKind kind = vector().GetKind(slot());
DCHECK_LT(1, FeedbackMetadata::GetSlotSize(kind));
#endif
- int extra_index = vector()->GetIndex(slot()) + 1;
- return vector()->get(extra_index);
+ int extra_index = vector().GetIndex(slot()) + 1;
+ return vector().get(extra_index);
}
void FeedbackNexus::SetFeedback(Object feedback, WriteBarrierMode mode) {
@@ -323,18 +323,18 @@
void FeedbackNexus::SetFeedback(MaybeObject feedback, WriteBarrierMode mode) {
FeedbackVector::AssertNoLegacyTypes(feedback);
- vector()->Set(slot(), feedback, mode);
+ vector().Set(slot(), feedback, mode);
}
void FeedbackNexus::SetFeedbackExtra(Object feedback_extra,
WriteBarrierMode mode) {
#ifdef DEBUG
- FeedbackSlotKind kind = vector()->GetKind(slot());
+ FeedbackSlotKind kind = vector().GetKind(slot());
DCHECK_LT(1, FeedbackMetadata::GetSlotSize(kind));
FeedbackVector::AssertNoLegacyTypes(MaybeObject::FromObject(feedback_extra));
#endif
- int index = vector()->GetIndex(slot()) + 1;
- vector()->set(index, MaybeObject::FromObject(feedback_extra), mode);
+ int index = vector().GetIndex(slot()) + 1;
+ vector().set(index, MaybeObject::FromObject(feedback_extra), mode);
}
void FeedbackNexus::SetFeedbackExtra(MaybeObject feedback_extra,
@@ -342,11 +342,11 @@
#ifdef DEBUG
FeedbackVector::AssertNoLegacyTypes(feedback_extra);
#endif
- int index = vector()->GetIndex(slot()) + 1;
- vector()->set(index, feedback_extra, mode);
+ int index = vector().GetIndex(slot()) + 1;
+ vector().set(index, feedback_extra, mode);
}
-Isolate* FeedbackNexus::GetIsolate() const { return vector()->GetIsolate(); }
+Isolate* FeedbackNexus::GetIsolate() const { return vector().GetIsolate(); }
} // namespace internal
} // namespace v8
diff --git a/src/feedback-vector.cc b/src/feedback-vector.cc
index 09910b6..b77950f 100644
--- a/src/feedback-vector.cc
+++ b/src/feedback-vector.cc
@@ -44,13 +44,13 @@
static bool IsPropertyNameFeedback(MaybeObject feedback) {
HeapObject heap_object;
if (!feedback->GetHeapObjectIfStrong(&heap_object)) return false;
- if (heap_object->IsString()) {
- DCHECK(heap_object->IsInternalizedString());
+ if (heap_object.IsString()) {
+ DCHECK(heap_object.IsInternalizedString());
return true;
}
- if (!heap_object->IsSymbol()) return false;
+ if (!heap_object.IsSymbol()) return false;
Symbol symbol = Symbol::cast(heap_object);
- ReadOnlyRoots roots = symbol->GetReadOnlyRoots();
+ ReadOnlyRoots roots = symbol.GetReadOnlyRoots();
return symbol != roots.uninitialized_symbol() &&
symbol != roots.premonomorphic_symbol() &&
symbol != roots.megamorphic_symbol();
@@ -195,11 +195,11 @@
FeedbackSlotKind FeedbackVector::GetKind(FeedbackSlot slot) const {
DCHECK(!is_empty());
- return metadata()->GetKind(slot);
+ return metadata().GetKind(slot);
}
FeedbackSlot FeedbackVector::GetTypeProfileSlot() const {
- DCHECK(metadata()->HasTypeProfileSlot());
+ DCHECK(metadata().HasTypeProfileSlot());
FeedbackSlot slot =
FeedbackVector::ToSlot(FeedbackVectorSpec::kTypeProfileSlotIndex);
DCHECK_EQ(FeedbackSlotKind::kTypeProfile, GetKind(slot));
@@ -212,7 +212,7 @@
Factory* factory = isolate->factory();
int num_feedback_cells =
- shared->feedback_metadata()->closure_feedback_cell_count();
+ shared->feedback_metadata().closure_feedback_cell_count();
Handle<ClosureFeedbackCellArray> feedback_cell_array =
factory->NewClosureFeedbackCellArray(num_feedback_cells);
@@ -231,7 +231,7 @@
Handle<ClosureFeedbackCellArray> closure_feedback_cell_array) {
Factory* factory = isolate->factory();
- const int slot_count = shared->feedback_metadata()->slot_count();
+ const int slot_count = shared->feedback_metadata().slot_count();
Handle<FeedbackVector> vector = factory->NewFeedbackVector(
shared, closure_feedback_cell_array, AllocationType::kOld);
@@ -254,7 +254,7 @@
*uninitialized_sentinel);
for (int i = 0; i < slot_count;) {
FeedbackSlot slot(i);
- FeedbackSlotKind kind = shared->feedback_metadata()->GetKind(slot);
+ FeedbackSlotKind kind = shared->feedback_metadata().GetKind(slot);
int index = FeedbackVector::GetIndex(slot);
int entry_size = FeedbackMetadata::GetSlotSize(kind);
@@ -319,7 +319,7 @@
Isolate* isolate, Handle<FeedbackVector> vector) {
DCHECK(!isolate->is_best_effort_code_coverage() ||
isolate->is_collecting_type_profile());
- if (!vector->shared_function_info()->IsSubjectToDebugging()) return;
+ if (!vector->shared_function_info().IsSubjectToDebugging()) return;
Handle<ArrayList> list = Handle<ArrayList>::cast(
isolate->factory()->feedback_vectors_for_profiling_tools());
list = ArrayList::Add(isolate, list, vector);
@@ -360,16 +360,16 @@
}
Code code = Code::cast(slot->GetHeapObject());
- if (code->marked_for_deoptimization()) {
+ if (code.marked_for_deoptimization()) {
if (FLAG_trace_deopt) {
PrintF("[evicting optimizing code marked for deoptimization (%s) for ",
reason);
- shared->ShortPrint();
+ shared.ShortPrint();
PrintF("]\n");
}
- if (!code->deopt_already_counted()) {
+ if (!code.deopt_already_counted()) {
increment_deopt_count();
- code->set_deopt_already_counted(true);
+ code.set_deopt_already_counted(true);
}
ClearOptimizedCode();
}
@@ -399,7 +399,7 @@
if (object->GetHeapObject(&heap_object)) {
// Instead of FixedArray, the Feedback and the Extra should contain
// WeakFixedArrays. The only allowed FixedArray subtype is HashTable.
- DCHECK_IMPLIES(heap_object->IsFixedArray(), heap_object->IsHashTable());
+ DCHECK_IMPLIES(heap_object.IsFixedArray(), heap_object.IsHashTable());
}
#endif
}
@@ -408,8 +408,8 @@
Isolate* isolate = GetIsolate();
HeapObject heap_object;
if (GetFeedback()->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsWeakFixedArray() &&
- WeakFixedArray::cast(heap_object)->length() == length) {
+ heap_object.IsWeakFixedArray() &&
+ WeakFixedArray::cast(heap_object).length() == length) {
return handle(WeakFixedArray::cast(heap_object), isolate);
}
Handle<WeakFixedArray> array = isolate->factory()->NewWeakFixedArray(length);
@@ -421,8 +421,8 @@
Isolate* isolate = GetIsolate();
HeapObject heap_object;
if (GetFeedbackExtra()->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsWeakFixedArray() &&
- WeakFixedArray::cast(heap_object)->length() == length) {
+ heap_object.IsWeakFixedArray() &&
+ WeakFixedArray::cast(heap_object).length() == length) {
return handle(WeakFixedArray::cast(heap_object), isolate);
}
Handle<WeakFixedArray> array = isolate->factory()->NewWeakFixedArray(length);
@@ -630,17 +630,17 @@
}
HeapObject heap_object;
if (feedback->GetHeapObjectIfStrong(&heap_object)) {
- if (heap_object->IsWeakFixedArray()) {
+ if (heap_object.IsWeakFixedArray()) {
// Determine state purely by our structure, don't check if the maps
// are cleared.
return POLYMORPHIC;
}
- if (heap_object->IsName()) {
+ if (heap_object.IsName()) {
DCHECK(IsKeyedLoadICKind(kind()) || IsKeyedStoreICKind(kind()) ||
IsKeyedHasICKind(kind()));
Object extra = GetFeedbackExtra()->GetHeapObjectAssumeStrong();
WeakFixedArray extra_array = WeakFixedArray::cast(extra);
- return extra_array->length() > 2 ? POLYMORPHIC : MONOMORPHIC;
+ return extra_array.length() > 2 ? POLYMORPHIC : MONOMORPHIC;
}
}
UNREACHABLE();
@@ -652,7 +652,7 @@
return GENERIC;
} else if (feedback->IsWeakOrCleared() ||
(feedback->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsAllocationSite())) {
+ heap_object.IsAllocationSite())) {
return MONOMORPHIC;
}
@@ -732,7 +732,7 @@
return MONOMORPHIC;
}
- DCHECK(feedback->GetHeapObjectAssumeStrong()->IsWeakFixedArray());
+ DCHECK(feedback->GetHeapObjectAssumeStrong().IsWeakFixedArray());
return POLYMORPHIC;
}
@@ -796,7 +796,7 @@
break;
case MONOMORPHIC:
if (maybe_feedback->IsCleared() || feedback.is_identical_to(source_map) ||
- Map::cast(*feedback)->is_deprecated()) {
+ Map::cast(*feedback).is_deprecated()) {
// Remain in MONOMORPHIC state if previous feedback has been collected.
SetFeedback(HeapObjectReference::Weak(*source_map));
SetFeedbackExtra(*result_map);
@@ -858,7 +858,7 @@
DCHECK(IsCallICKind(kind()));
Object call_count = GetFeedbackExtra()->cast<Object>();
- CHECK(call_count->IsSmi());
+ CHECK(call_count.IsSmi());
uint32_t value = static_cast<uint32_t>(Smi::ToInt(call_count));
return CallCountField::decode(value);
}
@@ -867,7 +867,7 @@
DCHECK(IsCallICKind(kind()));
Object call_count = GetFeedbackExtra()->cast<Object>();
- CHECK(call_count->IsSmi());
+ CHECK(call_count.IsSmi());
uint32_t count = static_cast<uint32_t>(Smi::ToInt(call_count));
uint32_t value = CallCountField::encode(CallCountField::decode(count));
int result = static_cast<int>(value | SpeculationModeField::encode(mode));
@@ -878,7 +878,7 @@
DCHECK(IsCallICKind(kind()));
Object call_count = GetFeedbackExtra()->cast<Object>();
- CHECK(call_count->IsSmi());
+ CHECK(call_count.IsSmi());
uint32_t value = static_cast<uint32_t>(Smi::ToInt(call_count));
return SpeculationModeField::decode(value);
}
@@ -886,7 +886,7 @@
float FeedbackNexus::ComputeCallFrequency() {
DCHECK(IsCallICKind(kind()));
- double const invocation_count = vector()->invocation_count();
+ double const invocation_count = vector().invocation_count();
double const call_count = GetCallCount();
if (invocation_count == 0) {
// Prevent division by 0.
@@ -950,7 +950,7 @@
bool is_named_feedback = IsPropertyNameFeedback(feedback);
HeapObject heap_object;
if ((feedback->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsWeakFixedArray()) ||
+ heap_object.IsWeakFixedArray()) ||
is_named_feedback) {
int found = 0;
WeakFixedArray array;
@@ -962,9 +962,9 @@
}
const int increment = 2;
HeapObject heap_object;
- for (int i = 0; i < array->length(); i += increment) {
- DCHECK(array->Get(i)->IsWeakOrCleared());
- if (array->Get(i)->GetHeapObjectIfWeak(&heap_object)) {
+ for (int i = 0; i < array.length(); i += increment) {
+ DCHECK(array.Get(i)->IsWeakOrCleared());
+ if (array.Get(i)->GetHeapObjectIfWeak(&heap_object)) {
Map map = Map::cast(heap_object);
maps->push_back(handle(map, isolate));
found++;
@@ -977,7 +977,7 @@
return 1;
} else if (feedback->GetHeapObjectIfStrong(&heap_object) &&
heap_object ==
- heap_object->GetReadOnlyRoots().premonomorphic_symbol()) {
+ heap_object.GetReadOnlyRoots().premonomorphic_symbol()) {
if (GetFeedbackExtra()->GetHeapObjectIfWeak(&heap_object)) {
Map map = Map::cast(heap_object);
maps->push_back(handle(map, isolate));
@@ -999,7 +999,7 @@
bool is_named_feedback = IsPropertyNameFeedback(feedback);
HeapObject heap_object;
if ((feedback->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsWeakFixedArray()) ||
+ heap_object.IsWeakFixedArray()) ||
is_named_feedback) {
WeakFixedArray array;
if (is_named_feedback) {
@@ -1010,12 +1010,12 @@
}
const int increment = 2;
HeapObject heap_object;
- for (int i = 0; i < array->length(); i += increment) {
- DCHECK(array->Get(i)->IsWeakOrCleared());
- if (array->Get(i)->GetHeapObjectIfWeak(&heap_object)) {
+ for (int i = 0; i < array.length(); i += increment) {
+ DCHECK(array.Get(i)->IsWeakOrCleared());
+ if (array.Get(i)->GetHeapObjectIfWeak(&heap_object)) {
Map array_map = Map::cast(heap_object);
- if (array_map == *map && !array->Get(i + increment - 1)->IsCleared()) {
- MaybeObject handler = array->Get(i + increment - 1);
+ if (array_map == *map && !array.Get(i + increment - 1)->IsCleared()) {
+ MaybeObject handler = array.Get(i + increment - 1);
DCHECK(IC::IsHandler(handler));
return handle(handler, isolate);
}
@@ -1046,7 +1046,7 @@
bool is_named_feedback = IsPropertyNameFeedback(feedback);
HeapObject heap_object;
if ((feedback->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsWeakFixedArray()) ||
+ heap_object.IsWeakFixedArray()) ||
is_named_feedback) {
WeakFixedArray array;
if (is_named_feedback) {
@@ -1057,12 +1057,12 @@
}
const int increment = 2;
HeapObject heap_object;
- for (int i = 0; i < array->length(); i += increment) {
+ for (int i = 0; i < array.length(); i += increment) {
// Be sure to skip handlers whose maps have been cleared.
- DCHECK(array->Get(i)->IsWeakOrCleared());
- if (array->Get(i)->GetHeapObjectIfWeak(&heap_object) &&
- !array->Get(i + increment - 1)->IsCleared()) {
- MaybeObject handler = array->Get(i + increment - 1);
+ DCHECK(array.Get(i)->IsWeakOrCleared());
+ if (array.Get(i)->GetHeapObjectIfWeak(&heap_object) &&
+ !array.Get(i + increment - 1)->IsCleared()) {
+ MaybeObject handler = array.Get(i + increment - 1);
DCHECK(IC::IsHandler(handler));
code_list->push_back(handle(handler, isolate));
count++;
@@ -1190,7 +1190,7 @@
Handle<StoreHandler> data_handler =
Handle<StoreHandler>::cast(maybe_code_handler.object());
handler = handle(Code::cast(data_handler->smi_handler()),
- vector()->GetIsolate());
+ vector().GetIsolate());
} else if (maybe_code_handler.object()->IsSmi()) {
// Skip proxy handlers.
DCHECK_EQ(*(maybe_code_handler.object()),
@@ -1259,7 +1259,7 @@
bool InList(Handle<ArrayList> types, Handle<String> type) {
for (int i = 0; i < types->Length(); i++) {
Object obj = types->Get(i);
- if (String::cast(obj)->Equals(*type)) {
+ if (String::cast(obj).Equals(*type)) {
return true;
}
}
@@ -1295,7 +1295,7 @@
isolate, types, position,
ArrayList::Add(isolate, position_specific_types, type));
} else {
- DCHECK(types->ValueAt(entry)->IsArrayList());
+ DCHECK(types->ValueAt(entry).IsArrayList());
position_specific_types =
handle(ArrayList::cast(types->ValueAt(entry)), isolate);
if (!InList(position_specific_types, type)) { // Add type
@@ -1327,8 +1327,8 @@
index < types->length(); index += SimpleNumberDictionary::kEntrySize) {
int key_index = index + SimpleNumberDictionary::kEntryKeyIndex;
Object key = types->get(key_index);
- if (key->IsSmi()) {
- int position = Smi::cast(key)->value();
+ if (key.IsSmi()) {
+ int position = Smi::cast(key).value();
source_positions.push_back(position);
}
}
@@ -1355,7 +1355,7 @@
if (entry == SimpleNumberDictionary::kNotFound) {
return types_for_position;
}
- DCHECK(types->ValueAt(entry)->IsArrayList());
+ DCHECK(types->ValueAt(entry).IsArrayList());
Handle<ArrayList> position_specific_types =
Handle<ArrayList>(ArrayList::cast(types->ValueAt(entry)), isolate);
for (int i = 0; i < position_specific_types->Length(); i++) {
@@ -1378,7 +1378,7 @@
index += SimpleNumberDictionary::kEntrySize) {
int key_index = index + SimpleNumberDictionary::kEntryKeyIndex;
Object key = feedback->get(key_index);
- if (key->IsSmi()) {
+ if (key.IsSmi()) {
int value_index = index + SimpleNumberDictionary::kEntryValueIndex;
Handle<ArrayList> position_specific_types(
diff --git a/src/feedback-vector.h b/src/feedback-vector.h
index 42536b2..3653228 100644
--- a/src/feedback-vector.h
+++ b/src/feedback-vector.h
@@ -618,7 +618,7 @@
FeedbackNexus(FeedbackVector vector, FeedbackSlot slot)
: vector_(vector), slot_(slot) {
kind_ =
- (vector.is_null()) ? FeedbackSlotKind::kInvalid : vector->GetKind(slot);
+ (vector.is_null()) ? FeedbackSlotKind::kInvalid : vector.GetKind(slot);
}
Handle<FeedbackVector> vector_handle() const {
@@ -632,7 +632,7 @@
FeedbackSlotKind kind() const { return kind_; }
inline LanguageMode GetLanguageMode() const {
- return vector()->GetLanguageMode(slot());
+ return vector().GetLanguageMode(slot());
}
InlineCacheState ic_state() const;
diff --git a/src/handles/global-handles.cc b/src/handles/global-handles.cc
index 98b015b..66ef6dc 100644
--- a/src/handles/global-handles.cc
+++ b/src/handles/global-handles.cc
@@ -350,7 +350,7 @@
namespace {
void ExtractInternalFields(JSObject jsobject, void** embedder_fields, int len) {
- int field_count = jsobject->GetEmbedderFieldCount();
+ int field_count = jsobject.GetEmbedderFieldCount();
for (int i = 0; i < len; ++i) {
if (field_count == i) break;
void* pointer;
@@ -517,7 +517,7 @@
void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
nullptr};
- if (weakness_type() != PHANTOM_WEAK && object()->IsJSObject()) {
+ if (weakness_type() != PHANTOM_WEAK && object().IsJSObject()) {
ExtractInternalFields(JSObject::cast(object()), embedder_fields,
v8::kEmbedderFieldsInWeakCallback);
}
@@ -548,10 +548,10 @@
set_state(NEAR_DEATH);
// Check that we are not passing a finalized external string to
// the callback.
- DCHECK(!object()->IsExternalOneByteString() ||
- ExternalOneByteString::cast(object())->resource() != nullptr);
- DCHECK(!object()->IsExternalTwoByteString() ||
- ExternalTwoByteString::cast(object())->resource() != nullptr);
+ DCHECK(!object().IsExternalOneByteString() ||
+ ExternalOneByteString::cast(object()).resource() != nullptr);
+ DCHECK(!object().IsExternalTwoByteString() ||
+ ExternalTwoByteString::cast(object()).resource() != nullptr);
// Leaving V8.
VMState<EXTERNAL> vmstate(isolate);
HandleScope handle_scope(isolate);
@@ -711,7 +711,7 @@
Node::FromLocation(location)->global_handles();
#ifdef VERIFY_HEAP
if (i::FLAG_verify_heap) {
- Object(*location)->ObjectVerify(global_handles->isolate());
+ Object(*location).ObjectVerify(global_handles->isolate());
}
#endif // VERIFY_HEAP
return global_handles->Create(*location);
@@ -1283,7 +1283,7 @@
PrintF("Global handles:\n");
for (Node* node : *regular_nodes_) {
PrintF(" handle %p to %p%s\n", node->location().ToVoidPtr(),
- reinterpret_cast<void*>(node->object()->ptr()),
+ reinterpret_cast<void*>(node->object().ptr()),
node->IsWeak() ? " (weak)" : "");
}
}
@@ -1336,8 +1336,8 @@
MemsetPointer(FullObjectSlot(next_block), the_hole, kSize);
blocks_.push_back(next_block);
}
- DCHECK_EQ(the_hole->ptr(), blocks_[block][offset]);
- blocks_[block][offset] = object->ptr();
+ DCHECK_EQ(the_hole.ptr(), blocks_[block][offset]);
+ blocks_[block][offset] = object.ptr();
if (ObjectInYoungGeneration(object)) {
young_node_indices_.push_back(size_);
}
diff --git a/src/handles/handles.cc b/src/handles/handles.cc
index de04d4b..7e04598 100644
--- a/src/handles/handles.cc
+++ b/src/handles/handles.cc
@@ -31,7 +31,7 @@
bool HandleBase::IsDereferenceAllowed(DereferenceCheckMode mode) const {
DCHECK_NOT_NULL(location_);
Object object(*location_);
- if (object->IsSmi()) return true;
+ if (object.IsSmi()) return true;
HeapObject heap_object = HeapObject::cast(object);
Isolate* isolate;
if (!GetIsolateFromWritableObject(heap_object, &isolate)) return true;
@@ -44,9 +44,9 @@
if (mode == INCLUDE_DEFERRED_CHECK &&
!AllowDeferredHandleDereference::IsAllowed()) {
// Accessing cells, maps and internalized strings is safe.
- if (heap_object->IsCell()) return true;
- if (heap_object->IsMap()) return true;
- if (heap_object->IsInternalizedString()) return true;
+ if (heap_object.IsCell()) return true;
+ if (heap_object.IsMap()) return true;
+ if (heap_object.IsInternalizedString()) return true;
return !isolate->IsDeferredHandle(location_);
}
return true;
diff --git a/src/hash-seed-inl.h b/src/hash-seed-inl.h
index 575da0c..62f8946 100644
--- a/src/hash-seed-inl.h
+++ b/src/hash-seed-inl.h
@@ -39,7 +39,7 @@
inline uint64_t HashSeed(ReadOnlyRoots roots) {
uint64_t seed;
- roots.hash_seed()->copy_out(0, reinterpret_cast<byte*>(&seed), kInt64Size);
+ roots.hash_seed().copy_out(0, reinterpret_cast<byte*>(&seed), kInt64Size);
DCHECK(FLAG_randomize_hashes || seed == 0);
return seed;
}
diff --git a/src/heap/array-buffer-tracker-inl.h b/src/heap/array-buffer-tracker-inl.h
index 220df08..647a7b0 100644
--- a/src/heap/array-buffer-tracker-inl.h
+++ b/src/heap/array-buffer-tracker-inl.h
@@ -16,12 +16,12 @@
namespace internal {
void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer buffer) {
- if (buffer->backing_store() == nullptr) return;
+ if (buffer.backing_store() == nullptr) return;
// ArrayBuffer tracking works only for small objects.
DCHECK(!heap->IsLargeObject(buffer));
- const size_t length = buffer->byte_length();
+ const size_t length = buffer.byte_length();
Page* page = Page::FromHeapObject(buffer);
{
base::MutexGuard guard(page->mutex());
@@ -42,10 +42,10 @@
}
void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer buffer) {
- if (buffer->backing_store() == nullptr) return;
+ if (buffer.backing_store() == nullptr) return;
Page* page = Page::FromHeapObject(buffer);
- const size_t length = buffer->byte_length();
+ const size_t length = buffer.byte_length();
{
base::MutexGuard guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
@@ -110,8 +110,8 @@
void LocalArrayBufferTracker::AddInternal(JSArrayBuffer buffer, size_t length) {
auto ret = array_buffers_.insert(
{buffer,
- {buffer->backing_store(), length, buffer->backing_store(),
- buffer->is_wasm_memory()}});
+ {buffer.backing_store(), length, buffer.backing_store(),
+ buffer.is_wasm_memory()}});
USE(ret);
// Check that we indeed inserted a new value and did not overwrite an existing
// one (which would be a bug).
diff --git a/src/heap/array-buffer-tracker.cc b/src/heap/array-buffer-tracker.cc
index 58cd4f9..0c04d7b 100644
--- a/src/heap/array-buffer-tracker.cc
+++ b/src/heap/array-buffer-tracker.cc
@@ -47,7 +47,7 @@
const size_t length = it->second.length;
// We should decrement before adding to avoid potential overflows in
// the external memory counters.
- DCHECK_EQ(it->first->is_wasm_memory(), it->second.is_wasm_memory);
+ DCHECK_EQ(it->first.is_wasm_memory(), it->second.is_wasm_memory);
tracker->AddInternal(new_buffer, length);
MemoryChunk::MoveExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer,
@@ -104,7 +104,7 @@
DCHECK(page->SweepingDone());
tracker->Process([mode](JSArrayBuffer old_buffer, JSArrayBuffer* new_buffer) {
- MapWord map_word = old_buffer->map_word();
+ MapWord map_word = old_buffer.map_word();
if (map_word.IsForwardingAddress()) {
*new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress());
return LocalArrayBufferTracker::kUpdateEntry;
diff --git a/src/heap/code-stats.cc b/src/heap/code-stats.cc
index d17577a..f660d6a 100644
--- a/src/heap/code-stats.cc
+++ b/src/heap/code-stats.cc
@@ -15,21 +15,21 @@
// Record code statisitcs.
void CodeStatistics::RecordCodeAndMetadataStatistics(HeapObject object,
Isolate* isolate) {
- if (object->IsScript()) {
+ if (object.IsScript()) {
Script script = Script::cast(object);
// Log the size of external source code.
- Object source = script->source();
- if (source->IsExternalString()) {
+ Object source = script.source();
+ if (source.IsExternalString()) {
ExternalString external_source_string = ExternalString::cast(source);
int size = isolate->external_script_source_size();
- size += external_source_string->ExternalPayloadSize();
+ size += external_source_string.ExternalPayloadSize();
isolate->set_external_script_source_size(size);
}
- } else if (object->IsAbstractCode()) {
+ } else if (object.IsAbstractCode()) {
// Record code+metadata statisitcs.
AbstractCode abstract_code = AbstractCode::cast(object);
- int size = abstract_code->SizeIncludingMetadata();
- if (abstract_code->IsCode()) {
+ int size = abstract_code.SizeIncludingMetadata();
+ if (abstract_code.IsCode()) {
size += isolate->code_and_metadata_size();
isolate->set_code_and_metadata_size(size);
} else {
@@ -39,8 +39,8 @@
#ifdef DEBUG
// Record code kind and code comment statistics.
- isolate->code_kind_statistics()[abstract_code->kind()] +=
- abstract_code->Size();
+ isolate->code_kind_statistics()[abstract_code.kind()] +=
+ abstract_code.Size();
CodeStatistics::CollectCodeCommentStatistics(object, isolate);
#endif
}
@@ -197,12 +197,12 @@
Isolate* isolate) {
// Bytecode objects do not contain RelocInfo. Only process code objects
// for code comment statistics.
- if (!obj->IsCode()) {
+ if (!obj.IsCode()) {
return;
}
Code code = Code::cast(obj);
- CodeCommentsIterator cit(code->code_comments(), code->code_comments_size());
+ CodeCommentsIterator cit(code.code_comments(), code.code_comments_size());
int delta = 0;
int prev_pc_offset = 0;
while (cit.HasCurrent()) {
@@ -212,8 +212,8 @@
cit.Next();
}
- DCHECK(0 <= prev_pc_offset && prev_pc_offset <= code->raw_instruction_size());
- delta += static_cast<int>(code->raw_instruction_size() - prev_pc_offset);
+ DCHECK(0 <= prev_pc_offset && prev_pc_offset <= code.raw_instruction_size());
+ delta += static_cast<int>(code.raw_instruction_size() - prev_pc_offset);
EnterComment(isolate, "NoComment", delta);
}
#endif
diff --git a/src/heap/concurrent-marking.cc b/src/heap/concurrent-marking.cc
index 58d6877..3c25e31 100644
--- a/src/heap/concurrent-marking.cc
+++ b/src/heap/concurrent-marking.cc
@@ -178,7 +178,7 @@
HeapObject object = rinfo->target_object();
RecordRelocSlot(host, rinfo, object);
if (!marking_state_.IsBlackOrGrey(object)) {
- if (host->IsWeakObject(object)) {
+ if (host.IsWeakObject(object)) {
weak_objects_->weak_objects_in_code.Push(task_id_,
std::make_pair(object, host));
} else {
@@ -199,7 +199,7 @@
ObjectSlot slot = snapshot.slot(i);
Object object = snapshot.value(i);
DCHECK(!HasWeakHeapObjectTag(object));
- if (!object->IsHeapObject()) continue;
+ if (!object.IsHeapObject()) continue;
HeapObject heap_object = HeapObject::cast(object);
MarkObject(heap_object);
MarkCompactCollector::RecordSlot(host, slot, heap_object);
@@ -227,8 +227,8 @@
if (size == 0) {
return 0;
}
- if (weak_ref->target()->IsHeapObject()) {
- HeapObject target = HeapObject::cast(weak_ref->target());
+ if (weak_ref.target().IsHeapObject()) {
+ HeapObject target = HeapObject::cast(weak_ref.target());
if (marking_state_.IsBlackOrGrey(target)) {
// Record the slot inside the JSWeakRef, since the
// VisitJSObjectSubclass above didn't visit it.
@@ -247,10 +247,10 @@
if (!ShouldVisit(weak_cell)) return 0;
int size = WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
- VisitMapPointer(weak_cell, weak_cell->map_slot());
+ VisitMapPointer(weak_cell, weak_cell.map_slot());
WeakCell::BodyDescriptor::IterateBody(map, weak_cell, size, this);
- if (weak_cell->target()->IsHeapObject()) {
- HeapObject target = HeapObject::cast(weak_cell->target());
+ if (weak_cell.target().IsHeapObject()) {
+ HeapObject target = HeapObject::cast(weak_cell.target());
if (marking_state_.IsBlackOrGrey(target)) {
// Record the slot inside the WeakCell, since the IterateBody above
// didn't visit it.
@@ -306,14 +306,14 @@
int VisitSeqOneByteString(Map map, SeqOneByteString object) {
if (!ShouldVisit(object)) return 0;
- VisitMapPointer(object, object->map_slot());
- return SeqOneByteString::SizeFor(object->synchronized_length());
+ VisitMapPointer(object, object.map_slot());
+ return SeqOneByteString::SizeFor(object.synchronized_length());
}
int VisitSeqTwoByteString(Map map, SeqTwoByteString object) {
if (!ShouldVisit(object)) return 0;
- VisitMapPointer(object, object->map_slot());
- return SeqTwoByteString::SizeFor(object->synchronized_length());
+ VisitMapPointer(object, object.map_slot());
+ return SeqTwoByteString::SizeFor(object.synchronized_length());
}
// ===========================================================================
@@ -367,16 +367,16 @@
if (!ShouldVisit(shared_info)) return 0;
int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, shared_info);
- VisitMapPointer(shared_info, shared_info->map_slot());
+ VisitMapPointer(shared_info, shared_info.map_slot());
SharedFunctionInfo::BodyDescriptor::IterateBody(map, shared_info, size,
this);
// If the SharedFunctionInfo has old bytecode, mark it as flushable,
// otherwise visit the function data field strongly.
- if (shared_info->ShouldFlushBytecode(bytecode_flush_mode_)) {
+ if (shared_info.ShouldFlushBytecode(bytecode_flush_mode_)) {
weak_objects_->bytecode_flushing_candidates.Push(task_id_, shared_info);
} else {
- VisitPointer(shared_info, shared_info->RawField(
+ VisitPointer(shared_info, shared_info.RawField(
SharedFunctionInfo::kFunctionDataOffset));
}
return size;
@@ -385,10 +385,10 @@
int VisitBytecodeArray(Map map, BytecodeArray object) {
if (!ShouldVisit(object)) return 0;
int size = BytecodeArray::BodyDescriptor::SizeOf(map, object);
- VisitMapPointer(object, object->map_slot());
+ VisitMapPointer(object, object.map_slot());
BytecodeArray::BodyDescriptor::IterateBody(map, object, size, this);
if (!is_forced_gc_) {
- object->MakeOlder();
+ object.MakeOlder();
}
return size;
}
@@ -398,7 +398,7 @@
// Check if the JSFunction needs reset due to bytecode being flushed.
if (bytecode_flush_mode_ != BytecodeFlushMode::kDoNotFlushBytecode &&
- object->NeedsResetDueToFlushedBytecode()) {
+ object.NeedsResetDueToFlushedBytecode()) {
weak_objects_->flushed_js_functions.Push(task_id_, object);
}
@@ -408,7 +408,7 @@
int VisitMap(Map meta_map, Map map) {
if (!ShouldVisit(map)) return 0;
int size = Map::BodyDescriptor::SizeOf(meta_map, map);
- if (map->CanTransition()) {
+ if (map.CanTransition()) {
// Maps that can transition share their descriptor arrays and require
// special visiting logic to avoid memory leaks.
// Since descriptor arrays are potentially shared, ensure that only the
@@ -416,9 +416,9 @@
// non-empty descriptor array is marked, its header is also visited. The
// slot holding the descriptor array will be implicitly recorded when the
// pointer fields of this map are visited.
- DescriptorArray descriptors = map->synchronized_instance_descriptors();
+ DescriptorArray descriptors = map.synchronized_instance_descriptors();
MarkDescriptorArrayBlack(descriptors);
- int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ int number_of_own_descriptors = map.NumberOfOwnDescriptors();
if (number_of_own_descriptors) {
// It is possible that the concurrent marker observes the
// number_of_own_descriptors out of sync with the descriptors. In that
@@ -428,7 +428,7 @@
// std::min<int>() below.
VisitDescriptors(descriptors,
std::min<int>(number_of_own_descriptors,
- descriptors->number_of_descriptors()));
+ descriptors.number_of_descriptors()));
}
// Mark the pointer fields of the Map. Since the transitions array has
// been marked already, it is fine that one of these fields contains a
@@ -441,29 +441,29 @@
void VisitDescriptors(DescriptorArray descriptor_array,
int number_of_own_descriptors) {
int16_t new_marked = static_cast<int16_t>(number_of_own_descriptors);
- int16_t old_marked = descriptor_array->UpdateNumberOfMarkedDescriptors(
+ int16_t old_marked = descriptor_array.UpdateNumberOfMarkedDescriptors(
mark_compact_epoch_, new_marked);
if (old_marked < new_marked) {
VisitPointers(
descriptor_array,
- MaybeObjectSlot(descriptor_array->GetDescriptorSlot(old_marked)),
- MaybeObjectSlot(descriptor_array->GetDescriptorSlot(new_marked)));
+ MaybeObjectSlot(descriptor_array.GetDescriptorSlot(old_marked)),
+ MaybeObjectSlot(descriptor_array.GetDescriptorSlot(new_marked)));
}
}
int VisitDescriptorArray(Map map, DescriptorArray array) {
if (!ShouldVisit(array)) return 0;
- VisitMapPointer(array, array->map_slot());
+ VisitMapPointer(array, array.map_slot());
int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
- VisitPointers(array, array->GetFirstPointerSlot(),
- array->GetDescriptorSlot(0));
- VisitDescriptors(array, array->number_of_descriptors());
+ VisitPointers(array, array.GetFirstPointerSlot(),
+ array.GetDescriptorSlot(0));
+ VisitDescriptors(array, array.number_of_descriptors());
return size;
}
int VisitTransitionArray(Map map, TransitionArray array) {
if (!ShouldVisit(array)) return 0;
- VisitMapPointer(array, array->map_slot());
+ VisitMapPointer(array, array.map_slot());
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
weak_objects_->transition_arrays.Push(task_id_, array);
@@ -478,22 +478,22 @@
if (!ShouldVisit(table)) return 0;
weak_objects_->ephemeron_hash_tables.Push(task_id_, table);
- for (int i = 0; i < table->Capacity(); i++) {
+ for (int i = 0; i < table.Capacity(); i++) {
ObjectSlot key_slot =
- table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
- HeapObject key = HeapObject::cast(table->KeyAt(i));
+ table.RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
+ HeapObject key = HeapObject::cast(table.KeyAt(i));
MarkCompactCollector::RecordSlot(table, key_slot, key);
ObjectSlot value_slot =
- table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
+ table.RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
if (marking_state_.IsBlackOrGrey(key)) {
VisitPointer(table, value_slot);
} else {
- Object value_obj = table->ValueAt(i);
+ Object value_obj = table.ValueAt(i);
- if (value_obj->IsHeapObject()) {
+ if (value_obj.IsHeapObject()) {
HeapObject value = HeapObject::cast(value_obj);
MarkCompactCollector::RecordSlot(table, value_slot, value);
@@ -507,7 +507,7 @@
}
}
- return table->SizeFromMap(map);
+ return table.SizeFromMap(map);
}
// Implements ephemeron semantics: Marks value if key is already reachable.
@@ -542,8 +542,8 @@
void MarkDescriptorArrayBlack(DescriptorArray descriptors) {
marking_state_.WhiteToGrey(descriptors);
if (marking_state_.GreyToBlack(descriptors)) {
- VisitPointers(descriptors, descriptors->GetFirstPointerSlot(),
- descriptors->GetDescriptorSlot(0));
+ VisitPointers(descriptors, descriptors.GetFirstPointerSlot(),
+ descriptors.GetDescriptorSlot(0));
}
}
@@ -585,7 +585,7 @@
void VisitCustomWeakPointers(HeapObject host, ObjectSlot start,
ObjectSlot end) override {
- DCHECK(host->IsWeakCell() || host->IsJSWeakRef());
+ DCHECK(host.IsWeakCell() || host.IsJSWeakRef());
}
private:
@@ -594,7 +594,7 @@
template <typename T>
int VisitJSObjectSubclassFast(Map map, T object) {
- DCHECK_IMPLIES(FLAG_unbox_double_fields, map->HasFastPointerLayout());
+ DCHECK_IMPLIES(FLAG_unbox_double_fields, map.HasFastPointerLayout());
using TBodyDescriptor = typename T::FastBodyDescriptor;
return VisitJSObjectSubclass<T, TBodyDescriptor>(map, object);
}
@@ -602,7 +602,7 @@
template <typename T, typename TBodyDescriptor = typename T::BodyDescriptor>
int VisitJSObjectSubclass(Map map, T object) {
int size = TBodyDescriptor::SizeOf(map, object);
- int used_size = map->UsedInstanceSize();
+ int used_size = map.UsedInstanceSize();
DCHECK_LE(used_size, size);
DCHECK_GE(used_size, T::kHeaderSize);
return VisitPartiallyWithSnapshot<T, TBodyDescriptor>(map, object,
@@ -611,7 +611,7 @@
template <typename T>
int VisitEmbedderTracingSubclass(Map map, T object) {
- DCHECK(object->IsApiWrapper());
+ DCHECK(object.IsApiWrapper());
int size = VisitJSObjectSubclass(map, object);
if (size && embedder_tracing_enabled_) {
// Success: The object needs to be processed for embedder references on
@@ -625,13 +625,13 @@
int VisitLeftTrimmableArray(Map map, T object) {
// The synchronized_length() function checks that the length is a Smi.
// This is not necessarily the case if the array is being left-trimmed.
- Object length = object->unchecked_synchronized_length();
+ Object length = object.unchecked_synchronized_length();
if (!ShouldVisit(object)) return 0;
// The cached length must be the actual length as the array is not black.
// Left trimming marks the array black before over-writing the length.
- DCHECK(length->IsSmi());
+ DCHECK(length.IsSmi());
int size = T::SizeFor(Smi::ToInt(length));
- VisitMapPointer(object, object->map_slot());
+ VisitMapPointer(object, object.map_slot());
T::BodyDescriptor::IterateBody(map, object, size, this);
return size;
}
@@ -656,7 +656,7 @@
template <typename T, typename TBodyDescriptor>
const SlotSnapshot& MakeSlotSnapshot(Map map, T object, int size) {
SlotSnapshottingVisitor visitor(&slot_snapshot_);
- visitor.VisitPointer(object, ObjectSlot(object->map_slot().address()));
+ visitor.VisitPointer(object, ObjectSlot(object.map_slot().address()));
TBodyDescriptor::IterateBody(map, object, size, &visitor);
return slot_snapshot_;
}
@@ -803,12 +803,12 @@
Address new_space_top = heap_->new_space()->original_top_acquire();
Address new_space_limit = heap_->new_space()->original_limit_relaxed();
Address new_large_object = heap_->new_lo_space()->pending_object();
- Address addr = object->address();
+ Address addr = object.address();
if ((new_space_top <= addr && addr < new_space_limit) ||
addr == new_large_object) {
on_hold_->Push(task_id, object);
} else {
- Map map = object->synchronized_map();
+ Map map = object.synchronized_map();
current_marked_bytes += visitor.Visit(map, object);
}
}
diff --git a/src/heap/embedder-tracing.cc b/src/heap/embedder-tracing.cc
index 058f33f..dc534b8 100644
--- a/src/heap/embedder-tracing.cc
+++ b/src/heap/embedder-tracing.cc
@@ -85,8 +85,8 @@
void LocalEmbedderHeapTracer::ProcessingScope::TracePossibleWrapper(
JSObject js_object) {
- DCHECK(js_object->IsApiWrapper());
- if (js_object->GetEmbedderFieldCount() < 2) return;
+ DCHECK(js_object.IsApiWrapper());
+ if (js_object.GetEmbedderFieldCount() < 2) return;
void* pointer0;
void* pointer1;
diff --git a/src/heap/factory.cc b/src/heap/factory.cc
index e759ce6f..bc65211 100644
--- a/src/heap/factory.cc
+++ b/src/heap/factory.cc
@@ -134,7 +134,7 @@
// fact that no allocation will happen from this point on.
DisallowHeapAllocation no_gc;
- result->set_map_after_allocation(*factory->code_map(), SKIP_WRITE_BARRIER);
+ result.set_map_after_allocation(*factory->code_map(), SKIP_WRITE_BARRIER);
code = handle(Code::cast(result), isolate_);
DCHECK(IsAligned(code->address(), kCodeAlignment));
DCHECK_IMPLIES(
@@ -162,7 +162,7 @@
Handle<Object> self_reference;
if (self_reference_.ToHandle(&self_reference)) {
DCHECK(self_reference->IsOddball());
- DCHECK(Oddball::cast(*self_reference)->kind() ==
+ DCHECK(Oddball::cast(*self_reference).kind() ==
Oddball::kSelfReferenceMarker);
if (FLAG_embedded_builtins) {
auto builder = isolate_->builtins_constants_table_builder();
@@ -211,7 +211,7 @@
AllocationAlignment alignment) {
HeapObject result = isolate()->heap()->AllocateRawWithRetryOrFail(
size, allocation, alignment);
- result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
+ result.set_map_after_allocation(map, SKIP_WRITE_BARRIER);
return result;
}
@@ -226,10 +226,10 @@
WriteBarrierMode write_barrier_mode = allocation == AllocationType::kYoung
? SKIP_WRITE_BARRIER
: UPDATE_WRITE_BARRIER;
- result->set_map_after_allocation(*map, write_barrier_mode);
+ result.set_map_after_allocation(*map, write_barrier_mode);
if (!allocation_site.is_null()) {
AllocationMemento alloc_memento = AllocationMemento::unchecked_cast(
- Object(result->ptr() + map->instance_size()));
+ Object(result.ptr() + map->instance_size()));
InitializeAllocationMemento(alloc_memento, *allocation_site);
}
return result;
@@ -237,11 +237,11 @@
void Factory::InitializeAllocationMemento(AllocationMemento memento,
AllocationSite allocation_site) {
- memento->set_map_after_allocation(*allocation_memento_map(),
- SKIP_WRITE_BARRIER);
- memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
+ memento.set_map_after_allocation(*allocation_memento_map(),
+ SKIP_WRITE_BARRIER);
+ memento.set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
if (FLAG_allocation_site_pretenuring) {
- allocation_site->IncrementMementoCreateCount();
+ allocation_site.IncrementMementoCreateCount();
}
}
@@ -280,7 +280,7 @@
WriteBarrierMode write_barrier_mode = allocation == AllocationType::kYoung
? SKIP_WRITE_BARRIER
: UPDATE_WRITE_BARRIER;
- result->set_map_after_allocation(*map, write_barrier_mode);
+ result.set_map_after_allocation(*map, write_barrier_mode);
return result;
}
@@ -290,7 +290,7 @@
Heap* heap = isolate()->heap();
HeapObject result =
heap->AllocateRawWithRetryOrFail(size, allocation, alignment);
- heap->CreateFillerObjectAt(result->address(), size, ClearRecordedSlots::kNo);
+ heap->CreateFillerObjectAt(result.address(), size, ClearRecordedSlots::kNo);
return Handle<HeapObject>(result, isolate());
}
@@ -375,7 +375,7 @@
DCHECK_LE(0, length);
if (length == 0) return empty_property_array();
HeapObject result = AllocateRawFixedArray(length, allocation);
- result->set_map_after_allocation(*property_array_map(), SKIP_WRITE_BARRIER);
+ result.set_map_after_allocation(*property_array_map(), SKIP_WRITE_BARRIER);
Handle<PropertyArray> array(PropertyArray::cast(result), isolate());
array->initialize_length(length);
MemsetTagged(array->data_start(), *undefined_value(), length);
@@ -388,7 +388,7 @@
HeapObject result = AllocateRawFixedArray(length, allocation);
DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
Map map = Map::cast(isolate()->root(map_root_index));
- result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
+ result.set_map_after_allocation(map, SKIP_WRITE_BARRIER);
Handle<FixedArray> array(FixedArray::cast(result), isolate());
array->set_length(length);
MemsetTagged(array->data_start(), filler, length);
@@ -420,7 +420,7 @@
HeapObject result =
AllocateRawArray(WeakFixedArray::SizeFor(length), allocation);
Map map = Map::cast(isolate()->root(map_root_index));
- result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
+ result.set_map_after_allocation(map, SKIP_WRITE_BARRIER);
Handle<WeakFixedArray> array(WeakFixedArray::cast(result), isolate());
array->set_length(length);
@@ -447,7 +447,7 @@
HeapObject result =
AllocateRawArray(WeakFixedArray::SizeFor(length), allocation);
DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kWeakFixedArrayMap));
- result->set_map_after_allocation(*weak_fixed_array_map(), SKIP_WRITE_BARRIER);
+ result.set_map_after_allocation(*weak_fixed_array_map(), SKIP_WRITE_BARRIER);
Handle<WeakFixedArray> array(WeakFixedArray::cast(result), isolate());
array->set_length(length);
MemsetTagged(ObjectSlot(array->data_start()), *undefined_value(), length);
@@ -468,7 +468,7 @@
MemoryChunk* chunk = MemoryChunk::FromHeapObject(result);
chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR);
}
- result->set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
+ result.set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
Handle<FixedArray> array(FixedArray::cast(result), isolate());
array->set_length(length);
MemsetTagged(array->data_start(), ReadOnlyRoots(heap).undefined_value(),
@@ -511,7 +511,7 @@
Handle<SharedFunctionInfo> shared,
Handle<ClosureFeedbackCellArray> closure_feedback_cell_array,
AllocationType allocation) {
- int length = shared->feedback_metadata()->slot_count();
+ int length = shared->feedback_metadata().slot_count();
DCHECK_LE(0, length);
int size = FeedbackVector::SizeFor(length);
@@ -767,7 +767,7 @@
DisallowHeapAllocation no_gc;
// Copy the characters into the new object.
- CopyChars(SeqOneByteString::cast(*result)->GetChars(no_gc), string.begin(),
+ CopyChars(SeqOneByteString::cast(*result).GetChars(no_gc), string.begin(),
length);
return result;
}
@@ -988,11 +988,9 @@
DisallowHeapAllocation no_gc;
if (is_one_byte) {
- WriteOneByteData(t, SeqOneByteString::cast(*answer)->GetChars(no_gc),
- chars);
+ WriteOneByteData(t, SeqOneByteString::cast(*answer).GetChars(no_gc), chars);
} else {
- WriteTwoByteData(t, SeqTwoByteString::cast(*answer)->GetChars(no_gc),
- chars);
+ WriteTwoByteData(t, SeqTwoByteString::cast(*answer).GetChars(no_gc), chars);
}
return answer;
}
@@ -1023,7 +1021,7 @@
namespace {
MaybeHandle<Map> GetInternalizedStringMap(Factory* f, Handle<String> string) {
- switch (string->map()->instance_type()) {
+ switch (string->map().instance_type()) {
case STRING_TYPE:
return f->internalized_string_map();
case ONE_BYTE_STRING_TYPE:
@@ -1622,7 +1620,7 @@
default:
UNREACHABLE();
}
- int size = map->instance_size();
+ int size = map.instance_size();
HeapObject result = AllocateRawWithImmortalMap(size, allocation, map);
Handle<Struct> str(Struct::cast(result), isolate());
str->InitializeBody(size);
@@ -1762,7 +1760,7 @@
STATIC_ASSERT(Foreign::kSize <= kMaxRegularHeapObjectSize);
Map map = *foreign_map();
HeapObject result =
- AllocateRawWithImmortalMap(map->instance_size(), allocation, map);
+ AllocateRawWithImmortalMap(map.instance_size(), allocation, map);
Handle<Foreign> foreign(Foreign::cast(result), isolate());
foreign->set_foreign_address(addr);
return foreign;
@@ -1921,10 +1919,10 @@
int size = DescriptorArray::SizeFor(number_of_all_descriptors);
HeapObject obj =
isolate()->heap()->AllocateRawWithRetryOrFail(size, allocation);
- obj->set_map_after_allocation(*descriptor_array_map(), SKIP_WRITE_BARRIER);
+ obj.set_map_after_allocation(*descriptor_array_map(), SKIP_WRITE_BARRIER);
DescriptorArray array = DescriptorArray::cast(obj);
- array->Initialize(*empty_enum_cache(), *undefined_value(),
- number_of_descriptors, slack);
+ array.Initialize(*empty_enum_cache(), *undefined_value(),
+ number_of_descriptors, slack);
return Handle<DescriptorArray>(array, isolate());
}
@@ -1973,7 +1971,7 @@
IsTerminalElementsKind(elements_kind));
HeapObject result = isolate()->heap()->AllocateRawWithRetryOrFail(
Map::kSize, AllocationType::kMap);
- result->set_map_after_allocation(*meta_map(), SKIP_WRITE_BARRIER);
+ result.set_map_after_allocation(*meta_map(), SKIP_WRITE_BARRIER);
return handle(InitializeMap(Map::cast(result), type, instance_size,
elements_kind, inobject_properties),
isolate());
@@ -1982,42 +1980,42 @@
Map Factory::InitializeMap(Map map, InstanceType type, int instance_size,
ElementsKind elements_kind,
int inobject_properties) {
- map->set_instance_type(type);
- map->set_prototype(*null_value(), SKIP_WRITE_BARRIER);
- map->set_constructor_or_backpointer(*null_value(), SKIP_WRITE_BARRIER);
- map->set_instance_size(instance_size);
- if (map->IsJSObjectMap()) {
+ map.set_instance_type(type);
+ map.set_prototype(*null_value(), SKIP_WRITE_BARRIER);
+ map.set_constructor_or_backpointer(*null_value(), SKIP_WRITE_BARRIER);
+ map.set_instance_size(instance_size);
+ if (map.IsJSObjectMap()) {
DCHECK(!ReadOnlyHeap::Contains(map));
- map->SetInObjectPropertiesStartInWords(instance_size / kTaggedSize -
- inobject_properties);
- DCHECK_EQ(map->GetInObjectProperties(), inobject_properties);
- map->set_prototype_validity_cell(*invalid_prototype_validity_cell());
+ map.SetInObjectPropertiesStartInWords(instance_size / kTaggedSize -
+ inobject_properties);
+ DCHECK_EQ(map.GetInObjectProperties(), inobject_properties);
+ map.set_prototype_validity_cell(*invalid_prototype_validity_cell());
} else {
DCHECK_EQ(inobject_properties, 0);
- map->set_inobject_properties_start_or_constructor_function_index(0);
- map->set_prototype_validity_cell(Smi::FromInt(Map::kPrototypeChainValid));
+ map.set_inobject_properties_start_or_constructor_function_index(0);
+ map.set_prototype_validity_cell(Smi::FromInt(Map::kPrototypeChainValid));
}
- map->set_dependent_code(DependentCode::cast(*empty_weak_fixed_array()),
- SKIP_WRITE_BARRIER);
- map->set_raw_transitions(MaybeObject::FromSmi(Smi::zero()));
- map->SetInObjectUnusedPropertyFields(inobject_properties);
- map->SetInstanceDescriptors(isolate(), *empty_descriptor_array(), 0);
+ map.set_dependent_code(DependentCode::cast(*empty_weak_fixed_array()),
+ SKIP_WRITE_BARRIER);
+ map.set_raw_transitions(MaybeObject::FromSmi(Smi::zero()));
+ map.SetInObjectUnusedPropertyFields(inobject_properties);
+ map.SetInstanceDescriptors(isolate(), *empty_descriptor_array(), 0);
if (FLAG_unbox_double_fields) {
- map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
+ map.set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
}
// Must be called only after |instance_type|, |instance_size| and
// |layout_descriptor| are set.
- map->set_visitor_id(Map::GetVisitorId(map));
- map->set_bit_field(0);
- map->set_bit_field2(Map::IsExtensibleBit::kMask);
+ map.set_visitor_id(Map::GetVisitorId(map));
+ map.set_bit_field(0);
+ map.set_bit_field2(Map::IsExtensibleBit::kMask);
int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
Map::OwnsDescriptorsBit::encode(true) |
Map::ConstructionCounterBits::encode(Map::kNoSlackTracking);
- map->set_bit_field3(bit_field3);
- DCHECK(!map->is_in_retained_map_list());
- map->clear_padding();
- map->set_elements_kind(elements_kind);
- map->set_new_target_is_base(true);
+ map.set_bit_field3(bit_field3);
+ DCHECK(!map.is_in_retained_map_list());
+ map.clear_padding();
+ map.set_elements_kind(elements_kind);
+ map.set_new_target_is_base(true);
isolate()->counters()->maps_created()->Increment();
if (FLAG_trace_maps) LOG(isolate(), MapCreate(map));
return map;
@@ -2055,21 +2053,21 @@
DCHECK(Heap::InYoungGeneration(raw_clone));
// Since we know the clone is allocated in new space, we can copy
// the contents without worrying about updating the write barrier.
- Heap::CopyBlock(raw_clone->address(), source->address(), object_size);
+ Heap::CopyBlock(raw_clone.address(), source->address(), object_size);
Handle<JSObject> clone(JSObject::cast(raw_clone), isolate());
if (!site.is_null()) {
AllocationMemento alloc_memento = AllocationMemento::unchecked_cast(
- Object(raw_clone->ptr() + object_size));
+ Object(raw_clone.ptr() + object_size));
InitializeAllocationMemento(alloc_memento, *site);
}
SLOW_DCHECK(clone->GetElementsKind() == source->GetElementsKind());
FixedArrayBase elements = source->elements();
// Update elements if necessary.
- if (elements->length() > 0) {
+ if (elements.length() > 0) {
FixedArrayBase elem;
- if (elements->map() == *fixed_cow_array_map()) {
+ if (elements.map() == *fixed_cow_array_map()) {
elem = elements;
} else if (source->HasDoubleElements()) {
elem = *CopyFixedDoubleArray(
@@ -2083,10 +2081,10 @@
// Update properties if necessary.
if (source->HasFastProperties()) {
PropertyArray properties = source->property_array();
- if (properties->length() > 0) {
+ if (properties.length() > 0) {
// TODO(gsathya): Do not copy hash code.
Handle<PropertyArray> prop = CopyArrayWithMap(
- handle(properties, isolate()), handle(properties->map(), isolate()));
+ handle(properties, isolate()), handle(properties.map(), isolate()));
clone->set_raw_properties_or_hash(*prop);
}
} else {
@@ -2115,7 +2113,7 @@
Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
int len = src->length();
HeapObject obj = AllocateRawFixedArray(len, AllocationType::kYoung);
- obj->set_map_after_allocation(*map, SKIP_WRITE_BARRIER);
+ obj.set_map_after_allocation(*map, SKIP_WRITE_BARRIER);
Handle<T> result(T::cast(obj), isolate());
initialize_length(result, len);
@@ -2134,14 +2132,14 @@
int old_len = src->length();
int new_len = old_len + grow_by;
HeapObject obj = AllocateRawFixedArray(new_len, allocation);
- obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
+ obj.set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
Handle<T> result(T::cast(obj), isolate());
initialize_length(result, new_len);
// Copy the content.
DisallowHeapAllocation no_gc;
- WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc);
+ WriteBarrierMode mode = obj.GetWriteBarrierMode(no_gc);
result->CopyElements(isolate(), 0, *src, 0, old_len, mode);
MemsetTagged(ObjectSlot(result->data_start() + old_len),
ReadOnlyRoots(isolate()).undefined_value(), grow_by);
@@ -2171,18 +2169,18 @@
int new_capacity = old_capacity + grow_by;
DCHECK_GE(new_capacity, old_capacity);
HeapObject obj = AllocateRawWeakArrayList(new_capacity, allocation);
- obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
+ obj.set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
WeakArrayList result = WeakArrayList::cast(obj);
int old_len = src->length();
- result->set_length(old_len);
- result->set_capacity(new_capacity);
+ result.set_length(old_len);
+ result.set_capacity(new_capacity);
// Copy the content.
DisallowHeapAllocation no_gc;
- WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc);
- result->CopyElements(isolate(), 0, *src, 0, old_len, mode);
- MemsetTagged(ObjectSlot(result->data_start() + old_len),
+ WriteBarrierMode mode = obj.GetWriteBarrierMode(no_gc);
+ result.CopyElements(isolate(), 0, *src, 0, old_len, mode);
+ MemsetTagged(ObjectSlot(result.data_start() + old_len),
ReadOnlyRoots(isolate()).undefined_value(),
new_capacity - old_len);
return Handle<WeakArrayList>(result, isolate());
@@ -2201,7 +2199,7 @@
if (new_len == 0) return empty_fixed_array();
HeapObject obj = AllocateRawFixedArray(new_len, allocation);
- obj->set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
+ obj.set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
Handle<FixedArray> result(FixedArray::cast(obj), isolate());
result->set_length(new_len);
@@ -2293,7 +2291,7 @@
HeapObject result = AllocateRawWithImmortalMap(BigInt::SizeFor(length),
allocation, *bigint_map());
FreshlyAllocatedBigInt bigint = FreshlyAllocatedBigInt::cast(result);
- bigint->clear_padding();
+ bigint.clear_padding();
return handle(bigint, isolate());
}
@@ -2401,7 +2399,7 @@
NewFunctionArgs args = NewFunctionArgs::ForFunctionWithoutCode(
name, isolate()->sloppy_function_map(), LanguageMode::kSloppy);
Handle<JSFunction> result = NewFunction(args);
- DCHECK(is_sloppy(result->shared()->language_mode()));
+ DCHECK(is_sloppy(result->shared().language_mode()));
return result;
}
@@ -2453,7 +2451,7 @@
}
if (args.should_set_language_mode_) {
- result->shared()->set_language_mode(args.language_mode_);
+ result->shared().set_language_mode(args.language_mode_);
}
if (args.should_create_and_set_initial_map_) {
@@ -2471,12 +2469,12 @@
}
Handle<Map> initial_map = NewMap(args.type_, args.instance_size_,
elements_kind, args.inobject_properties_);
- result->shared()->set_expected_nof_properties(args.inobject_properties_);
+ result->shared().set_expected_nof_properties(args.inobject_properties_);
// TODO(littledan): Why do we have this is_generator test when
// NewFunctionPrototype already handles finding an appropriately
// shared prototype?
Handle<HeapObject> prototype = args.maybe_prototype_.ToHandleChecked();
- if (!IsResumableFunction(result->shared()->kind())) {
+ if (!IsResumableFunction(result->shared().kind())) {
if (prototype->IsTheHole(isolate())) {
prototype = NewFunctionPrototype(result);
}
@@ -2490,13 +2488,13 @@
Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
// Make sure to use globals from the function's context, since the function
// can be from a different context.
- Handle<NativeContext> native_context(function->context()->native_context(),
+ Handle<NativeContext> native_context(function->context().native_context(),
isolate());
Handle<Map> new_map;
- if (V8_UNLIKELY(IsAsyncGeneratorFunction(function->shared()->kind()))) {
+ if (V8_UNLIKELY(IsAsyncGeneratorFunction(function->shared().kind()))) {
new_map = handle(native_context->async_generator_object_prototype_map(),
isolate());
- } else if (IsResumableFunction(function->shared()->kind())) {
+ } else if (IsResumableFunction(function->shared().kind())) {
// Generator and async function prototypes can share maps since they
// don't have "constructor" properties.
new_map =
@@ -2513,7 +2511,7 @@
DCHECK(!new_map->is_prototype_map());
Handle<JSObject> prototype = NewJSObjectFromMap(new_map);
- if (!IsResumableFunction(function->shared()->kind())) {
+ if (!IsResumableFunction(function->shared().kind())) {
JSObject::AddProperty(isolate(), prototype, constructor_string(), function,
DONT_ENUM);
}
@@ -2536,7 +2534,7 @@
Handle<SharedFunctionInfo> info, Handle<Context> context,
AllocationType allocation) {
Handle<Map> initial_map(
- Map::cast(context->native_context()->get(info->function_map_index())),
+ Map::cast(context->native_context().get(info->function_map_index())),
isolate());
return NewFunctionFromSharedFunctionInfo(initial_map, info, context,
allocation);
@@ -2546,7 +2544,7 @@
Handle<SharedFunctionInfo> info, Handle<Context> context,
Handle<FeedbackCell> feedback_cell, AllocationType allocation) {
Handle<Map> initial_map(
- Map::cast(context->native_context()->get(info->function_map_index())),
+ Map::cast(context->native_context().get(info->function_map_index())),
isolate());
return NewFunctionFromSharedFunctionInfo(initial_map, info, context,
feedback_cell, allocation);
@@ -2584,9 +2582,9 @@
// Check that the optimized code in the feedback cell wasn't marked for
// deoptimization while not pointed to by any live JSFunction.
- if (feedback_cell->value()->IsFeedbackVector()) {
+ if (feedback_cell->value().IsFeedbackVector()) {
FeedbackVector::cast(feedback_cell->value())
- ->EvictOptimizedCodeMarkedForDeoptimization(
+ .EvictOptimizedCodeMarkedForDeoptimization(
*info, "new function from shared function info");
}
result->set_raw_feedback_cell(*feedback_cell);
@@ -2680,7 +2678,7 @@
Handle<Code> result = Builtins::GenerateOffHeapTrampolineFor(
isolate(), off_heap_entry,
- code->code_data_container()->kind_specific_flags());
+ code->code_data_container().kind_specific_flags());
// The CodeDataContainer should not be modified beyond this point since it's
// now possibly canonicalized.
@@ -2710,9 +2708,9 @@
#ifdef DEBUG
// Verify that the contents are the same.
ByteArray reloc_info = result->relocation_info();
- DCHECK_EQ(reloc_info->length(), canonical_reloc_info->length());
- for (int i = 0; i < reloc_info->length(); ++i) {
- DCHECK_EQ(reloc_info->get(i), canonical_reloc_info->get(i));
+ DCHECK_EQ(reloc_info.length(), canonical_reloc_info.length());
+ for (int i = 0; i < reloc_info.length(); ++i) {
+ DCHECK_EQ(reloc_info.get(i), canonical_reloc_info.get(i));
}
#endif
result->set_relocation_info(canonical_reloc_info);
@@ -2723,7 +2721,7 @@
Handle<Code> Factory::CopyCode(Handle<Code> code) {
Handle<CodeDataContainer> data_container = NewCodeDataContainer(
- code->code_data_container()->kind_specific_flags(), AllocationType::kOld);
+ code->code_data_container().kind_specific_flags(), AllocationType::kOld);
Heap* heap = isolate()->heap();
Handle<Code> new_code;
@@ -2735,7 +2733,7 @@
// Copy code object.
Address old_addr = code->address();
- Address new_addr = result->address();
+ Address new_addr = result.address();
Heap::CopyBlock(new_addr, old_addr, obj_size);
new_code = handle(Code::cast(result), isolate());
@@ -2898,7 +2896,7 @@
}
obj->InitializeBody(*map, start_offset, *undefined_value(), filler);
if (in_progress) {
- map->FindRootMap(isolate())->InobjectSlackTrackingStep(isolate());
+ map->FindRootMap(isolate()).InobjectSlackTrackingStep(isolate());
}
}
@@ -2958,10 +2956,10 @@
Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind,
AllocationType allocation) {
NativeContext native_context = isolate()->raw_native_context();
- Map map = native_context->GetInitialJSArrayMap(elements_kind);
+ Map map = native_context.GetInitialJSArrayMap(elements_kind);
if (map.is_null()) {
- JSFunction array_function = native_context->array_function();
- map = array_function->initial_map();
+ JSFunction array_function = native_context.array_function();
+ map = array_function.initial_map();
}
return Handle<JSArray>::cast(
NewJSObjectFromMap(handle(map, isolate()), allocation));
@@ -3025,7 +3023,7 @@
Handle<JSWeakMap> Factory::NewJSWeakMap() {
NativeContext native_context = isolate()->raw_native_context();
- Handle<Map> map(native_context->js_weak_map_fun()->initial_map(), isolate());
+ Handle<Map> map(native_context.js_weak_map_fun().initial_map(), isolate());
Handle<JSWeakMap> weakmap(JSWeakMap::cast(*NewJSObjectFromMap(map)),
isolate());
{
@@ -3049,7 +3047,7 @@
Handle<JSGeneratorObject> Factory::NewJSGeneratorObject(
Handle<JSFunction> function) {
- DCHECK(IsResumableFunction(function->shared()->kind()));
+ DCHECK(IsResumableFunction(function->shared().kind()));
JSFunction::EnsureHasInitialMap(function);
Handle<Map> map(function->initial_map(), isolate());
@@ -3060,15 +3058,15 @@
}
Handle<Module> Factory::NewModule(Handle<SharedFunctionInfo> code) {
- Handle<ModuleInfo> module_info(code->scope_info()->ModuleDescriptorInfo(),
+ Handle<ModuleInfo> module_info(code->scope_info().ModuleDescriptorInfo(),
isolate());
Handle<ObjectHashTable> exports =
ObjectHashTable::New(isolate(), module_info->RegularExportCount());
Handle<FixedArray> regular_exports =
NewFixedArray(module_info->RegularExportCount());
Handle<FixedArray> regular_imports =
- NewFixedArray(module_info->regular_imports()->length());
- int requested_modules_length = module_info->module_requests()->length();
+ NewFixedArray(module_info->regular_imports().length());
+ int requested_modules_length = module_info->module_requests().length();
Handle<FixedArray> requested_modules =
requested_modules_length > 0 ? NewFixedArray(requested_modules_length)
: empty_fixed_array();
@@ -3176,11 +3174,11 @@
}
JSFunction GetTypedArrayFun(ExternalArrayType type, Isolate* isolate) {
- NativeContext native_context = isolate->context()->native_context();
+ NativeContext native_context = isolate->context().native_context();
switch (type) {
#define TYPED_ARRAY_FUN(Type, type, TYPE, ctype) \
case kExternal##Type##Array: \
- return native_context->type##_array_fun();
+ return native_context.type##_array_fun();
TYPED_ARRAYS(TYPED_ARRAY_FUN)
#undef TYPED_ARRAY_FUN
@@ -3189,11 +3187,11 @@
}
JSFunction GetTypedArrayFun(ElementsKind elements_kind, Isolate* isolate) {
- NativeContext native_context = isolate->context()->native_context();
+ NativeContext native_context = isolate->context().native_context();
switch (elements_kind) {
#define TYPED_ARRAY_FUN(Type, type, TYPE, ctype) \
case TYPE##_ELEMENTS: \
- return native_context->type##_array_fun();
+ return native_context.type##_array_fun();
TYPED_ARRAYS(TYPED_ARRAY_FUN)
#undef TYPED_ARRAY_FUN
@@ -3302,7 +3300,7 @@
Handle<JSDataView> Factory::NewJSDataView(Handle<JSArrayBuffer> buffer,
size_t byte_offset,
size_t byte_length) {
- Handle<Map> map(isolate()->native_context()->data_view_fun()->initial_map(),
+ Handle<Map> map(isolate()->native_context()->data_view_fun().initial_map(),
isolate());
Handle<JSDataView> obj = Handle<JSDataView>::cast(NewJSObjectFromMap(map));
SetupArrayBufferView(isolate(), obj, buffer, byte_offset, byte_length);
@@ -3373,7 +3371,7 @@
} else {
map = Handle<Map>(isolate()->proxy_map());
}
- DCHECK(map->prototype()->IsNull(isolate()));
+ DCHECK(map->prototype().IsNull(isolate()));
Handle<JSProxy> result(JSProxy::cast(New(map, AllocationType::kYoung)),
isolate());
result->initialize_properties();
@@ -3531,7 +3529,7 @@
// the function_data should not be code with a builtin.
DCHECK(!Builtins::IsBuiltinId(maybe_builtin_index));
DCHECK_IMPLIES(function_data->IsCode(),
- !Code::cast(*function_data)->is_builtin());
+ !Code::cast(*function_data).is_builtin());
share->set_function_data(*function_data);
} else if (Builtins::IsBuiltinId(maybe_builtin_index)) {
share->set_builtin_id(maybe_builtin_index);
@@ -3580,7 +3578,7 @@
namespace {
inline int NumberToStringCacheHash(Handle<FixedArray> cache, Smi number) {
int mask = (cache->length() >> 1) - 1;
- return number->value() & mask;
+ return number.value() & mask;
}
inline int NumberToStringCacheHash(Handle<FixedArray> cache, double number) {
int mask = (cache->length() >> 1) - 1;
@@ -3598,7 +3596,7 @@
string, check_cache ? AllocationType::kOld : AllocationType::kYoung);
if (!check_cache) return js_string;
- if (!number_string_cache()->get(hash * 2)->IsUndefined(isolate())) {
+ if (!number_string_cache()->get(hash * 2).IsUndefined(isolate())) {
int full_size = isolate()->heap()->MaxNumberToStringCacheSize();
if (number_string_cache()->length() != full_size) {
Handle<FixedArray> new_cache =
@@ -3615,8 +3613,8 @@
Handle<Object> Factory::NumberToStringCacheGet(Object number, int hash) {
DisallowHeapAllocation no_gc;
Object key = number_string_cache()->get(hash * 2);
- if (key == number || (key->IsHeapNumber() && number->IsHeapNumber() &&
- key->Number() == number->Number())) {
+ if (key == number || (key.IsHeapNumber() && number.IsHeapNumber() &&
+ key.Number() == number.Number())) {
return Handle<String>(
String::cast(number_string_cache()->get(hash * 2 + 1)), isolate());
}
@@ -3658,7 +3656,7 @@
char arr[100];
Vector<char> buffer(arr, arraysize(arr));
- const char* string = IntToCString(number->value(), buffer);
+ const char* string = IntToCString(number.value(), buffer);
return NumberToStringCacheSet(handle(number, isolate()), hash, string,
check_cache);
@@ -3790,7 +3788,7 @@
Handle<JSFunction> fun = Handle<JSFunction>::cast(function);
function_name = JSFunction::GetDebugName(fun);
- const bool is_user_java_script = fun->shared()->IsUserJavaScript();
+ const bool is_user_java_script = fun->shared().IsUserJavaScript();
info->set_is_user_java_script(is_user_java_script);
}
}
@@ -3819,8 +3817,8 @@
Handle<JSObject> Factory::NewArgumentsObject(Handle<JSFunction> callee,
int length) {
- bool strict_mode_callee = is_strict(callee->shared()->language_mode()) ||
- !callee->shared()->has_simple_parameters();
+ bool strict_mode_callee = is_strict(callee->shared().language_mode()) ||
+ !callee->shared().has_simple_parameters();
Handle<Map> map = strict_mode_callee ? isolate()->strict_arguments_map()
: isolate()->sloppy_arguments_map();
AllocationSiteUsageContext context(isolate(), Handle<AllocationSite>(),
@@ -3846,7 +3844,7 @@
if (number_of_properties == 0) {
// Reuse the initial map of the Object function if the literal has no
// predeclared properties.
- return handle(context->object_function()->initial_map(), isolate());
+ return handle(context->object_function().initial_map(), isolate());
}
// Use initial slow object proto map for too many properties.
@@ -3868,7 +3866,7 @@
HeapObject heap_object;
if (result->GetHeapObjectIfWeak(&heap_object)) {
Map map = Map::cast(heap_object);
- DCHECK(!map->is_dictionary_map());
+ DCHECK(!map.is_dictionary_map());
return handle(map, isolate());
}
}
diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h
index fe71f71..d4aece1 100644
--- a/src/heap/heap-inl.h
+++ b/src/heap/heap-inl.h
@@ -94,34 +94,34 @@
!RootsTable::IsImmortalImmovable(RootIndex::k##CamelName)); \
DCHECK_IMPLIES(RootsTable::IsImmortalImmovable(RootIndex::k##CamelName), \
IsImmovable(HeapObject::cast(value))); \
- roots_table()[RootIndex::k##CamelName] = value->ptr(); \
+ roots_table()[RootIndex::k##CamelName] = value.ptr(); \
}
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
void Heap::SetRootMaterializedObjects(FixedArray objects) {
- roots_table()[RootIndex::kMaterializedObjects] = objects->ptr();
+ roots_table()[RootIndex::kMaterializedObjects] = objects.ptr();
}
void Heap::SetRootScriptList(Object value) {
- roots_table()[RootIndex::kScriptList] = value->ptr();
+ roots_table()[RootIndex::kScriptList] = value.ptr();
}
void Heap::SetRootStringTable(StringTable value) {
- roots_table()[RootIndex::kStringTable] = value->ptr();
+ roots_table()[RootIndex::kStringTable] = value.ptr();
}
void Heap::SetRootNoScriptSharedFunctionInfos(Object value) {
- roots_table()[RootIndex::kNoScriptSharedFunctionInfos] = value->ptr();
+ roots_table()[RootIndex::kNoScriptSharedFunctionInfos] = value.ptr();
}
void Heap::SetMessageListeners(TemplateList value) {
- roots_table()[RootIndex::kMessageListeners] = value->ptr();
+ roots_table()[RootIndex::kMessageListeners] = value.ptr();
}
void Heap::SetPendingOptimizeForTestBytecode(Object hash_table) {
- DCHECK(hash_table->IsObjectHashTable() || hash_table->IsUndefined(isolate()));
- roots_table()[RootIndex::kPendingOptimizeForTestBytecode] = hash_table->ptr();
+ DCHECK(hash_table.IsObjectHashTable() || hash_table.IsUndefined(isolate()));
+ roots_table()[RootIndex::kPendingOptimizeForTestBytecode] = hash_table.ptr();
}
PagedSpace* Heap::paged_space(int idx) {
@@ -223,11 +223,11 @@
// Unprotect the memory chunk of the object if it was not unprotected
// already.
UnprotectAndRegisterMemoryChunk(object);
- ZapCodeObject(object->address(), size_in_bytes);
+ ZapCodeObject(object.address(), size_in_bytes);
if (!large_object) {
MemoryChunk::FromHeapObject(object)
->GetCodeObjectRegistry()
- ->RegisterNewlyAllocatedCodeObject(object->address());
+ ->RegisterNewlyAllocatedCodeObject(object.address());
}
}
OnAllocationEvent(object, size_in_bytes);
@@ -238,7 +238,7 @@
void Heap::OnAllocationEvent(HeapObject object, int size_in_bytes) {
for (auto& tracker : allocation_trackers_) {
- tracker->AllocationEvent(object->address(), size_in_bytes);
+ tracker->AllocationEvent(object.address(), size_in_bytes);
}
if (FLAG_verify_predictable) {
@@ -269,7 +269,7 @@
}
void Heap::UpdateAllocationsHash(HeapObject object) {
- Address object_address = object->address();
+ Address object_address = object.address();
MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
AllocationSpace allocation_space = memory_chunk->owner()->identity();
@@ -291,28 +291,28 @@
}
void Heap::RegisterExternalString(String string) {
- DCHECK(string->IsExternalString());
- DCHECK(!string->IsThinString());
+ DCHECK(string.IsExternalString());
+ DCHECK(!string.IsThinString());
external_string_table_.AddString(string);
}
void Heap::FinalizeExternalString(String string) {
- DCHECK(string->IsExternalString());
+ DCHECK(string.IsExternalString());
Page* page = Page::FromHeapObject(string);
ExternalString ext_string = ExternalString::cast(string);
page->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kExternalString,
- ext_string->ExternalPayloadSize());
+ ext_string.ExternalPayloadSize());
- ext_string->DisposeResource();
+ ext_string.DisposeResource();
}
Address Heap::NewSpaceTop() { return new_space_->top(); }
bool Heap::InYoungGeneration(Object object) {
DCHECK(!HasWeakHeapObjectTag(object));
- return object->IsHeapObject() && InYoungGeneration(HeapObject::cast(object));
+ return object.IsHeapObject() && InYoungGeneration(HeapObject::cast(object));
}
// static
@@ -340,7 +340,7 @@
// static
bool Heap::InFromPage(Object object) {
DCHECK(!HasWeakHeapObjectTag(object));
- return object->IsHeapObject() && InFromPage(HeapObject::cast(object));
+ return object.IsHeapObject() && InFromPage(HeapObject::cast(object));
}
// static
@@ -357,7 +357,7 @@
// static
bool Heap::InToPage(Object object) {
DCHECK(!HasWeakHeapObjectTag(object));
- return object->IsHeapObject() && InToPage(HeapObject::cast(object));
+ return object.IsHeapObject() && InToPage(HeapObject::cast(object));
}
// static
@@ -400,15 +400,15 @@
template <Heap::FindMementoMode mode>
AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject object) {
- Address object_address = object->address();
- Address memento_address = object_address + object->SizeFromMap(map);
+ Address object_address = object.address();
+ Address memento_address = object_address + object.SizeFromMap(map);
Address last_memento_word_address = memento_address + kTaggedSize;
// If the memento would be on another page, bail out immediately.
if (!Page::OnSamePage(object_address, last_memento_word_address)) {
return AllocationMemento();
}
HeapObject candidate = HeapObject::FromAddress(memento_address);
- MapWordSlot candidate_map_slot = candidate->map_slot();
+ MapWordSlot candidate_map_slot = candidate.map_slot();
// This fast check may peek at an uninitialized word. However, the slow check
// below (memento_address == top) ensures that this is safe. Mark the word as
// initialized to silence MemorySanitizer warnings.
@@ -450,7 +450,7 @@
DCHECK(memento_address == top ||
memento_address + HeapObject::kHeaderSize <= top ||
!Page::OnSamePage(memento_address, top - 1));
- if ((memento_address != top) && memento_candidate->IsValid()) {
+ if ((memento_address != top) && memento_candidate.IsValid()) {
return memento_candidate;
}
return AllocationMemento();
@@ -471,7 +471,7 @@
chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION));
#endif
if (!FLAG_allocation_site_pretenuring ||
- !AllocationSite::CanTrack(map->instance_type())) {
+ !AllocationSite::CanTrack(map.instance_type())) {
return;
}
AllocationMemento memento_candidate =
@@ -481,12 +481,12 @@
// Entering cached feedback is used in the parallel case. We are not allowed
// to dereference the allocation site and rather have to postpone all checks
// till actually merging the data.
- Address key = memento_candidate->GetAllocationSiteUnchecked();
+ Address key = memento_candidate.GetAllocationSiteUnchecked();
(*pretenuring_feedback)[AllocationSite::unchecked_cast(Object(key))]++;
}
void Heap::ExternalStringTable::AddString(String string) {
- DCHECK(string->IsExternalString());
+ DCHECK(string.IsExternalString());
DCHECK(!Contains(string));
if (InYoungGeneration(string)) {
@@ -502,7 +502,7 @@
}
int Heap::NextScriptId() {
- int last_id = last_script_id()->value();
+ int last_id = last_script_id().value();
if (last_id == Smi::kMaxValue) last_id = v8::UnboundScript::kNoScriptId;
last_id++;
set_last_script_id(Smi::FromInt(last_id));
@@ -510,7 +510,7 @@
}
int Heap::NextDebuggingId() {
- int last_id = last_debugging_id()->value();
+ int last_id = last_debugging_id().value();
if (last_id == DebugInfo::DebuggingIdBits::kMax) {
last_id = DebugInfo::kNoDebuggingId;
}
@@ -520,7 +520,7 @@
}
int Heap::GetNextTemplateSerialNumber() {
- int next_serial_number = next_template_serial_number()->value() + 1;
+ int next_serial_number = next_template_serial_number().value() + 1;
set_next_template_serial_number(Smi::FromInt(next_serial_number));
return next_serial_number;
}
diff --git a/src/heap/heap-write-barrier-inl.h b/src/heap/heap-write-barrier-inl.h
index 24eb351..1468a28 100644
--- a/src/heap/heap-write-barrier-inl.h
+++ b/src/heap/heap-write-barrier-inl.h
@@ -66,7 +66,7 @@
V8_INLINE static heap_internals::MemoryChunk* FromHeapObject(
HeapObject object) {
- return reinterpret_cast<MemoryChunk*>(object->ptr() & ~kPageAlignmentMask);
+ return reinterpret_cast<MemoryChunk*>(object.ptr() & ~kPageAlignmentMask);
}
V8_INLINE bool IsMarking() const { return GetFlags() & kMarkingBit; }
@@ -141,7 +141,7 @@
inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, Object value) {
DCHECK(!HasWeakHeapObjectTag(value));
- if (!value->IsHeapObject()) return;
+ if (!value.IsHeapObject()) return;
HeapObject object = HeapObject::cast(value);
GenerationalBarrierForCode(host, rinfo, object);
MarkingBarrierForCode(host, rinfo, object);
@@ -155,7 +155,7 @@
Object value) {
DCHECK(!HasWeakHeapObjectTag(*slot));
DCHECK(!HasWeakHeapObjectTag(value));
- if (!value->IsHeapObject()) return;
+ if (!value.IsHeapObject()) return;
heap_internals::GenerationalBarrierInternal(object, slot.address(),
HeapObject::cast(value));
}
@@ -164,7 +164,7 @@
ObjectSlot slot, Object value) {
DCHECK(!HasWeakHeapObjectTag(*slot));
DCHECK(!HasWeakHeapObjectTag(value));
- DCHECK(value->IsHeapObject());
+ DCHECK(value.IsHeapObject());
heap_internals::GenerationalEphemeronKeyBarrierInternal(
table, slot.address(), HeapObject::cast(value));
}
@@ -188,7 +188,7 @@
inline void MarkingBarrier(HeapObject object, ObjectSlot slot, Object value) {
DCHECK_IMPLIES(slot.address() != kNullAddress, !HasWeakHeapObjectTag(*slot));
DCHECK(!HasWeakHeapObjectTag(value));
- if (!value->IsHeapObject()) return;
+ if (!value.IsHeapObject()) return;
heap_internals::MarkingBarrierInternal(object, slot.address(),
HeapObject::cast(value));
}
diff --git a/src/heap/heap.cc b/src/heap/heap.cc
index 6826707..6ff808c 100644
--- a/src/heap/heap.cc
+++ b/src/heap/heap.cc
@@ -544,10 +544,10 @@
bool Heap::IsRetainingPathTarget(HeapObject object,
RetainingPathOption* option) {
WeakArrayList targets = retaining_path_targets();
- int length = targets->length();
+ int length = targets.length();
MaybeObject object_to_check = HeapObjectReference::Weak(object);
for (int i = 0; i < length; i++) {
- MaybeObject target = targets->Get(i);
+ MaybeObject target = targets.Get(i);
DCHECK(target->IsWeakOrCleared());
if (target == object_to_check) {
DCHECK(retaining_path_target_option_.count(i));
@@ -561,7 +561,7 @@
void Heap::PrintRetainingPath(HeapObject target, RetainingPathOption option) {
PrintF("\n\n\n");
PrintF("#################################################\n");
- PrintF("Retaining path for %p:\n", reinterpret_cast<void*>(target->ptr()));
+ PrintF("Retaining path for %p:\n", reinterpret_cast<void*>(target.ptr()));
HeapObject object = target;
std::vector<std::pair<HeapObject, bool>> retaining_path;
Root root = Root::kUnknown;
@@ -590,10 +590,10 @@
PrintF("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n");
PrintF("Distance from root %d%s: ", distance,
ephemeron ? " (ephemeron)" : "");
- object->ShortPrint();
+ object.ShortPrint();
PrintF("\n");
#ifdef OBJECT_PRINT
- object->Print();
+ object.Print();
PrintF("\n");
#endif
--distance;
@@ -727,7 +727,7 @@
AllocationSite site;
for (auto& site_and_count : local_pretenuring_feedback) {
site = site_and_count.first;
- MapWord map_word = site_and_count.first->map_word();
+ MapWord map_word = site_and_count.first.map_word();
if (map_word.IsForwardingAddress()) {
site = AllocationSite::cast(map_word.ToForwardingAddress());
}
@@ -735,11 +735,11 @@
// We have not validated the allocation site yet, since we have not
// dereferenced the site during collecting information.
// This is an inlined check of AllocationMemento::IsValid.
- if (!site->IsAllocationSite() || site->IsZombie()) continue;
+ if (!site.IsAllocationSite() || site.IsZombie()) continue;
const int value = static_cast<int>(site_and_count.second);
DCHECK_LT(0, value);
- if (site->IncrementMementoFoundCount(value)) {
+ if (site.IncrementMementoFoundCount(value)) {
// For sites in the global map the count is accessed through the site.
global_pretenuring_feedback_.insert(std::make_pair(site, 0));
}
@@ -803,15 +803,15 @@
// We just transition into tenure state when the semi-space was at
// maximum capacity.
if (maximum_size_scavenge) {
- site->set_deopt_dependent_code(true);
- site->set_pretenure_decision(AllocationSite::kTenure);
+ site.set_deopt_dependent_code(true);
+ site.set_pretenure_decision(AllocationSite::kTenure);
// Currently we just need to deopt when we make a state transition to
// tenure.
return true;
}
- site->set_pretenure_decision(AllocationSite::kMaybeTenure);
+ site.set_pretenure_decision(AllocationSite::kMaybeTenure);
} else {
- site->set_pretenure_decision(AllocationSite::kDontTenure);
+ site.set_pretenure_decision(AllocationSite::kDontTenure);
}
}
return false;
@@ -820,15 +820,15 @@
inline bool DigestPretenuringFeedback(Isolate* isolate, AllocationSite site,
bool maximum_size_scavenge) {
bool deopt = false;
- int create_count = site->memento_create_count();
- int found_count = site->memento_found_count();
+ int create_count = site.memento_create_count();
+ int found_count = site.memento_found_count();
bool minimum_mementos_created =
create_count >= AllocationSite::kPretenureMinimumCreated;
double ratio = minimum_mementos_created || FLAG_trace_pretenuring_statistics
? static_cast<double>(found_count) / create_count
: 0.0;
AllocationSite::PretenureDecision current_decision =
- site->pretenure_decision();
+ site.pretenure_decision();
if (minimum_mementos_created) {
deopt = MakePretenureDecision(site, current_decision, ratio,
@@ -840,13 +840,13 @@
"pretenuring: AllocationSite(%p): (created, found, ratio) "
"(%d, %d, %f) %s => %s\n",
reinterpret_cast<void*>(site.ptr()), create_count, found_count,
- ratio, site->PretenureDecisionName(current_decision),
- site->PretenureDecisionName(site->pretenure_decision()));
+ ratio, site.PretenureDecisionName(current_decision),
+ site.PretenureDecisionName(site.pretenure_decision()));
}
// Clear feedback calculation fields until the next gc.
- site->set_memento_found_count(0);
- site->set_memento_create_count(0);
+ site.set_memento_found_count(0);
+ site.set_memento_create_count(0);
return deopt;
}
} // namespace
@@ -877,18 +877,18 @@
site = site_and_count.first;
// Count is always access through the site.
DCHECK_EQ(0, site_and_count.second);
- int found_count = site->memento_found_count();
+ int found_count = site.memento_found_count();
// An entry in the storage does not imply that the count is > 0 because
// allocation sites might have been reset due to too many objects dying
// in old space.
if (found_count > 0) {
- DCHECK(site->IsAllocationSite());
+ DCHECK(site.IsAllocationSite());
active_allocation_sites++;
allocation_mementos_found += found_count;
if (DigestPretenuringFeedback(isolate_, site, maximum_size_scavenge)) {
trigger_deoptimization = true;
}
- if (site->GetAllocationType() == AllocationType::kOld) {
+ if (site.GetAllocationType() == AllocationType::kOld) {
tenure_decisions++;
} else {
dont_tenure_decisions++;
@@ -902,10 +902,10 @@
ForeachAllocationSite(
allocation_sites_list(),
[&allocation_sites, &trigger_deoptimization](AllocationSite site) {
- DCHECK(site->IsAllocationSite());
+ DCHECK(site.IsAllocationSite());
allocation_sites++;
- if (site->IsMaybeTenure()) {
- site->set_deopt_dependent_code(true);
+ if (site.IsMaybeTenure()) {
+ site.set_deopt_dependent_code(true);
trigger_deoptimization = true;
}
});
@@ -935,7 +935,7 @@
void Heap::InvalidateCodeDeoptimizationData(Code code) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(code);
CodePageMemoryModificationScope modification_scope(chunk);
- code->set_deoptimization_data(ReadOnlyRoots(this).empty_fixed_array());
+ code.set_deoptimization_data(ReadOnlyRoots(this).empty_fixed_array());
}
void Heap::DeoptMarkedAllocationSites() {
@@ -943,10 +943,10 @@
// performance issue, use a cache data structure in heap instead.
ForeachAllocationSite(allocation_sites_list(), [this](AllocationSite site) {
- if (site->deopt_dependent_code()) {
- site->dependent_code()->MarkCodeForDeoptimization(
+ if (site.deopt_dependent_code()) {
+ site.dependent_code().MarkCodeForDeoptimization(
isolate_, DependentCode::kAllocationSiteTenuringChangedGroup);
- site->set_deopt_dependent_code(false);
+ site.set_deopt_dependent_code(false);
}
});
@@ -981,10 +981,9 @@
isolate_->counters()->alive_after_last_gc()->Set(
static_cast<int>(SizeOfObjects()));
- isolate_->counters()->string_table_capacity()->Set(
- string_table()->Capacity());
+ isolate_->counters()->string_table_capacity()->Set(string_table().Capacity());
isolate_->counters()->number_of_symbols()->Set(
- string_table()->NumberOfElements());
+ string_table().NumberOfElements());
if (CommittedMemory() > 0) {
isolate_->counters()->external_fragmentation_total()->AddSample(
@@ -1049,7 +1048,7 @@
// earlier invocation of the cleanup function didn't iterate through
// them). See https://github.com/tc39/proposal-weakrefs/issues/34
HandleScope handle_scope(isolate());
- while (!isolate()->heap()->dirty_js_finalization_groups()->IsUndefined(
+ while (!isolate()->heap()->dirty_js_finalization_groups().IsUndefined(
isolate())) {
// Enqueue one microtask per JSFinalizationGroup.
Handle<JSFinalizationGroup> finalization_group(
@@ -1181,10 +1180,10 @@
intptr_t CompareWords(int size, HeapObject a, HeapObject b) {
int slots = size / kTaggedSize;
- DCHECK_EQ(a->Size(), size);
- DCHECK_EQ(b->Size(), size);
- Tagged_t* slot_a = reinterpret_cast<Tagged_t*>(a->address());
- Tagged_t* slot_b = reinterpret_cast<Tagged_t*>(b->address());
+ DCHECK_EQ(a.Size(), size);
+ DCHECK_EQ(b.Size(), size);
+ Tagged_t* slot_a = reinterpret_cast<Tagged_t*>(a.address());
+ Tagged_t* slot_b = reinterpret_cast<Tagged_t*>(b.address());
for (int i = 0; i < slots; i++) {
if (*slot_a != *slot_b) {
return *slot_a - *slot_b;
@@ -1231,7 +1230,7 @@
PrintF("%d duplicates of size %d each (%dKB)\n", it->first, size,
duplicate_bytes / KB);
PrintF("Sample object: ");
- it->second->Print();
+ it->second.Print();
PrintF("============================\n");
}
}
@@ -1286,13 +1285,13 @@
space = spaces.next()) {
HeapObjectIterator it(space);
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
- objects_by_size[obj->Size()].push_back(obj);
+ objects_by_size[obj.Size()].push_back(obj);
}
}
{
LargeObjectIterator it(lo_space());
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
- objects_by_size[obj->Size()].push_back(obj);
+ objects_by_size[obj.Size()].push_back(obj);
}
}
for (auto it = objects_by_size.rbegin(); it != objects_by_size.rend();
@@ -1462,7 +1461,7 @@
event.next_gc_likely_to_collect_more =
(committed_memory_before > committed_memory_after + MB) ||
HasHighFragmentation(used_memory_after, committed_memory_after) ||
- (detached_contexts()->length() > 0);
+ (detached_contexts().length() > 0);
event.committed_memory = committed_memory_after;
if (deserialization_complete_) {
memory_reducer_->NotifyMarkCompact(event);
@@ -1506,7 +1505,7 @@
}
isolate()->AbortConcurrentOptimization(BlockingBehavior::kDontBlock);
- number_of_disposed_maps_ = retained_maps()->length();
+ number_of_disposed_maps_ = retained_maps().length();
tracer()->AddContextDisposalTime(MonotonicallyIncreasingTimeInMs());
return ++contexts_disposed_;
}
@@ -1548,7 +1547,7 @@
const ObjectSlot src_slot, int len,
WriteBarrierMode mode) {
DCHECK_NE(len, 0);
- DCHECK_NE(dst_object->map(), ReadOnlyRoots(this).fixed_cow_array_map());
+ DCHECK_NE(dst_object.map(), ReadOnlyRoots(this).fixed_cow_array_map());
const ObjectSlot dst_end(dst_slot + len);
// Ensure no range overflow.
DCHECK(dst_slot < dst_end);
@@ -1600,7 +1599,7 @@
const TSlot src_slot, int len, WriteBarrierMode mode) {
DCHECK_NE(len, 0);
- DCHECK_NE(dst_object->map(), ReadOnlyRoots(this).fixed_cow_array_map());
+ DCHECK_NE(dst_object.map(), ReadOnlyRoots(this).fixed_cow_array_map());
const TSlot dst_end(dst_slot + len);
// Ensure ranges do not overlap.
DCHECK(dst_end <= src_slot || (src_slot + len) <= dst_slot);
@@ -1634,11 +1633,11 @@
// Visit all HeapObject pointers in [start, end).
for (ObjectSlot p = start; p < end; ++p) {
DCHECK(!HasWeakHeapObjectTag(*p));
- if ((*p)->IsHeapObject()) {
+ if ((*p).IsHeapObject()) {
HeapObject object = HeapObject::cast(*p);
// Check that the string is actually internalized.
- CHECK(object->IsTheHole(isolate_) || object->IsUndefined(isolate_) ||
- object->IsInternalizedString());
+ CHECK(object.IsTheHole(isolate_) || object.IsUndefined(isolate_) ||
+ object.IsInternalizedString());
}
}
}
@@ -1659,7 +1658,7 @@
static void VerifyStringTable(Isolate* isolate) {
StringTableVerifier verifier(isolate);
- isolate->heap()->string_table()->IterateElements(&verifier);
+ isolate->heap()->string_table().IterateElements(&verifier);
}
#endif // VERIFY_HEAP
@@ -1693,7 +1692,7 @@
if (allocation.To(&free_space)) {
// Mark with a free list node, in case we have a GC before
// deserializing.
- Address free_space_address = free_space->address();
+ Address free_space_address = free_space.address();
CreateFillerObjectAt(free_space_address, Map::kSize,
ClearRecordedSlots::kNo);
maps->push_back(free_space_address);
@@ -1725,7 +1724,7 @@
if (allocation.To(&free_space)) {
// Mark with a free list node, in case we have a GC before
// deserializing.
- Address free_space_address = free_space->address();
+ Address free_space_address = free_space.address();
CreateFillerObjectAt(free_space_address, size,
ClearRecordedSlots::kNo);
DCHECK_GT(SerializerDeserializer::kNumberOfPreallocatedSpaces,
@@ -2277,7 +2276,7 @@
void Heap::UpdateExternalString(String string, size_t old_payload,
size_t new_payload) {
- DCHECK(string->IsExternalString());
+ DCHECK(string.IsExternalString());
Page* page = Page::FromHeapObject(string);
if (old_payload > new_payload) {
@@ -2292,7 +2291,7 @@
String Heap::UpdateYoungReferenceInExternalStringTableEntry(Heap* heap,
FullObjectSlot p) {
HeapObject obj = HeapObject::cast(*p);
- MapWord first_word = obj->map_word();
+ MapWord first_word = obj.map_word();
String new_string;
@@ -2300,9 +2299,9 @@
if (!first_word.IsForwardingAddress()) {
// Unreachable external string can be finalized.
String string = String::cast(obj);
- if (!string->IsExternalString()) {
+ if (!string.IsExternalString()) {
// Original external string has been internalized.
- DCHECK(string->IsThinString());
+ DCHECK(string.IsThinString());
return String();
}
heap->FinalizeExternalString(string);
@@ -2314,19 +2313,19 @@
}
// String is still reachable.
- if (new_string->IsThinString()) {
+ if (new_string.IsThinString()) {
// Filtering Thin strings out of the external string table.
return String();
- } else if (new_string->IsExternalString()) {
+ } else if (new_string.IsExternalString()) {
MemoryChunk::MoveExternalBackingStoreBytes(
ExternalBackingStoreType::kExternalString,
Page::FromAddress((*p).ptr()), Page::FromHeapObject(new_string),
- ExternalString::cast(new_string)->ExternalPayloadSize());
+ ExternalString::cast(new_string).ExternalPayloadSize());
return new_string;
}
// Internalization can replace external strings with non-external strings.
- return new_string->IsExternalString() ? new_string : String();
+ return new_string.IsExternalString() ? new_string : String();
}
void Heap::ExternalStringTable::VerifyYoung() {
@@ -2339,12 +2338,12 @@
MemoryChunk* mc = MemoryChunk::FromHeapObject(obj);
DCHECK(mc->InYoungGeneration());
DCHECK(heap_->InYoungGeneration(obj));
- DCHECK(!obj->IsTheHole(heap_->isolate()));
- DCHECK(obj->IsExternalString());
+ DCHECK(!obj.IsTheHole(heap_->isolate()));
+ DCHECK(obj.IsExternalString());
// Note: we can have repeated elements in the table.
DCHECK_EQ(0, visited_map.count(obj));
visited_map.insert(obj);
- size_map[mc] += ExternalString::cast(obj)->ExternalPayloadSize();
+ size_map[mc] += ExternalString::cast(obj).ExternalPayloadSize();
}
for (std::map<MemoryChunk*, size_t>::iterator it = size_map.begin();
it != size_map.end(); it++)
@@ -2363,12 +2362,12 @@
MemoryChunk* mc = MemoryChunk::FromHeapObject(obj);
DCHECK(!mc->InYoungGeneration());
DCHECK(!heap_->InYoungGeneration(obj));
- DCHECK(!obj->IsTheHole(heap_->isolate()));
- DCHECK(obj->IsExternalString());
+ DCHECK(!obj.IsTheHole(heap_->isolate()));
+ DCHECK(obj.IsExternalString());
// Note: we can have repeated elements in the table.
DCHECK_EQ(0, visited_map.count(obj));
visited_map.insert(obj);
- size_map[mc] += ExternalString::cast(obj)->ExternalPayloadSize();
+ size_map[mc] += ExternalString::cast(obj).ExternalPayloadSize();
}
for (std::map<MemoryChunk*, size_t>::iterator it = size_map.begin();
it != size_map.end(); it++)
@@ -2389,7 +2388,7 @@
if (target.is_null()) continue;
- DCHECK(target->IsExternalString());
+ DCHECK(target.IsExternalString());
if (InYoungGeneration(target)) {
// String is still in new space. Update the table entry.
@@ -2492,16 +2491,16 @@
Object list, const std::function<void(AllocationSite)>& visitor) {
DisallowHeapAllocation disallow_heap_allocation;
Object current = list;
- while (current->IsAllocationSite()) {
+ while (current.IsAllocationSite()) {
AllocationSite site = AllocationSite::cast(current);
visitor(site);
- Object current_nested = site->nested_site();
- while (current_nested->IsAllocationSite()) {
+ Object current_nested = site.nested_site();
+ while (current_nested.IsAllocationSite()) {
AllocationSite nested_site = AllocationSite::cast(current_nested);
visitor(nested_site);
- current_nested = nested_site->nested_site();
+ current_nested = nested_site.nested_site();
}
- current = site->weak_next();
+ current = site.weak_next();
}
}
@@ -2511,9 +2510,9 @@
ForeachAllocationSite(allocation_sites_list(),
[&marked, allocation, this](AllocationSite site) {
- if (site->GetAllocationType() == allocation) {
- site->ResetPretenureDecision();
- site->set_deopt_dependent_code(true);
+ if (site.GetAllocationType() == allocation) {
+ site.ResetPretenureDecision();
+ site.set_deopt_dependent_code(true);
marked = true;
RemoveAllocationSitePretenuringFeedback(site);
return;
@@ -2557,7 +2556,7 @@
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
for (FullObjectSlot p = start; p < end; ++p) {
- DCHECK((*p)->IsExternalString());
+ DCHECK((*p).IsExternalString());
visitor_->VisitExternalString(
Utils::ToLocal(Handle<String>(String::cast(*p), isolate_)));
}
@@ -2615,8 +2614,8 @@
}
HeapObject Heap::PrecedeWithFiller(HeapObject object, int filler_size) {
- CreateFillerObjectAt(object->address(), filler_size, ClearRecordedSlots::kNo);
- return HeapObject::FromAddress(object->address() + filler_size);
+ CreateFillerObjectAt(object.address(), filler_size, ClearRecordedSlots::kNo);
+ return HeapObject::FromAddress(object.address() + filler_size);
}
HeapObject Heap::AlignWithFiller(HeapObject object, int object_size,
@@ -2624,13 +2623,13 @@
AllocationAlignment alignment) {
int filler_size = allocation_size - object_size;
DCHECK_LT(0, filler_size);
- int pre_filler = GetFillToAlign(object->address(), alignment);
+ int pre_filler = GetFillToAlign(object.address(), alignment);
if (pre_filler) {
object = PrecedeWithFiller(object, pre_filler);
filler_size -= pre_filler;
}
if (filler_size) {
- CreateFillerObjectAt(object->address() + object_size, filler_size,
+ CreateFillerObjectAt(object.address() + object_size, filler_size,
ClearRecordedSlots::kNo);
}
return object;
@@ -2674,9 +2673,9 @@
void Heap::FlushNumberStringCache() {
// Flush the number to string cache.
- int len = number_string_cache()->length();
+ int len = number_string_cache().length();
for (int i = 0; i < len; i++) {
- number_string_cache()->set_undefined(i);
+ number_string_cache().set_undefined(i);
}
}
@@ -2686,11 +2685,11 @@
if (size == 0) return HeapObject();
HeapObject filler = HeapObject::FromAddress(addr);
if (size == kTaggedSize) {
- filler->set_map_after_allocation(
+ filler.set_map_after_allocation(
Map::unchecked_cast(isolate()->root(RootIndex::kOnePointerFillerMap)),
SKIP_WRITE_BARRIER);
} else if (size == 2 * kTaggedSize) {
- filler->set_map_after_allocation(
+ filler.set_map_after_allocation(
Map::unchecked_cast(isolate()->root(RootIndex::kTwoPointerFillerMap)),
SKIP_WRITE_BARRIER);
if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
@@ -2699,10 +2698,10 @@
}
} else {
DCHECK_GT(size, 2 * kTaggedSize);
- filler->set_map_after_allocation(
+ filler.set_map_after_allocation(
Map::unchecked_cast(isolate()->root(RootIndex::kFreeSpaceMap)),
SKIP_WRITE_BARRIER);
- FreeSpace::cast(filler)->relaxed_write_size(size);
+ FreeSpace::cast(filler).relaxed_write_size(size);
if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
MemsetTagged(ObjectSlot(addr) + 2, Object(kClearedFreeMemoryValue),
(size / kTaggedSize) - 2);
@@ -2714,9 +2713,9 @@
// At this point, we may be deserializing the heap from a snapshot, and
// none of the maps have been created yet and are nullptr.
- DCHECK((filler->map_slot().contains_value(kNullAddress) &&
+ DCHECK((filler.map_slot().contains_value(kNullAddress) &&
!deserialization_complete_) ||
- filler->map()->IsMap());
+ filler.map().IsMap());
return filler;
}
@@ -2769,7 +2768,7 @@
// New space object do not have recorded slots.
if (MemoryChunk::FromHeapObject(object)->InYoungGeneration()) return false;
// Whitelist objects that definitely do not have pointers.
- if (object->IsByteArray() || object->IsFixedDoubleArray()) return false;
+ if (object.IsByteArray() || object.IsFixedDoubleArray()) return false;
// Conservatively return true for other objects.
return true;
}
@@ -2779,15 +2778,15 @@
int size_in_bytes) {
HeapProfiler* heap_profiler = isolate_->heap_profiler();
if (heap_profiler->is_tracking_object_moves()) {
- heap_profiler->ObjectMoveEvent(source->address(), target->address(),
+ heap_profiler->ObjectMoveEvent(source.address(), target.address(),
size_in_bytes);
}
for (auto& tracker : allocation_trackers_) {
- tracker->MoveEvent(source->address(), target->address(), size_in_bytes);
+ tracker->MoveEvent(source.address(), target.address(), size_in_bytes);
}
- if (target->IsSharedFunctionInfo()) {
- LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source->address(),
- target->address()));
+ if (target.IsSharedFunctionInfo()) {
+ LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source.address(),
+ target.address()));
}
if (FLAG_verify_predictable) {
@@ -2817,26 +2816,26 @@
DCHECK(CanMoveObjectStart(object));
// Add custom visitor to concurrent marker if new left-trimmable type
// is added.
- DCHECK(object->IsFixedArray() || object->IsFixedDoubleArray());
- const int element_size = object->IsFixedArray() ? kTaggedSize : kDoubleSize;
+ DCHECK(object.IsFixedArray() || object.IsFixedDoubleArray());
+ const int element_size = object.IsFixedArray() ? kTaggedSize : kDoubleSize;
const int bytes_to_trim = elements_to_trim * element_size;
- Map map = object->map();
+ Map map = object.map();
// For now this trick is only applied to fixed arrays which may be in new
// space or old space. In a large object space the object's start must
// coincide with chunk and thus the trick is just not applicable.
DCHECK(!IsLargeObject(object));
- DCHECK(object->map() != ReadOnlyRoots(this).fixed_cow_array_map());
+ DCHECK(object.map() != ReadOnlyRoots(this).fixed_cow_array_map());
STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
STATIC_ASSERT(FixedArrayBase::kLengthOffset == kTaggedSize);
STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kTaggedSize);
- const int len = object->length();
+ const int len = object.length();
DCHECK(elements_to_trim <= len);
// Calculate location of new array start.
- Address old_start = object->address();
+ Address old_start = object.address();
Address new_start = old_start + bytes_to_trim;
if (incremental_marking()->IsMarking()) {
@@ -2876,14 +2875,14 @@
// Note we cannot use ClearFreedMemoryMode of CreateFillerObjectAt because
// we need pointer granularity writes to avoid race with the concurrent
// marking.
- if (filler->Size() > FreeSpace::kSize) {
+ if (filler.Size() > FreeSpace::kSize) {
MemsetTagged(filler.RawField(FreeSpace::kSize),
ReadOnlyRoots(this).undefined_value(),
- (filler->Size() - FreeSpace::kSize) / kTaggedSize);
+ (filler.Size() - FreeSpace::kSize) / kTaggedSize);
}
}
// Notify the heap profiler of change in object layout.
- OnMoveEvent(new_object, object, new_object->Size());
+ OnMoveEvent(new_object, object, new_object.Size());
#ifdef ENABLE_SLOW_DCHECKS
if (FLAG_enable_slow_asserts) {
@@ -2899,21 +2898,21 @@
}
void Heap::RightTrimFixedArray(FixedArrayBase object, int elements_to_trim) {
- const int len = object->length();
+ const int len = object.length();
DCHECK_LE(elements_to_trim, len);
DCHECK_GE(elements_to_trim, 0);
int bytes_to_trim;
- DCHECK(!object->IsFixedTypedArrayBase());
- if (object->IsByteArray()) {
+ DCHECK(!object.IsFixedTypedArrayBase());
+ if (object.IsByteArray()) {
int new_size = ByteArray::SizeFor(len - elements_to_trim);
bytes_to_trim = ByteArray::SizeFor(len) - new_size;
DCHECK_GE(bytes_to_trim, 0);
- } else if (object->IsFixedArray()) {
+ } else if (object.IsFixedArray()) {
CHECK_NE(elements_to_trim, len);
bytes_to_trim = elements_to_trim * kTaggedSize;
} else {
- DCHECK(object->IsFixedDoubleArray());
+ DCHECK(object.IsFixedDoubleArray());
CHECK_NE(elements_to_trim, len);
bytes_to_trim = elements_to_trim * kDoubleSize;
}
@@ -2934,11 +2933,11 @@
template <typename T>
void Heap::CreateFillerForArray(T object, int elements_to_trim,
int bytes_to_trim) {
- DCHECK(object->IsFixedArrayBase() || object->IsByteArray() ||
- object->IsWeakFixedArray());
+ DCHECK(object.IsFixedArrayBase() || object.IsByteArray() ||
+ object.IsWeakFixedArray());
// For now this trick is only applied to objects in new and paged space.
- DCHECK(object->map() != ReadOnlyRoots(this).fixed_cow_array_map());
+ DCHECK(object.map() != ReadOnlyRoots(this).fixed_cow_array_map());
if (bytes_to_trim == 0) {
DCHECK_EQ(elements_to_trim, 0);
@@ -2947,8 +2946,8 @@
}
// Calculate location of new array end.
- int old_size = object->Size();
- Address old_end = object->address() + old_size;
+ int old_size = object.Size();
+ Address old_end = object.address() + old_size;
Address new_end = old_end - bytes_to_trim;
// Register the array as an object with invalidated old-to-old slots. We
@@ -2986,12 +2985,12 @@
// Initialize header of the trimmed array. We are storing the new length
// using release store after creating a filler for the left-over space to
// avoid races with the sweeper thread.
- object->synchronized_set_length(object->length() - elements_to_trim);
+ object.synchronized_set_length(object.length() - elements_to_trim);
// Notify the heap object allocation tracker of change in object layout. The
// array may not be moved during GC, and size has to be adjusted nevertheless.
for (auto& tracker : allocation_trackers_) {
- tracker->UpdateObjectSizeEvent(object->address(), object->Size());
+ tracker->UpdateObjectSizeEvent(object.address(), object.Size());
}
}
@@ -3229,7 +3228,7 @@
if (marking_state->IsBlack(obj)) {
incremental_marking()->ProcessBlackAllocatedObject(obj);
}
- addr += obj->Size();
+ addr += obj.Size();
}
}
}
@@ -3301,19 +3300,19 @@
// If you see this check triggering for a freshly allocated object,
// use object->set_map_after_allocation() to initialize its map.
if (pending_layout_change_object_.is_null()) {
- if (object->IsJSObject()) {
- DCHECK(!object->map()->TransitionRequiresSynchronizationWithGC(new_map));
+ if (object.IsJSObject()) {
+ DCHECK(!object.map().TransitionRequiresSynchronizationWithGC(new_map));
} else {
// Check that the set of slots before and after the transition match.
SlotCollectingVisitor old_visitor;
- object->IterateFast(&old_visitor);
- MapWord old_map_word = object->map_word();
+ object.IterateFast(&old_visitor);
+ MapWord old_map_word = object.map_word();
// Temporarily set the new map to iterate new slots.
- object->set_map_word(MapWord::FromMap(new_map));
+ object.set_map_word(MapWord::FromMap(new_map));
SlotCollectingVisitor new_visitor;
- object->IterateFast(&new_visitor);
+ object.IterateFast(&new_visitor);
// Restore the old map.
- object->set_map_word(old_map_word);
+ object.set_map_word(old_map_word);
DCHECK_EQ(new_visitor.number_of_slots(), old_visitor.number_of_slots());
for (int i = 0; i < new_visitor.number_of_slots(); i++) {
DCHECK(new_visitor.slot(i) == old_visitor.slot(i));
@@ -3693,7 +3692,7 @@
}
bool Heap::Contains(HeapObject value) {
- if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
+ if (memory_allocator()->IsOutsideAllocatedSpace(value.address())) {
return false;
}
return HasBeenSetUp() &&
@@ -3704,7 +3703,7 @@
}
bool Heap::InSpace(HeapObject value, AllocationSpace space) {
- if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
+ if (memory_allocator()->IsOutsideAllocatedSpace(value.address())) {
return false;
}
if (!HasBeenSetUp()) return false;
@@ -3783,7 +3782,7 @@
void VerifyPointers(HeapObject host, MaybeObjectSlot start,
MaybeObjectSlot end) override {
if (!host.is_null()) {
- CHECK(ReadOnlyHeap::Contains(host->map()));
+ CHECK(ReadOnlyHeap::Contains(host.map()));
}
VerifyPointersVisitor::VerifyPointers(host, start, end);
@@ -3809,7 +3808,7 @@
if (!isolate()->context().is_null() &&
!isolate()->normalized_map_cache()->IsUndefined(isolate())) {
NormalizedMapCache::cast(*isolate()->normalized_map_cache())
- ->NormalizedMapCacheVerify(isolate());
+ .NormalizedMapCacheVerify(isolate());
}
VerifySmisVisitor smis_visitor;
@@ -3968,8 +3967,8 @@
// In RO_SPACE chunk->mutex() may be nullptr, so just ignore it.
base::LockGuard<base::Mutex, base::NullBehavior::kIgnoreIfNull> lock_guard(
chunk->mutex());
- Address start = object->address();
- Address end = start + object->Size();
+ Address start = object.address();
+ Address end = start + object.Size();
std::set<Address> old_to_new;
std::set<std::pair<SlotType, Address> > typed_old_to_new;
if (!InYoungGeneration(object)) {
@@ -3977,7 +3976,7 @@
CollectSlots<OLD_TO_NEW>(chunk, start, end, &old_to_new, &typed_old_to_new);
OldToNewSlotVerifyingVisitor visitor(&old_to_new, &typed_old_to_new,
&this->ephemeron_remembered_set_);
- object->IterateBody(&visitor);
+ object.IterateBody(&visitor);
}
// TODO(ulan): Add old to old slot set verification once all weak objects
// have their own instance types and slots are recorded for all weal fields.
@@ -4092,25 +4091,25 @@
private:
inline void FixHandle(FullObjectSlot p) {
- if (!(*p)->IsHeapObject()) return;
+ if (!(*p).IsHeapObject()) return;
HeapObject current = HeapObject::cast(*p);
- const MapWord map_word = current->map_word();
- if (!map_word.IsForwardingAddress() && current->IsFiller()) {
+ const MapWord map_word = current.map_word();
+ if (!map_word.IsForwardingAddress() && current.IsFiller()) {
#ifdef DEBUG
// We need to find a FixedArrayBase map after walking the fillers.
- while (current->IsFiller()) {
- Address next = current->ptr();
- if (current->map() == ReadOnlyRoots(heap_).one_pointer_filler_map()) {
+ while (current.IsFiller()) {
+ Address next = current.ptr();
+ if (current.map() == ReadOnlyRoots(heap_).one_pointer_filler_map()) {
next += kTaggedSize;
- } else if (current->map() ==
+ } else if (current.map() ==
ReadOnlyRoots(heap_).two_pointer_filler_map()) {
next += 2 * kTaggedSize;
} else {
- next += current->Size();
+ next += current.Size();
}
current = HeapObject::cast(Object(next));
}
- DCHECK(current->IsFixedArrayBase());
+ DCHECK(current.IsFixedArrayBase());
#endif // DEBUG
p.store(Smi::kZero);
}
@@ -4395,10 +4394,10 @@
HeapIterator iterator(this);
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- InstanceType type = obj->map()->instance_type();
+ InstanceType type = obj.map().instance_type();
DCHECK(0 <= type && type <= LAST_TYPE);
stats->objects_per_type[type]++;
- stats->size_per_type[type] += obj->Size();
+ stats->size_per_type[type] += obj.Size();
}
}
if (stats->last_few_messages != nullptr)
@@ -4615,16 +4614,16 @@
DCHECK_GE(object_size, 0);
if (!Heap::IsImmovable(heap_object)) {
if (isolate()->serializer_enabled() ||
- code_space_->first_page()->Contains(heap_object->address())) {
+ code_space_->first_page()->Contains(heap_object.address())) {
MemoryChunk::FromHeapObject(heap_object)->MarkNeverEvacuate();
} else {
// Discard the first code allocation, which was on a page where it could
// be moved.
- CreateFillerObjectAt(heap_object->address(), object_size,
+ CreateFillerObjectAt(heap_object.address(), object_size,
ClearRecordedSlots::kNo);
heap_object = AllocateRawCodeInLargeObjectSpace(object_size);
UnprotectAndRegisterMemoryChunk(heap_object);
- ZapCodeObject(heap_object->address(), object_size);
+ ZapCodeObject(heap_object.address(), object_size);
OnAllocationEvent(heap_object, object_size);
}
}
@@ -4844,7 +4843,7 @@
} else {
new_hash_seed = static_cast<uint64_t>(FLAG_hash_seed);
}
- ReadOnlyRoots(this).hash_seed()->copy_in(
+ ReadOnlyRoots(this).hash_seed().copy_in(
0, reinterpret_cast<byte*>(&new_hash_seed), kInt64Size);
}
@@ -4954,7 +4953,7 @@
// The embedder is not aware of whether numbers are materialized as heap
// objects are just passed around as Smis.
Object object(*location);
- if (!object->IsHeapObject()) return;
+ if (!object.IsHeapObject()) return;
HeapObject heap_object = HeapObject::cast(object);
DCHECK(IsValidHeapObject(this, heap_object));
if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) {
@@ -5155,9 +5154,9 @@
{
HeapIterator iterator(this);
for (HeapObject o = iterator.next(); !o.is_null(); o = iterator.next()) {
- if (o->IsPrototypeInfo()) {
+ if (o.IsPrototypeInfo()) {
PrototypeInfo prototype_info = PrototypeInfo::cast(o);
- if (prototype_info->prototype_users()->IsWeakArrayList()) {
+ if (prototype_info.prototype_users().IsWeakArrayList()) {
prototype_infos.emplace_back(handle(prototype_info, isolate()));
}
}
@@ -5209,23 +5208,23 @@
void Heap::CompactRetainedMaps(WeakArrayList retained_maps) {
DCHECK_EQ(retained_maps, this->retained_maps());
- int length = retained_maps->length();
+ int length = retained_maps.length();
int new_length = 0;
int new_number_of_disposed_maps = 0;
// This loop compacts the array by removing cleared weak cells.
for (int i = 0; i < length; i += 2) {
- MaybeObject maybe_object = retained_maps->Get(i);
+ MaybeObject maybe_object = retained_maps.Get(i);
if (maybe_object->IsCleared()) {
continue;
}
DCHECK(maybe_object->IsWeak());
- MaybeObject age = retained_maps->Get(i + 1);
+ MaybeObject age = retained_maps.Get(i + 1);
DCHECK(age->IsSmi());
if (i != new_length) {
- retained_maps->Set(new_length, maybe_object);
- retained_maps->Set(new_length + 1, age);
+ retained_maps.Set(new_length, maybe_object);
+ retained_maps.Set(new_length + 1, age);
}
if (i < number_of_disposed_maps_) {
new_number_of_disposed_maps += 2;
@@ -5235,9 +5234,9 @@
number_of_disposed_maps_ = new_number_of_disposed_maps;
HeapObject undefined = ReadOnlyRoots(this).undefined_value();
for (int i = new_length; i < length; i++) {
- retained_maps->Set(i, HeapObjectReference::Strong(undefined));
+ retained_maps.Set(i, HeapObjectReference::Strong(undefined));
}
- if (new_length != length) retained_maps->set_length(new_length);
+ if (new_length != length) retained_maps.set_length(new_length);
}
void Heap::FatalProcessOutOfMemory(const char* location) {
@@ -5386,7 +5385,7 @@
}
bool SkipObject(HeapObject object) override {
- if (object->IsFiller()) return true;
+ if (object.IsFiller()) return true;
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
if (reachable_.count(chunk) == 0) return true;
return reachable_[chunk]->count(object) == 0;
@@ -5435,7 +5434,7 @@
while (!marking_stack_.empty()) {
HeapObject obj = marking_stack_.back();
marking_stack_.pop_back();
- obj->Iterate(this);
+ obj.Iterate(this);
}
}
@@ -5556,13 +5555,13 @@
Isolate* isolate = heap_->isolate();
for (size_t i = 0; i < young_strings_.size(); ++i) {
Object o = young_strings_[i];
- if (o->IsTheHole(isolate)) {
+ if (o.IsTheHole(isolate)) {
continue;
}
// The real external string is already in one of these vectors and was or
// will be processed. Re-processing it will add a duplicate to the vector.
- if (o->IsThinString()) continue;
- DCHECK(o->IsExternalString());
+ if (o.IsThinString()) continue;
+ DCHECK(o.IsExternalString());
if (InYoungGeneration(o)) {
young_strings_[last++] = o;
} else {
@@ -5578,13 +5577,13 @@
Isolate* isolate = heap_->isolate();
for (size_t i = 0; i < old_strings_.size(); ++i) {
Object o = old_strings_[i];
- if (o->IsTheHole(isolate)) {
+ if (o.IsTheHole(isolate)) {
continue;
}
// The real external string is already in one of these vectors and was or
// will be processed. Re-processing it will add a duplicate to the vector.
- if (o->IsThinString()) continue;
- DCHECK(o->IsExternalString());
+ if (o.IsThinString()) continue;
+ DCHECK(o.IsExternalString());
DCHECK(!InYoungGeneration(o));
old_strings_[last++] = o;
}
@@ -5600,14 +5599,14 @@
for (size_t i = 0; i < young_strings_.size(); ++i) {
Object o = young_strings_[i];
// Dont finalize thin strings.
- if (o->IsThinString()) continue;
+ if (o.IsThinString()) continue;
heap_->FinalizeExternalString(ExternalString::cast(o));
}
young_strings_.clear();
for (size_t i = 0; i < old_strings_.size(); ++i) {
Object o = old_strings_[i];
// Dont finalize thin strings.
- if (o->IsThinString()) continue;
+ if (o.IsThinString()) continue;
heap_->FinalizeExternalString(ExternalString::cast(o));
}
old_strings_.clear();
@@ -5658,7 +5657,7 @@
}
void Heap::SetInterpreterEntryTrampolineForProfiling(Code code) {
- DCHECK_EQ(Builtins::kInterpreterEntryTrampoline, code->builtin_index());
+ DCHECK_EQ(Builtins::kInterpreterEntryTrampoline, code.builtin_index());
set_interpreter_entry_trampoline_for_profiling(code);
}
@@ -5666,12 +5665,12 @@
JSFinalizationGroup finalization_group,
std::function<void(HeapObject object, ObjectSlot slot, Object target)>
gc_notify_updated_slot) {
- DCHECK(dirty_js_finalization_groups()->IsUndefined(isolate()) ||
- dirty_js_finalization_groups()->IsJSFinalizationGroup());
- DCHECK(finalization_group->next()->IsUndefined(isolate()));
- DCHECK(!finalization_group->scheduled_for_cleanup());
- finalization_group->set_scheduled_for_cleanup(true);
- finalization_group->set_next(dirty_js_finalization_groups());
+ DCHECK(dirty_js_finalization_groups().IsUndefined(isolate()) ||
+ dirty_js_finalization_groups().IsJSFinalizationGroup());
+ DCHECK(finalization_group.next().IsUndefined(isolate()));
+ DCHECK(!finalization_group.scheduled_for_cleanup());
+ finalization_group.set_scheduled_for_cleanup(true);
+ finalization_group.set_next(dirty_js_finalization_groups());
gc_notify_updated_slot(
finalization_group,
finalization_group.RawField(JSFinalizationGroup::kNextOffset),
@@ -5683,10 +5682,10 @@
void Heap::AddKeepDuringJobTarget(Handle<JSReceiver> target) {
DCHECK(FLAG_harmony_weak_refs);
- DCHECK(weak_refs_keep_during_job()->IsUndefined() ||
- weak_refs_keep_during_job()->IsOrderedHashSet());
+ DCHECK(weak_refs_keep_during_job().IsUndefined() ||
+ weak_refs_keep_during_job().IsOrderedHashSet());
Handle<OrderedHashSet> table;
- if (weak_refs_keep_during_job()->IsUndefined(isolate())) {
+ if (weak_refs_keep_during_job().IsUndefined(isolate())) {
table = isolate()->factory()->NewOrderedHashSet();
} else {
table =
@@ -5746,17 +5745,17 @@
size_t Heap::NumberOfNativeContexts() {
int result = 0;
Object context = native_contexts_list();
- while (!context->IsUndefined(isolate())) {
+ while (!context.IsUndefined(isolate())) {
++result;
Context native_context = Context::cast(context);
- context = native_context->next_context_link();
+ context = native_context.next_context_link();
}
return result;
}
size_t Heap::NumberOfDetachedContexts() {
// The detached_contexts() array has two entries per detached context.
- return detached_contexts()->length() / 2;
+ return detached_contexts().length() / 2;
}
void VerifyPointersVisitor::VisitPointers(HeapObject host, ObjectSlot start,
@@ -5779,7 +5778,7 @@
void VerifyPointersVisitor::VerifyHeapObjectImpl(HeapObject heap_object) {
CHECK(IsValidHeapObject(heap_, heap_object));
- CHECK(heap_object->map()->IsMap());
+ CHECK(heap_object.map().IsMap());
}
template <typename TSlot>
@@ -5790,7 +5789,7 @@
if (object.GetHeapObject(&heap_object)) {
VerifyHeapObjectImpl(heap_object);
} else {
- CHECK(object->IsSmi() || object->IsCleared());
+ CHECK(object.IsSmi() || object.IsCleared());
}
}
}
@@ -5802,7 +5801,7 @@
// to one of objects in DATA_ONLY_VISITOR_ID_LIST. You can fix
// this by moving that object to POINTER_VISITOR_ID_LIST.
DCHECK_EQ(ObjectFields::kMaybePointers,
- Map::ObjectFieldsFrom(host->map()->visitor_id()));
+ Map::ObjectFieldsFrom(host.map().visitor_id()));
VerifyPointersImpl(start, end);
}
@@ -5819,7 +5818,7 @@
FullObjectSlot start,
FullObjectSlot end) {
for (FullObjectSlot current = start; current < end; ++current) {
- CHECK((*current)->IsSmi());
+ CHECK((*current).IsSmi());
}
}
@@ -5837,7 +5836,7 @@
// Since this function is used for debugging only, we do not place
// asserts here, but check everything explicitly.
if (map == ReadOnlyRoots(this).one_pointer_filler_map()) return false;
- InstanceType type = map->instance_type();
+ InstanceType type = map.instance_type();
MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj);
AllocationSpace src = chunk->owner()->identity();
switch (src) {
@@ -5886,8 +5885,8 @@
}
Map Heap::GcSafeMapOfCodeSpaceObject(HeapObject object) {
- MapWord map_word = object->map_word();
- return map_word.IsForwardingAddress() ? map_word.ToForwardingAddress()->map()
+ MapWord map_word = object.map_word();
+ return map_word.IsForwardingAddress() ? map_word.ToForwardingAddress().map()
: map_word.ToMap();
}
@@ -5902,8 +5901,8 @@
Map map = GcSafeMapOfCodeSpaceObject(code);
DCHECK(map == ReadOnlyRoots(this).code_map());
if (InstructionStream::TryLookupCode(isolate(), addr) == code) return true;
- Address start = code->address();
- Address end = code->address() + code->SizeFromMap(map);
+ Address start = code.address();
+ Address end = code.address() + code.SizeFromMap(map);
return start <= addr && addr < end;
}
@@ -6109,7 +6108,7 @@
DCHECK(heap->incremental_marking()->IsMarking());
DescriptorArray descriptor_array =
DescriptorArray::cast(raw_descriptor_array);
- int16_t raw_marked = descriptor_array->raw_number_of_marked_descriptors();
+ int16_t raw_marked = descriptor_array.raw_number_of_marked_descriptors();
if (NumberOfMarkedDescriptors::decode(heap->mark_compact_collector()->epoch(),
raw_marked) <
number_of_own_descriptors) {
diff --git a/src/heap/heap.h b/src/heap/heap.h
index 15ec3b4..92662ec 100644
--- a/src/heap/heap.h
+++ b/src/heap/heap.h
@@ -161,12 +161,12 @@
: object_(object) {
// AllocationResults can't return Smis, which are used to represent
// failure and the space to retry in.
- CHECK(!object->IsSmi());
+ CHECK(!object.IsSmi());
}
AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {}
- inline bool IsRetry() { return object_->IsSmi(); }
+ inline bool IsRetry() { return object_.IsSmi(); }
inline HeapObject ToObjectChecked();
inline AllocationSpace RetrySpace();
@@ -2384,7 +2384,7 @@
template <typename T>
T ForwardingAddress(T heap_obj) {
- MapWord map_word = heap_obj->map_word();
+ MapWord map_word = heap_obj.map_word();
if (map_word.IsForwardingAddress()) {
return T::cast(map_word.ToForwardingAddress());
diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc
index f12479e..944bbc0 100644
--- a/src/heap/incremental-marking.cc
+++ b/src/heap/incremental-marking.cc
@@ -122,7 +122,7 @@
MarkBlackAndVisitObjectDueToLayoutChange(from);
DCHECK(marking_state()->IsBlack(from));
// Mark the new address as black.
- if (from->address() + kTaggedSize == to->address()) {
+ if (from.address() + kTaggedSize == to.address()) {
// The old and the new markbits overlap. The |to| object has the
// grey color. To make it black, we need to set the second bit.
DCHECK(new_mark_bit.Get<kAtomicity>());
@@ -154,7 +154,7 @@
private:
void MarkObjectByPointer(FullObjectSlot p) {
Object obj = *p;
- if (!obj->IsHeapObject()) return;
+ if (!obj.IsHeapObject()) return;
heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
}
@@ -424,8 +424,8 @@
// The map has aged. Do not retain this map.
return false;
}
- Object constructor = map->GetConstructor();
- if (!constructor->IsHeapObject() ||
+ Object constructor = map.GetConstructor();
+ if (!constructor.IsHeapObject() ||
marking_state()->IsWhite(HeapObject::cast(constructor))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
@@ -442,18 +442,18 @@
bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
FLAG_retain_maps_for_n_gc == 0;
WeakArrayList retained_maps = heap()->retained_maps();
- int length = retained_maps->length();
+ int length = retained_maps.length();
// The number_of_disposed_maps separates maps in the retained_maps
// array that were created before and after context disposal.
// We do not age and retain disposed maps to avoid memory leaks.
int number_of_disposed_maps = heap()->number_of_disposed_maps_;
for (int i = 0; i < length; i += 2) {
- MaybeObject value = retained_maps->Get(i);
+ MaybeObject value = retained_maps.Get(i);
HeapObject map_heap_object;
if (!value->GetHeapObjectIfWeak(&map_heap_object)) {
continue;
}
- int age = retained_maps->Get(i + 1).ToSmi().value();
+ int age = retained_maps.Get(i + 1).ToSmi().value();
int new_age;
Map map = Map::cast(map_heap_object);
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
@@ -461,8 +461,8 @@
if (ShouldRetainMap(map, age)) {
WhiteToGreyAndPush(map);
}
- Object prototype = map->prototype();
- if (age > 0 && prototype->IsHeapObject() &&
+ Object prototype = map.prototype();
+ if (age > 0 && prototype.IsHeapObject() &&
marking_state()->IsWhite(HeapObject::cast(prototype))) {
// The prototype is not marked, age the map.
new_age = age - 1;
@@ -476,7 +476,7 @@
}
// Compact the array and update the age.
if (new_age != age) {
- retained_maps->Set(i + 1, MaybeObject::FromSmi(Smi::FromInt(new_age)));
+ retained_maps.Set(i + 1, MaybeObject::FromSmi(Smi::FromInt(new_age)));
}
}
}
@@ -527,10 +527,10 @@
#endif
filler_map, minor_marking_state](
HeapObject obj, HeapObject* out) -> bool {
- DCHECK(obj->IsHeapObject());
+ DCHECK(obj.IsHeapObject());
// Only pointers to from space have to be updated.
if (Heap::InFromPage(obj)) {
- MapWord map_word = obj->map_word();
+ MapWord map_word = obj.map_word();
if (!map_word.IsForwardingAddress()) {
// There may be objects on the marking deque that do not exist anymore,
// e.g. left trimmed objects or objects from the root set (frames).
@@ -540,7 +540,7 @@
return false;
}
HeapObject dest = map_word.ToForwardingAddress();
- DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
+ DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj.IsFiller());
*out = dest;
return true;
} else if (Heap::InToPage(obj)) {
@@ -568,10 +568,10 @@
*out = obj;
return true;
}
- DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
+ DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj.IsFiller());
// Skip one word filler objects that appear on the
// stack when we perform in place array shift.
- if (obj->map() != filler_map) {
+ if (obj.map() != filler_map) {
*out = obj;
return true;
}
@@ -681,9 +681,9 @@
// 4. The object is materizalized by the deoptimizer.
// 5. The object is a descriptor array marked black by
// the descriptor array marking barrier.
- DCHECK(obj->IsHashTable() || obj->IsPropertyArray() ||
- obj->IsFixedArray() || obj->IsContext() || obj->IsJSObject() ||
- obj->IsString() || obj->IsDescriptorArray());
+ DCHECK(obj.IsHashTable() || obj.IsPropertyArray() || obj.IsFixedArray() ||
+ obj.IsContext() || obj.IsJSObject() || obj.IsString() ||
+ obj.IsDescriptorArray());
}
DCHECK(marking_state()->IsBlack(obj));
WhiteToGreyAndPush(map);
@@ -704,7 +704,7 @@
DCHECK_IMPLIES(MemoryChunk::FromHeapObject(obj)->IsFlagSet(
MemoryChunk::HAS_PROGRESS_BAR),
0u == MemoryChunk::FromHeapObject(obj)->ProgressBar());
- Map map = obj->map();
+ Map map = obj.map();
WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor visitor(heap()->mark_compact_collector(),
marking_state());
@@ -731,20 +731,20 @@
if (obj.is_null()) break;
// Left trimming may result in grey or black filler objects on the marking
// worklist. Ignore these objects.
- if (obj->IsFiller()) {
+ if (obj.IsFiller()) {
// Due to copying mark bits and the fact that grey and black have their
// first bit set, one word fillers are always black.
DCHECK_IMPLIES(
- obj->map() == ReadOnlyRoots(heap()).one_pointer_filler_map(),
+ obj.map() == ReadOnlyRoots(heap()).one_pointer_filler_map(),
marking_state()->IsBlack(obj));
// Other fillers may be black or grey depending on the color of the object
// that was trimmed.
DCHECK_IMPLIES(
- obj->map() != ReadOnlyRoots(heap()).one_pointer_filler_map(),
+ obj.map() != ReadOnlyRoots(heap()).one_pointer_filler_map(),
marking_state()->IsBlackOrGrey(obj));
continue;
}
- bytes_processed += VisitObject(obj->map(), obj);
+ bytes_processed += VisitObject(obj.map(), obj);
}
return bytes_processed;
}
diff --git a/src/heap/invalidated-slots-inl.h b/src/heap/invalidated-slots-inl.h
index 583d443..b7a9921 100644
--- a/src/heap/invalidated-slots-inl.h
+++ b/src/heap/invalidated-slots-inl.h
@@ -28,8 +28,8 @@
++iterator_;
if (iterator_ != iterator_end_) {
// Invalidated ranges must not overlap.
- DCHECK_LE(invalidated_end_, iterator_->first->address());
- invalidated_start_ = iterator_->first->address();
+ DCHECK_LE(invalidated_end_, iterator_->first.address());
+ invalidated_start_ = iterator_->first.address();
invalidated_end_ = invalidated_start_ + iterator_->second;
invalidated_object_ = HeapObject();
invalidated_object_size_ = 0;
@@ -47,9 +47,9 @@
// Ask the object if the slot is valid.
if (invalidated_object_.is_null()) {
invalidated_object_ = HeapObject::FromAddress(invalidated_start_);
- DCHECK(!invalidated_object_->IsFiller());
+ DCHECK(!invalidated_object_.IsFiller());
invalidated_object_size_ =
- invalidated_object_->SizeFromMap(invalidated_object_->map());
+ invalidated_object_.SizeFromMap(invalidated_object_.map());
}
int offset = static_cast<int>(slot - invalidated_start_);
DCHECK_GT(offset, 0);
@@ -59,7 +59,7 @@
if (offset >= invalidated_object_size_) {
return slots_in_free_space_are_valid_;
}
- return invalidated_object_->IsValidSlot(invalidated_object_->map(), offset);
+ return invalidated_object_.IsValidSlot(invalidated_object_.map(), offset);
}
} // namespace internal
diff --git a/src/heap/invalidated-slots.cc b/src/heap/invalidated-slots.cc
index a5b8354..368d189 100644
--- a/src/heap/invalidated-slots.cc
+++ b/src/heap/invalidated-slots.cc
@@ -24,7 +24,7 @@
iterator_end_ = invalidated_slots->end();
sentinel_ = chunk->area_end();
if (iterator_ != iterator_end_) {
- invalidated_start_ = iterator_->first->address();
+ invalidated_start_ = iterator_->first.address();
invalidated_end_ = invalidated_start_ + iterator_->second;
} else {
invalidated_start_ = sentinel_;
diff --git a/src/heap/local-allocator-inl.h b/src/heap/local-allocator-inl.h
index 8f1081d..71dcd98 100644
--- a/src/heap/local-allocator-inl.h
+++ b/src/heap/local-allocator-inl.h
@@ -47,7 +47,7 @@
void LocalAllocator::FreeLastInNewSpace(HeapObject object, int object_size) {
if (!new_space_lab_.TryFreeLast(object, object_size)) {
// We couldn't free the last object so we have to write a proper filler.
- heap_->CreateFillerObjectAt(object->address(), object_size,
+ heap_->CreateFillerObjectAt(object.address(), object_size,
ClearRecordedSlots::kNo);
}
}
@@ -55,7 +55,7 @@
void LocalAllocator::FreeLastInOldSpace(HeapObject object, int object_size) {
if (!compaction_spaces_.Get(OLD_SPACE)->TryFreeLast(object, object_size)) {
// We couldn't free the last object so we have to write a proper filler.
- heap_->CreateFillerObjectAt(object->address(), object_size,
+ heap_->CreateFillerObjectAt(object.address(), object_size,
ClearRecordedSlots::kNo);
}
}
diff --git a/src/heap/mark-compact-inl.h b/src/heap/mark-compact-inl.h
index 0bd2730..cf6d96c 100644
--- a/src/heap/mark-compact-inl.h
+++ b/src/heap/mark-compact-inl.h
@@ -24,9 +24,9 @@
template <typename ConcreteState, AccessMode access_mode>
bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(HeapObject obj) {
MemoryChunk* p = MemoryChunk::FromHeapObject(obj);
- MarkBit markbit = MarkBitFrom(p, obj->address());
+ MarkBit markbit = MarkBitFrom(p, obj.address());
if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
- static_cast<ConcreteState*>(this)->IncrementLiveBytes(p, obj->Size());
+ static_cast<ConcreteState*>(this)->IncrementLiveBytes(p, obj.Size());
return true;
}
@@ -60,7 +60,7 @@
BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
if (!heap_->is_current_gc_forced()) {
- array->MakeOlder();
+ array.MakeOlder();
}
return size;
}
@@ -71,9 +71,8 @@
MarkingState>::VisitDescriptorArray(Map map,
DescriptorArray array) {
int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
- VisitPointers(array, array->GetFirstPointerSlot(),
- array->GetDescriptorSlot(0));
- VisitDescriptors(array, array->number_of_descriptors());
+ VisitPointers(array, array.GetFirstPointerSlot(), array.GetDescriptorSlot(0));
+ VisitDescriptors(array, array.number_of_descriptors());
return size;
}
@@ -86,7 +85,7 @@
// If the SharedFunctionInfo has old bytecode, mark it as flushable,
// otherwise visit the function data field strongly.
- if (shared_info->ShouldFlushBytecode(Heap::GetBytecodeFlushMode())) {
+ if (shared_info.ShouldFlushBytecode(Heap::GetBytecodeFlushMode())) {
collector_->AddBytecodeFlushingCandidate(shared_info);
} else {
VisitPointer(shared_info,
@@ -102,7 +101,7 @@
int size = Parent::VisitJSFunction(map, object);
// Check if the JSFunction needs reset due to bytecode being flushed.
- if (FLAG_flush_bytecode && object->NeedsResetDueToFlushedBytecode()) {
+ if (FLAG_flush_bytecode && object.NeedsResetDueToFlushedBytecode()) {
collector_->AddFlushedJSFunction(object);
}
@@ -168,14 +167,14 @@
VisitEphemeronHashTable(Map map, EphemeronHashTable table) {
collector_->AddEphemeronHashTable(table);
- for (int i = 0; i < table->Capacity(); i++) {
+ for (int i = 0; i < table.Capacity(); i++) {
ObjectSlot key_slot =
- table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
- HeapObject key = HeapObject::cast(table->KeyAt(i));
+ table.RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
+ HeapObject key = HeapObject::cast(table.KeyAt(i));
collector_->RecordSlot(table, key_slot, key);
ObjectSlot value_slot =
- table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
+ table.RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
if (marking_state()->IsBlackOrGrey(key)) {
VisitPointer(table, value_slot);
@@ -183,7 +182,7 @@
} else {
Object value_obj = *value_slot;
- if (value_obj->IsHeapObject()) {
+ if (value_obj.IsHeapObject()) {
HeapObject value = HeapObject::cast(value_obj);
collector_->RecordSlot(table, value_slot, value);
@@ -196,7 +195,7 @@
}
}
- return table->SizeFromMap(map);
+ return table.SizeFromMap(map);
}
template <FixedArrayVisitationMode fixed_array_mode,
@@ -204,7 +203,7 @@
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitMap(Map meta_map, Map map) {
int size = Map::BodyDescriptor::SizeOf(meta_map, map);
- if (map->CanTransition()) {
+ if (map.CanTransition()) {
// Maps that can transition share their descriptor arrays and require
// special visiting logic to avoid memory leaks.
// Since descriptor arrays are potentially shared, ensure that only the
@@ -212,12 +211,11 @@
// non-empty descriptor array is marked, its header is also visited. The
// slot holding the descriptor array will be implicitly recorded when the
// pointer fields of this map are visited.
- DescriptorArray descriptors = map->instance_descriptors();
+ DescriptorArray descriptors = map.instance_descriptors();
MarkDescriptorArrayBlack(map, descriptors);
- int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ int number_of_own_descriptors = map.NumberOfOwnDescriptors();
if (number_of_own_descriptors) {
- DCHECK_LE(number_of_own_descriptors,
- descriptors->number_of_descriptors());
+ DCHECK_LE(number_of_own_descriptors, descriptors.number_of_descriptors());
VisitDescriptors(descriptors, number_of_own_descriptors);
}
// Mark the pointer fields of the Map. Since the transitions array has
@@ -243,8 +241,8 @@
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitJSWeakRef(Map map, JSWeakRef weak_ref) {
- if (weak_ref->target()->IsHeapObject()) {
- HeapObject target = HeapObject::cast(weak_ref->target());
+ if (weak_ref.target().IsHeapObject()) {
+ HeapObject target = HeapObject::cast(weak_ref.target());
if (marking_state()->IsBlackOrGrey(target)) {
// Record the slot inside the JSWeakRef, since the IterateBody below
// won't visit it.
@@ -265,8 +263,8 @@
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitWeakCell(Map map, WeakCell weak_cell) {
- if (weak_cell->target()->IsHeapObject()) {
- HeapObject target = HeapObject::cast(weak_cell->target());
+ if (weak_cell.target().IsHeapObject()) {
+ HeapObject target = HeapObject::cast(weak_cell.target());
if (marking_state()->IsBlackOrGrey(target)) {
// Record the slot inside the WeakCell, since the IterateBody below
// won't visit it.
@@ -335,7 +333,7 @@
HeapObject object = HeapObject::cast(rinfo->target_object());
collector_->RecordRelocSlot(host, rinfo, object);
if (!marking_state()->IsBlackOrGrey(object)) {
- if (host->IsWeakObject(object)) {
+ if (host.IsWeakObject(object)) {
collector_->AddWeakObjectInCode(object, host);
} else {
MarkObject(host, object);
@@ -368,8 +366,8 @@
}
}
if (marking_state()->GreyToBlack(descriptors)) {
- VisitPointers(descriptors, descriptors->GetFirstPointerSlot(),
- descriptors->GetDescriptorSlot(0));
+ VisitPointers(descriptors, descriptors.GetFirstPointerSlot(),
+ descriptors.GetDescriptorSlot(0));
}
DCHECK(marking_state()->IsBlack(descriptors));
}
@@ -429,12 +427,12 @@
// descriptor arrays.
DCHECK(marking_state()->IsBlack(descriptors));
int16_t new_marked = static_cast<int16_t>(number_of_own_descriptors);
- int16_t old_marked = descriptors->UpdateNumberOfMarkedDescriptors(
+ int16_t old_marked = descriptors.UpdateNumberOfMarkedDescriptors(
mark_compact_epoch_, new_marked);
if (old_marked < new_marked) {
VisitPointers(descriptors,
- MaybeObjectSlot(descriptors->GetDescriptorSlot(old_marked)),
- MaybeObjectSlot(descriptors->GetDescriptorSlot(new_marked)));
+ MaybeObjectSlot(descriptors.GetDescriptorSlot(old_marked)),
+ MaybeObjectSlot(descriptors.GetDescriptorSlot(new_marked)));
}
}
@@ -567,7 +565,7 @@
// last word is a one word filler, we are not allowed to advance. In
// that case we can return immediately.
if (!it_.Advance()) {
- DCHECK(HeapObject::FromAddress(addr)->map() == one_word_filler_map_);
+ DCHECK(HeapObject::FromAddress(addr).map() == one_word_filler_map_);
current_object_ = HeapObject();
return;
}
@@ -584,7 +582,7 @@
// object ends.
HeapObject black_object = HeapObject::FromAddress(addr);
map = Map::cast(ObjectSlot(addr).Acquire_Load());
- size = black_object->SizeFromMap(map);
+ size = black_object.SizeFromMap(map);
Address end = addr + size - kTaggedSize;
// One word filler objects do not borrow the second mark bit. We have
// to jump over the advancing and clearing part.
@@ -612,7 +610,7 @@
} else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
map = Map::cast(ObjectSlot(addr).Acquire_Load());
object = HeapObject::FromAddress(addr);
- size = object->SizeFromMap(map);
+ size = object.SizeFromMap(map);
}
// We found a live object.
diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
index 0b31b7b..4649c25 100644
--- a/src/heap/mark-compact.cc
+++ b/src/heap/mark-compact.cc
@@ -112,11 +112,11 @@
object = HeapObject::FromAddress(current);
// One word fillers at the end of a black area can be grey.
if (IsBlackOrGrey(object) &&
- object->map() != ReadOnlyRoots(heap_).one_pointer_filler_map()) {
+ object.map() != ReadOnlyRoots(heap_).one_pointer_filler_map()) {
CHECK(IsMarked(object));
CHECK(current >= next_object_must_be_here_or_later);
- object->Iterate(this);
- next_object_must_be_here_or_later = current + object->Size();
+ object.Iterate(this);
+ next_object_must_be_here_or_later = current + object.Size();
// The object is either part of a black area of black allocation or a
// regular black object
CHECK(
@@ -159,7 +159,7 @@
LargeObjectIterator it(lo_space);
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
if (IsBlackOrGrey(obj)) {
- obj->Iterate(this);
+ obj.Iterate(this);
}
}
}
@@ -215,7 +215,7 @@
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
DCHECK(RelocInfo::IsEmbeddedObjectMode(rinfo->rmode()));
- if (!host->IsWeakObject(rinfo->target_object())) {
+ if (!host.IsWeakObject(rinfo->target_object())) {
HeapObject object = rinfo->target_object();
VerifyHeapObjectImpl(object);
}
@@ -284,8 +284,8 @@
Address current = start;
while (current < end) {
HeapObject object = HeapObject::FromAddress(current);
- if (!object->IsFiller()) object->Iterate(this);
- current += object->Size();
+ if (!object.IsFiller()) object.Iterate(this);
+ current += object.Size();
}
}
@@ -905,7 +905,7 @@
private:
V8_INLINE void MarkObjectByPointer(Root root, FullObjectSlot p) {
- if (!(*p)->IsHeapObject()) return;
+ if (!(*p).IsHeapObject()) return;
collector_->MarkRootObject(root, HeapObject::cast(*p));
}
@@ -956,7 +956,7 @@
private:
V8_INLINE void MarkObject(HeapObject host, Object object) {
- if (!object->IsHeapObject()) return;
+ if (!object.IsHeapObject()) return;
collector_->MarkObject(host, HeapObject::cast(object));
}
@@ -976,7 +976,7 @@
heap_->mark_compact_collector()->non_atomic_marking_state();
for (ObjectSlot p = start; p < end; ++p) {
Object o = *p;
- if (o->IsHeapObject()) {
+ if (o.IsHeapObject()) {
HeapObject heap_object = HeapObject::cast(o);
if (marking_state->IsWhite(heap_object)) {
pointers_removed_++;
@@ -1024,14 +1024,14 @@
Object the_hole = ReadOnlyRoots(heap_).the_hole_value();
for (FullObjectSlot p = start; p < end; ++p) {
Object o = *p;
- if (o->IsHeapObject()) {
+ if (o.IsHeapObject()) {
HeapObject heap_object = HeapObject::cast(o);
if (marking_state->IsWhite(heap_object)) {
- if (o->IsExternalString()) {
+ if (o.IsExternalString()) {
heap_->FinalizeExternalString(String::cast(o));
} else {
// The original external string may have been internalized.
- DCHECK(o->IsThinString());
+ DCHECK(o.IsThinString());
}
// Set the entry to the_hole_value (as deleted).
p.store(the_hole);
@@ -1057,18 +1057,18 @@
DCHECK(!marking_state_->IsGrey(heap_object));
if (marking_state_->IsBlack(heap_object)) {
return object;
- } else if (object->IsAllocationSite() &&
- !(AllocationSite::cast(object)->IsZombie())) {
+ } else if (object.IsAllocationSite() &&
+ !(AllocationSite::cast(object).IsZombie())) {
// "dead" AllocationSites need to live long enough for a traversal of new
// space. These sites get a one-time reprieve.
Object nested = object;
- while (nested->IsAllocationSite()) {
+ while (nested.IsAllocationSite()) {
AllocationSite current_site = AllocationSite::cast(nested);
// MarkZombie will override the nested_site, read it first before
// marking
- nested = current_site->nested_site();
- current_site->MarkZombie();
+ nested = current_site.nested_site();
+ current_site.MarkZombie();
marking_state_->WhiteToBlack(current_site);
}
@@ -1117,7 +1117,7 @@
inline void VisitEphemeron(HeapObject host, int index, ObjectSlot key,
ObjectSlot value) override {
- DCHECK(host->IsEphemeronHashTable());
+ DCHECK(host.IsEphemeronHashTable());
DCHECK(!Heap::InYoungGeneration(host));
VisitPointer(host, value);
@@ -1196,7 +1196,7 @@
inline void Move(AllocationSpace dest, HeapObject src, HeapObject dst,
int size) final {
- if (dest == CODE_SPACE || (dest == OLD_SPACE && dst->IsBytecodeArray())) {
+ if (dest == CODE_SPACE || (dest == OLD_SPACE && dst.IsBytecodeArray())) {
PROFILE(heap_->isolate(),
CodeMoveEvent(AbstractCode::cast(src), AbstractCode::cast(dst)));
}
@@ -1227,9 +1227,9 @@
template <MigrationMode mode>
static void RawMigrateObject(EvacuateVisitorBase* base, HeapObject dst,
HeapObject src, int size, AllocationSpace dest) {
- Address dst_addr = dst->address();
- Address src_addr = src->address();
- DCHECK(base->heap_->AllowedToBeMigrated(src->map(), src, dest));
+ Address dst_addr = dst.address();
+ Address src_addr = src.address();
+ DCHECK(base->heap_->AllowedToBeMigrated(src.map(), src, dest));
DCHECK_NE(dest, LO_SPACE);
DCHECK_NE(dest, CODE_LO_SPACE);
if (dest == OLD_SPACE) {
@@ -1238,14 +1238,14 @@
base->heap_->CopyBlock(dst_addr, src_addr, size);
if (mode != MigrationMode::kFast)
base->ExecuteMigrationObservers(dest, src, dst, size);
- dst->IterateBodyFast(dst->map(), size, base->record_visitor_);
+ dst.IterateBodyFast(dst.map(), size, base->record_visitor_);
} else if (dest == CODE_SPACE) {
DCHECK_CODEOBJECT_SIZE(size, base->heap_->code_space());
base->heap_->CopyBlock(dst_addr, src_addr, size);
- Code::cast(dst)->Relocate(dst_addr - src_addr);
+ Code::cast(dst).Relocate(dst_addr - src_addr);
if (mode != MigrationMode::kFast)
base->ExecuteMigrationObservers(dest, src, dst, size);
- dst->IterateBodyFast(dst->map(), size, base->record_visitor_);
+ dst.IterateBodyFast(dst.map(), size, base->record_visitor_);
} else {
DCHECK_OBJECT_SIZE(size);
DCHECK(dest == NEW_SPACE);
@@ -1253,7 +1253,7 @@
if (mode != MigrationMode::kFast)
base->ExecuteMigrationObservers(dest, src, dst, size);
}
- src->set_map_word(MapWord::FromForwardingAddress(dst));
+ src.set_map_word(MapWord::FromForwardingAddress(dst));
}
EvacuateVisitorBase(Heap* heap, LocalAllocator* local_allocator,
@@ -1269,8 +1269,7 @@
#ifdef VERIFY_HEAP
if (AbortCompactionForTesting(object)) return false;
#endif // VERIFY_HEAP
- AllocationAlignment alignment =
- HeapObject::RequiredAlignment(object->map());
+ AllocationAlignment alignment = HeapObject::RequiredAlignment(object.map());
AllocationResult allocation =
local_allocator_->Allocate(target_space, size, alignment);
if (allocation.To(target_object)) {
@@ -1301,7 +1300,7 @@
if (FLAG_stress_compaction) {
const uintptr_t mask = static_cast<uintptr_t>(FLAG_random_seed) &
kPageAlignmentMask & ~kObjectAlignmentMask;
- if ((object->ptr() & kPageAlignmentMask) == mask) {
+ if ((object.ptr() & kPageAlignmentMask) == mask) {
Page* page = Page::FromHeapObject(object);
if (page->IsFlagSet(Page::COMPACTION_WAS_ABORTED_FOR_TESTING)) {
page->ClearFlag(Page::COMPACTION_WAS_ABORTED_FOR_TESTING);
@@ -1338,12 +1337,12 @@
inline bool Visit(HeapObject object, int size) override {
if (TryEvacuateWithoutCopy(object)) return true;
HeapObject target_object;
- if (heap_->ShouldBePromoted(object->address()) &&
+ if (heap_->ShouldBePromoted(object.address()) &&
TryEvacuateObject(OLD_SPACE, object, size, &target_object)) {
promoted_size_ += size;
return true;
}
- heap_->UpdateAllocationSite(object->map(), object,
+ heap_->UpdateAllocationSite(object.map(), object,
local_pretenuring_feedback_);
HeapObject target;
AllocationSpace space = AllocateTargetObject(object, size, &target);
@@ -1359,13 +1358,13 @@
inline bool TryEvacuateWithoutCopy(HeapObject object) {
if (is_incremental_marking_) return false;
- Map map = object->map();
+ Map map = object.map();
// Some objects can be evacuated without creating a copy.
- if (map->visitor_id() == kVisitThinString) {
- HeapObject actual = ThinString::cast(object)->unchecked_actual();
+ if (map.visitor_id() == kVisitThinString) {
+ HeapObject actual = ThinString::cast(object).unchecked_actual();
if (MarkCompactCollector::IsOnEvacuationCandidate(actual)) return false;
- object->map_slot().Relaxed_Store(
+ object.map_slot().Relaxed_Store(
MapWord::FromForwardingAddress(actual).ToMap());
return true;
}
@@ -1377,7 +1376,7 @@
inline AllocationSpace AllocateTargetObject(HeapObject old_object, int size,
HeapObject* target_object) {
AllocationAlignment alignment =
- HeapObject::RequiredAlignment(old_object->map());
+ HeapObject::RequiredAlignment(old_object.map());
AllocationSpace space_allocated_in = NEW_SPACE;
AllocationResult allocation =
local_allocator_->Allocate(NEW_SPACE, size, alignment);
@@ -1438,10 +1437,10 @@
inline bool Visit(HeapObject object, int size) override {
if (mode == NEW_TO_NEW) {
- heap_->UpdateAllocationSite(object->map(), object,
+ heap_->UpdateAllocationSite(object.map(), object,
local_pretenuring_feedback_);
} else if (mode == NEW_TO_OLD) {
- object->IterateBodyFast(record_visitor_);
+ object.IterateBodyFast(record_visitor_);
}
return true;
}
@@ -1466,7 +1465,7 @@
HeapObject target_object;
if (TryEvacuateObject(Page::FromHeapObject(object)->owner()->identity(),
object, size, &target_object)) {
- DCHECK(object->map_word().IsForwardingAddress());
+ DCHECK(object.map_word().IsForwardingAddress());
return true;
}
return false;
@@ -1480,7 +1479,7 @@
inline bool Visit(HeapObject object, int size) override {
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector(),
&heap_->ephemeron_remembered_set_);
- object->IterateBodyFast(&visitor);
+ object.IterateBodyFast(&visitor);
return true;
}
@@ -1490,7 +1489,7 @@
bool MarkCompactCollector::IsUnmarkedHeapObject(Heap* heap, FullObjectSlot p) {
Object o = *p;
- if (!o->IsHeapObject()) return false;
+ if (!o.IsHeapObject()) return false;
HeapObject heap_object = HeapObject::cast(o);
return heap->mark_compact_collector()->non_atomic_marking_state()->IsWhite(
heap_object);
@@ -1502,7 +1501,7 @@
// Mark the string table itself.
if (marking_state()->WhiteToBlack(string_table)) {
// Explicitly mark the prefix.
- string_table->IteratePrefix(custom_root_body_visitor);
+ string_table.IteratePrefix(custom_root_body_visitor);
}
}
@@ -1708,20 +1707,20 @@
while (!(object = marking_worklist()->Pop()).is_null()) {
// Left trimming may result in grey or black filler objects on the marking
// worklist. Ignore these objects.
- if (object->IsFiller()) {
+ if (object.IsFiller()) {
// Due to copying mark bits and the fact that grey and black have their
// first bit set, one word fillers are always black.
DCHECK_IMPLIES(
- object->map() == ReadOnlyRoots(heap()).one_pointer_filler_map(),
+ object.map() == ReadOnlyRoots(heap()).one_pointer_filler_map(),
marking_state()->IsBlack(object));
// Other fillers may be black or grey depending on the color of the object
// that was trimmed.
DCHECK_IMPLIES(
- object->map() != ReadOnlyRoots(heap()).one_pointer_filler_map(),
+ object.map() != ReadOnlyRoots(heap()).one_pointer_filler_map(),
marking_state()->IsBlackOrGrey(object));
continue;
}
- DCHECK(object->IsHeapObject());
+ DCHECK(object.IsHeapObject());
DCHECK(heap()->Contains(object));
DCHECK(!(marking_state()->IsWhite(object)));
marking_state()->GreyToBlack(object);
@@ -1729,7 +1728,7 @@
kTrackNewlyDiscoveredObjects) {
AddNewlyDiscovered(object);
}
- Map map = object->map();
+ Map map = object.map();
MarkObject(object, map);
visitor.Visit(map, object);
}
@@ -1770,8 +1769,8 @@
}
if (it.frame()->type() == StackFrame::OPTIMIZED) {
Code code = it.frame()->LookupCode();
- if (!code->CanDeoptAt(it.frame()->pc())) {
- Code::BodyDescriptor::IterateBody(code->map(), code, visitor);
+ if (!code.CanDeoptAt(it.frame()->pc())) {
+ Code::BodyDescriptor::IterateBody(code.map(), code, visitor);
}
return;
}
@@ -1937,8 +1936,8 @@
// table is marked.
StringTable string_table = heap()->string_table();
InternalizedStringTableCleaner internalized_visitor(heap(), string_table);
- string_table->IterateElements(&internalized_visitor);
- string_table->ElementsRemoved(internalized_visitor.PointersRemoved());
+ string_table.IterateElements(&internalized_visitor);
+ string_table.ElementsRemoved(internalized_visitor.PointersRemoved());
ExternalStringTableCleaner external_visitor(heap());
heap()->external_string_table_.IterateAll(&external_visitor);
@@ -1994,21 +1993,21 @@
HeapObject object = weak_object_in_code.first;
Code code = weak_object_in_code.second;
if (!non_atomic_marking_state()->IsBlackOrGrey(object) &&
- !code->embedded_objects_cleared()) {
- if (!code->marked_for_deoptimization()) {
- code->SetMarkedForDeoptimization("weak objects");
+ !code.embedded_objects_cleared()) {
+ if (!code.marked_for_deoptimization()) {
+ code.SetMarkedForDeoptimization("weak objects");
have_code_to_deoptimize_ = true;
}
- code->ClearEmbeddedObjects(heap_);
- DCHECK(code->embedded_objects_cleared());
+ code.ClearEmbeddedObjects(heap_);
+ DCHECK(code.embedded_objects_cleared());
}
}
}
void MarkCompactCollector::ClearPotentialSimpleMapTransition(Map dead_target) {
DCHECK(non_atomic_marking_state()->IsWhite(dead_target));
- Object potential_parent = dead_target->constructor_or_backpointer();
- if (potential_parent->IsMap()) {
+ Object potential_parent = dead_target.constructor_or_backpointer();
+ if (potential_parent.IsMap()) {
Map parent = Map::cast(potential_parent);
DisallowHeapAllocation no_gc_obviously;
if (non_atomic_marking_state()->IsBlackOrGrey(parent) &&
@@ -2021,29 +2020,29 @@
void MarkCompactCollector::ClearPotentialSimpleMapTransition(Map map,
Map dead_target) {
- DCHECK(!map->is_prototype_map());
- DCHECK(!dead_target->is_prototype_map());
- DCHECK_EQ(map->raw_transitions(), HeapObjectReference::Weak(dead_target));
+ DCHECK(!map.is_prototype_map());
+ DCHECK(!dead_target.is_prototype_map());
+ DCHECK_EQ(map.raw_transitions(), HeapObjectReference::Weak(dead_target));
// Take ownership of the descriptor array.
- int number_of_own_descriptors = map->NumberOfOwnDescriptors();
- DescriptorArray descriptors = map->instance_descriptors();
- if (descriptors == dead_target->instance_descriptors() &&
+ int number_of_own_descriptors = map.NumberOfOwnDescriptors();
+ DescriptorArray descriptors = map.instance_descriptors();
+ if (descriptors == dead_target.instance_descriptors() &&
number_of_own_descriptors > 0) {
TrimDescriptorArray(map, descriptors);
- DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
+ DCHECK(descriptors.number_of_descriptors() == number_of_own_descriptors);
}
}
void MarkCompactCollector::FlushBytecodeFromSFI(
SharedFunctionInfo shared_info) {
- DCHECK(shared_info->HasBytecodeArray());
+ DCHECK(shared_info.HasBytecodeArray());
// Retain objects required for uncompiled data.
- String inferred_name = shared_info->inferred_name();
- int start_position = shared_info->StartPosition();
- int end_position = shared_info->EndPosition();
+ String inferred_name = shared_info.inferred_name();
+ int start_position = shared_info.StartPosition();
+ int end_position = shared_info.EndPosition();
- shared_info->DiscardCompiledMetadata(
+ shared_info.DiscardCompiledMetadata(
isolate(), [](HeapObject object, ObjectSlot slot, HeapObject target) {
RecordSlot(object, slot, target);
});
@@ -2054,9 +2053,9 @@
UncompiledDataWithoutPreparseData::kSize);
// Replace bytecode array with an uncompiled data array.
- HeapObject compiled_data = shared_info->GetBytecodeArray();
- Address compiled_data_start = compiled_data->address();
- int compiled_data_size = compiled_data->Size();
+ HeapObject compiled_data = shared_info.GetBytecodeArray();
+ Address compiled_data_start = compiled_data.address();
+ int compiled_data_size = compiled_data.Size();
MemoryChunk* chunk = MemoryChunk::FromAddress(compiled_data_start);
// Clear any recorded slots for the compiled data as being invalid.
@@ -2069,14 +2068,14 @@
// Swap the map, using set_map_after_allocation to avoid verify heap checks
// which are not necessary since we are doing this during the GC atomic pause.
- compiled_data->set_map_after_allocation(
+ compiled_data.set_map_after_allocation(
ReadOnlyRoots(heap()).uncompiled_data_without_preparse_data_map(),
SKIP_WRITE_BARRIER);
// Create a filler object for any left over space in the bytecode array.
if (!heap()->IsLargeObject(compiled_data)) {
heap()->CreateFillerObjectAt(
- compiled_data->address() + UncompiledDataWithoutPreparseData::kSize,
+ compiled_data.address() + UncompiledDataWithoutPreparseData::kSize,
compiled_data_size - UncompiledDataWithoutPreparseData::kSize,
ClearRecordedSlots::kNo);
}
@@ -2097,8 +2096,8 @@
// Use the raw function data setter to avoid validity checks, since we're
// performing the unusual task of decompiling.
- shared_info->set_function_data(uncompiled_data);
- DCHECK(!shared_info->is_compiled());
+ shared_info.set_function_data(uncompiled_data);
+ DCHECK(!shared_info.is_compiled());
}
void MarkCompactCollector::ClearOldBytecodeCandidates() {
@@ -2110,7 +2109,7 @@
// If the BytecodeArray is dead, flush it, which will replace the field with
// an uncompiled data object.
if (!non_atomic_marking_state()->IsBlackOrGrey(
- flushing_candidate->GetBytecodeArray())) {
+ flushing_candidate.GetBytecodeArray())) {
FlushBytecodeFromSFI(flushing_candidate);
}
@@ -2127,26 +2126,25 @@
JSFunction flushed_js_function;
while (weak_objects_.flushed_js_functions.Pop(kMainThread,
&flushed_js_function)) {
- flushed_js_function->ResetIfBytecodeFlushed();
+ flushed_js_function.ResetIfBytecodeFlushed();
}
}
void MarkCompactCollector::ClearFullMapTransitions() {
TransitionArray array;
while (weak_objects_.transition_arrays.Pop(kMainThread, &array)) {
- int num_transitions = array->number_of_entries();
+ int num_transitions = array.number_of_entries();
if (num_transitions > 0) {
Map map;
// The array might contain "undefined" elements because it's not yet
// filled. Allow it.
- if (array->GetTargetIfExists(0, isolate(), &map)) {
+ if (array.GetTargetIfExists(0, isolate(), &map)) {
DCHECK(!map.is_null()); // Weak pointers aren't cleared yet.
- Map parent = Map::cast(map->constructor_or_backpointer());
+ Map parent = Map::cast(map.constructor_or_backpointer());
bool parent_is_alive =
non_atomic_marking_state()->IsBlackOrGrey(parent);
- DescriptorArray descriptors = parent_is_alive
- ? parent->instance_descriptors()
- : DescriptorArray();
+ DescriptorArray descriptors =
+ parent_is_alive ? parent.instance_descriptors() : DescriptorArray();
bool descriptors_owner_died =
CompactTransitionArray(parent, array, descriptors);
if (descriptors_owner_died) {
@@ -2160,30 +2158,30 @@
bool MarkCompactCollector::CompactTransitionArray(Map map,
TransitionArray transitions,
DescriptorArray descriptors) {
- DCHECK(!map->is_prototype_map());
- int num_transitions = transitions->number_of_entries();
+ DCHECK(!map.is_prototype_map());
+ int num_transitions = transitions.number_of_entries();
bool descriptors_owner_died = false;
int transition_index = 0;
// Compact all live transitions to the left.
for (int i = 0; i < num_transitions; ++i) {
- Map target = transitions->GetTarget(i);
- DCHECK_EQ(target->constructor_or_backpointer(), map);
+ Map target = transitions.GetTarget(i);
+ DCHECK_EQ(target.constructor_or_backpointer(), map);
if (non_atomic_marking_state()->IsWhite(target)) {
if (!descriptors.is_null() &&
- target->instance_descriptors() == descriptors) {
- DCHECK(!target->is_prototype_map());
+ target.instance_descriptors() == descriptors) {
+ DCHECK(!target.is_prototype_map());
descriptors_owner_died = true;
}
} else {
if (i != transition_index) {
- Name key = transitions->GetKey(i);
- transitions->SetKey(transition_index, key);
- HeapObjectSlot key_slot = transitions->GetKeySlot(transition_index);
+ Name key = transitions.GetKey(i);
+ transitions.SetKey(transition_index, key);
+ HeapObjectSlot key_slot = transitions.GetKeySlot(transition_index);
RecordSlot(transitions, key_slot, key);
- MaybeObject raw_target = transitions->GetRawTarget(i);
- transitions->SetRawTarget(transition_index, raw_target);
+ MaybeObject raw_target = transitions.GetRawTarget(i);
+ transitions.SetRawTarget(transition_index, raw_target);
HeapObjectSlot target_slot =
- transitions->GetTargetSlot(transition_index);
+ transitions.GetTargetSlot(transition_index);
RecordSlot(transitions, target_slot, raw_target->GetHeapObject());
}
transition_index++;
@@ -2198,23 +2196,23 @@
// such that number_of_transitions() == 0. If this assumption changes,
// TransitionArray::Insert() will need to deal with the case that a transition
// array disappeared during GC.
- int trim = transitions->Capacity() - transition_index;
+ int trim = transitions.Capacity() - transition_index;
if (trim > 0) {
heap_->RightTrimWeakFixedArray(transitions,
trim * TransitionArray::kEntrySize);
- transitions->SetNumberOfTransitions(transition_index);
+ transitions.SetNumberOfTransitions(transition_index);
}
return descriptors_owner_died;
}
void MarkCompactCollector::RightTrimDescriptorArray(DescriptorArray array,
int descriptors_to_trim) {
- int old_nof_all_descriptors = array->number_of_all_descriptors();
+ int old_nof_all_descriptors = array.number_of_all_descriptors();
int new_nof_all_descriptors = old_nof_all_descriptors - descriptors_to_trim;
DCHECK_LT(0, descriptors_to_trim);
DCHECK_LE(0, new_nof_all_descriptors);
- Address start = array->GetDescriptorSlot(new_nof_all_descriptors).address();
- Address end = array->GetDescriptorSlot(old_nof_all_descriptors).address();
+ Address start = array.GetDescriptorSlot(new_nof_all_descriptors).address();
+ Address end = array.GetDescriptorSlot(old_nof_all_descriptors).address();
RememberedSet<OLD_TO_NEW>::RemoveRange(MemoryChunk::FromHeapObject(array),
start, end,
SlotSet::PREFREE_EMPTY_BUCKETS);
@@ -2223,52 +2221,52 @@
SlotSet::PREFREE_EMPTY_BUCKETS);
heap()->CreateFillerObjectAt(start, static_cast<int>(end - start),
ClearRecordedSlots::kNo);
- array->set_number_of_all_descriptors(new_nof_all_descriptors);
+ array.set_number_of_all_descriptors(new_nof_all_descriptors);
}
void MarkCompactCollector::TrimDescriptorArray(Map map,
DescriptorArray descriptors) {
- int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ int number_of_own_descriptors = map.NumberOfOwnDescriptors();
if (number_of_own_descriptors == 0) {
DCHECK(descriptors == ReadOnlyRoots(heap_).empty_descriptor_array());
return;
}
// TODO(ulan): Trim only if slack is greater than some percentage threshold.
int to_trim =
- descriptors->number_of_all_descriptors() - number_of_own_descriptors;
+ descriptors.number_of_all_descriptors() - number_of_own_descriptors;
if (to_trim > 0) {
- descriptors->set_number_of_descriptors(number_of_own_descriptors);
+ descriptors.set_number_of_descriptors(number_of_own_descriptors);
RightTrimDescriptorArray(descriptors, to_trim);
TrimEnumCache(map, descriptors);
- descriptors->Sort();
+ descriptors.Sort();
if (FLAG_unbox_double_fields) {
- LayoutDescriptor layout_descriptor = map->layout_descriptor();
- layout_descriptor = layout_descriptor->Trim(heap_, map, descriptors,
- number_of_own_descriptors);
- SLOW_DCHECK(layout_descriptor->IsConsistentWithMap(map, true));
+ LayoutDescriptor layout_descriptor = map.layout_descriptor();
+ layout_descriptor = layout_descriptor.Trim(heap_, map, descriptors,
+ number_of_own_descriptors);
+ SLOW_DCHECK(layout_descriptor.IsConsistentWithMap(map, true));
}
}
- DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
- map->set_owns_descriptors(true);
+ DCHECK(descriptors.number_of_descriptors() == number_of_own_descriptors);
+ map.set_owns_descriptors(true);
}
void MarkCompactCollector::TrimEnumCache(Map map, DescriptorArray descriptors) {
- int live_enum = map->EnumLength();
+ int live_enum = map.EnumLength();
if (live_enum == kInvalidEnumCacheSentinel) {
- live_enum = map->NumberOfEnumerableProperties();
+ live_enum = map.NumberOfEnumerableProperties();
}
- if (live_enum == 0) return descriptors->ClearEnumCache();
- EnumCache enum_cache = descriptors->enum_cache();
+ if (live_enum == 0) return descriptors.ClearEnumCache();
+ EnumCache enum_cache = descriptors.enum_cache();
- FixedArray keys = enum_cache->keys();
- int to_trim = keys->length() - live_enum;
+ FixedArray keys = enum_cache.keys();
+ int to_trim = keys.length() - live_enum;
if (to_trim <= 0) return;
heap_->RightTrimFixedArray(keys, to_trim);
- FixedArray indices = enum_cache->indices();
- to_trim = indices->length() - live_enum;
+ FixedArray indices = enum_cache.indices();
+ to_trim = indices.length() - live_enum;
if (to_trim <= 0) return;
heap_->RightTrimFixedArray(indices, to_trim);
}
@@ -2278,19 +2276,19 @@
EphemeronHashTable table;
while (weak_objects_.ephemeron_hash_tables.Pop(kMainThread, &table)) {
- for (int i = 0; i < table->Capacity(); i++) {
- HeapObject key = HeapObject::cast(table->KeyAt(i));
+ for (int i = 0; i < table.Capacity(); i++) {
+ HeapObject key = HeapObject::cast(table.KeyAt(i));
#ifdef VERIFY_HEAP
- Object value = table->ValueAt(i);
+ Object value = table.ValueAt(i);
- if (value->IsHeapObject()) {
+ if (value.IsHeapObject()) {
CHECK_IMPLIES(
non_atomic_marking_state()->IsBlackOrGrey(key),
non_atomic_marking_state()->IsBlackOrGrey(HeapObject::cast(value)));
}
#endif
if (!non_atomic_marking_state()->IsBlackOrGrey(key)) {
- table->RemoveEntry(i);
+ table.RemoveEntry(i);
}
}
}
@@ -2315,12 +2313,12 @@
// as MaybeObjectSlot.
MaybeObjectSlot location(slot.second);
if ((*location)->GetHeapObjectIfWeak(&value)) {
- DCHECK(!value->IsCell());
+ DCHECK(!value.IsCell());
if (non_atomic_marking_state()->IsBlackOrGrey(value)) {
// The value of the weak reference is alive.
RecordSlot(slot.first, HeapObjectSlot(location), value);
} else {
- if (value->IsMap()) {
+ if (value.IsMap()) {
// The map is non-live.
ClearPotentialSimpleMapTransition(Map::cast(value));
}
@@ -2336,9 +2334,9 @@
}
JSWeakRef weak_ref;
while (weak_objects_.js_weak_refs.Pop(kMainThread, &weak_ref)) {
- HeapObject target = HeapObject::cast(weak_ref->target());
+ HeapObject target = HeapObject::cast(weak_ref.target());
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
- weak_ref->set_target(ReadOnlyRoots(isolate()).undefined_value());
+ weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value());
} else {
// The value of the JSWeakRef is alive.
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
@@ -2347,17 +2345,17 @@
}
WeakCell weak_cell;
while (weak_objects_.weak_cells.Pop(kMainThread, &weak_cell)) {
- HeapObject target = HeapObject::cast(weak_cell->target());
+ HeapObject target = HeapObject::cast(weak_cell.target());
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
- DCHECK(!target->IsUndefined());
+ DCHECK(!target.IsUndefined());
// The value of the WeakCell is dead.
JSFinalizationGroup finalization_group =
- JSFinalizationGroup::cast(weak_cell->finalization_group());
- if (!finalization_group->scheduled_for_cleanup()) {
+ JSFinalizationGroup::cast(weak_cell.finalization_group());
+ if (!finalization_group.scheduled_for_cleanup()) {
heap()->AddDirtyJSFinalizationGroup(
finalization_group,
[](HeapObject object, ObjectSlot slot, Object target) {
- if (target->IsHeapObject()) {
+ if (target.IsHeapObject()) {
RecordSlot(object, slot, HeapObject::cast(target));
}
});
@@ -2365,14 +2363,14 @@
// We're modifying the pointers in WeakCell and JSFinalizationGroup during
// GC; thus we need to record the slots it writes. The normal write
// barrier is not enough, since it's disabled before GC.
- weak_cell->Nullify(isolate(),
- [](HeapObject object, ObjectSlot slot, Object target) {
- if (target->IsHeapObject()) {
- RecordSlot(object, slot, HeapObject::cast(target));
- }
- });
- DCHECK(finalization_group->NeedsCleanup());
- DCHECK(finalization_group->scheduled_for_cleanup());
+ weak_cell.Nullify(isolate(),
+ [](HeapObject object, ObjectSlot slot, Object target) {
+ if (target.IsHeapObject()) {
+ RecordSlot(object, slot, HeapObject::cast(target));
+ }
+ });
+ DCHECK(finalization_group.NeedsCleanup());
+ DCHECK(finalization_group.scheduled_for_cleanup());
} else {
// The value of the WeakCell is alive.
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
@@ -2496,7 +2494,7 @@
std::is_same<TSlot, FullMaybeObjectSlot>::value ||
std::is_same<TSlot, MaybeObjectSlot>::value,
"Only [Full]ObjectSlot and [Full]MaybeObjectSlot are expected here");
- MapWord map_word = heap_obj->map_word();
+ MapWord map_word = heap_obj.map_word();
if (map_word.IsForwardingAddress()) {
DCHECK_IMPLIES(!Heap::InFromPage(heap_obj),
MarkCompactCollector::IsOnEvacuationCandidate(heap_obj) ||
@@ -2512,7 +2510,7 @@
DCHECK(!Heap::InFromPage(target));
DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(target));
} else {
- DCHECK(heap_obj->map()->IsMap());
+ DCHECK(heap_obj.map().IsMap());
}
// OLD_TO_OLD slots are always removed after updating.
return REMOVE_SLOT;
@@ -2616,16 +2614,16 @@
static String UpdateReferenceInExternalStringTableEntry(Heap* heap,
FullObjectSlot p) {
- MapWord map_word = HeapObject::cast(*p)->map_word();
+ MapWord map_word = HeapObject::cast(*p).map_word();
if (map_word.IsForwardingAddress()) {
String new_string = String::cast(map_word.ToForwardingAddress());
- if (new_string->IsExternalString()) {
+ if (new_string.IsExternalString()) {
MemoryChunk::MoveExternalBackingStoreBytes(
ExternalBackingStoreType::kExternalString,
Page::FromAddress((*p).ptr()), Page::FromHeapObject(new_string),
- ExternalString::cast(new_string)->ExternalPayloadSize());
+ ExternalString::cast(new_string).ExternalPayloadSize());
}
return new_string;
}
@@ -3032,9 +3030,9 @@
class EvacuationWeakObjectRetainer : public WeakObjectRetainer {
public:
Object RetainAs(Object object) override {
- if (object->IsHeapObject()) {
+ if (object.IsHeapObject()) {
HeapObject heap_object = HeapObject::cast(object);
- MapWord map_word = heap_object->map_word();
+ MapWord map_word = heap_object.map_word();
if (map_word.IsForwardingAddress()) {
return map_word.ToForwardingAddress();
}
@@ -3065,7 +3063,7 @@
if (iteration_mode == kClearMarkbits) {
marking_state->bitmap(chunk)->ClearRange(
chunk->AddressToMarkbitIndex(chunk->area_start()),
- chunk->AddressToMarkbitIndex(object->address()));
+ chunk->AddressToMarkbitIndex(object.address()));
*failed_object = object;
}
return false;
@@ -3087,7 +3085,7 @@
if (chunk->IsLargePage()) {
HeapObject object = reinterpret_cast<LargePage*>(chunk)->GetObject();
if (marking_state->IsBlack(object)) {
- const bool success = visitor->Visit(object, object->Size());
+ const bool success = visitor->Visit(object, object.Size());
USE(success);
DCHECK(success);
}
@@ -3116,7 +3114,7 @@
if (chunk->IsLargePage()) {
HeapObject object = reinterpret_cast<LargePage*>(chunk)->GetObject();
if (marking_state->IsGrey(object)) {
- const bool success = visitor->Visit(object, object->Size());
+ const bool success = visitor->Visit(object, object.Size());
USE(success);
DCHECK(success);
}
@@ -3268,9 +3266,9 @@
PointersUpdatingVisitor visitor;
for (Address cur = start_; cur < end_;) {
HeapObject object = HeapObject::FromAddress(cur);
- Map map = object->map();
- int size = object->SizeFromMap(map);
- object->IterateBodyFast(map, size, &visitor);
+ Map map = object.map();
+ int size = object.SizeFromMap(map);
+ object.IterateBodyFast(map, size, &visitor);
cur += size;
}
}
@@ -3283,7 +3281,7 @@
PointersUpdatingVisitor visitor;
for (auto object_and_size : LiveObjectRange<kAllLiveObjects>(
chunk_, marking_state_->bitmap(chunk_))) {
- object_and_size.first->IterateBodyFast(&visitor);
+ object_and_size.first.IterateBodyFast(&visitor);
}
}
@@ -3327,7 +3325,7 @@
return REMOVE_SLOT;
}
if (Heap::InFromPage(heap_object)) {
- MapWord map_word = heap_object->map_word();
+ MapWord map_word = heap_object.map_word();
if (map_word.IsForwardingAddress()) {
HeapObjectReference::Update(THeapObjectSlot(slot),
map_word.ToForwardingAddress());
@@ -3392,7 +3390,7 @@
for (auto object_size : *chunk_->invalidated_slots()) {
HeapObject object = object_size.first;
int size = object_size.second;
- DCHECK_LE(object->SizeFromMap(object->map()), size);
+ DCHECK_LE(object.SizeFromMap(object.map()), size);
}
#endif
// The invalidated slots are not needed after old-to-old slots were
@@ -3590,9 +3588,9 @@
for (auto iti = indices.begin(); iti != indices.end();) {
// EphemeronHashTable keys must be heap objects.
HeapObjectSlot key_slot(
- table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(*iti)));
+ table.RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(*iti)));
HeapObject key = key_slot.ToHeapObject();
- MapWord map_word = key->map_word();
+ MapWord map_word = key.map_word();
if (map_word.IsForwardingAddress()) {
key = map_word.ToForwardingAddress();
key_slot.StoreHeapObject(key);
@@ -3724,10 +3722,10 @@
// Remove outdated slots.
RememberedSet<OLD_TO_NEW>::RemoveRange(page, page->address(),
- failed_object->address(),
+ failed_object.address(),
SlotSet::PREFREE_EMPTY_BUCKETS);
RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, page->address(),
- failed_object->address());
+ failed_object.address());
// Recompute live bytes.
LiveObjectVisitor::RecomputeLiveBytes(page, non_atomic_marking_state());
// Re-record slots.
@@ -3845,7 +3843,7 @@
int total_count = 0;
worklist->IterateGlobalPool([&count, &total_count](HeapObject obj) {
++total_count;
- count[obj->map()->instance_type()]++;
+ count[obj.map().instance_type()]++;
});
std::vector<std::pair<int, InstanceType>> rank;
rank.reserve(count.size());
@@ -4244,7 +4242,7 @@
private:
V8_INLINE void MarkObjectByPointer(FullObjectSlot p) {
- if (!(*p)->IsHeapObject()) return;
+ if (!(*p).IsHeapObject()) return;
collector_->MarkRootObject(HeapObject::cast(*p));
}
MinorMarkCompactCollector* const collector_;
@@ -4322,7 +4320,7 @@
LiveObjectRange<kGreyObjects>(p, marking_state()->bitmap(p))) {
HeapObject const object = object_and_size.first;
DCHECK(non_atomic_marking_state()->IsGrey(object));
- Address free_end = object->address();
+ Address free_end = object.address();
if (free_end != free_start) {
CHECK_GT(free_end, free_start);
size_t size = static_cast<size_t>(free_end - free_start);
@@ -4335,8 +4333,8 @@
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
ClearRecordedSlots::kNo);
}
- Map map = object->synchronized_map();
- int size = object->SizeFromMap(map);
+ Map map = object.synchronized_map();
+ int size = object.SizeFromMap(map);
free_start = free_end + size;
}
@@ -4376,14 +4374,14 @@
// Visit all HeapObject pointers in [start, end).
for (FullObjectSlot p = start; p < end; ++p) {
Object o = *p;
- if (o->IsHeapObject()) {
+ if (o.IsHeapObject()) {
HeapObject heap_object = HeapObject::cast(o);
if (marking_state_->IsWhite(heap_object)) {
- if (o->IsExternalString()) {
+ if (o.IsExternalString()) {
heap_->FinalizeExternalString(String::cast(*p));
} else {
// The original external string may have been internalized.
- DCHECK(o->IsThinString());
+ DCHECK(o.IsThinString());
}
// Set the entry to the_hole_value (as deleted).
p.store(ReadOnlyRoots(heap_).the_hole_value());
@@ -4698,8 +4696,8 @@
MarkingWorklist::View marking_worklist(worklist(), kMainMarker);
HeapObject object;
while (marking_worklist.Pop(&object)) {
- DCHECK(!object->IsFiller());
- DCHECK(object->IsHeapObject());
+ DCHECK(!object.IsFiller());
+ DCHECK(object.IsHeapObject());
DCHECK(heap()->Contains(object));
DCHECK(non_atomic_marking_state()->IsGrey(object));
main_marking_visitor()->Visit(object);
diff --git a/src/heap/mark-compact.h b/src/heap/mark-compact.h
index c86f1d8..8c6a2b4 100644
--- a/src/heap/mark-compact.h
+++ b/src/heap/mark-compact.h
@@ -33,7 +33,7 @@
class MarkingStateBase {
public:
V8_INLINE MarkBit MarkBitFrom(HeapObject obj) {
- return MarkBitFrom(MemoryChunk::FromHeapObject(obj), obj->ptr());
+ return MarkBitFrom(MemoryChunk::FromHeapObject(obj), obj.ptr());
}
// {addr} may be tagged or aligned.
@@ -601,7 +601,7 @@
void AbortCompaction();
static inline bool IsOnEvacuationCandidate(Object obj) {
- return Page::FromAddress(obj->ptr())->IsEvacuationCandidate();
+ return Page::FromAddress(obj.ptr())->IsEvacuationCandidate();
}
static bool IsOnEvacuationCandidate(MaybeObject obj);
diff --git a/src/heap/object-stats.cc b/src/heap/object-stats.cc
index 0bb0b0f..bfaa208 100644
--- a/src/heap/object-stats.cc
+++ b/src/heap/object-stats.cc
@@ -44,17 +44,17 @@
void RecordStats(HeapObject host) {
size_t old_pointer_fields_count = *tagged_fields_count_;
- host->Iterate(this);
+ host.Iterate(this);
size_t tagged_fields_count_in_object =
*tagged_fields_count_ - old_pointer_fields_count;
- int object_size_in_words = host->Size() / kTaggedSize;
+ int object_size_in_words = host.Size() / kTaggedSize;
DCHECK_LE(tagged_fields_count_in_object, object_size_in_words);
size_t raw_fields_count_in_object =
object_size_in_words - tagged_fields_count_in_object;
- if (host->IsJSObject()) {
- JSObjectFieldStats field_stats = GetInobjectFieldStats(host->map());
+ if (host.IsJSObject()) {
+ JSObjectFieldStats field_stats = GetInobjectFieldStats(host.map());
// Embedder fields are already included into pointer words.
DCHECK_LE(field_stats.embedded_fields_count_,
tagged_fields_count_in_object);
@@ -117,17 +117,17 @@
// Iterate descriptor array and calculate stats.
JSObjectFieldStats stats;
stats.embedded_fields_count_ = JSObject::GetEmbedderFieldCount(map);
- if (!map->is_dictionary_map()) {
- int nof = map->NumberOfOwnDescriptors();
- DescriptorArray descriptors = map->instance_descriptors();
+ if (!map.is_dictionary_map()) {
+ int nof = map.NumberOfOwnDescriptors();
+ DescriptorArray descriptors = map.instance_descriptors();
for (int descriptor = 0; descriptor < nof; descriptor++) {
- PropertyDetails details = descriptors->GetDetails(descriptor);
+ PropertyDetails details = descriptors.GetDetails(descriptor);
if (details.location() == kField) {
FieldIndex index = FieldIndex::ForDescriptor(map, descriptor);
// Stop on first out-of-object field.
if (!index.is_inobject()) break;
if (details.representation().IsDouble() &&
- map->IsUnboxedDoubleField(index)) {
+ map.IsUnboxedDoubleField(index)) {
++stats.unboxed_double_fields_count_;
}
}
@@ -431,7 +431,7 @@
bool ObjectStatsCollectorImpl::ShouldRecordObject(HeapObject obj,
CowMode check_cow_array) {
- if (obj->IsFixedArrayExact()) {
+ if (obj.IsFixedArrayExact()) {
FixedArray fixed_array = FixedArray::cast(obj);
bool cow_check = check_cow_array == kIgnoreCow || !IsCowArray(fixed_array);
return CanRecordFixedArray(fixed_array) && cow_check;
@@ -445,16 +445,16 @@
HeapObject parent, HashTable<Derived, Shape> hash_table,
ObjectStats::VirtualInstanceType type) {
size_t over_allocated =
- (hash_table->Capacity() - (hash_table->NumberOfElements() +
- hash_table->NumberOfDeletedElements())) *
+ (hash_table.Capacity() -
+ (hash_table.NumberOfElements() + hash_table.NumberOfDeletedElements())) *
HashTable<Derived, Shape>::kEntrySize * kTaggedSize;
- RecordVirtualObjectStats(parent, hash_table, type, hash_table->Size(),
+ RecordVirtualObjectStats(parent, hash_table, type, hash_table.Size(),
over_allocated);
}
bool ObjectStatsCollectorImpl::RecordSimpleVirtualObjectStats(
HeapObject parent, HeapObject obj, ObjectStats::VirtualInstanceType type) {
- return RecordVirtualObjectStats(parent, obj, type, obj->Size(),
+ return RecordVirtualObjectStats(parent, obj, type, obj.Size(),
ObjectStats::kNoOverAllocation, kCheckCow);
}
@@ -484,29 +484,29 @@
void ObjectStatsCollectorImpl::RecordVirtualAllocationSiteDetails(
AllocationSite site) {
- if (!site->PointsToLiteral()) return;
- JSObject boilerplate = site->boilerplate();
- if (boilerplate->IsJSArray()) {
+ if (!site.PointsToLiteral()) return;
+ JSObject boilerplate = site.boilerplate();
+ if (boilerplate.IsJSArray()) {
RecordSimpleVirtualObjectStats(site, boilerplate,
ObjectStats::JS_ARRAY_BOILERPLATE_TYPE);
// Array boilerplates cannot have properties.
} else {
RecordVirtualObjectStats(
site, boilerplate, ObjectStats::JS_OBJECT_BOILERPLATE_TYPE,
- boilerplate->Size(), ObjectStats::kNoOverAllocation);
- if (boilerplate->HasFastProperties()) {
+ boilerplate.Size(), ObjectStats::kNoOverAllocation);
+ if (boilerplate.HasFastProperties()) {
// We'll mis-classify the empty_property_array here. Given that there is a
// single instance, this is negligible.
- PropertyArray properties = boilerplate->property_array();
+ PropertyArray properties = boilerplate.property_array();
RecordSimpleVirtualObjectStats(
site, properties, ObjectStats::BOILERPLATE_PROPERTY_ARRAY_TYPE);
} else {
- NameDictionary properties = boilerplate->property_dictionary();
+ NameDictionary properties = boilerplate.property_dictionary();
RecordSimpleVirtualObjectStats(
site, properties, ObjectStats::BOILERPLATE_PROPERTY_DICTIONARY_TYPE);
}
}
- FixedArrayBase elements = boilerplate->elements();
+ FixedArrayBase elements = boilerplate.elements();
RecordSimpleVirtualObjectStats(site, elements,
ObjectStats::BOILERPLATE_ELEMENTS_TYPE);
}
@@ -515,14 +515,14 @@
FunctionTemplateInfo fti) {
// named_property_handler and indexed_property_handler are recorded as
// INTERCEPTOR_INFO_TYPE.
- if (!fti->call_code()->IsUndefined(isolate())) {
+ if (!fti.call_code().IsUndefined(isolate())) {
RecordSimpleVirtualObjectStats(
- fti, CallHandlerInfo::cast(fti->call_code()),
+ fti, CallHandlerInfo::cast(fti.call_code()),
ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
}
- if (!fti->GetInstanceCallHandler()->IsUndefined(isolate())) {
+ if (!fti.GetInstanceCallHandler().IsUndefined(isolate())) {
RecordSimpleVirtualObjectStats(
- fti, CallHandlerInfo::cast(fti->GetInstanceCallHandler()),
+ fti, CallHandlerInfo::cast(fti.GetInstanceCallHandler()),
ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
}
}
@@ -530,62 +530,61 @@
void ObjectStatsCollectorImpl::RecordVirtualJSGlobalObjectDetails(
JSGlobalObject object) {
// Properties.
- GlobalDictionary properties = object->global_dictionary();
+ GlobalDictionary properties = object.global_dictionary();
RecordHashTableVirtualObjectStats(object, properties,
ObjectStats::GLOBAL_PROPERTIES_TYPE);
// Elements.
- FixedArrayBase elements = object->elements();
+ FixedArrayBase elements = object.elements();
RecordSimpleVirtualObjectStats(object, elements,
ObjectStats::GLOBAL_ELEMENTS_TYPE);
}
void ObjectStatsCollectorImpl::RecordVirtualJSObjectDetails(JSObject object) {
// JSGlobalObject is recorded separately.
- if (object->IsJSGlobalObject()) return;
+ if (object.IsJSGlobalObject()) return;
// Uncompiled JSFunction has a separate type.
- if (object->IsJSFunction() && !JSFunction::cast(object)->is_compiled()) {
+ if (object.IsJSFunction() && !JSFunction::cast(object).is_compiled()) {
RecordSimpleVirtualObjectStats(HeapObject(), object,
ObjectStats::JS_UNCOMPILED_FUNCTION_TYPE);
}
// Properties.
- if (object->HasFastProperties()) {
- PropertyArray properties = object->property_array();
+ if (object.HasFastProperties()) {
+ PropertyArray properties = object.property_array();
if (properties != ReadOnlyRoots(heap_).empty_property_array()) {
- size_t over_allocated =
- object->map()->UnusedPropertyFields() * kTaggedSize;
+ size_t over_allocated = object.map().UnusedPropertyFields() * kTaggedSize;
RecordVirtualObjectStats(object, properties,
- object->map()->is_prototype_map()
+ object.map().is_prototype_map()
? ObjectStats::PROTOTYPE_PROPERTY_ARRAY_TYPE
: ObjectStats::OBJECT_PROPERTY_ARRAY_TYPE,
- properties->Size(), over_allocated);
+ properties.Size(), over_allocated);
}
} else {
- NameDictionary properties = object->property_dictionary();
+ NameDictionary properties = object.property_dictionary();
RecordHashTableVirtualObjectStats(
object, properties,
- object->map()->is_prototype_map()
+ object.map().is_prototype_map()
? ObjectStats::PROTOTYPE_PROPERTY_DICTIONARY_TYPE
: ObjectStats::OBJECT_PROPERTY_DICTIONARY_TYPE);
}
// Elements.
- FixedArrayBase elements = object->elements();
- if (object->HasDictionaryElements()) {
+ FixedArrayBase elements = object.elements();
+ if (object.HasDictionaryElements()) {
RecordHashTableVirtualObjectStats(
object, NumberDictionary::cast(elements),
- object->IsJSArray() ? ObjectStats::ARRAY_DICTIONARY_ELEMENTS_TYPE
- : ObjectStats::OBJECT_DICTIONARY_ELEMENTS_TYPE);
- } else if (object->IsJSArray()) {
+ object.IsJSArray() ? ObjectStats::ARRAY_DICTIONARY_ELEMENTS_TYPE
+ : ObjectStats::OBJECT_DICTIONARY_ELEMENTS_TYPE);
+ } else if (object.IsJSArray()) {
if (elements != ReadOnlyRoots(heap_).empty_fixed_array()) {
size_t element_size =
- (elements->Size() - FixedArrayBase::kHeaderSize) / elements->length();
- uint32_t length = JSArray::cast(object)->length()->Number();
- size_t over_allocated = (elements->length() - length) * element_size;
+ (elements.Size() - FixedArrayBase::kHeaderSize) / elements.length();
+ uint32_t length = JSArray::cast(object).length().Number();
+ size_t over_allocated = (elements.length() - length) * element_size;
RecordVirtualObjectStats(object, elements,
ObjectStats::ARRAY_ELEMENTS_TYPE,
- elements->Size(), over_allocated);
+ elements.Size(), over_allocated);
}
} else {
RecordSimpleVirtualObjectStats(object, elements,
@@ -593,10 +592,10 @@
}
// JSCollections.
- if (object->IsJSCollection()) {
+ if (object.IsJSCollection()) {
// TODO(bmeurer): Properly compute over-allocation here.
RecordSimpleVirtualObjectStats(
- object, FixedArray::cast(JSCollection::cast(object)->table()),
+ object, FixedArray::cast(JSCollection::cast(object).table()),
ObjectStats::JS_COLLECTION_TABLE_TYPE);
}
}
@@ -657,30 +656,30 @@
size_t calculated_size = 0;
// Log the feedback vector's header (fixed fields).
- size_t header_size = vector->slots_start().address() - vector->address();
+ size_t header_size = vector.slots_start().address() - vector.address();
stats_->RecordVirtualObjectStats(ObjectStats::FEEDBACK_VECTOR_HEADER_TYPE,
header_size, ObjectStats::kNoOverAllocation);
calculated_size += header_size;
// Iterate over the feedback slots and log each one.
- if (!vector->shared_function_info()->HasFeedbackMetadata()) return;
+ if (!vector.shared_function_info().HasFeedbackMetadata()) return;
- FeedbackMetadataIterator it(vector->metadata());
+ FeedbackMetadataIterator it(vector.metadata());
while (it.HasNext()) {
FeedbackSlot slot = it.Next();
// Log the entry (or entries) taken up by this slot.
size_t slot_size = it.entry_size() * kTaggedSize;
stats_->RecordVirtualObjectStats(
- GetFeedbackSlotType(vector->Get(slot), it.kind(), heap_->isolate()),
+ GetFeedbackSlotType(vector.Get(slot), it.kind(), heap_->isolate()),
slot_size, ObjectStats::kNoOverAllocation);
calculated_size += slot_size;
// Log the monomorphic/polymorphic helper objects that this slot owns.
for (int i = 0; i < it.entry_size(); i++) {
- MaybeObject raw_object = vector->get(slot.ToInt() + i);
+ MaybeObject raw_object = vector.get(slot.ToInt() + i);
HeapObject object;
if (raw_object->GetHeapObject(&object)) {
- if (object->IsCell() || object->IsWeakFixedArray()) {
+ if (object.IsCell() || object.IsWeakFixedArray()) {
RecordSimpleVirtualObjectStats(
vector, object, ObjectStats::FEEDBACK_VECTOR_ENTRY_TYPE);
}
@@ -688,65 +687,65 @@
}
}
- CHECK_EQ(calculated_size, vector->Size());
+ CHECK_EQ(calculated_size, vector.Size());
}
void ObjectStatsCollectorImpl::RecordVirtualFixedArrayDetails(
FixedArray array) {
if (IsCowArray(array)) {
RecordVirtualObjectStats(HeapObject(), array, ObjectStats::COW_ARRAY_TYPE,
- array->Size(), ObjectStats::kNoOverAllocation,
+ array.Size(), ObjectStats::kNoOverAllocation,
kIgnoreCow);
}
}
void ObjectStatsCollectorImpl::CollectStatistics(
HeapObject obj, Phase phase, CollectFieldStats collect_field_stats) {
- Map map = obj->map();
+ Map map = obj.map();
switch (phase) {
case kPhase1:
- if (obj->IsFeedbackVector()) {
+ if (obj.IsFeedbackVector()) {
RecordVirtualFeedbackVectorDetails(FeedbackVector::cast(obj));
- } else if (obj->IsMap()) {
+ } else if (obj.IsMap()) {
RecordVirtualMapDetails(Map::cast(obj));
- } else if (obj->IsBytecodeArray()) {
+ } else if (obj.IsBytecodeArray()) {
RecordVirtualBytecodeArrayDetails(BytecodeArray::cast(obj));
- } else if (obj->IsCode()) {
+ } else if (obj.IsCode()) {
RecordVirtualCodeDetails(Code::cast(obj));
- } else if (obj->IsFunctionTemplateInfo()) {
+ } else if (obj.IsFunctionTemplateInfo()) {
RecordVirtualFunctionTemplateInfoDetails(
FunctionTemplateInfo::cast(obj));
- } else if (obj->IsJSGlobalObject()) {
+ } else if (obj.IsJSGlobalObject()) {
RecordVirtualJSGlobalObjectDetails(JSGlobalObject::cast(obj));
- } else if (obj->IsJSObject()) {
+ } else if (obj.IsJSObject()) {
// This phase needs to come after RecordVirtualAllocationSiteDetails
// to properly split among boilerplates.
RecordVirtualJSObjectDetails(JSObject::cast(obj));
- } else if (obj->IsSharedFunctionInfo()) {
+ } else if (obj.IsSharedFunctionInfo()) {
RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo::cast(obj));
- } else if (obj->IsContext()) {
+ } else if (obj.IsContext()) {
RecordVirtualContext(Context::cast(obj));
- } else if (obj->IsScript()) {
+ } else if (obj.IsScript()) {
RecordVirtualScriptDetails(Script::cast(obj));
- } else if (obj->IsArrayBoilerplateDescription()) {
+ } else if (obj.IsArrayBoilerplateDescription()) {
RecordVirtualArrayBoilerplateDescription(
ArrayBoilerplateDescription::cast(obj));
- } else if (obj->IsFixedArrayExact()) {
+ } else if (obj.IsFixedArrayExact()) {
// Has to go last as it triggers too eagerly.
RecordVirtualFixedArrayDetails(FixedArray::cast(obj));
}
break;
case kPhase2:
- if (obj->IsExternalString()) {
+ if (obj.IsExternalString()) {
// This has to be in Phase2 to avoid conflicting with recording Script
// sources. We still want to run RecordObjectStats after though.
RecordVirtualExternalStringDetails(ExternalString::cast(obj));
}
size_t over_allocated = ObjectStats::kNoOverAllocation;
- if (obj->IsJSObject()) {
- over_allocated = map->instance_size() - map->UsedInstanceSize();
+ if (obj.IsJSObject()) {
+ over_allocated = map.instance_size() - map.UsedInstanceSize();
}
- RecordObjectStats(obj, map->instance_type(), obj->Size(), over_allocated);
+ RecordObjectStats(obj, map.instance_type(), obj.Size(), over_allocated);
if (collect_field_stats == CollectFieldStats::kYes) {
field_stats_collector_.RecordStats(obj);
}
@@ -757,10 +756,10 @@
void ObjectStatsCollectorImpl::CollectGlobalStatistics() {
// Iterate boilerplates first to disambiguate them from regular JS objects.
Object list = heap_->allocation_sites_list();
- while (list->IsAllocationSite()) {
+ while (list.IsAllocationSite()) {
AllocationSite site = AllocationSite::cast(list);
RecordVirtualAllocationSiteDetails(site);
- list = site->weak_next();
+ list = site.weak_next();
}
// FixedArray.
@@ -805,7 +804,7 @@
}
bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase array) {
- return array->map() == ReadOnlyRoots(heap_).fixed_cow_array_map();
+ return array.map() == ReadOnlyRoots(heap_).fixed_cow_array_map();
}
bool ObjectStatsCollectorImpl::SameLiveness(HeapObject obj1, HeapObject obj2) {
@@ -820,57 +819,57 @@
// to get a better picture of what's going on in MapSpace. This
// method computes the virtual instance type to use for a given map,
// using MAP_TYPE for regular maps that aren't special in any way.
- if (map->is_prototype_map()) {
- if (map->is_dictionary_map()) {
+ if (map.is_prototype_map()) {
+ if (map.is_dictionary_map()) {
RecordSimpleVirtualObjectStats(
HeapObject(), map, ObjectStats::MAP_PROTOTYPE_DICTIONARY_TYPE);
- } else if (map->is_abandoned_prototype_map()) {
+ } else if (map.is_abandoned_prototype_map()) {
RecordSimpleVirtualObjectStats(HeapObject(), map,
ObjectStats::MAP_ABANDONED_PROTOTYPE_TYPE);
} else {
RecordSimpleVirtualObjectStats(HeapObject(), map,
ObjectStats::MAP_PROTOTYPE_TYPE);
}
- } else if (map->is_deprecated()) {
+ } else if (map.is_deprecated()) {
RecordSimpleVirtualObjectStats(HeapObject(), map,
ObjectStats::MAP_DEPRECATED_TYPE);
- } else if (map->is_dictionary_map()) {
+ } else if (map.is_dictionary_map()) {
RecordSimpleVirtualObjectStats(HeapObject(), map,
ObjectStats::MAP_DICTIONARY_TYPE);
- } else if (map->is_stable()) {
+ } else if (map.is_stable()) {
RecordSimpleVirtualObjectStats(HeapObject(), map,
ObjectStats::MAP_STABLE_TYPE);
} else {
// This will be logged as MAP_TYPE in Phase2.
}
- DescriptorArray array = map->instance_descriptors();
- if (map->owns_descriptors() &&
+ DescriptorArray array = map.instance_descriptors();
+ if (map.owns_descriptors() &&
array != ReadOnlyRoots(heap_).empty_descriptor_array()) {
// Generally DescriptorArrays have their own instance type already
// (DESCRIPTOR_ARRAY_TYPE), but we'd like to be able to tell which
// of those are for (abandoned) prototypes, and which of those are
// owned by deprecated maps.
- if (map->is_prototype_map()) {
+ if (map.is_prototype_map()) {
RecordSimpleVirtualObjectStats(
map, array, ObjectStats::PROTOTYPE_DESCRIPTOR_ARRAY_TYPE);
- } else if (map->is_deprecated()) {
+ } else if (map.is_deprecated()) {
RecordSimpleVirtualObjectStats(
map, array, ObjectStats::DEPRECATED_DESCRIPTOR_ARRAY_TYPE);
}
- EnumCache enum_cache = array->enum_cache();
- RecordSimpleVirtualObjectStats(array, enum_cache->keys(),
+ EnumCache enum_cache = array.enum_cache();
+ RecordSimpleVirtualObjectStats(array, enum_cache.keys(),
ObjectStats::ENUM_KEYS_CACHE_TYPE);
- RecordSimpleVirtualObjectStats(array, enum_cache->indices(),
+ RecordSimpleVirtualObjectStats(array, enum_cache.indices(),
ObjectStats::ENUM_INDICES_CACHE_TYPE);
}
- if (map->is_prototype_map()) {
- if (map->prototype_info()->IsPrototypeInfo()) {
- PrototypeInfo info = PrototypeInfo::cast(map->prototype_info());
- Object users = info->prototype_users();
- if (users->IsWeakFixedArray()) {
+ if (map.is_prototype_map()) {
+ if (map.prototype_info().IsPrototypeInfo()) {
+ PrototypeInfo info = PrototypeInfo::cast(map.prototype_info());
+ Object users = info.prototype_users();
+ if (users.IsWeakFixedArray()) {
RecordSimpleVirtualObjectStats(map, WeakArrayList::cast(users),
ObjectStats::PROTOTYPE_USERS_TYPE);
}
@@ -880,29 +879,29 @@
void ObjectStatsCollectorImpl::RecordVirtualScriptDetails(Script script) {
RecordSimpleVirtualObjectStats(
- script, script->shared_function_infos(),
+ script, script.shared_function_infos(),
ObjectStats::SCRIPT_SHARED_FUNCTION_INFOS_TYPE);
// Log the size of external source code.
- Object raw_source = script->source();
- if (raw_source->IsExternalString()) {
+ Object raw_source = script.source();
+ if (raw_source.IsExternalString()) {
// The contents of external strings aren't on the heap, so we have to record
// them manually. The on-heap String object is recorded indepentendely in
// the normal pass.
ExternalString string = ExternalString::cast(raw_source);
- Address resource = string->resource_as_address();
- size_t off_heap_size = string->ExternalPayloadSize();
+ Address resource = string.resource_as_address();
+ size_t off_heap_size = string.ExternalPayloadSize();
RecordExternalResourceStats(
resource,
- string->IsOneByteRepresentation()
+ string.IsOneByteRepresentation()
? ObjectStats::SCRIPT_SOURCE_EXTERNAL_ONE_BYTE_TYPE
: ObjectStats::SCRIPT_SOURCE_EXTERNAL_TWO_BYTE_TYPE,
off_heap_size);
- } else if (raw_source->IsString()) {
+ } else if (raw_source.IsString()) {
String source = String::cast(raw_source);
RecordSimpleVirtualObjectStats(
script, source,
- source->IsOneByteRepresentation()
+ source.IsOneByteRepresentation()
? ObjectStats::SCRIPT_SOURCE_NON_EXTERNAL_ONE_BYTE_TYPE
: ObjectStats::SCRIPT_SOURCE_NON_EXTERNAL_TWO_BYTE_TYPE);
}
@@ -912,11 +911,11 @@
ExternalString string) {
// Track the external string resource size in a separate category.
- Address resource = string->resource_as_address();
- size_t off_heap_size = string->ExternalPayloadSize();
+ Address resource = string.resource_as_address();
+ size_t off_heap_size = string.ExternalPayloadSize();
RecordExternalResourceStats(
resource,
- string->IsOneByteRepresentation()
+ string.IsOneByteRepresentation()
? ObjectStats::STRING_EXTERNAL_RESOURCE_ONE_BYTE_TYPE
: ObjectStats::STRING_EXTERNAL_RESOURCE_TWO_BYTE_TYPE,
off_heap_size);
@@ -925,7 +924,7 @@
void ObjectStatsCollectorImpl::RecordVirtualSharedFunctionInfoDetails(
SharedFunctionInfo info) {
// Uncompiled SharedFunctionInfo gets its own category.
- if (!info->is_compiled()) {
+ if (!info.is_compiled()) {
RecordSimpleVirtualObjectStats(
HeapObject(), info, ObjectStats::UNCOMPILED_SHARED_FUNCTION_INFO_TYPE);
}
@@ -934,7 +933,7 @@
void ObjectStatsCollectorImpl::RecordVirtualArrayBoilerplateDescription(
ArrayBoilerplateDescription description) {
RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
- description, description->constant_elements(),
+ description, description.constant_elements(),
ObjectStats::ARRAY_BOILERPLATE_DESCRIPTION_ELEMENTS_TYPE);
}
@@ -943,11 +942,11 @@
HeapObject parent, HeapObject object,
ObjectStats::VirtualInstanceType type) {
if (!RecordSimpleVirtualObjectStats(parent, object, type)) return;
- if (object->IsFixedArrayExact()) {
+ if (object.IsFixedArrayExact()) {
FixedArray array = FixedArray::cast(object);
- for (int i = 0; i < array->length(); i++) {
- Object entry = array->get(i);
- if (!entry->IsHeapObject()) continue;
+ for (int i = 0; i < array.length(); i++) {
+ Object entry = array.get(i);
+ if (!entry.IsHeapObject()) continue;
RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
array, HeapObject::cast(entry), type);
}
@@ -957,24 +956,24 @@
void ObjectStatsCollectorImpl::RecordVirtualBytecodeArrayDetails(
BytecodeArray bytecode) {
RecordSimpleVirtualObjectStats(
- bytecode, bytecode->constant_pool(),
+ bytecode, bytecode.constant_pool(),
ObjectStats::BYTECODE_ARRAY_CONSTANT_POOL_TYPE);
// FixedArrays on constant pool are used for holding descriptor information.
// They are shared with optimized code.
- FixedArray constant_pool = FixedArray::cast(bytecode->constant_pool());
- for (int i = 0; i < constant_pool->length(); i++) {
- Object entry = constant_pool->get(i);
- if (entry->IsFixedArrayExact()) {
+ FixedArray constant_pool = FixedArray::cast(bytecode.constant_pool());
+ for (int i = 0; i < constant_pool.length(); i++) {
+ Object entry = constant_pool.get(i);
+ if (entry.IsFixedArrayExact()) {
RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
constant_pool, HeapObject::cast(entry),
ObjectStats::EMBEDDED_OBJECT_TYPE);
}
}
RecordSimpleVirtualObjectStats(
- bytecode, bytecode->handler_table(),
+ bytecode, bytecode.handler_table(),
ObjectStats::BYTECODE_ARRAY_HANDLER_TABLE_TYPE);
- if (bytecode->HasSourcePositionTable()) {
- RecordSimpleVirtualObjectStats(bytecode, bytecode->SourcePositionTable(),
+ if (bytecode.HasSourcePositionTable()) {
+ RecordSimpleVirtualObjectStats(bytecode, bytecode.SourcePositionTable(),
ObjectStats::SOURCE_POSITION_TABLE_TYPE);
}
}
@@ -999,29 +998,29 @@
void ObjectStatsCollectorImpl::RecordVirtualCodeDetails(Code code) {
RecordSimpleVirtualObjectStats(HeapObject(), code,
- CodeKindToVirtualInstanceType(code->kind()));
- RecordSimpleVirtualObjectStats(code, code->deoptimization_data(),
+ CodeKindToVirtualInstanceType(code.kind()));
+ RecordSimpleVirtualObjectStats(code, code.deoptimization_data(),
ObjectStats::DEOPTIMIZATION_DATA_TYPE);
- RecordSimpleVirtualObjectStats(code, code->relocation_info(),
+ RecordSimpleVirtualObjectStats(code, code.relocation_info(),
ObjectStats::RELOC_INFO_TYPE);
- Object source_position_table = code->source_position_table();
- if (source_position_table->IsSourcePositionTableWithFrameCache()) {
+ Object source_position_table = code.source_position_table();
+ if (source_position_table.IsSourcePositionTableWithFrameCache()) {
RecordSimpleVirtualObjectStats(
code,
SourcePositionTableWithFrameCache::cast(source_position_table)
- ->source_position_table(),
+ .source_position_table(),
ObjectStats::SOURCE_POSITION_TABLE_TYPE);
- } else if (source_position_table->IsHeapObject()) {
+ } else if (source_position_table.IsHeapObject()) {
RecordSimpleVirtualObjectStats(code,
HeapObject::cast(source_position_table),
ObjectStats::SOURCE_POSITION_TABLE_TYPE);
}
- if (code->kind() == Code::Kind::OPTIMIZED_FUNCTION) {
+ if (code.kind() == Code::Kind::OPTIMIZED_FUNCTION) {
DeoptimizationData input_data =
- DeoptimizationData::cast(code->deoptimization_data());
- if (input_data->length() > 0) {
- RecordSimpleVirtualObjectStats(code->deoptimization_data(),
- input_data->LiteralArray(),
+ DeoptimizationData::cast(code.deoptimization_data());
+ if (input_data.length() > 0) {
+ RecordSimpleVirtualObjectStats(code.deoptimization_data(),
+ input_data.LiteralArray(),
ObjectStats::OPTIMIZED_CODE_LITERALS_TYPE);
}
}
@@ -1029,7 +1028,7 @@
for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
DCHECK(RelocInfo::IsEmbeddedObjectMode(it.rinfo()->rmode()));
Object target = it.rinfo()->target_object();
- if (target->IsFixedArrayExact()) {
+ if (target.IsFixedArrayExact()) {
RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
code, HeapObject::cast(target), ObjectStats::EMBEDDED_OBJECT_TYPE);
}
@@ -1037,10 +1036,10 @@
}
void ObjectStatsCollectorImpl::RecordVirtualContext(Context context) {
- if (context->IsNativeContext()) {
- RecordObjectStats(context, NATIVE_CONTEXT_TYPE, context->Size());
- } else if (context->IsFunctionContext()) {
- RecordObjectStats(context, FUNCTION_CONTEXT_TYPE, context->Size());
+ if (context.IsNativeContext()) {
+ RecordObjectStats(context, NATIVE_CONTEXT_TYPE, context.Size());
+ } else if (context.IsFunctionContext()) {
+ RecordObjectStats(context, FUNCTION_CONTEXT_TYPE, context.Size());
} else {
RecordSimpleVirtualObjectStats(HeapObject(), context,
ObjectStats::OTHER_CONTEXT_TYPE);
@@ -1083,7 +1082,7 @@
CombinedHeapIterator iterator(heap);
for (HeapObject obj = iterator.Next(); !obj.is_null();
obj = iterator.Next()) {
- visitor->Visit(obj, obj->Size());
+ visitor->Visit(obj, obj.Size());
}
}
diff --git a/src/heap/objects-visiting-inl.h b/src/heap/objects-visiting-inl.h
index 132bd5b..cc71e7c 100644
--- a/src/heap/objects-visiting-inl.h
+++ b/src/heap/objects-visiting-inl.h
@@ -29,14 +29,14 @@
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject object) {
- return Visit(object->map(), object);
+ return Visit(object.map(), object);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(Map map,
HeapObject object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
- switch (map->visitor_id()) {
+ switch (map.visitor_id()) {
#define CASE(TypeName, Type) \
case kVisit##TypeName: \
return visitor->Visit##TypeName( \
@@ -72,7 +72,7 @@
template <typename ResultType, typename ConcreteVisitor>
void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(
HeapObject host, MapWordSlot map_slot) {
- DCHECK(!host->map_word().IsForwardingAddress());
+ DCHECK(!host.map_word().IsForwardingAddress());
static_cast<ConcreteVisitor*>(this)->VisitPointer(host, ObjectSlot(map_slot));
}
@@ -83,13 +83,13 @@
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this); \
if (!visitor->ShouldVisit(object)) return ResultType(); \
if (!visitor->AllowDefaultJSObjectVisit()) { \
- DCHECK_WITH_MSG(!map->IsJSObjectMap(), \
+ DCHECK_WITH_MSG(!map.IsJSObjectMap(), \
"Implement custom visitor for new JSObject subclass in " \
"concurrent marker"); \
} \
int size = TypeName::BodyDescriptor::SizeOf(map, object); \
if (visitor->ShouldVisitMapPointer()) \
- visitor->VisitMapPointer(object, object->map_slot()); \
+ visitor->VisitMapPointer(object, object.map_slot()); \
TypeName::BodyDescriptor::IterateBody(map, object, size, visitor); \
return static_cast<ResultType>(size); \
}
@@ -107,9 +107,9 @@
Map map, HeapObject object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
- int size = map->instance_size();
+ int size = map.instance_size();
if (visitor->ShouldVisitMapPointer()) {
- visitor->VisitMapPointer(object, object->map_slot());
+ visitor->VisitMapPointer(object, object.map_slot());
}
return static_cast<ResultType>(size);
}
@@ -121,7 +121,7 @@
if (!visitor->ShouldVisit(object)) return ResultType();
int size = JSObject::FastBodyDescriptor::SizeOf(map, object);
if (visitor->ShouldVisitMapPointer())
- visitor->VisitMapPointer(object, object->map_slot());
+ visitor->VisitMapPointer(object, object.map_slot());
JSObject::FastBodyDescriptor::IterateBody(map, object, size, visitor);
return static_cast<ResultType>(size);
}
@@ -133,7 +133,7 @@
if (!visitor->ShouldVisit(object)) return ResultType();
int size = JSObject::BodyDescriptor::SizeOf(map, object);
if (visitor->ShouldVisitMapPointer())
- visitor->VisitMapPointer(object, object->map_slot());
+ visitor->VisitMapPointer(object, object.map_slot());
JSObject::BodyDescriptor::IterateBody(map, object, size, visitor);
return static_cast<ResultType>(size);
}
@@ -143,9 +143,9 @@
Map map, HeapObject object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
- int size = map->instance_size();
+ int size = map.instance_size();
if (visitor->ShouldVisitMapPointer()) {
- visitor->VisitMapPointer(object, object->map_slot());
+ visitor->VisitMapPointer(object, object.map_slot());
}
StructBodyDescriptor::IterateBody(map, object, size, visitor);
return static_cast<ResultType>(size);
@@ -157,9 +157,9 @@
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
if (visitor->ShouldVisitMapPointer()) {
- visitor->VisitMapPointer(object, object->map_slot());
+ visitor->VisitMapPointer(object, object.map_slot());
}
- return static_cast<ResultType>(object->size());
+ return static_cast<ResultType>(object.size());
}
template <typename ResultType, typename ConcreteVisitor>
@@ -169,7 +169,7 @@
if (!visitor->ShouldVisit(object)) return ResultType();
int size = WeakArrayBodyDescriptor::SizeOf(map, object);
if (visitor->ShouldVisitMapPointer()) {
- visitor->VisitMapPointer(object, object->map_slot());
+ visitor->VisitMapPointer(object, object.map_slot());
}
WeakArrayBodyDescriptor::IterateBody(map, object, size, visitor);
return size;
diff --git a/src/heap/objects-visiting.cc b/src/heap/objects-visiting.cc
index d56dd91..ec49471 100644
--- a/src/heap/objects-visiting.cc
+++ b/src/heap/objects-visiting.cc
@@ -57,7 +57,7 @@
}
}
// Retained object is new tail.
- DCHECK(!retained->IsUndefined(heap->isolate()));
+ DCHECK(!retained.IsUndefined(heap->isolate()));
candidate = T::cast(retained);
tail = candidate;
@@ -87,16 +87,16 @@
template <>
struct WeakListVisitor<Code> {
static void SetWeakNext(Code code, Object next) {
- code->code_data_container()->set_next_code_link(next,
- UPDATE_WEAK_WRITE_BARRIER);
+ code.code_data_container().set_next_code_link(next,
+ UPDATE_WEAK_WRITE_BARRIER);
}
static Object WeakNext(Code code) {
- return code->code_data_container()->next_code_link();
+ return code.code_data_container().next_code_link();
}
static HeapObject WeakNextHolder(Code code) {
- return code->code_data_container();
+ return code.code_data_container();
}
static int WeakNextOffset() { return CodeDataContainer::kNextCodeLinkOffset; }
@@ -114,11 +114,11 @@
template <>
struct WeakListVisitor<Context> {
static void SetWeakNext(Context context, Object next) {
- context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
+ context.set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
}
static Object WeakNext(Context context) {
- return context->next_context_link();
+ return context.next_context_link();
}
static HeapObject WeakNextHolder(Context context) { return context; }
@@ -133,7 +133,7 @@
// Record the slots of the weak entries in the native context.
for (int idx = Context::FIRST_WEAK_SLOT;
idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
- ObjectSlot slot = context->RawField(Context::OffsetOfElementAt(idx));
+ ObjectSlot slot = context.RawField(Context::OffsetOfElementAt(idx));
MarkCompactCollector::RecordSlot(context, slot,
HeapObject::cast(*slot));
}
@@ -148,22 +148,22 @@
static void DoWeakList(Heap* heap, Context context,
WeakObjectRetainer* retainer, int index) {
// Visit the weak list, removing dead intermediate elements.
- Object list_head = VisitWeakList<T>(heap, context->get(index), retainer);
+ Object list_head = VisitWeakList<T>(heap, context.get(index), retainer);
// Update the list head.
- context->set(index, list_head, UPDATE_WRITE_BARRIER);
+ context.set(index, list_head, UPDATE_WRITE_BARRIER);
if (MustRecordSlots(heap)) {
// Record the updated slot if necessary.
- ObjectSlot head_slot = context->RawField(FixedArray::SizeFor(index));
+ ObjectSlot head_slot = context.RawField(FixedArray::SizeFor(index));
heap->mark_compact_collector()->RecordSlot(context, head_slot,
HeapObject::cast(list_head));
}
}
static void VisitPhantomObject(Heap* heap, Context context) {
- ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
- ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
+ ClearWeakList<Code>(heap, context.get(Context::OPTIMIZED_CODE_LIST));
+ ClearWeakList<Code>(heap, context.get(Context::DEOPTIMIZED_CODE_LIST));
}
};
@@ -171,10 +171,10 @@
template <>
struct WeakListVisitor<AllocationSite> {
static void SetWeakNext(AllocationSite obj, Object next) {
- obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
+ obj.set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
}
- static Object WeakNext(AllocationSite obj) { return obj->weak_next(); }
+ static Object WeakNext(AllocationSite obj) { return obj.weak_next(); }
static HeapObject WeakNextHolder(AllocationSite obj) { return obj; }
diff --git a/src/heap/read-only-heap.cc b/src/heap/read-only-heap.cc
index 32ac952..42efc69 100644
--- a/src/heap/read-only-heap.cc
+++ b/src/heap/read-only-heap.cc
@@ -164,10 +164,10 @@
continue;
}
HeapObject object = HeapObject::FromAddress(current_addr_);
- const int object_size = object->Size();
+ const int object_size = object.Size();
current_addr_ += object_size;
- if (object->IsFiller()) {
+ if (object.IsFiller()) {
continue;
}
diff --git a/src/heap/remembered-set.h b/src/heap/remembered-set.h
index 8ce09f9..8792762 100644
--- a/src/heap/remembered-set.h
+++ b/src/heap/remembered-set.h
@@ -309,7 +309,7 @@
SlotCallbackResult result = callback(FullMaybeObjectSlot(&code));
DCHECK(!HasWeakHeapObjectTag(code));
if (code != old_code) {
- Memory<Address>(entry_address) = code->entry();
+ Memory<Address>(entry_address) = code.entry();
}
return result;
}
@@ -325,8 +325,7 @@
SlotCallbackResult result = callback(FullMaybeObjectSlot(&new_target));
DCHECK(!HasWeakHeapObjectTag(new_target));
if (new_target != old_target) {
- rinfo->set_target_address(
- Code::cast(new_target)->raw_instruction_start());
+ rinfo->set_target_address(Code::cast(new_target).raw_instruction_start());
}
return result;
}
diff --git a/src/heap/scavenger-inl.h b/src/heap/scavenger-inl.h
index e577a471..c556d37 100644
--- a/src/heap/scavenger-inl.h
+++ b/src/heap/scavenger-inl.h
@@ -72,7 +72,7 @@
if (regular_object_promotion_list_.Pop(task_id, ®ular_object)) {
entry->heap_object = regular_object.first;
entry->size = regular_object.second;
- entry->map = entry->heap_object->map();
+ entry->map = entry->heap_object.map();
return true;
}
return large_object_promotion_list_.Pop(task_id, entry);
@@ -106,11 +106,11 @@
bool Scavenger::MigrateObject(Map map, HeapObject source, HeapObject target,
int size) {
// Copy the content of source to target.
- target->set_map_word(MapWord::FromMap(map));
- heap()->CopyBlock(target->address() + kTaggedSize,
- source->address() + kTaggedSize, size - kTaggedSize);
+ target.set_map_word(MapWord::FromMap(map));
+ heap()->CopyBlock(target.address() + kTaggedSize,
+ source.address() + kTaggedSize, size - kTaggedSize);
- Object old = source->map_slot().Release_CompareAndSwap(
+ Object old = source.map_slot().Release_CompareAndSwap(
map, MapWord::FromForwardingAddress(target).ToMap());
if (old != map) {
// Other task migrated the object.
@@ -147,7 +147,7 @@
const bool self_success = MigrateObject(map, object, target, object_size);
if (!self_success) {
allocator_.FreeLast(NEW_SPACE, target, object_size);
- MapWord map_word = object->synchronized_map_word();
+ MapWord map_word = object.synchronized_map_word();
HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
DCHECK(!Heap::InFromPage(*slot));
return Heap::InToPage(*slot)
@@ -183,7 +183,7 @@
const bool self_success = MigrateObject(map, object, target, object_size);
if (!self_success) {
allocator_.FreeLast(OLD_SPACE, target, object_size);
- MapWord map_word = object->synchronized_map_word();
+ MapWord map_word = object.synchronized_map_word();
HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
DCHECK(!Heap::InFromPage(*slot));
return Heap::InToPage(*slot)
@@ -216,7 +216,7 @@
MemoryChunk::FromHeapObject(object)->InNewLargeObjectSpace())) {
DCHECK_EQ(NEW_LO_SPACE,
MemoryChunk::FromHeapObject(object)->owner()->identity());
- if (object->map_slot().Release_CompareAndSwap(
+ if (object.map_slot().Release_CompareAndSwap(
map, MapWord::FromForwardingAddress(object).ToMap()) == map) {
surviving_new_large_objects_.insert({object, map});
promoted_size_ += object_size;
@@ -236,7 +236,7 @@
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
- SLOW_DCHECK(object->SizeFromMap(map) == object_size);
+ SLOW_DCHECK(object.SizeFromMap(map) == object_size);
CopyAndForwardResult result;
if (HandleLargeObject(map, object, object_size, object_fields)) {
@@ -246,7 +246,7 @@
SLOW_DCHECK(static_cast<size_t>(object_size) <=
MemoryChunkLayout::AllocatableMemoryInDataPage());
- if (!heap()->ShouldBePromoted(object->address())) {
+ if (!heap()->ShouldBePromoted(object.address())) {
// A semi-space copy may fail due to fragmentation. In that case, we
// try to promote the object.
result = SemiSpaceCopyObject(map, slot, object, object_size, object_fields);
@@ -284,7 +284,7 @@
// The ThinString should die after Scavenge, so avoid writing the proper
// forwarding pointer and instead just signal the actual object as forwarded
// reference.
- String actual = object->actual();
+ String actual = object.actual();
// ThinStrings always refer to internalized strings, which are always in old
// space.
DCHECK(!Heap::InYoungGeneration(actual));
@@ -293,7 +293,7 @@
}
DCHECK_EQ(ObjectFields::kMaybePointers,
- Map::ObjectFieldsFrom(map->visitor_id()));
+ Map::ObjectFieldsFrom(map.visitor_id()));
return EvacuateObjectDefault(map, slot, object, object_size,
ObjectFields::kMaybePointers);
}
@@ -306,38 +306,38 @@
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
- DCHECK(IsShortcutCandidate(map->instance_type()));
+ DCHECK(IsShortcutCandidate(map.instance_type()));
if (!is_incremental_marking_ &&
- object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
- HeapObject first = HeapObject::cast(object->unchecked_first());
+ object.unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
+ HeapObject first = HeapObject::cast(object.unchecked_first());
HeapObjectReference::Update(slot, first);
if (!Heap::InYoungGeneration(first)) {
- object->map_slot().Release_Store(
+ object.map_slot().Release_Store(
MapWord::FromForwardingAddress(first).ToMap());
return REMOVE_SLOT;
}
- MapWord first_word = first->synchronized_map_word();
+ MapWord first_word = first.synchronized_map_word();
if (first_word.IsForwardingAddress()) {
HeapObject target = first_word.ToForwardingAddress();
HeapObjectReference::Update(slot, target);
- object->map_slot().Release_Store(
+ object.map_slot().Release_Store(
MapWord::FromForwardingAddress(target).ToMap());
return Heap::InYoungGeneration(target) ? KEEP_SLOT : REMOVE_SLOT;
}
Map map = first_word.ToMap();
SlotCallbackResult result =
- EvacuateObjectDefault(map, slot, first, first->SizeFromMap(map),
- Map::ObjectFieldsFrom(map->visitor_id()));
- object->map_slot().Release_Store(
+ EvacuateObjectDefault(map, slot, first, first.SizeFromMap(map),
+ Map::ObjectFieldsFrom(map.visitor_id()));
+ object.map_slot().Release_Store(
MapWord::FromForwardingAddress(slot.ToHeapObject()).ToMap());
return result;
}
DCHECK_EQ(ObjectFields::kMaybePointers,
- Map::ObjectFieldsFrom(map->visitor_id()));
+ Map::ObjectFieldsFrom(map.visitor_id()));
return EvacuateObjectDefault(map, slot, object, object_size,
ObjectFields::kMaybePointers);
}
@@ -350,10 +350,10 @@
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
SLOW_DCHECK(Heap::InFromPage(source));
SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
- int size = source->SizeFromMap(map);
+ int size = source.SizeFromMap(map);
// Cannot use ::cast() below because that would add checks in debug mode
// that require re-reading the map.
- VisitorId visitor_id = map->visitor_id();
+ VisitorId visitor_id = map.visitor_id();
switch (visitor_id) {
case kVisitThinString:
// At the moment we don't allow weak pointers to thin strings.
@@ -380,7 +380,7 @@
DCHECK(Heap::InFromPage(object));
// Synchronized load that consumes the publishing CAS of MigrateObject.
- MapWord first_word = object->synchronized_map_word();
+ MapWord first_word = object.synchronized_map_word();
// If the first word is a forwarding address, the object has already been
// copied.
@@ -486,13 +486,13 @@
// later. This allows to only iterate the tables' values, which are treated
// as strong independetly of whether the key is live.
scavenger_->AddEphemeronHashTable(table);
- for (int i = 0; i < table->Capacity(); i++) {
+ for (int i = 0; i < table.Capacity(); i++) {
ObjectSlot value_slot =
- table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
+ table.RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
VisitPointer(table, value_slot);
}
- return table->SizeFromMap(map);
+ return table.SizeFromMap(map);
}
} // namespace internal
diff --git a/src/heap/scavenger.cc b/src/heap/scavenger.cc
index ad56d1c..b941a08 100644
--- a/src/heap/scavenger.cc
+++ b/src/heap/scavenger.cc
@@ -100,7 +100,7 @@
inline void VisitEphemeron(HeapObject obj, int entry, ObjectSlot key,
ObjectSlot value) override {
- DCHECK(Heap::IsLargeObject(obj) || obj->IsEphemeronHashTable());
+ DCHECK(Heap::IsLargeObject(obj) || obj.IsEphemeronHashTable());
VisitPointer(obj, value);
if (ObjectInYoungGeneration(*key)) {
@@ -143,7 +143,7 @@
DCHECK(success);
if (result == KEEP_SLOT) {
- SLOW_DCHECK(target->IsHeapObject());
+ SLOW_DCHECK(target.IsHeapObject());
RememberedSet<OLD_TO_NEW>::Insert(MemoryChunk::FromHeapObject(host),
slot.address());
}
@@ -169,13 +169,13 @@
V8_INLINE bool IsUnscavengedHeapObject(Heap* heap, Object object) {
return Heap::InFromPage(object) &&
- !HeapObject::cast(object)->map_word().IsForwardingAddress();
+ !HeapObject::cast(object).map_word().IsForwardingAddress();
}
// Same as IsUnscavengedHeapObject() above but specialized for HeapObjects.
V8_INLINE bool IsUnscavengedHeapObject(Heap* heap, HeapObject heap_object) {
return Heap::InFromPage(heap_object) &&
- !heap_object->map_word().IsForwardingAddress();
+ !heap_object.map_word().IsForwardingAddress();
}
bool IsUnscavengedHeapObjectSlot(Heap* heap, FullObjectSlot p) {
@@ -191,7 +191,7 @@
return object;
}
- MapWord map_word = HeapObject::cast(object)->map_word();
+ MapWord map_word = HeapObject::cast(object).map_word();
if (map_word.IsForwardingAddress()) {
return map_word.ToForwardingAddress();
}
@@ -344,7 +344,7 @@
Map map = update_info.second;
// Order is important here. We have to re-install the map to have access
// to meta-data like size during page promotion.
- object->set_map_word(MapWord::FromMap(map));
+ object.set_map_word(MapWord::FromMap(map));
LargePage* page = LargePage::FromHeapObject(object);
heap_->lo_space()->PromoteNewLargeObject(page);
}
@@ -403,7 +403,7 @@
is_compacting_ &&
heap()->incremental_marking()->atomic_marking_state()->IsBlack(target);
IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots);
- target->IterateBodyFast(map, size, &visitor);
+ target.IterateBodyFast(map, size, &visitor);
}
void Scavenger::RememberPromotedEphemeron(EphemeronHashTable table, int entry) {
@@ -487,13 +487,13 @@
void ScavengerCollector::ClearYoungEphemerons(
EphemeronTableList* ephemeron_table_list) {
ephemeron_table_list->Iterate([this](EphemeronHashTable table) {
- for (int i = 0; i < table->Capacity(); i++) {
+ for (int i = 0; i < table.Capacity(); i++) {
// Keys in EphemeronHashTables must be heap objects.
HeapObjectSlot key_slot(
- table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i)));
+ table.RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i)));
HeapObject key = key_slot.ToHeapObject();
if (IsUnscavengedHeapObject(heap_, key)) {
- table->RemoveEntry(i);
+ table.RemoveEntry(i);
} else {
HeapObject forwarded = ForwardingAddress(key);
key_slot.StoreHeapObject(forwarded);
@@ -513,10 +513,10 @@
for (auto iti = indices.begin(); iti != indices.end();) {
// Keys in EphemeronHashTables must be heap objects.
HeapObjectSlot key_slot(
- table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(*iti)));
+ table.RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(*iti)));
HeapObject key = key_slot.ToHeapObject();
if (IsUnscavengedHeapObject(heap_, key)) {
- table->RemoveEntry(*iti);
+ table.RemoveEntry(*iti);
iti = indices.erase(iti);
} else {
HeapObject forwarded = ForwardingAddress(key);
diff --git a/src/heap/setup-heap-internal.cc b/src/heap/setup-heap-internal.cc
index fa3ceaa..3fe7e76 100644
--- a/src/heap/setup-heap-internal.cc
+++ b/src/heap/setup-heap-internal.cc
@@ -116,8 +116,8 @@
: AllocationType::kReadOnly);
if (!allocation.To(&result)) return allocation;
- result->set_map_after_allocation(ReadOnlyRoots(this).meta_map(),
- SKIP_WRITE_BARRIER);
+ result.set_map_after_allocation(ReadOnlyRoots(this).meta_map(),
+ SKIP_WRITE_BARRIER);
Map map = isolate()->factory()->InitializeMap(
Map::cast(result), instance_type, instance_size, elements_kind,
inobject_properties);
@@ -133,48 +133,48 @@
if (!allocation.To(&result)) return allocation;
// Map::cast cannot be used due to uninitialized map field.
Map map = Map::unchecked_cast(result);
- map->set_map_after_allocation(
+ map.set_map_after_allocation(
Map::unchecked_cast(isolate()->root(RootIndex::kMetaMap)),
SKIP_WRITE_BARRIER);
- map->set_instance_type(instance_type);
- map->set_instance_size(instance_size);
+ map.set_instance_type(instance_type);
+ map.set_instance_size(instance_size);
// Initialize to only containing tagged fields.
if (FLAG_unbox_double_fields) {
- map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
+ map.set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
}
// GetVisitorId requires a properly initialized LayoutDescriptor.
- map->set_visitor_id(Map::GetVisitorId(map));
- map->set_inobject_properties_start_or_constructor_function_index(0);
- DCHECK(!map->IsJSObjectMap());
- map->set_prototype_validity_cell(Smi::FromInt(Map::kPrototypeChainValid));
- map->SetInObjectUnusedPropertyFields(0);
- map->set_bit_field(0);
- map->set_bit_field2(0);
- DCHECK(!map->is_in_retained_map_list());
+ map.set_visitor_id(Map::GetVisitorId(map));
+ map.set_inobject_properties_start_or_constructor_function_index(0);
+ DCHECK(!map.IsJSObjectMap());
+ map.set_prototype_validity_cell(Smi::FromInt(Map::kPrototypeChainValid));
+ map.SetInObjectUnusedPropertyFields(0);
+ map.set_bit_field(0);
+ map.set_bit_field2(0);
+ DCHECK(!map.is_in_retained_map_list());
int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
Map::OwnsDescriptorsBit::encode(true) |
Map::ConstructionCounterBits::encode(Map::kNoSlackTracking);
- map->set_bit_field3(bit_field3);
- map->clear_padding();
- map->set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND);
+ map.set_bit_field3(bit_field3);
+ map.clear_padding();
+ map.set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND);
return map;
}
void Heap::FinalizePartialMap(Map map) {
ReadOnlyRoots roots(this);
- map->set_dependent_code(DependentCode::cast(roots.empty_weak_fixed_array()));
- map->set_raw_transitions(MaybeObject::FromSmi(Smi::zero()));
- map->SetInstanceDescriptors(isolate(), roots.empty_descriptor_array(), 0);
+ map.set_dependent_code(DependentCode::cast(roots.empty_weak_fixed_array()));
+ map.set_raw_transitions(MaybeObject::FromSmi(Smi::zero()));
+ map.SetInstanceDescriptors(isolate(), roots.empty_descriptor_array(), 0);
if (FLAG_unbox_double_fields) {
- map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
+ map.set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
}
- map->set_prototype(roots.null_value());
- map->set_constructor_or_backpointer(roots.null_value());
+ map.set_prototype(roots.null_value());
+ map.set_constructor_or_backpointer(roots.null_value());
}
AllocationResult Heap::Allocate(Map map, AllocationType allocation_type) {
- DCHECK(map->instance_type() != MAP_TYPE);
- int size = map->instance_size();
+ DCHECK(map.instance_type() != MAP_TYPE);
+ int size = map.instance_size();
HeapObject result;
AllocationResult allocation = AllocateRaw(size, allocation_type);
if (!allocation.To(&result)) return allocation;
@@ -182,7 +182,7 @@
WriteBarrierMode write_barrier_mode =
allocation_type == AllocationType::kYoung ? SKIP_WRITE_BARRIER
: UPDATE_WRITE_BARRIER;
- result->set_map_after_allocation(map, write_barrier_mode);
+ result.set_map_after_allocation(map, write_barrier_mode);
return result;
}
@@ -196,14 +196,14 @@
array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned);
if (!allocation.To(&object)) return allocation;
- object->set_map_after_allocation(
+ object.set_map_after_allocation(
ReadOnlyRoots(this).MapForFixedTypedArray(array_type),
SKIP_WRITE_BARRIER);
FixedTypedArrayBase elements = FixedTypedArrayBase::cast(object);
- elements->set_base_pointer(elements, SKIP_WRITE_BARRIER);
- elements->set_external_pointer(
+ elements.set_base_pointer(elements, SKIP_WRITE_BARRIER);
+ elements.set_external_pointer(
FixedTypedArrayBase::ExternalPointerPtrForOnHeapArray());
- elements->set_number_of_elements_onheap_only(0);
+ elements.set_number_of_elements_onheap_only(0);
return elements;
}
@@ -216,7 +216,7 @@
// Map::cast cannot be used due to uninitialized map field.
Map new_meta_map = Map::unchecked_cast(obj);
set_meta_map(new_meta_map);
- new_meta_map->set_map_after_allocation(new_meta_map);
+ new_meta_map.set_map_after_allocation(new_meta_map);
ReadOnlyRoots roots(this);
{ // Partial map allocation
@@ -251,8 +251,8 @@
AllocationResult alloc =
AllocateRaw(FixedArray::SizeFor(0), AllocationType::kReadOnly);
if (!alloc.To(&obj)) return false;
- obj->set_map_after_allocation(roots.fixed_array_map(), SKIP_WRITE_BARRIER);
- FixedArray::cast(obj)->set_length(0);
+ obj.set_map_after_allocation(roots.fixed_array_map(), SKIP_WRITE_BARRIER);
+ FixedArray::cast(obj).set_length(0);
}
set_empty_fixed_array(FixedArray::cast(obj));
@@ -260,9 +260,9 @@
AllocationResult alloc =
AllocateRaw(WeakFixedArray::SizeFor(0), AllocationType::kReadOnly);
if (!alloc.To(&obj)) return false;
- obj->set_map_after_allocation(roots.weak_fixed_array_map(),
- SKIP_WRITE_BARRIER);
- WeakFixedArray::cast(obj)->set_length(0);
+ obj.set_map_after_allocation(roots.weak_fixed_array_map(),
+ SKIP_WRITE_BARRIER);
+ WeakFixedArray::cast(obj).set_length(0);
}
set_empty_weak_fixed_array(WeakFixedArray::cast(obj));
@@ -270,10 +270,10 @@
AllocationResult allocation = AllocateRaw(WeakArrayList::SizeForCapacity(0),
AllocationType::kReadOnly);
if (!allocation.To(&obj)) return false;
- obj->set_map_after_allocation(roots.weak_array_list_map(),
- SKIP_WRITE_BARRIER);
- WeakArrayList::cast(obj)->set_capacity(0);
- WeakArrayList::cast(obj)->set_length(0);
+ obj.set_map_after_allocation(roots.weak_array_list_map(),
+ SKIP_WRITE_BARRIER);
+ WeakArrayList::cast(obj).set_capacity(0);
+ WeakArrayList::cast(obj).set_length(0);
}
set_empty_weak_array_list(WeakArrayList::cast(obj));
@@ -283,7 +283,7 @@
if (!allocation.To(&obj)) return false;
}
set_null_value(Oddball::cast(obj));
- Oddball::cast(obj)->set_kind(Oddball::kNull);
+ Oddball::cast(obj).set_kind(Oddball::kNull);
{
AllocationResult allocation =
@@ -291,7 +291,7 @@
if (!allocation.To(&obj)) return false;
}
set_undefined_value(Oddball::cast(obj));
- Oddball::cast(obj)->set_kind(Oddball::kUndefined);
+ Oddball::cast(obj).set_kind(Oddball::kUndefined);
DCHECK(!InYoungGeneration(roots.undefined_value()));
{
AllocationResult allocation =
@@ -299,7 +299,7 @@
if (!allocation.To(&obj)) return false;
}
set_the_hole_value(Oddball::cast(obj));
- Oddball::cast(obj)->set_kind(Oddball::kTheHole);
+ Oddball::cast(obj).set_kind(Oddball::kTheHole);
// Set preliminary exception sentinel value before actually initializing it.
set_exception(roots.null_value());
@@ -309,7 +309,7 @@
const StructTable& entry = struct_table[i];
Map map;
if (!AllocatePartialMap(entry.type, entry.size).To(&map)) return false;
- roots_table()[entry.index] = map->ptr();
+ roots_table()[entry.index] = map.ptr();
}
// Allocate the empty enum cache.
@@ -319,17 +319,17 @@
if (!allocation.To(&obj)) return false;
}
set_empty_enum_cache(EnumCache::cast(obj));
- EnumCache::cast(obj)->set_keys(roots.empty_fixed_array());
- EnumCache::cast(obj)->set_indices(roots.empty_fixed_array());
+ EnumCache::cast(obj).set_keys(roots.empty_fixed_array());
+ EnumCache::cast(obj).set_indices(roots.empty_fixed_array());
// Allocate the empty descriptor array.
{
int size = DescriptorArray::SizeFor(0);
if (!AllocateRaw(size, AllocationType::kReadOnly).To(&obj)) return false;
- obj->set_map_after_allocation(roots.descriptor_array_map(),
- SKIP_WRITE_BARRIER);
+ obj.set_map_after_allocation(roots.descriptor_array_map(),
+ SKIP_WRITE_BARRIER);
DescriptorArray array = DescriptorArray::cast(obj);
- array->Initialize(roots.empty_enum_cache(), roots.undefined_value(), 0, 0);
+ array.Initialize(roots.empty_enum_cache(), roots.undefined_value(), 0, 0);
}
set_empty_descriptor_array(DescriptorArray::cast(obj));
@@ -341,9 +341,9 @@
FinalizePartialMap(roots.fixed_cow_array_map());
FinalizePartialMap(roots.descriptor_array_map());
FinalizePartialMap(roots.undefined_map());
- roots.undefined_map()->set_is_undetectable(true);
+ roots.undefined_map().set_is_undetectable(true);
FinalizePartialMap(roots.null_map());
- roots.null_map()->set_is_undetectable(true);
+ roots.null_map().set_is_undetectable(true);
FinalizePartialMap(roots.the_hole_map());
for (unsigned i = 0; i < arraysize(struct_table); ++i) {
const StructTable& entry = struct_table[i];
@@ -365,7 +365,7 @@
constructor_function_index) \
{ \
ALLOCATE_MAP((instance_type), (size), field_name); \
- roots.field_name##_map()->SetConstructorFunctionIndex( \
+ roots.field_name##_map().SetConstructorFunctionIndex( \
(constructor_function_index)); \
}
@@ -397,11 +397,11 @@
const StringTypeTable& entry = string_type_table[i];
Map map;
if (!AllocateMap(entry.type, entry.size).To(&map)) return false;
- map->SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX);
+ map.SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX);
// Mark cons string maps as unstable, because their objects can change
// maps during GC.
- if (StringShape(entry.type).IsCons()) map->mark_unstable();
- roots_table()[entry.index] = map->ptr();
+ if (StringShape(entry.type).IsCons()) map.mark_unstable();
+ roots_table()[entry.index] = map.ptr();
}
{ // Create a separate external one byte string map for native sources.
@@ -410,12 +410,12 @@
AllocateMap(UNCACHED_EXTERNAL_ONE_BYTE_STRING_TYPE,
ExternalOneByteString::kUncachedSize);
if (!allocation.To(&map)) return false;
- map->SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX);
+ map.SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX);
set_native_source_string_map(map);
}
ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array)
- roots.fixed_double_array_map()->set_elements_kind(HOLEY_DOUBLE_ELEMENTS);
+ roots.fixed_double_array_map().set_elements_kind(HOLEY_DOUBLE_ELEMENTS);
ALLOCATE_VARSIZE_MAP(FEEDBACK_METADATA_TYPE, feedback_metadata)
ALLOCATE_VARSIZE_MAP(BYTE_ARRAY_TYPE, byte_array)
ALLOCATE_VARSIZE_MAP(BYTECODE_ARRAY_TYPE, bytecode_array)
@@ -442,8 +442,8 @@
Smi value = Smi::FromInt(Map::kPrototypeChainInvalid);
AllocationResult alloc = AllocateRaw(Cell::kSize, AllocationType::kOld);
if (!alloc.To(&obj)) return false;
- obj->set_map_after_allocation(roots.cell_map(), SKIP_WRITE_BARRIER);
- Cell::cast(obj)->set_value(value);
+ obj.set_map_after_allocation(roots.cell_map(), SKIP_WRITE_BARRIER);
+ Cell::cast(obj).set_value(value);
set_invalid_prototype_validity_cell(Cell::cast(obj));
}
@@ -455,10 +455,10 @@
// to be marked unstable because their objects can change maps.
ALLOCATE_MAP(
FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize, no_closures_cell)
- roots.no_closures_cell_map()->mark_unstable();
+ roots.no_closures_cell_map().mark_unstable();
ALLOCATE_MAP(
FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize, one_closure_cell)
- roots.one_closure_cell_map()->mark_unstable();
+ roots.one_closure_cell_map().mark_unstable();
ALLOCATE_MAP(
FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize, many_closures_cell)
@@ -521,7 +521,7 @@
ALLOCATE_MAP(JS_MESSAGE_OBJECT_TYPE, JSMessageObject::kSize, message_object)
ALLOCATE_MAP(JS_OBJECT_TYPE, JSObject::kHeaderSize + kEmbedderDataSlotSize,
external)
- external_map()->set_is_extensible(false);
+ external_map().set_is_extensible(false);
#undef ALLOCATE_PRIMITIVE_MAP
#undef ALLOCATE_VARSIZE_MAP
#undef ALLOCATE_MAP
@@ -531,8 +531,8 @@
AllocationResult alloc =
AllocateRaw(FixedArray::SizeFor(0), AllocationType::kReadOnly);
if (!alloc.To(&obj)) return false;
- obj->set_map_after_allocation(roots.scope_info_map(), SKIP_WRITE_BARRIER);
- FixedArray::cast(obj)->set_length(0);
+ obj.set_map_after_allocation(roots.scope_info_map(), SKIP_WRITE_BARRIER);
+ FixedArray::cast(obj).set_length(0);
}
set_empty_scope_info(ScopeInfo::cast(obj));
@@ -541,12 +541,12 @@
AllocationResult alloc =
AllocateRaw(FixedArray::SizeFor(1), AllocationType::kReadOnly);
if (!alloc.To(&obj)) return false;
- obj->set_map_after_allocation(roots.object_boilerplate_description_map(),
- SKIP_WRITE_BARRIER);
+ obj.set_map_after_allocation(roots.object_boilerplate_description_map(),
+ SKIP_WRITE_BARRIER);
- FixedArray::cast(obj)->set_length(1);
- FixedArray::cast(obj)->set(ObjectBoilerplateDescription::kLiteralTypeOffset,
- Smi::kZero);
+ FixedArray::cast(obj).set_length(1);
+ FixedArray::cast(obj).set(ObjectBoilerplateDescription::kLiteralTypeOffset,
+ Smi::kZero);
}
set_empty_object_boilerplate_description(
ObjectBoilerplateDescription::cast(obj));
@@ -557,9 +557,9 @@
AllocationType::kReadOnly);
if (!alloc.To(&obj)) return false;
- ArrayBoilerplateDescription::cast(obj)->set_constant_elements(
+ ArrayBoilerplateDescription::cast(obj).set_constant_elements(
roots.empty_fixed_array());
- ArrayBoilerplateDescription::cast(obj)->set_elements_kind(
+ ArrayBoilerplateDescription::cast(obj).set_elements_kind(
ElementsKind::PACKED_SMI_ELEMENTS);
}
set_empty_array_boilerplate_description(
@@ -571,7 +571,7 @@
if (!allocation.To(&obj)) return false;
}
set_true_value(Oddball::cast(obj));
- Oddball::cast(obj)->set_kind(Oddball::kTrue);
+ Oddball::cast(obj).set_kind(Oddball::kTrue);
{
AllocationResult allocation =
@@ -579,14 +579,14 @@
if (!allocation.To(&obj)) return false;
}
set_false_value(Oddball::cast(obj));
- Oddball::cast(obj)->set_kind(Oddball::kFalse);
+ Oddball::cast(obj).set_kind(Oddball::kFalse);
// Empty arrays.
{
if (!AllocateRaw(ByteArray::SizeFor(0), AllocationType::kReadOnly).To(&obj))
return false;
- obj->set_map_after_allocation(roots.byte_array_map(), SKIP_WRITE_BARRIER);
- ByteArray::cast(obj)->set_length(0);
+ obj.set_map_after_allocation(roots.byte_array_map(), SKIP_WRITE_BARRIER);
+ ByteArray::cast(obj).set_length(0);
set_empty_byte_array(ByteArray::cast(obj));
}
@@ -595,9 +595,9 @@
.To(&obj)) {
return false;
}
- obj->set_map_after_allocation(roots.property_array_map(),
- SKIP_WRITE_BARRIER);
- PropertyArray::cast(obj)->initialize_length(0);
+ obj.set_map_after_allocation(roots.property_array_map(),
+ SKIP_WRITE_BARRIER);
+ PropertyArray::cast(obj).initialize_length(0);
set_empty_property_array(PropertyArray::cast(obj));
}
@@ -606,9 +606,9 @@
.To(&obj)) {
return false;
}
- obj->set_map_after_allocation(roots.closure_feedback_cell_array_map(),
- SKIP_WRITE_BARRIER);
- FixedArray::cast(obj)->set_length(0);
+ obj.set_map_after_allocation(roots.closure_feedback_cell_array_map(),
+ SKIP_WRITE_BARRIER);
+ FixedArray::cast(obj).set_length(0);
set_empty_closure_feedback_cell_array(ClosureFeedbackCellArray::cast(obj));
}
@@ -626,7 +626,7 @@
DCHECK(!InYoungGeneration(roots.empty_fixed_array()));
- roots.bigint_map()->SetConstructorFunctionIndex(
+ roots.bigint_map().SetConstructorFunctionIndex(
Context::BIGINT_FUNCTION_INDEX);
return true;
@@ -653,7 +653,7 @@
// The -0 value must be set before NewNumber works.
set_minus_zero_value(
*factory->NewHeapNumber(-0.0, AllocationType::kReadOnly));
- DCHECK(std::signbit(roots.minus_zero_value()->Number()));
+ DCHECK(std::signbit(roots.minus_zero_value().Number()));
set_nan_value(*factory->NewHeapNumber(
std::numeric_limits<double>::quiet_NaN(), AllocationType::kReadOnly));
@@ -982,10 +982,10 @@
SetterType) \
AccessorInfo::cast( \
Object(roots_table()[RootIndex::k##AccessorName##Accessor])) \
- ->set_getter_side_effect_type(SideEffectType::GetterType); \
+ .set_getter_side_effect_type(SideEffectType::GetterType); \
AccessorInfo::cast( \
Object(roots_table()[RootIndex::k##AccessorName##Accessor])) \
- ->set_setter_side_effect_type(SideEffectType::SetterType);
+ .set_setter_side_effect_type(SideEffectType::SetterType);
ACCESSOR_INFO_LIST_GENERATOR(INIT_SIDE_EFFECT_FLAG, /* not used */)
#undef INIT_SIDE_EFFECT_FLAG
}
diff --git a/src/heap/spaces-inl.h b/src/heap/spaces-inl.h
index 20d4707..308d4f5 100644
--- a/src/heap/spaces-inl.h
+++ b/src/heap/spaces-inl.h
@@ -54,8 +54,8 @@
if (current_ == limit_) return HeapObject();
}
HeapObject object = HeapObject::FromAddress(current_);
- current_ += object->Size();
- if (!object->IsFiller()) {
+ current_ += object.Size();
+ if (!object.IsFiller()) {
return object;
}
}
@@ -80,11 +80,11 @@
continue;
}
HeapObject obj = HeapObject::FromAddress(cur_addr_);
- const int obj_size = obj->Size();
+ const int obj_size = obj.Size();
cur_addr_ += obj_size;
DCHECK_LE(cur_addr_, cur_end_);
- if (!obj->IsFiller()) {
- if (obj->IsCode()) {
+ if (!obj.IsFiller()) {
+ if (obj.IsCode()) {
DCHECK_EQ(space_, space_->heap()->code_space());
DCHECK_CODEOBJECT_SIZE(obj_size, space_);
} else {
@@ -128,7 +128,7 @@
}
bool SemiSpace::Contains(Object o) {
- return o->IsHeapObject() && Contains(HeapObject::cast(o));
+ return o.IsHeapObject() && Contains(HeapObject::cast(o));
}
bool SemiSpace::ContainsSlow(Address a) {
@@ -142,7 +142,7 @@
// NewSpace
bool NewSpace::Contains(Object o) {
- return o->IsHeapObject() && Contains(HeapObject::cast(o));
+ return o.IsHeapObject() && Contains(HeapObject::cast(o));
}
bool NewSpace::Contains(HeapObject o) {
@@ -195,7 +195,7 @@
bool PagedSpace::TryFreeLast(HeapObject object, int object_size) {
if (allocation_info_.top() != kNullAddress) {
- const Address object_address = object->address();
+ const Address object_address = object.address();
if ((allocation_info_.top() - object_size) == object_address) {
allocation_info_.set_top(object_address);
return true;
@@ -382,7 +382,7 @@
}
HeapObject object = AllocateLinearly(size_in_bytes);
DCHECK(!object.is_null());
- MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
+ MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes);
return object;
}
@@ -404,7 +404,7 @@
object = TryAllocateLinearlyAligned(&allocation_size, alignment);
DCHECK(!object.is_null());
}
- MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
+ MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes);
return object;
}
@@ -434,7 +434,7 @@
HeapObject heap_obj;
if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
- heap_obj->address(), size_in_bytes);
+ heap_obj.address(), size_in_bytes);
StartNextInlineAllocationStep();
DCHECK_IMPLIES(
heap()->incremental_marking()->black_allocation(),
@@ -474,7 +474,7 @@
obj = heap()->PrecedeWithFiller(obj, filler_size);
}
- MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
+ MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
return obj;
}
@@ -495,7 +495,7 @@
allocation_info_.set_top(top + size_in_bytes);
DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
- MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
+ MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
return obj;
}
@@ -538,7 +538,7 @@
bool ok = result.To(&obj);
USE(ok);
DCHECK(ok);
- Address top = HeapObject::cast(obj)->address();
+ Address top = HeapObject::cast(obj).address();
return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
}
@@ -554,7 +554,7 @@
bool LocalAllocationBuffer::TryFreeLast(HeapObject object, int object_size) {
if (IsValid()) {
- const Address object_address = object->address();
+ const Address object_address = object.address();
if ((allocation_info_.top() - object_size) == object_address) {
allocation_info_.set_top(object_address);
return true;
diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc
index 3942388..64a59a2 100644
--- a/src/heap/spaces.cc
+++ b/src/heap/spaces.cc
@@ -1051,11 +1051,11 @@
// Skips filler starting from the given filler until the end address.
// Returns the first address after the skipped fillers.
Address SkipFillers(HeapObject filler, Address end) {
- Address addr = filler->address();
+ Address addr = filler.address();
while (addr < end) {
filler = HeapObject::FromAddress(addr);
- CHECK(filler->IsFiller());
- addr = filler->address() + filler->Size();
+ CHECK(filler.IsFiller());
+ addr = filler.address() + filler.Size();
}
return addr;
}
@@ -1071,14 +1071,14 @@
// Shrink pages to high water mark. The water mark points either to a filler
// or the area_end.
HeapObject filler = HeapObject::FromAddress(HighWaterMark());
- if (filler->address() == area_end()) return 0;
- CHECK(filler->IsFiller());
+ if (filler.address() == area_end()) return 0;
+ CHECK(filler.IsFiller());
// Ensure that no objects were allocated in [filler, area_end) region.
DCHECK_EQ(area_end(), SkipFillers(filler, area_end()));
// Ensure that no objects will be allocated on this page.
DCHECK_EQ(0u, AvailableInFreeList());
- size_t unused = RoundDown(static_cast<size_t>(area_end() - filler->address()),
+ size_t unused = RoundDown(static_cast<size_t>(area_end() - filler.address()),
MemoryAllocator::GetCommitPageSize());
if (unused > 0) {
DCHECK_EQ(0u, unused % MemoryAllocator::GetCommitPageSize());
@@ -1089,14 +1089,14 @@
reinterpret_cast<void*>(area_end() - unused));
}
heap()->CreateFillerObjectAt(
- filler->address(),
- static_cast<int>(area_end() - filler->address() - unused),
+ filler.address(),
+ static_cast<int>(area_end() - filler.address() - unused),
ClearRecordedSlots::kNo);
heap()->memory_allocator()->PartialFreeMemory(
this, address() + size() - unused, unused, area_end() - unused);
- if (filler->address() != area_end()) {
- CHECK(filler->IsFiller());
- CHECK_EQ(filler->address() + filler->Size(), area_end());
+ if (filler.address() != area_end()) {
+ CHECK(filler.IsFiller());
+ CHECK_EQ(filler.address() + filler.Size(), area_end());
}
}
return unused;
@@ -1503,7 +1503,7 @@
auto it = invalidated_slots()->find(old_start);
if (it != invalidated_slots()->end()) {
int old_size = it->second;
- int delta = static_cast<int>(new_start->address() - old_start->address());
+ int delta = static_cast<int>(new_start.address() - old_start.address());
invalidated_slots()->erase(it);
(*invalidated_slots())[new_start] = old_size - delta;
}
@@ -2013,8 +2013,8 @@
Page* page = Page::FromHeapObject(new_node);
IncreaseAllocatedBytes(new_node_size, page);
- Address start = new_node->address();
- Address end = new_node->address() + new_node_size;
+ Address start = new_node.address();
+ Address end = new_node.address() + new_node_size;
Address limit = ComputeLimit(start, end, size_in_bytes);
DCHECK_LE(limit, end);
DCHECK_LE(size_in_bytes, limit - start);
@@ -2060,12 +2060,12 @@
Address top = page->area_end();
for (HeapObject object = it.Next(); !object.is_null(); object = it.Next()) {
- CHECK(end_of_previous_object <= object->address());
+ CHECK(end_of_previous_object <= object.address());
// The first word should be a map, and we expect all map pointers to
// be in map space.
- Map map = object->map();
- CHECK(map->IsMap());
+ Map map = object.map();
+ CHECK(map.IsMap());
CHECK(isolate->heap()->map_space()->Contains(map) ||
ReadOnlyHeap::Contains(map));
@@ -2073,26 +2073,26 @@
VerifyObject(object);
// The object itself should look OK.
- object->ObjectVerify(isolate);
+ object.ObjectVerify(isolate);
if (!FLAG_verify_heap_skip_remembered_set) {
isolate->heap()->VerifyRememberedSetFor(object);
}
// All the interior pointers should be contained in the heap.
- int size = object->Size();
- object->IterateBody(map, size, visitor);
- CHECK(object->address() + size <= top);
- end_of_previous_object = object->address() + size;
+ int size = object.Size();
+ object.IterateBody(map, size, visitor);
+ CHECK(object.address() + size <= top);
+ end_of_previous_object = object.address() + size;
- if (object->IsExternalString()) {
+ if (object.IsExternalString()) {
ExternalString external_string = ExternalString::cast(object);
- size_t size = external_string->ExternalPayloadSize();
+ size_t size = external_string.ExternalPayloadSize();
external_page_bytes[ExternalBackingStoreType::kExternalString] += size;
- } else if (object->IsJSArrayBuffer()) {
+ } else if (object.IsJSArrayBuffer()) {
JSArrayBuffer array_buffer = JSArrayBuffer::cast(object);
if (ArrayBufferTracker::IsTracked(array_buffer)) {
- size_t size = array_buffer->byte_length();
+ size_t size = array_buffer.byte_length();
external_page_bytes[ExternalBackingStoreType::kArrayBuffer] += size;
}
}
@@ -2123,7 +2123,7 @@
for (HeapObject object = it.Next(); !object.is_null(); object = it.Next()) {
// All the interior pointers should be contained in the heap.
if (marking_state->IsBlack(object)) {
- black_size += object->Size();
+ black_size += object.Size();
}
}
CHECK_LE(black_size, marking_state->live_bytes(page));
@@ -2141,8 +2141,8 @@
HeapObjectIterator it(page);
size_t real_allocated = 0;
for (HeapObject object = it.Next(); !object.is_null(); object = it.Next()) {
- if (!object->IsFiller()) {
- real_allocated += object->Size();
+ if (!object.IsFiller()) {
+ real_allocated += object.Size();
}
}
total_allocated += page->allocated_bytes();
@@ -2558,31 +2558,31 @@
// The first word should be a map, and we expect all map pointers to
// be in map space or read-only space.
- Map map = object->map();
- CHECK(map->IsMap());
+ Map map = object.map();
+ CHECK(map.IsMap());
CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// The object should not be code or a map.
- CHECK(!object->IsMap());
- CHECK(!object->IsAbstractCode());
+ CHECK(!object.IsMap());
+ CHECK(!object.IsAbstractCode());
// The object itself should look OK.
- object->ObjectVerify(isolate);
+ object.ObjectVerify(isolate);
// All the interior pointers should be contained in the heap.
VerifyPointersVisitor visitor(heap());
- int size = object->Size();
- object->IterateBody(map, size, &visitor);
+ int size = object.Size();
+ object.IterateBody(map, size, &visitor);
- if (object->IsExternalString()) {
+ if (object.IsExternalString()) {
ExternalString external_string = ExternalString::cast(object);
- size_t size = external_string->ExternalPayloadSize();
+ size_t size = external_string.ExternalPayloadSize();
external_space_bytes[ExternalBackingStoreType::kExternalString] += size;
- } else if (object->IsJSArrayBuffer()) {
+ } else if (object.IsJSArrayBuffer()) {
JSArrayBuffer array_buffer = JSArrayBuffer::cast(object);
if (ArrayBufferTracker::IsTracked(array_buffer)) {
- size_t size = array_buffer->byte_length();
+ size_t size = array_buffer.byte_length();
external_space_bytes[ExternalBackingStoreType::kArrayBuffer] += size;
}
}
@@ -2931,12 +2931,12 @@
size_t* node_size) {
DCHECK(page()->CanAllocate());
FreeSpace node = top();
- if (node.is_null() || static_cast<size_t>(node->Size()) < minimum_size) {
+ if (node.is_null() || static_cast<size_t>(node.Size()) < minimum_size) {
*node_size = 0;
return FreeSpace();
}
- set_top(node->next());
- *node_size = node->Size();
+ set_top(node.next());
+ *node_size = node.Size();
available_ -= *node_size;
return node;
}
@@ -2946,20 +2946,20 @@
DCHECK(page()->CanAllocate());
FreeSpace prev_non_evac_node;
for (FreeSpace cur_node = top(); !cur_node.is_null();
- cur_node = cur_node->next()) {
- size_t size = cur_node->size();
+ cur_node = cur_node.next()) {
+ size_t size = cur_node.size();
if (size >= minimum_size) {
DCHECK_GE(available_, size);
available_ -= size;
if (cur_node == top()) {
- set_top(cur_node->next());
+ set_top(cur_node.next());
}
if (!prev_non_evac_node.is_null()) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(prev_non_evac_node);
if (chunk->owner()->identity() == CODE_SPACE) {
chunk->heap()->UnprotectAndRegisterMemoryChunk(chunk);
}
- prev_non_evac_node->set_next(cur_node->next());
+ prev_non_evac_node.set_next(cur_node.next());
}
*node_size = size;
return cur_node;
@@ -2973,7 +2973,7 @@
void FreeListCategory::Free(Address start, size_t size_in_bytes,
FreeMode mode) {
FreeSpace free_space = FreeSpace::cast(HeapObject::FromAddress(start));
- free_space->set_next(top());
+ free_space.set_next(top());
set_top(free_space);
available_ += size_in_bytes;
if ((mode == kLinkCategory) && (prev() == nullptr) && (next() == nullptr)) {
@@ -2995,7 +2995,7 @@
DCHECK(map_location.contains_value(
ReadOnlyRoots(heap).free_space_map().ptr()));
}
- n = n->next();
+ n = n.next();
}
}
@@ -3201,10 +3201,10 @@
while (!cur.is_null()) {
// We can't use "cur->map()" here because both cur's map and the
// root can be null during bootstrapping.
- DCHECK(cur->map_slot().contains_value(
+ DCHECK(cur.map_slot().contains_value(
page()->heap()->isolate()->root(RootIndex::kFreeSpaceMap).ptr()));
- sum += cur->relaxed_read_size();
- cur = cur->next();
+ sum += cur.relaxed_read_size();
+ cur = cur.next();
}
return sum;
}
@@ -3214,7 +3214,7 @@
FreeSpace cur = top();
while (!cur.is_null()) {
length++;
- cur = cur->next();
+ cur = cur.next();
if (length == kVeryLongFreeList) return length;
}
return length;
@@ -3362,7 +3362,7 @@
// MapSpace implementation
#ifdef VERIFY_HEAP
-void MapSpace::VerifyObject(HeapObject object) { CHECK(object->IsMap()); }
+void MapSpace::VerifyObject(HeapObject object) { CHECK(object.IsMap()); }
#endif
ReadOnlySpace::ReadOnlySpace(Heap* heap)
@@ -3411,8 +3411,8 @@
if (start < end - size) {
// A region at the high watermark is already in free list.
HeapObject filler = HeapObject::FromAddress(start);
- CHECK(filler->IsFiller());
- start += filler->Size();
+ CHECK(filler.IsFiller());
+ start += filler.Size();
}
CHECK_EQ(size, static_cast<int>(end - start));
heap()->CreateFillerObjectAt(start, size, ClearRecordedSlots::kNo);
@@ -3424,10 +3424,10 @@
ReadOnlyHeapIterator iterator(this);
for (HeapObject o = iterator.Next(); !o.is_null(); o = iterator.Next()) {
- if (o->IsSeqOneByteString()) {
- SeqOneByteString::cast(o)->clear_padding();
- } else if (o->IsSeqTwoByteString()) {
- SeqTwoByteString::cast(o)->clear_padding();
+ if (o.IsSeqOneByteString()) {
+ SeqOneByteString::cast(o).clear_padding();
+ } else if (o.IsSeqTwoByteString()) {
+ SeqTwoByteString::cast(o).clear_padding();
}
}
is_string_padding_cleared_ = true;
@@ -3542,7 +3542,7 @@
heap()->incremental_marking()->marking_state()->IsBlack(object));
page->InitializationMemoryFence();
heap()->NotifyOldGenerationExpansion();
- AllocationStep(object_size, object->address(), object_size);
+ AllocationStep(object_size, object.address(), object_size);
return object;
}
@@ -3557,7 +3557,7 @@
HeapObject object = page->GetObject();
- heap()->CreateFillerObjectAt(object->address(), object_size,
+ heap()->CreateFillerObjectAt(object.address(), object_size,
ClearRecordedSlots::kNo);
return page;
}
@@ -3618,7 +3618,7 @@
DCHECK(page->IsLargePage());
DCHECK(page->IsFlagSet(MemoryChunk::FROM_PAGE));
DCHECK(!page->IsFlagSet(MemoryChunk::TO_PAGE));
- size_t object_size = static_cast<size_t>(page->GetObject()->Size());
+ size_t object_size = static_cast<size_t>(page->GetObject().Size());
static_cast<LargeObjectSpace*>(page->owner())->RemovePage(page, object_size);
AddPage(page, object_size);
page->ClearFlag(MemoryChunk::FROM_PAGE);
@@ -3653,11 +3653,11 @@
LargePage* next_current = current->next_page();
HeapObject object = current->GetObject();
DCHECK(!marking_state->IsGrey(object));
- size_t size = static_cast<size_t>(object->Size());
+ size_t size = static_cast<size_t>(object.Size());
if (marking_state->IsBlack(object)) {
Address free_start;
surviving_object_size += size;
- if ((free_start = current->GetAddressToShrink(object->address(), size)) !=
+ if ((free_start = current->GetAddressToShrink(object.address(), size)) !=
0) {
DCHECK(!current->IsFlagSet(Page::IS_EXECUTABLE));
current->ClearOutOfLiveRangeSlots(free_start);
@@ -3665,7 +3665,7 @@
current->size() - (free_start - current->address());
heap()->memory_allocator()->PartialFreeMemory(
current, free_start, bytes_to_free,
- current->area_start() + object->Size());
+ current->area_start() + object.Size());
size_ -= bytes_to_free;
AccountUncommitted(bytes_to_free);
}
@@ -3684,7 +3684,7 @@
bool owned = (chunk->owner() == this);
- SLOW_DCHECK(!owned || ContainsSlow(object->address()));
+ SLOW_DCHECK(!owned || ContainsSlow(object.address()));
return owned;
}
@@ -3716,60 +3716,59 @@
// object area start.
HeapObject object = chunk->GetObject();
Page* page = Page::FromHeapObject(object);
- CHECK(object->address() == page->area_start());
+ CHECK(object.address() == page->area_start());
// The first word should be a map, and we expect all map pointers to be
// in map space or read-only space.
- Map map = object->map();
- CHECK(map->IsMap());
+ Map map = object.map();
+ CHECK(map.IsMap());
CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// We have only the following types in the large object space:
- if (!(object->IsAbstractCode() || object->IsSeqString() ||
- object->IsExternalString() || object->IsThinString() ||
- object->IsFixedArray() || object->IsFixedDoubleArray() ||
- object->IsWeakFixedArray() || object->IsWeakArrayList() ||
- object->IsPropertyArray() || object->IsByteArray() ||
- object->IsFeedbackVector() || object->IsBigInt() ||
- object->IsFreeSpace() || object->IsFeedbackMetadata() ||
- object->IsContext() ||
- object->IsUncompiledDataWithoutPreparseData() ||
- object->IsPreparseData()) &&
+ if (!(object.IsAbstractCode() || object.IsSeqString() ||
+ object.IsExternalString() || object.IsThinString() ||
+ object.IsFixedArray() || object.IsFixedDoubleArray() ||
+ object.IsWeakFixedArray() || object.IsWeakArrayList() ||
+ object.IsPropertyArray() || object.IsByteArray() ||
+ object.IsFeedbackVector() || object.IsBigInt() ||
+ object.IsFreeSpace() || object.IsFeedbackMetadata() ||
+ object.IsContext() || object.IsUncompiledDataWithoutPreparseData() ||
+ object.IsPreparseData()) &&
!FLAG_young_generation_large_objects) {
FATAL("Found invalid Object (instance_type=%i) in large object space.",
- object->map()->instance_type());
+ object.map().instance_type());
}
// The object itself should look OK.
- object->ObjectVerify(isolate);
+ object.ObjectVerify(isolate);
if (!FLAG_verify_heap_skip_remembered_set) {
heap()->VerifyRememberedSetFor(object);
}
// Byte arrays and strings don't have interior pointers.
- if (object->IsAbstractCode()) {
+ if (object.IsAbstractCode()) {
VerifyPointersVisitor code_visitor(heap());
- object->IterateBody(map, object->Size(), &code_visitor);
- } else if (object->IsFixedArray()) {
+ object.IterateBody(map, object.Size(), &code_visitor);
+ } else if (object.IsFixedArray()) {
FixedArray array = FixedArray::cast(object);
- for (int j = 0; j < array->length(); j++) {
- Object element = array->get(j);
- if (element->IsHeapObject()) {
+ for (int j = 0; j < array.length(); j++) {
+ Object element = array.get(j);
+ if (element.IsHeapObject()) {
HeapObject element_object = HeapObject::cast(element);
CHECK(IsValidHeapObject(heap(), element_object));
- CHECK(element_object->map()->IsMap());
+ CHECK(element_object.map().IsMap());
}
}
- } else if (object->IsPropertyArray()) {
+ } else if (object.IsPropertyArray()) {
PropertyArray array = PropertyArray::cast(object);
- for (int j = 0; j < array->length(); j++) {
- Object property = array->get(j);
- if (property->IsHeapObject()) {
+ for (int j = 0; j < array.length(); j++) {
+ Object property = array.get(j);
+ if (property.IsHeapObject()) {
HeapObject property_object = HeapObject::cast(property);
CHECK(heap()->Contains(property_object));
- CHECK(property_object->map()->IsMap());
+ CHECK(property_object.map().IsMap());
}
}
}
@@ -3790,7 +3789,7 @@
StdoutStream os;
LargeObjectIterator it(this);
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
- obj->Print(os);
+ obj.Print(os);
}
}
@@ -3807,9 +3806,9 @@
heap()->incremental_marking()->marking_state()->IsBlackOrGrey(object);
PrintF(" %c ", (is_marked ? '!' : ' ')); // Indent a little.
if (is_marked) {
- mark_size += object->Size();
+ mark_size += object.Size();
}
- object->ShortPrint();
+ object.ShortPrint();
PrintF("\n");
}
printf(" --------------------------------------\n");
@@ -3845,7 +3844,7 @@
HeapObject result = page->GetObject();
page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking());
page->SetFlag(MemoryChunk::TO_PAGE);
- pending_object_.store(result->address(), std::memory_order_relaxed);
+ pending_object_.store(result.address(), std::memory_order_relaxed);
#ifdef ENABLE_MINOR_MC
if (FLAG_minor_mc) {
page->AllocateYoungGenerationBitmap();
@@ -3858,7 +3857,7 @@
page->InitializationMemoryFence();
DCHECK(page->IsLargePage());
DCHECK_EQ(page->owner()->identity(), NEW_LO_SPACE);
- AllocationStep(object_size, result->address(), object_size);
+ AllocationStep(object_size, result.address(), object_size);
return result;
}
@@ -3881,7 +3880,7 @@
LargePage* page = *it;
it++;
HeapObject object = page->GetObject();
- size_t size = static_cast<size_t>(object->Size());
+ size_t size = static_cast<size_t>(object.Size());
if (is_dead(object)) {
freed_pages = true;
RemovePage(page, size);
diff --git a/src/heap/sweeper.cc b/src/heap/sweeper.cc
index 7f5a213..4a5aa70 100644
--- a/src/heap/sweeper.cc
+++ b/src/heap/sweeper.cc
@@ -281,10 +281,9 @@
LiveObjectRange<kBlackObjects>(p, marking_state_->bitmap(p))) {
HeapObject const object = object_and_size.first;
if (code_object_registry)
- code_object_registry->RegisterAlreadyExistingCodeObject(
- object->address());
+ code_object_registry->RegisterAlreadyExistingCodeObject(object.address());
DCHECK(marking_state_->IsBlack(object));
- Address free_end = object->address();
+ Address free_end = object.address();
if (free_end != free_start) {
CHECK_GT(free_end, free_start);
size_t size = static_cast<size_t>(free_end - free_start);
@@ -311,8 +310,8 @@
static_cast<uint32_t>(free_end - p->address())));
}
}
- Map map = object->synchronized_map();
- int size = object->SizeFromMap(map);
+ Map map = object.synchronized_map();
+ int size = object.SizeFromMap(map);
live_bytes += size;
free_start = free_end + size;
}
diff --git a/src/ia32/assembler-ia32-inl.h b/src/ia32/assembler-ia32-inl.h
index 4517aa2..2a2844f 100644
--- a/src/ia32/assembler-ia32-inl.h
+++ b/src/ia32/assembler-ia32-inl.h
@@ -107,7 +107,7 @@
WriteBarrierMode write_barrier_mode,
ICacheFlushMode icache_flush_mode) {
DCHECK(IsCodeTarget(rmode_) || rmode_ == FULL_EMBEDDED_OBJECT);
- WriteUnalignedValue(pc_, target->ptr());
+ WriteUnalignedValue(pc_, target.ptr());
if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
FlushInstructionCache(pc_, sizeof(Address));
}
@@ -257,7 +257,7 @@
void Assembler::deserialization_set_special_target_at(
Address instruction_payload, Code code, Address target) {
set_target_address_at(instruction_payload,
- !code.is_null() ? code->constant_pool() : kNullAddress,
+ !code.is_null() ? code.constant_pool() : kNullAddress,
target);
}
diff --git a/src/ic/call-optimization.cc b/src/ic/call-optimization.cc
index f1acbe1..09c38bb 100644
--- a/src/ic/call-optimization.cc
+++ b/src/ic/call-optimization.cc
@@ -22,15 +22,15 @@
Context CallOptimization::GetAccessorContext(Map holder_map) const {
if (is_constant_call()) {
- return constant_function_->context()->native_context();
+ return constant_function_->context().native_context();
}
- JSFunction constructor = JSFunction::cast(holder_map->GetConstructor());
- return constructor->context()->native_context();
+ JSFunction constructor = JSFunction::cast(holder_map.GetConstructor());
+ return constructor.context().native_context();
}
bool CallOptimization::IsCrossContextLazyAccessorPair(Context native_context,
Map holder_map) const {
- DCHECK(native_context->IsNativeContext());
+ DCHECK(native_context.IsNativeContext());
if (is_constant_call()) return false;
return native_context != GetAccessorContext(holder_map);
}
@@ -49,7 +49,7 @@
}
if (object_map->has_hidden_prototype()) {
JSObject raw_prototype = JSObject::cast(object_map->prototype());
- Handle<JSObject> prototype(raw_prototype, raw_prototype->GetIsolate());
+ Handle<JSObject> prototype(raw_prototype, raw_prototype.GetIsolate());
object_map = handle(prototype->map(), prototype->GetIsolate());
if (expected_receiver_type_->IsTemplateFor(*object_map)) {
*holder_lookup = kHolderFound;
@@ -65,7 +65,7 @@
Handle<JSObject> holder) const {
DCHECK(is_simple_api_call());
if (!receiver->IsHeapObject()) return false;
- Handle<Map> map(HeapObject::cast(*receiver)->map(), holder->GetIsolate());
+ Handle<Map> map(HeapObject::cast(*receiver).map(), holder->GetIsolate());
return IsCompatibleReceiverMap(map, holder);
}
@@ -85,8 +85,8 @@
{
JSObject object = *api_holder;
while (true) {
- Object prototype = object->map()->prototype();
- if (!prototype->IsJSObject()) return false;
+ Object prototype = object.map().prototype();
+ if (!prototype.IsJSObject()) return false;
if (prototype == *holder) return true;
object = JSObject::cast(prototype);
}
@@ -98,11 +98,11 @@
void CallOptimization::Initialize(
Isolate* isolate, Handle<FunctionTemplateInfo> function_template_info) {
- if (function_template_info->call_code()->IsUndefined(isolate)) return;
+ if (function_template_info->call_code().IsUndefined(isolate)) return;
api_call_info_ = handle(
CallHandlerInfo::cast(function_template_info->call_code()), isolate);
- if (!function_template_info->signature()->IsUndefined(isolate)) {
+ if (!function_template_info->signature().IsUndefined(isolate)) {
expected_receiver_type_ =
handle(FunctionTemplateInfo::cast(function_template_info->signature()),
isolate);
@@ -120,15 +120,15 @@
void CallOptimization::AnalyzePossibleApiFunction(Isolate* isolate,
Handle<JSFunction> function) {
- if (!function->shared()->IsApiFunction()) return;
- Handle<FunctionTemplateInfo> info(function->shared()->get_api_func_data(),
+ if (!function->shared().IsApiFunction()) return;
+ Handle<FunctionTemplateInfo> info(function->shared().get_api_func_data(),
isolate);
// Require a C++ callback.
- if (info->call_code()->IsUndefined(isolate)) return;
+ if (info->call_code().IsUndefined(isolate)) return;
api_call_info_ = handle(CallHandlerInfo::cast(info->call_code()), isolate);
- if (!info->signature()->IsUndefined(isolate)) {
+ if (!info->signature().IsUndefined(isolate)) {
expected_receiver_type_ =
handle(FunctionTemplateInfo::cast(info->signature()), isolate);
}
diff --git a/src/ic/handler-configuration-inl.h b/src/ic/handler-configuration-inl.h
index 3e7a446..376cb8e 100644
--- a/src/ic/handler-configuration-inl.h
+++ b/src/ic/handler-configuration-inl.h
@@ -25,7 +25,7 @@
// Decodes kind from Smi-handler.
LoadHandler::Kind LoadHandler::GetHandlerKind(Smi smi_handler) {
- return KindBits::decode(smi_handler->value());
+ return KindBits::decode(smi_handler.value());
}
Handle<Smi> LoadHandler::LoadNormal(Isolate* isolate) {
diff --git a/src/ic/handler-configuration.cc b/src/ic/handler-configuration.cc
index a060785..0b8ebd2 100644
--- a/src/ic/handler-configuration.cc
+++ b/src/ic/handler-configuration.cc
@@ -202,7 +202,7 @@
Handle<DescriptorArray> descriptors(transition_map->instance_descriptors(),
isolate);
PropertyDetails details = descriptors->GetDetails(descriptor);
- if (descriptors->GetKey(descriptor)->IsPrivate()) {
+ if (descriptors->GetKey(descriptor).IsPrivate()) {
DCHECK_EQ(DONT_ENUM, details.attributes());
} else {
DCHECK_EQ(NONE, details.attributes());
diff --git a/src/ic/ic-inl.h b/src/ic/ic-inl.h
index 846e3c6..38b1561 100644
--- a/src/ic/ic-inl.h
+++ b/src/ic/ic-inl.h
@@ -37,7 +37,7 @@
if (receiver->IsSmi()) {
receiver_map_ = isolate_->factory()->heap_number_map();
} else {
- receiver_map_ = handle(HeapObject::cast(*receiver)->map(), isolate_);
+ receiver_map_ = handle(HeapObject::cast(*receiver).map(), isolate_);
}
}
@@ -45,16 +45,16 @@
HeapObject heap_object;
return (object->IsSmi() && (object.ptr() != kNullAddress)) ||
(object->GetHeapObjectIfWeak(&heap_object) &&
- (heap_object->IsMap() || heap_object->IsPropertyCell())) ||
+ (heap_object.IsMap() || heap_object.IsPropertyCell())) ||
(object->GetHeapObjectIfStrong(&heap_object) &&
- (heap_object->IsDataHandler() || heap_object->IsCode()));
+ (heap_object.IsDataHandler() || heap_object.IsCode()));
}
bool IC::HostIsDeoptimizedCode() const {
Code host =
isolate()->inner_pointer_to_code_cache()->GetCacheEntry(pc())->code;
- return (host->kind() == Code::OPTIMIZED_FUNCTION &&
- host->marked_for_deoptimization());
+ return (host.kind() == Code::OPTIMIZED_FUNCTION &&
+ host.marked_for_deoptimization());
}
bool IC::vector_needs_update() {
diff --git a/src/ic/ic-stats.cc b/src/ic/ic-stats.cc
index 8f8fe818..5eebbd2 100644
--- a/src/ic/ic-stats.cc
+++ b/src/ic/ic-stats.cc
@@ -59,11 +59,11 @@
if (script_name_map_.find(script_ptr) != script_name_map_.end()) {
return script_name_map_[script_ptr].get();
}
- Object script_name_raw = script->name();
- if (script_name_raw->IsString()) {
+ Object script_name_raw = script.name();
+ if (script_name_raw.IsString()) {
String script_name = String::cast(script_name_raw);
char* c_script_name =
- script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)
+ script_name.ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)
.release();
script_name_map_.insert(
std::make_pair(script_ptr, std::unique_ptr<char[]>(c_script_name)));
@@ -81,9 +81,9 @@
if (function_name_map_.find(function_ptr) != function_name_map_.end()) {
return function_name_map_[function_ptr].get();
}
- SharedFunctionInfo shared = function->shared();
- ic_infos_[pos_].is_optimized = function->IsOptimized();
- char* function_name = shared->DebugName()->ToCString().release();
+ SharedFunctionInfo shared = function.shared();
+ ic_infos_[pos_].is_optimized = function.IsOptimized();
+ char* function_name = shared.DebugName().ToCString().release();
function_name_map_.insert(
std::make_pair(function_ptr, std::unique_ptr<char[]>(function_name)));
return function_name;
diff --git a/src/ic/ic.cc b/src/ic/ic.cc
index c39783e..a4d5092 100644
--- a/src/ic/ic.cc
+++ b/src/ic/ic.cc
@@ -133,16 +133,16 @@
Object maybe_function =
Object(Memory<Address>(fp_ + JavaScriptFrameConstants::kFunctionOffset));
- DCHECK(maybe_function->IsJSFunction());
+ DCHECK(maybe_function.IsJSFunction());
JSFunction function = JSFunction::cast(maybe_function);
int code_offset = 0;
- if (function->IsInterpreted()) {
+ if (function.IsInterpreted()) {
code_offset = InterpretedFrame::GetBytecodeOffset(fp());
} else {
- code_offset = static_cast<int>(pc() - function->code()->InstructionStart());
+ code_offset = static_cast<int>(pc() - function.code().InstructionStart());
}
JavaScriptFrame::CollectFunctionAndOffsetForICStats(
- function, function->abstract_code(), code_offset);
+ function, function.abstract_code(), code_offset);
// Reserve enough space for IC transition state, the longest length is 17.
ic_info.state.reserve(17);
@@ -154,9 +154,9 @@
ic_info.state += ")";
ic_info.map = reinterpret_cast<void*>(map.ptr());
if (!map.is_null()) {
- ic_info.is_dictionary_map = map->is_dictionary_map();
- ic_info.number_of_own_descriptors = map->NumberOfOwnDescriptors();
- ic_info.instance_type = std::to_string(map->instance_type());
+ ic_info.is_dictionary_map = map.is_dictionary_map();
+ ic_info.number_of_own_descriptors = map.NumberOfOwnDescriptors();
+ ic_info.instance_type = std::to_string(map.instance_type());
}
// TODO(lpy) Add name as key field in ICStats.
ICStats::instance()->End();
@@ -231,13 +231,12 @@
case LookupIterator::INTERCEPTOR: {
// If there is a getter, return; otherwise loop to perform the lookup.
Handle<JSObject> holder = it->GetHolder<JSObject>();
- if (!holder->GetNamedInterceptor()->getter()->IsUndefined(
+ if (!holder->GetNamedInterceptor().getter().IsUndefined(
it->isolate())) {
return;
}
if (is_has_property &&
- !holder->GetNamedInterceptor()->query()->IsUndefined(
- it->isolate())) {
+ !holder->GetNamedInterceptor().query().IsUndefined(it->isolate())) {
return;
}
break;
@@ -337,14 +336,14 @@
// TODO(leszeks): The host function is only needed for this print, we could
// remove it as a parameter if we're of with removing this trace (or only
// tracing the feedback vector, not the function name).
- if (vector->profiler_ticks() != 0) {
+ if (vector.profiler_ticks() != 0) {
PrintF("[resetting ticks for ");
- host_function->ShortPrint();
- PrintF(" due from %d due to IC change: %s]\n", vector->profiler_ticks(),
+ host_function.ShortPrint();
+ PrintF(" due from %d due to IC change: %s]\n", vector.profiler_ticks(),
reason);
}
}
- vector->set_profiler_ticks(0);
+ vector.set_profiler_ticks(0);
#ifdef V8_TRACE_FEEDBACK_UPDATES
if (FLAG_trace_feedback_updates) {
@@ -377,7 +376,7 @@
static bool MigrateDeprecated(Handle<Object> object) {
if (!object->IsJSObject()) return false;
Handle<JSObject> receiver = Handle<JSObject>::cast(object);
- if (!receiver->map()->is_deprecated()) return false;
+ if (!receiver->map().is_deprecated()) return false;
JSObject::MigrateInstance(Handle<JSObject>::cast(object));
return true;
}
@@ -469,7 +468,7 @@
if (name->IsPrivate()) {
if (name->IsPrivateName() && !it.IsFound()) {
- Handle<String> name_string(String::cast(Symbol::cast(*name)->name()),
+ Handle<String> name_string(String::cast(Symbol::cast(*name).name()),
isolate());
return TypeError(MessageTemplate::kInvalidPrivateFieldRead, object,
name_string);
@@ -516,7 +515,7 @@
// Look up in script context table.
Handle<String> str_name = Handle<String>::cast(name);
Handle<ScriptContextTable> script_contexts(
- global->native_context()->script_context_table(), isolate());
+ global->native_context().script_context_table(), isolate());
ScriptContextTable::LookupResult lookup_result;
if (ScriptContextTable::Lookup(isolate(), *script_contexts, *str_name,
@@ -653,16 +652,16 @@
bool IC::IsTransitionOfMonomorphicTarget(Map source_map, Map target_map) {
if (source_map.is_null()) return true;
if (target_map.is_null()) return false;
- if (source_map->is_abandoned_prototype_map()) return false;
- ElementsKind target_elements_kind = target_map->elements_kind();
+ if (source_map.is_abandoned_prototype_map()) return false;
+ ElementsKind target_elements_kind = target_map.elements_kind();
bool more_general_transition = IsMoreGeneralElementsKindTransition(
- source_map->elements_kind(), target_elements_kind);
+ source_map.elements_kind(), target_elements_kind);
Map transitioned_map;
if (more_general_transition) {
MapHandles map_list;
map_list.push_back(handle(target_map, isolate_));
transitioned_map =
- source_map->FindElementsKindTransitionedMap(isolate(), map_list);
+ source_map.FindElementsKindTransitionedMap(isolate(), map_list);
}
return transitioned_map == target_map;
}
@@ -797,7 +796,7 @@
// Use specialized code for getting prototype of functions.
if (receiver->IsJSFunction() &&
*lookup->name() == roots.prototype_string() &&
- !JSFunction::cast(*receiver)->PrototypeRequiresRuntimeLookup()) {
+ !JSFunction::cast(*receiver).PrototypeRequiresRuntimeLookup()) {
TRACE_HANDLER_STATS(isolate(), LoadIC_FunctionPrototypeStub);
return BUILTIN_CODE(isolate(), LoadIC_FunctionPrototype);
}
@@ -815,7 +814,7 @@
case LookupIterator::INTERCEPTOR: {
Handle<Smi> smi_handler = LoadHandler::LoadInterceptor(isolate());
- if (holder->GetNamedInterceptor()->non_masking()) {
+ if (holder->GetNamedInterceptor().non_masking()) {
MaybeObjectHandle holder_ref(isolate()->factory()->null_value());
if (!receiver_is_holder || IsLoadGlobalIC()) {
holder_ref = MaybeObjectHandle::Weak(holder);
@@ -848,7 +847,7 @@
}
if (holder->IsJSModuleNamespace()) {
Handle<ObjectHashTable> exports(
- Handle<JSModuleNamespace>::cast(holder)->module()->exports(),
+ Handle<JSModuleNamespace>::cast(holder)->module().exports(),
isolate());
int entry = exports->FindEntry(roots, lookup->name(),
Smi::ToInt(lookup->name()->GetHash()));
@@ -865,7 +864,7 @@
return ComputeHandler(lookup);
}
- Handle<Object> getter(AccessorPair::cast(*accessors)->getter(),
+ Handle<Object> getter(AccessorPair::cast(*accessors).getter(),
isolate());
if (!getter->IsJSFunction() && !getter->IsFunctionTemplateInfo()) {
TRACE_HANDLER_STATS(isolate(), LoadIC_SlowStub);
@@ -873,9 +872,9 @@
}
if ((getter->IsFunctionTemplateInfo() &&
- FunctionTemplateInfo::cast(*getter)->BreakAtEntry()) ||
+ FunctionTemplateInfo::cast(*getter).BreakAtEntry()) ||
(getter->IsJSFunction() &&
- JSFunction::cast(*getter)->shared()->BreakAtEntry())) {
+ JSFunction::cast(*getter).shared().BreakAtEntry())) {
// Do not install an IC if the api function has a breakpoint.
TRACE_HANDLER_STATS(isolate(), LoadIC_SlowStub);
return slow_stub();
@@ -1146,12 +1145,11 @@
KeyedAccessLoadMode load_mode) {
// Has a getter interceptor, or is any has and has a query interceptor.
if (receiver_map->has_indexed_interceptor() &&
- (!receiver_map->GetIndexedInterceptor()->getter()->IsUndefined(
- isolate()) ||
+ (!receiver_map->GetIndexedInterceptor().getter().IsUndefined(isolate()) ||
(IsAnyHas() &&
- !receiver_map->GetIndexedInterceptor()->query()->IsUndefined(
+ !receiver_map->GetIndexedInterceptor().query().IsUndefined(
isolate()))) &&
- !receiver_map->GetIndexedInterceptor()->non_masking()) {
+ !receiver_map->GetIndexedInterceptor().non_masking()) {
// TODO(jgruber): Update counter name.
TRACE_HANDLER_STATS(isolate(), KeyedLoadIC_LoadIndexedInterceptorStub);
return IsAnyHas() ? BUILTIN_CODE(isolate(), HasIndexedInterceptorIC)
@@ -1254,13 +1252,13 @@
bool IsOutOfBoundsAccess(Handle<Object> receiver, uint32_t index) {
size_t length;
if (receiver->IsJSArray()) {
- length = JSArray::cast(*receiver)->length()->Number();
+ length = JSArray::cast(*receiver).length().Number();
} else if (receiver->IsJSTypedArray()) {
- length = JSTypedArray::cast(*receiver)->length();
+ length = JSTypedArray::cast(*receiver).length();
} else if (receiver->IsJSObject()) {
- length = JSObject::cast(*receiver)->elements()->length();
+ length = JSObject::cast(*receiver).elements().length();
} else if (receiver->IsString()) {
- length = String::cast(*receiver)->length();
+ length = String::cast(*receiver).length();
} else {
return false;
}
@@ -1313,7 +1311,7 @@
uint32_t index;
if ((key->IsInternalizedString() &&
- !String::cast(*key)->AsArrayIndex(&index)) ||
+ !String::cast(*key).AsArrayIndex(&index)) ||
key->IsSymbol()) {
ASSIGN_RETURN_ON_EXCEPTION(isolate(), load_handle,
LoadIC::Load(object, Handle<Name>::cast(key)),
@@ -1343,7 +1341,7 @@
if (object->IsJSProxy()) return true;
if (!object->IsJSObject()) return false;
Handle<JSObject> receiver = Handle<JSObject>::cast(object);
- DCHECK(!receiver->map()->is_deprecated());
+ DCHECK(!receiver->map().is_deprecated());
if (it->state() != LookupIterator::TRANSITION) {
for (; it->IsFound(); it->Next()) {
@@ -1357,10 +1355,10 @@
Handle<JSObject> holder = it->GetHolder<JSObject>();
InterceptorInfo info = holder->GetNamedInterceptor();
if (it->HolderIsReceiverOrHiddenPrototype()) {
- return !info->non_masking() && receiver.is_identical_to(holder) &&
- !info->setter()->IsUndefined(isolate());
- } else if (!info->getter()->IsUndefined(isolate()) ||
- !info->query()->IsUndefined(isolate())) {
+ return !info.non_masking() && receiver.is_identical_to(holder) &&
+ !info.setter().IsUndefined(isolate());
+ } else if (!info.getter().IsUndefined(isolate()) ||
+ !info.query().IsUndefined(isolate())) {
return false;
}
break;
@@ -1415,7 +1413,7 @@
Handle<String> str_name = Handle<String>::cast(name);
Handle<JSGlobalObject> global = isolate()->global_object();
Handle<ScriptContextTable> script_contexts(
- global->native_context()->script_context_table(), isolate());
+ global->native_context().script_context_table(), isolate());
ScriptContextTable::LookupResult lookup_result;
if (ScriptContextTable::Lookup(isolate(), *script_contexts, *str_name,
@@ -1490,7 +1488,7 @@
if (name->IsPrivate()) {
if (name->IsPrivateName() && !it.IsFound()) {
- Handle<String> name_string(String::cast(Symbol::cast(*name)->name()),
+ Handle<String> name_string(String::cast(Symbol::cast(*name).name()),
isolate());
return TypeError(MessageTemplate::kInvalidPrivateFieldWrite, object,
name_string);
@@ -1538,7 +1536,7 @@
InterceptorInfo info =
lookup->GetHolder<JSObject>()->GetNamedInterceptor();
if (!lookup->HolderIsReceiverOrHiddenPrototype() &&
- !info->getter()->IsUndefined(isolate())) {
+ !info.getter().IsUndefined(isolate())) {
// Utilize premonomorphic state for global store ics that run into
// an interceptor because the property doesn't exist yet.
// After we actually set the property, we'll have more information.
@@ -1596,7 +1594,7 @@
Handle<JSObject> holder = lookup->GetHolder<JSObject>();
USE(holder);
- DCHECK(!holder->GetNamedInterceptor()->setter()->IsUndefined(isolate()));
+ DCHECK(!holder->GetNamedInterceptor().setter().IsUndefined(isolate()));
// TODO(jgruber): Update counter name.
TRACE_HANDLER_STATS(isolate(), StoreIC_StoreInterceptorStub);
return MaybeObjectHandle(BUILTIN_CODE(isolate(), StoreInterceptorIC));
@@ -1621,7 +1619,7 @@
TRACE_HANDLER_STATS(isolate(), StoreIC_SlowStub);
return MaybeObjectHandle(slow_stub());
}
- if (AccessorInfo::cast(*accessors)->is_special_data_property() &&
+ if (AccessorInfo::cast(*accessors).is_special_data_property() &&
!lookup->HolderIsReceiverOrHiddenPrototype()) {
set_slow_stub_reason("special data property in prototype chain");
TRACE_HANDLER_STATS(isolate(), StoreIC_SlowStub);
@@ -1654,9 +1652,9 @@
}
if ((setter->IsFunctionTemplateInfo() &&
- FunctionTemplateInfo::cast(*setter)->BreakAtEntry()) ||
+ FunctionTemplateInfo::cast(*setter).BreakAtEntry()) ||
(setter->IsJSFunction() &&
- JSFunction::cast(*setter)->shared()->BreakAtEntry())) {
+ JSFunction::cast(*setter).shared().BreakAtEntry())) {
// Do not install an IC if the api function has a breakpoint.
TRACE_HANDLER_STATS(isolate(), StoreIC_SlowStub);
return MaybeObjectHandle(slow_stub());
@@ -1994,8 +1992,8 @@
for (PrototypeIterator iter(object->GetIsolate(), *object); !iter.IsAtEnd();
iter.Advance()) {
// Be conservative, don't walk into proxies.
- if (iter.GetCurrent()->IsJSProxy()) return true;
- if (iter.GetCurrent()->IsJSTypedArray()) return true;
+ if (iter.GetCurrent().IsJSProxy()) return true;
+ if (iter.GetCurrent().IsJSTypedArray()) return true;
}
return false;
}
@@ -2014,10 +2012,10 @@
return STORE_AND_GROW_HANDLE_COW;
}
if (!FLAG_trace_external_array_abuse &&
- receiver->map()->has_fixed_typed_array_elements() && oob_access) {
+ receiver->map().has_fixed_typed_array_elements() && oob_access) {
return STORE_IGNORE_OUT_OF_BOUNDS;
}
- return receiver->elements()->IsCowArray() ? STORE_HANDLE_COW : STANDARD_STORE;
+ return receiver->elements().IsCowArray() ? STORE_HANDLE_COW : STANDARD_STORE;
}
} // namespace
@@ -2045,7 +2043,7 @@
uint32_t index;
if ((key->IsInternalizedString() &&
- !String::cast(*key)->AsArrayIndex(&index)) ||
+ !String::cast(*key).AsArrayIndex(&index)) ||
key->IsSymbol()) {
ASSIGN_RETURN_ON_EXCEPTION(isolate(), store_handle,
StoreIC::Store(object, Handle<Name>::cast(key),
@@ -2070,7 +2068,7 @@
// expect to be able to trap element sets to objects with those maps in
// the runtime to enable optimization of element hole access.
Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
- if (heap_object->map()->IsMapInArrayPrototypeChain(isolate())) {
+ if (heap_object->map().IsMapInArrayPrototypeChain(isolate())) {
set_slow_stub_reason("map in array prototype");
use_ic = false;
}
@@ -2160,7 +2158,7 @@
void StoreInArrayLiteralIC::Store(Handle<JSArray> array, Handle<Object> index,
Handle<Object> value) {
- DCHECK(!array->map()->IsMapInArrayPrototypeChain(isolate()));
+ DCHECK(!array->map().IsMapInArrayPrototypeChain(isolate()));
DCHECK(index->IsNumber());
if (!FLAG_use_ic || state() == NO_FEEDBACK || MigrateDeprecated(array)) {
@@ -2577,10 +2575,10 @@
DescriptorArray descriptors = map->instance_descriptors();
for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
- PropertyDetails details = descriptors->GetDetails(i);
- Name key = descriptors->GetKey(i);
+ PropertyDetails details = descriptors.GetDetails(i);
+ Name key = descriptors.GetKey(i);
if (details.kind() != kData || !details.IsEnumerable() ||
- key->IsPrivateName()) {
+ key.IsPrivateName()) {
return false;
}
}
@@ -2747,7 +2745,7 @@
Object holder = *receiver;
if (handler_kind == LoadHandler::kApiGetterHolderIsPrototype) {
- holder = receiver->map()->prototype();
+ holder = receiver->map().prototype();
} else {
DCHECK_EQ(handler_kind, LoadHandler::kApiGetter);
}
@@ -2918,7 +2916,7 @@
PropertyCallbackArguments arguments(isolate, interceptor->data(), *receiver,
*receiver, Just(kDontThrow));
- if (!interceptor->query()->IsUndefined(isolate)) {
+ if (!interceptor->query().IsUndefined(isolate)) {
Handle<Object> result = arguments.CallIndexedQuery(interceptor, index);
if (!result.is_null()) {
int32_t value;
@@ -2926,7 +2924,7 @@
return value == ABSENT ? ReadOnlyRoots(isolate).false_value()
: ReadOnlyRoots(isolate).true_value();
}
- } else if (!interceptor->getter()->IsUndefined(isolate)) {
+ } else if (!interceptor->getter().IsUndefined(isolate)) {
Handle<Object> result = arguments.CallIndexedGetter(interceptor, index);
if (!result.is_null()) {
return ReadOnlyRoots(isolate).true_value();
diff --git a/src/ic/stub-cache.cc b/src/ic/stub-cache.cc
index 2a63835..bdac1ce 100644
--- a/src/ic/stub-cache.cc
+++ b/src/ic/stub-cache.cc
@@ -31,8 +31,8 @@
int StubCache::PrimaryOffset(Name name, Map map) {
STATIC_ASSERT(kCacheIndexShift == Name::kHashShift);
// Compute the hash of the name (use entire hash field).
- DCHECK(name->HasHashCode());
- uint32_t field = name->hash_field();
+ DCHECK(name.HasHashCode());
+ uint32_t field = name.hash_field();
// Using only the low bits in 64-bit mode is unlikely to increase the
// risk of collision even if the heap is spread over an area larger than
// 4Gb (and not at all if it isn't).
@@ -70,8 +70,8 @@
// can use identity checks instead of structural equality checks.
DCHECK(!Heap::InYoungGeneration(name));
DCHECK(!Heap::InYoungGeneration(handler));
- DCHECK(name->IsUniqueName());
- DCHECK(name->HasHashCode());
+ DCHECK(name.IsUniqueName());
+ DCHECK(name.HasHashCode());
if (handler->ptr() != kNullAddress) DCHECK(IC::IsHandler(handler));
return true;
}
diff --git a/src/init/bootstrapper.cc b/src/init/bootstrapper.cc
index 82cc585..911dc84 100644
--- a/src/init/bootstrapper.cc
+++ b/src/init/bootstrapper.cc
@@ -68,11 +68,11 @@
bool SourceCodeCache::Lookup(Isolate* isolate, Vector<const char> name,
Handle<SharedFunctionInfo>* handle) {
- for (int i = 0; i < cache_->length(); i += 2) {
- SeqOneByteString str = SeqOneByteString::cast(cache_->get(i));
- if (str->IsOneByteEqualTo(Vector<const uint8_t>::cast(name))) {
+ for (int i = 0; i < cache_.length(); i += 2) {
+ SeqOneByteString str = SeqOneByteString::cast(cache_.get(i));
+ if (str.IsOneByteEqualTo(Vector<const uint8_t>::cast(name))) {
*handle = Handle<SharedFunctionInfo>(
- SharedFunctionInfo::cast(cache_->get(i + 1)), isolate);
+ SharedFunctionInfo::cast(cache_.get(i + 1)), isolate);
return true;
}
}
@@ -83,10 +83,10 @@
Handle<SharedFunctionInfo> shared) {
Factory* factory = isolate->factory();
HandleScope scope(isolate);
- int length = cache_->length();
+ int length = cache_.length();
Handle<FixedArray> new_array =
factory->NewFixedArray(length + 2, AllocationType::kOld);
- cache_->CopyTo(0, *new_array, 0, cache_->length());
+ cache_.CopyTo(0, *new_array, 0, cache_.length());
cache_ = *new_array;
Handle<String> str =
factory
@@ -94,9 +94,9 @@
AllocationType::kOld)
.ToHandleChecked();
DCHECK(!str.is_null());
- cache_->set(length, *str);
- cache_->set(length + 1, *shared);
- Script::cast(shared->script())->set_type(type_);
+ cache_.set(length, *str);
+ cache_.set(length + 1, *shared);
+ Script::cast(shared->script()).set_type(type_);
}
Bootstrapper::Bootstrapper(Isolate* isolate)
@@ -349,12 +349,12 @@
Handle<JSGlobalProxy> global_proxy(env->global_proxy(), isolate_);
global_proxy->set_native_context(roots.null_value());
JSObject::ForceSetPrototype(global_proxy, isolate_->factory()->null_value());
- global_proxy->map()->SetConstructor(roots.null_value());
+ global_proxy->map().SetConstructor(roots.null_value());
if (FLAG_track_detached_contexts) {
isolate_->AddDetachedContext(env);
}
- env->native_context()->set_microtask_queue(nullptr);
+ env->native_context().set_microtask_queue(nullptr);
}
namespace {
@@ -397,7 +397,7 @@
// Make the resulting JSFunction object fast.
JSObject::MakePrototypesFast(result, kStartAtReceiver, isolate);
- result->shared()->set_native(true);
+ result->shared().set_native(true);
return result;
}
@@ -438,14 +438,14 @@
Handle<JSFunction> fun = isolate->factory()->NewFunction(args);
// Make the resulting JSFunction object fast.
JSObject::MakePrototypesFast(fun, kStartAtReceiver, isolate);
- fun->shared()->set_native(true);
+ fun->shared().set_native(true);
if (adapt) {
- fun->shared()->set_internal_formal_parameter_count(len);
+ fun->shared().set_internal_formal_parameter_count(len);
} else {
- fun->shared()->DontAdaptArguments();
+ fun->shared().DontAdaptArguments();
}
- fun->shared()->set_length(len);
+ fun->shared().set_length(len);
return fun;
}
@@ -602,8 +602,8 @@
script->set_type(Script::TYPE_NATIVE);
Handle<WeakFixedArray> infos = factory()->NewWeakFixedArray(2);
script->set_shared_function_infos(*infos);
- empty_function->shared()->set_scope_info(*scope_info);
- empty_function->shared()->DontAdaptArguments();
+ empty_function->shared().set_scope_info(*scope_info);
+ empty_function->shared().DontAdaptArguments();
SharedFunctionInfo::SetScript(handle(empty_function->shared(), isolate()),
script, 1);
@@ -647,7 +647,7 @@
NewFunctionArgs args = NewFunctionArgs::ForBuiltinWithoutPrototype(
name, Builtins::kStrictPoisonPillThrower, i::LanguageMode::kStrict);
Handle<JSFunction> function = factory()->NewFunction(args);
- function->shared()->DontAdaptArguments();
+ function->shared().DontAdaptArguments();
// %ThrowTypeError% must not have a name property.
if (JSReceiver::DeleteProperty(function, factory()->name_string())
@@ -737,14 +737,14 @@
Handle<JSFunction> object_fun = CreateFunction(
isolate_, factory->Object_string(), JS_OBJECT_TYPE, instance_size,
inobject_properties, factory->null_value(), Builtins::kObjectConstructor);
- object_fun->shared()->set_length(1);
- object_fun->shared()->DontAdaptArguments();
+ object_fun->shared().set_length(1);
+ object_fun->shared().DontAdaptArguments();
native_context()->set_object_function(*object_fun);
{
// Finish setting up Object function's initial map.
Map initial_map = object_fun->initial_map();
- initial_map->set_elements_kind(HOLEY_ELEMENTS);
+ initial_map.set_elements_kind(HOLEY_ELEMENTS);
}
// Allocate a new prototype for the object function.
@@ -852,7 +852,7 @@
Handle<JSFunction> generator_next_internal =
SimpleCreateFunction(isolate(), factory()->next_string(),
Builtins::kGeneratorPrototypeNext, 1, false);
- generator_next_internal->shared()->set_native(false);
+ generator_next_internal->shared().set_native(false);
native_context()->set_generator_next_internal(*generator_next_internal);
// Create maps for generator functions and their prototypes. Store those
@@ -1086,9 +1086,9 @@
PropertyAttributes attributes,
Handle<AccessorPair> accessor_pair) {
DescriptorArray descriptors = map->instance_descriptors();
- int idx = descriptors->SearchWithCache(isolate, *name, *map);
+ int idx = descriptors.SearchWithCache(isolate, *name, *map);
Descriptor d = Descriptor::AccessorConstant(name, accessor_pair, attributes);
- descriptors->Replace(idx, &d);
+ descriptors.Replace(idx, &d);
}
} // namespace
@@ -1107,21 +1107,21 @@
}
static void AddToWeakNativeContextList(Isolate* isolate, Context context) {
- DCHECK(context->IsNativeContext());
+ DCHECK(context.IsNativeContext());
Heap* heap = isolate->heap();
#ifdef DEBUG
{ // NOLINT
- DCHECK(context->next_context_link()->IsUndefined(isolate));
+ DCHECK(context.next_context_link().IsUndefined(isolate));
// Check that context is not in the list yet.
for (Object current = heap->native_contexts_list();
- !current->IsUndefined(isolate);
- current = Context::cast(current)->next_context_link()) {
+ !current.IsUndefined(isolate);
+ current = Context::cast(current).next_context_link()) {
DCHECK(current != context);
}
}
#endif
- context->set(Context::NEXT_CONTEXT_LINK, heap->native_contexts_list(),
- UPDATE_WEAK_WRITE_BARRIER);
+ context.set(Context::NEXT_CONTEXT_LINK, heap->native_contexts_list(),
+ UPDATE_WEAK_WRITE_BARRIER);
heap->set_native_contexts_list(context);
}
@@ -1216,9 +1216,9 @@
JS_GLOBAL_OBJECT_TYPE);
}
- js_global_object_function->initial_map()->set_is_prototype_map(true);
- js_global_object_function->initial_map()->set_is_dictionary_map(true);
- js_global_object_function->initial_map()->set_may_have_interesting_symbols(
+ js_global_object_function->initial_map().set_is_prototype_map(true);
+ js_global_object_function->initial_map().set_is_dictionary_map(true);
+ js_global_object_function->initial_map().set_may_have_interesting_symbols(
true);
Handle<JSGlobalObject> global_object =
factory()->NewJSGlobalObject(js_global_object_function);
@@ -1241,9 +1241,9 @@
isolate(), global_constructor, factory()->the_hole_value(),
JS_GLOBAL_PROXY_TYPE);
}
- global_proxy_function->initial_map()->set_is_access_check_needed(true);
- global_proxy_function->initial_map()->set_has_hidden_prototype(true);
- global_proxy_function->initial_map()->set_may_have_interesting_symbols(true);
+ global_proxy_function->initial_map().set_is_access_check_needed(true);
+ global_proxy_function->initial_map().set_has_hidden_prototype(true);
+ global_proxy_function->initial_map().set_may_have_interesting_symbols(true);
native_context()->set_global_proxy_function(*global_proxy_function);
// Set global_proxy.__proto__ to js_global after ConfigureGlobalObjects
@@ -1261,7 +1261,7 @@
// deserializer. Otherwise it's undefined.
DCHECK(native_context()
->get(Context::GLOBAL_PROXY_INDEX)
- ->IsUndefined(isolate()) ||
+ .IsUndefined(isolate()) ||
native_context()->global_proxy() == *global_proxy);
native_context()->set_global_proxy(*global_proxy);
@@ -1314,8 +1314,8 @@
InstallFunction(isolate, global, name, JS_ERROR_TYPE, kErrorObjectSize,
kInObjectPropertiesCount, factory->the_hole_value(),
Builtins::kErrorConstructor);
- error_fun->shared()->DontAdaptArguments();
- error_fun->shared()->set_length(1);
+ error_fun->shared().DontAdaptArguments();
+ error_fun->shared().set_length(1);
if (context_index == Context::ERROR_FUNCTION_INDEX) {
SimpleInstallFunction(isolate, error_fun, "captureStackTrace",
@@ -1341,7 +1341,7 @@
isolate->native_context()->set_error_to_string(*to_string_fun);
isolate->native_context()->set_initial_error_prototype(*prototype);
} else {
- DCHECK(isolate->native_context()->error_to_string()->IsJSFunction());
+ DCHECK(isolate->native_context()->error_to_string().IsJSFunction());
JSObject::AddProperty(isolate, prototype, factory->toString_string(),
isolate->error_to_string(), DONT_ENUM);
@@ -1376,7 +1376,7 @@
JS_OBJECT_TYPE, JSObject::kHeaderSize, 0, builtin_id, MUTABLE);
Handle<JSFunction> function = isolate->factory()->NewFunction(args);
- function->shared()->DontAdaptArguments();
+ function->shared().DontAdaptArguments();
isolate->native_context()->set(context_index, *function);
}
@@ -1388,7 +1388,7 @@
Handle<JSFunction> empty_function) {
// --- N a t i v e C o n t e x t ---
// Use the empty scope info.
- native_context()->set_scope_info(empty_function->shared()->scope_info());
+ native_context()->set_scope_info(empty_function->shared().scope_info());
native_context()->set_previous(Context());
// Set extension and global object.
native_context()->set_extension(*global_object);
@@ -1519,8 +1519,8 @@
// Function instances are sloppy by default.
function_fun->set_prototype_or_initial_map(
*isolate_->sloppy_function_map());
- function_fun->shared()->DontAdaptArguments();
- function_fun->shared()->set_length(1);
+ function_fun->shared().DontAdaptArguments();
+ function_fun->shared().set_length(1);
InstallWithIntrinsicDefaultProto(isolate_, function_fun,
Context::FUNCTION_FUNCTION_INDEX);
@@ -1609,11 +1609,11 @@
Handle<JSFunction> array_function = InstallFunction(
isolate_, global, "Array", JS_ARRAY_TYPE, JSArray::kSize, 0,
isolate_->initial_object_prototype(), Builtins::kArrayConstructor);
- array_function->shared()->DontAdaptArguments();
+ array_function->shared().DontAdaptArguments();
// This seems a bit hackish, but we need to make sure Array.length
// is 1.
- array_function->shared()->set_length(1);
+ array_function->shared().set_length(1);
Handle<Map> initial_map(array_function->initial_map(), isolate());
@@ -1776,7 +1776,7 @@
CreateFunction(isolate_, factory->ArrayIterator_string(),
JS_ARRAY_ITERATOR_TYPE, JSArrayIterator::kSize, 0,
array_iterator_prototype, Builtins::kIllegal);
- array_iterator_function->shared()->set_native(false);
+ array_iterator_function->shared().set_native(false);
native_context()->set_initial_array_iterator_map(
array_iterator_function->initial_map());
@@ -1788,8 +1788,8 @@
Handle<JSFunction> number_fun = InstallFunction(
isolate_, global, "Number", JS_VALUE_TYPE, JSValue::kSize, 0,
isolate_->initial_object_prototype(), Builtins::kNumberConstructor);
- number_fun->shared()->DontAdaptArguments();
- number_fun->shared()->set_length(1);
+ number_fun->shared().DontAdaptArguments();
+ number_fun->shared().set_length(1);
InstallWithIntrinsicDefaultProto(isolate_, number_fun,
Context::NUMBER_FUNCTION_INDEX);
@@ -1871,8 +1871,8 @@
Handle<JSFunction> boolean_fun = InstallFunction(
isolate_, global, "Boolean", JS_VALUE_TYPE, JSValue::kSize, 0,
isolate_->initial_object_prototype(), Builtins::kBooleanConstructor);
- boolean_fun->shared()->DontAdaptArguments();
- boolean_fun->shared()->set_length(1);
+ boolean_fun->shared().DontAdaptArguments();
+ boolean_fun->shared().set_length(1);
InstallWithIntrinsicDefaultProto(isolate_, boolean_fun,
Context::BOOLEAN_FUNCTION_INDEX);
@@ -1897,13 +1897,13 @@
Handle<JSFunction> string_fun = InstallFunction(
isolate_, global, "String", JS_VALUE_TYPE, JSValue::kSize, 0,
isolate_->initial_object_prototype(), Builtins::kStringConstructor);
- string_fun->shared()->DontAdaptArguments();
- string_fun->shared()->set_length(1);
+ string_fun->shared().DontAdaptArguments();
+ string_fun->shared().set_length(1);
InstallWithIntrinsicDefaultProto(isolate_, string_fun,
Context::STRING_FUNCTION_INDEX);
Handle<Map> string_map = Handle<Map>(
- native_context()->string_function()->initial_map(), isolate());
+ native_context()->string_function().initial_map(), isolate());
string_map->set_elements_kind(FAST_STRING_WRAPPER_ELEMENTS);
Map::EnsureDescriptorSlack(isolate_, string_map, 1);
@@ -2082,7 +2082,7 @@
isolate_, factory->InternalizeUtf8String("StringIterator"),
JS_STRING_ITERATOR_TYPE, JSStringIterator::kSize, 0,
string_iterator_prototype, Builtins::kIllegal);
- string_iterator_function->shared()->set_native(false);
+ string_iterator_function->shared().set_native(false);
native_context()->set_initial_string_iterator_map(
string_iterator_function->initial_map());
native_context()->set_initial_string_iterator_prototype(
@@ -2093,8 +2093,8 @@
Handle<JSFunction> symbol_fun = InstallFunction(
isolate_, global, "Symbol", JS_VALUE_TYPE, JSValue::kSize, 0,
factory->the_hole_value(), Builtins::kSymbolConstructor);
- symbol_fun->shared()->set_length(0);
- symbol_fun->shared()->DontAdaptArguments();
+ symbol_fun->shared().set_length(0);
+ symbol_fun->shared().DontAdaptArguments();
native_context()->set_symbol_function(*symbol_fun);
// Install the Symbol.for and Symbol.keyFor functions.
@@ -2156,8 +2156,8 @@
factory->the_hole_value(), Builtins::kDateConstructor);
InstallWithIntrinsicDefaultProto(isolate_, date_fun,
Context::DATE_FUNCTION_INDEX);
- date_fun->shared()->set_length(7);
- date_fun->shared()->DontAdaptArguments();
+ date_fun->shared().set_length(7);
+ date_fun->shared().DontAdaptArguments();
// Install the Date.now, Date.parse and Date.UTC functions.
SimpleInstallFunction(isolate_, date_fun, "now", Builtins::kDateNow, 0,
@@ -2448,7 +2448,7 @@
Builtins::kRegExpPrototypeExec, 1, true);
// Check that index of "exec" function in JSRegExp is correct.
DCHECK_EQ(JSRegExp::kExecFunctionDescriptorIndex,
- prototype->map()->LastAdded());
+ prototype->map().LastAdded());
native_context()->set_regexp_exec_function(*fun);
}
@@ -2481,31 +2481,31 @@
"[Symbol.match]", Builtins::kRegExpPrototypeMatch,
1, true);
DCHECK_EQ(JSRegExp::kSymbolMatchFunctionDescriptorIndex,
- prototype->map()->LastAdded());
+ prototype->map().LastAdded());
InstallFunctionAtSymbol(isolate_, prototype, factory->match_all_symbol(),
"[Symbol.matchAll]",
Builtins::kRegExpPrototypeMatchAll, 1, true);
DCHECK_EQ(JSRegExp::kSymbolMatchAllFunctionDescriptorIndex,
- prototype->map()->LastAdded());
+ prototype->map().LastAdded());
InstallFunctionAtSymbol(isolate_, prototype, factory->replace_symbol(),
"[Symbol.replace]",
Builtins::kRegExpPrototypeReplace, 2, false);
DCHECK_EQ(JSRegExp::kSymbolReplaceFunctionDescriptorIndex,
- prototype->map()->LastAdded());
+ prototype->map().LastAdded());
InstallFunctionAtSymbol(isolate_, prototype, factory->search_symbol(),
"[Symbol.search]",
Builtins::kRegExpPrototypeSearch, 1, true);
DCHECK_EQ(JSRegExp::kSymbolSearchFunctionDescriptorIndex,
- prototype->map()->LastAdded());
+ prototype->map().LastAdded());
InstallFunctionAtSymbol(isolate_, prototype, factory->split_symbol(),
"[Symbol.split]", Builtins::kRegExpPrototypeSplit,
2, false);
DCHECK_EQ(JSRegExp::kSymbolSplitFunctionDescriptorIndex,
- prototype->map()->LastAdded());
+ prototype->map().LastAdded());
Handle<Map> prototype_map(prototype->map(), isolate());
Map::SetShouldBeFastPrototypeMap(prototype_map, true, isolate_);
@@ -2622,7 +2622,7 @@
isolate(), "RegExpStringIterator", JS_REGEXP_STRING_ITERATOR_TYPE,
JSRegExpStringIterator::kSize, 0, regexp_string_iterator_prototype,
Builtins::kIllegal);
- regexp_string_iterator_function->shared()->set_native(false);
+ regexp_string_iterator_function->shared().set_native(false);
native_context()->set_initial_regexp_string_iterator_prototype_map(
regexp_string_iterator_function->initial_map());
}
@@ -2854,8 +2854,8 @@
isolate_, intl, "DateTimeFormat", JS_INTL_DATE_TIME_FORMAT_TYPE,
JSDateTimeFormat::kSize, 0, factory->the_hole_value(),
Builtins::kDateTimeFormatConstructor);
- date_time_format_constructor->shared()->set_length(0);
- date_time_format_constructor->shared()->DontAdaptArguments();
+ date_time_format_constructor->shared().set_length(0);
+ date_time_format_constructor->shared().DontAdaptArguments();
InstallWithIntrinsicDefaultProto(
isolate_, date_time_format_constructor,
Context::INTL_DATE_TIME_FORMAT_FUNCTION_INDEX);
@@ -2886,8 +2886,8 @@
isolate_, intl, "NumberFormat", JS_INTL_NUMBER_FORMAT_TYPE,
JSNumberFormat::kSize, 0, factory->the_hole_value(),
Builtins::kNumberFormatConstructor);
- number_format_constructor->shared()->set_length(0);
- number_format_constructor->shared()->DontAdaptArguments();
+ number_format_constructor->shared().set_length(0);
+ number_format_constructor->shared().DontAdaptArguments();
InstallWithIntrinsicDefaultProto(
isolate_, number_format_constructor,
Context::INTL_NUMBER_FORMAT_FUNCTION_INDEX);
@@ -2916,7 +2916,7 @@
Handle<JSFunction> collator_constructor = InstallFunction(
isolate_, intl, "Collator", JS_INTL_COLLATOR_TYPE, JSCollator::kSize,
0, factory->the_hole_value(), Builtins::kCollatorConstructor);
- collator_constructor->shared()->DontAdaptArguments();
+ collator_constructor->shared().DontAdaptArguments();
InstallWithIntrinsicDefaultProto(isolate_, collator_constructor,
Context::INTL_COLLATOR_FUNCTION_INDEX);
@@ -2942,7 +2942,7 @@
isolate_, intl, "v8BreakIterator", JS_INTL_V8_BREAK_ITERATOR_TYPE,
JSV8BreakIterator::kSize, 0, factory->the_hole_value(),
Builtins::kV8BreakIteratorConstructor);
- v8_break_iterator_constructor->shared()->DontAdaptArguments();
+ v8_break_iterator_constructor->shared().DontAdaptArguments();
SimpleInstallFunction(
isolate_, v8_break_iterator_constructor, "supportedLocalesOf",
@@ -2978,7 +2978,7 @@
isolate_, intl, "PluralRules", JS_INTL_PLURAL_RULES_TYPE,
JSPluralRules::kSize, 0, factory->the_hole_value(),
Builtins::kPluralRulesConstructor);
- plural_rules_constructor->shared()->DontAdaptArguments();
+ plural_rules_constructor->shared().DontAdaptArguments();
SimpleInstallFunction(isolate(), plural_rules_constructor,
"supportedLocalesOf",
@@ -3002,8 +3002,8 @@
isolate(), intl, "RelativeTimeFormat",
JS_INTL_RELATIVE_TIME_FORMAT_TYPE, JSRelativeTimeFormat::kSize, 0,
factory->the_hole_value(), Builtins::kRelativeTimeFormatConstructor);
- relative_time_format_fun->shared()->set_length(0);
- relative_time_format_fun->shared()->DontAdaptArguments();
+ relative_time_format_fun->shared().set_length(0);
+ relative_time_format_fun->shared().DontAdaptArguments();
SimpleInstallFunction(
isolate(), relative_time_format_fun, "supportedLocalesOf",
@@ -3032,8 +3032,8 @@
isolate(), intl, "ListFormat", JS_INTL_LIST_FORMAT_TYPE,
JSListFormat::kSize, 0, factory->the_hole_value(),
Builtins::kListFormatConstructor);
- list_format_fun->shared()->set_length(0);
- list_format_fun->shared()->DontAdaptArguments();
+ list_format_fun->shared().set_length(0);
+ list_format_fun->shared().DontAdaptArguments();
SimpleInstallFunction(isolate(), list_format_fun, "supportedLocalesOf",
Builtins::kListFormatSupportedLocalesOf, 1, false);
@@ -3060,8 +3060,8 @@
factory->the_hole_value(), Builtins::kLocaleConstructor);
InstallWithIntrinsicDefaultProto(isolate(), locale_fun,
Context::INTL_LOCALE_FUNCTION_INDEX);
- locale_fun->shared()->set_length(1);
- locale_fun->shared()->DontAdaptArguments();
+ locale_fun->shared().set_length(1);
+ locale_fun->shared().DontAdaptArguments();
// Setup %LocalePrototype%.
Handle<JSObject> prototype(
@@ -3165,8 +3165,8 @@
isolate_, factory->InternalizeUtf8String("TypedArray"),
JS_TYPED_ARRAY_TYPE, JSTypedArray::kHeaderSize, 0,
factory->the_hole_value(), Builtins::kTypedArrayBaseConstructor);
- typed_array_fun->shared()->set_native(false);
- typed_array_fun->shared()->set_length(0);
+ typed_array_fun->shared().set_native(false);
+ typed_array_fun->shared().set_length(0);
InstallSpeciesGetter(isolate_, typed_array_fun);
native_context()->set_typed_array_function(*typed_array_fun);
@@ -3274,8 +3274,8 @@
Builtins::kDataViewConstructor);
InstallWithIntrinsicDefaultProto(isolate_, data_view_fun,
Context::DATA_VIEW_FUN_INDEX);
- data_view_fun->shared()->set_length(1);
- data_view_fun->shared()->DontAdaptArguments();
+ data_view_fun->shared().set_length(1);
+ data_view_fun->shared().DontAdaptArguments();
// Setup %DataViewPrototype%.
Handle<JSObject> prototype(
@@ -3359,7 +3359,7 @@
isolate_, prototype, "set", Builtins::kMapPrototypeSet, 2, true);
// Check that index of "set" function in JSCollection is correct.
DCHECK_EQ(JSCollection::kAddFunctionDescriptorIndex,
- prototype->map()->LastAdded());
+ prototype->map().LastAdded());
native_context()->set_map_set(*map_set);
Handle<JSFunction> map_has = SimpleInstallFunction(
@@ -3396,8 +3396,8 @@
Handle<JSFunction> bigint_fun = InstallFunction(
isolate_, global, "BigInt", JS_VALUE_TYPE, JSValue::kSize, 0,
factory->the_hole_value(), Builtins::kBigIntConstructor);
- bigint_fun->shared()->DontAdaptArguments();
- bigint_fun->shared()->set_length(1);
+ bigint_fun->shared().DontAdaptArguments();
+ bigint_fun->shared().set_length(1);
InstallWithIntrinsicDefaultProto(isolate_, bigint_fun,
Context::BIGINT_FUNCTION_INDEX);
@@ -3454,7 +3454,7 @@
isolate_, prototype, "add", Builtins::kSetPrototypeAdd, 1, true);
// Check that index of "add" function in JSCollection is correct.
DCHECK_EQ(JSCollection::kAddFunctionDescriptorIndex,
- prototype->map()->LastAdded());
+ prototype->map().LastAdded());
native_context()->set_set_add(*set_add);
Handle<JSFunction> set_delete = SimpleInstallFunction(
@@ -3557,7 +3557,7 @@
isolate_, prototype, "set", Builtins::kWeakMapPrototypeSet, 2, true);
// Check that index of "set" function in JSWeakCollection is correct.
DCHECK_EQ(JSWeakCollection::kAddFunctionDescriptorIndex,
- prototype->map()->LastAdded());
+ prototype->map().LastAdded());
native_context()->set_weakmap_set(*weakmap_set);
SimpleInstallFunction(isolate_, prototype, "has",
@@ -3592,7 +3592,7 @@
isolate_, prototype, "add", Builtins::kWeakSetPrototypeAdd, 1, true);
// Check that index of "add" function in JSWeakCollection is correct.
DCHECK_EQ(JSWeakCollection::kAddFunctionDescriptorIndex,
- prototype->map()->LastAdded());
+ prototype->map().LastAdded());
native_context()->set_weakset_add(*weakset_add);
@@ -3618,8 +3618,8 @@
isolate_->proxy_map()->SetConstructor(*proxy_function);
- proxy_function->shared()->set_internal_formal_parameter_count(2);
- proxy_function->shared()->set_length(2);
+ proxy_function->shared().set_internal_formal_parameter_count(2);
+ proxy_function->shared().set_length(2);
native_context()->set_proxy_function(*proxy_function);
JSObject::AddProperty(isolate_, global, name, proxy_function, DONT_ENUM);
@@ -3792,13 +3792,13 @@
}
// @@iterator method is added later.
- DCHECK_EQ(native_context()->object_function()->prototype(),
+ DCHECK_EQ(native_context()->object_function().prototype(),
*isolate_->initial_object_prototype());
Map::SetPrototype(isolate(), map, isolate_->initial_object_prototype());
// Copy constructor from the sloppy arguments boilerplate.
map->SetConstructor(
- native_context()->sloppy_arguments_map()->GetConstructor());
+ native_context()->sloppy_arguments_map().GetConstructor());
native_context()->set_strict_arguments_map(*map);
@@ -3844,10 +3844,10 @@
isolate(), global, name, JS_TYPED_ARRAY_TYPE,
JSTypedArray::kSizeWithEmbedderFields, 0, factory()->the_hole_value(),
Builtins::kTypedArrayConstructor);
- result->initial_map()->set_elements_kind(elements_kind);
+ result->initial_map().set_elements_kind(elements_kind);
- result->shared()->DontAdaptArguments();
- result->shared()->set_length(3);
+ result->shared().DontAdaptArguments();
+ result->shared().set_length(3);
CHECK(JSObject::SetPrototype(result, typed_array_function, false, kDontThrow)
.FromJust());
@@ -3858,7 +3858,7 @@
InstallConstant(isolate(), result, "BYTES_PER_ELEMENT", bytes_per_element);
// Setup prototype object.
- DCHECK(result->prototype()->IsJSObject());
+ DCHECK(result->prototype().IsJSObject());
Handle<JSObject> prototype(JSObject::cast(result->prototype()), isolate());
CHECK(JSObject::SetPrototype(prototype, typed_array_prototype, false,
@@ -4002,8 +4002,8 @@
Builtins::kGeneratorFunctionConstructor);
generator_function_function->set_prototype_or_initial_map(
native_context->generator_function_map());
- generator_function_function->shared()->DontAdaptArguments();
- generator_function_function->shared()->set_length(1);
+ generator_function_function->shared().DontAdaptArguments();
+ generator_function_function->shared().set_length(1);
InstallWithIntrinsicDefaultProto(
isolate, generator_function_function,
Context::GENERATOR_FUNCTION_FUNCTION_INDEX);
@@ -4015,7 +4015,7 @@
generator_function_function,
static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY));
- native_context->generator_function_map()->SetConstructor(
+ native_context->generator_function_map().SetConstructor(
*generator_function_function);
}
@@ -4031,8 +4031,8 @@
Builtins::kAsyncGeneratorFunctionConstructor);
async_generator_function_function->set_prototype_or_initial_map(
native_context->async_generator_function_map());
- async_generator_function_function->shared()->DontAdaptArguments();
- async_generator_function_function->shared()->set_length(1);
+ async_generator_function_function->shared().DontAdaptArguments();
+ async_generator_function_function->shared().set_length(1);
InstallWithIntrinsicDefaultProto(
isolate, async_generator_function_function,
Context::ASYNC_GENERATOR_FUNCTION_FUNCTION_INDEX);
@@ -4045,7 +4045,7 @@
factory->constructor_string(), async_generator_function_function,
static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY));
- native_context->async_generator_function_map()->SetConstructor(
+ native_context->async_generator_function_map().SetConstructor(
*async_generator_function_function);
}
@@ -4066,7 +4066,7 @@
Handle<JSFunction> set_iterator_function =
CreateFunction(isolate, "SetIterator", JS_SET_VALUE_ITERATOR_TYPE,
JSSetIterator::kSize, 0, prototype, Builtins::kIllegal);
- set_iterator_function->shared()->set_native(false);
+ set_iterator_function->shared().set_native(false);
Handle<Map> set_value_iterator_map(set_iterator_function->initial_map(),
isolate);
@@ -4096,7 +4096,7 @@
Handle<JSFunction> map_iterator_function =
CreateFunction(isolate, "MapIterator", JS_MAP_KEY_ITERATOR_TYPE,
JSMapIterator::kSize, 0, prototype, Builtins::kIllegal);
- map_iterator_function->shared()->set_native(false);
+ map_iterator_function->shared().set_native(false);
Handle<Map> map_key_iterator_map(map_iterator_function->initial_map(),
isolate);
@@ -4126,8 +4126,8 @@
Builtins::kAsyncFunctionConstructor);
async_function_constructor->set_prototype_or_initial_map(
native_context->async_function_map());
- async_function_constructor->shared()->DontAdaptArguments();
- async_function_constructor->shared()->set_length(1);
+ async_function_constructor->shared().DontAdaptArguments();
+ async_function_constructor->shared().set_length(1);
native_context->set_async_function_constructor(*async_function_constructor);
JSObject::ForceSetPrototype(async_function_constructor,
isolate->function_function());
@@ -4179,7 +4179,7 @@
Handle<JSFunction> callsite_fun = CreateFunction(
isolate(), "CallSite", JS_OBJECT_TYPE, JSObject::kHeaderSize, 0,
factory->the_hole_value(), Builtins::kUnsupportedThrower);
- callsite_fun->shared()->DontAdaptArguments();
+ callsite_fun->shared().DontAdaptArguments();
isolate()->native_context()->set_callsite_function(*callsite_fun);
// Setup CallSite.prototype.
@@ -4287,8 +4287,8 @@
JSFinalizationGroup::kSize, 0, finalization_group_prototype,
Builtins::kFinalizationGroupConstructor);
- finalization_group_fun->shared()->DontAdaptArguments();
- finalization_group_fun->shared()->set_length(1);
+ finalization_group_fun->shared().DontAdaptArguments();
+ finalization_group_fun->shared().set_length(1);
// Install the "constructor" property on the prototype.
JSObject::AddProperty(isolate(), finalization_group_prototype,
@@ -4334,8 +4334,8 @@
isolate(), weak_ref_name, JS_WEAK_REF_TYPE, JSWeakRef::kSize, 0,
weak_ref_prototype, Builtins::kWeakRefConstructor);
- weak_ref_fun->shared()->DontAdaptArguments();
- weak_ref_fun->shared()->set_length(1);
+ weak_ref_fun->shared().DontAdaptArguments();
+ weak_ref_fun->shared().set_length(1);
// Install the "constructor" property on the prototype.
JSObject::AddProperty(isolate(), weak_ref_prototype,
@@ -4431,8 +4431,8 @@
Handle<JSFunction> segmenter_fun = InstallFunction(
isolate(), intl, "Segmenter", JS_INTL_SEGMENTER_TYPE, JSSegmenter::kSize,
0, factory()->the_hole_value(), Builtins::kSegmenterConstructor);
- segmenter_fun->shared()->set_length(0);
- segmenter_fun->shared()->DontAdaptArguments();
+ segmenter_fun->shared().set_length(0);
+ segmenter_fun->shared().DontAdaptArguments();
SimpleInstallFunction(isolate(), segmenter_fun, "supportedLocalesOf",
Builtins::kSegmenterSupportedLocalesOf, 1, false);
@@ -4489,7 +4489,7 @@
Handle<JSFunction> segment_iterator_fun = CreateFunction(
isolate(), name_string, JS_INTL_SEGMENT_ITERATOR_TYPE,
JSSegmentIterator::kSize, 0, prototype, Builtins::kIllegal);
- segment_iterator_fun->shared()->set_native(false);
+ segment_iterator_fun->shared().set_native(false);
Handle<Map> segment_iterator_map(segment_iterator_fun->initial_map(),
isolate());
@@ -4518,8 +4518,8 @@
CreateFunction(isolate(), name, JS_ARRAY_BUFFER_TYPE,
JSArrayBuffer::kSizeWithEmbedderFields, 0, prototype,
Builtins::kArrayBufferConstructor);
- array_buffer_fun->shared()->DontAdaptArguments();
- array_buffer_fun->shared()->set_length(1);
+ array_buffer_fun->shared().DontAdaptArguments();
+ array_buffer_fun->shared().set_length(1);
// Install the "constructor" property on the {prototype}.
JSObject::AddProperty(isolate(), prototype, factory()->constructor_string(),
@@ -4578,7 +4578,7 @@
InstallFunction(isolate(), target, name, JS_ARRAY_TYPE, JSArray::kSize, 0,
prototype, Builtins::kInternalArrayConstructor);
- array_function->shared()->DontAdaptArguments();
+ array_function->shared().DontAdaptArguments();
Handle<Map> original_map(array_function->initial_map(), isolate());
Handle<Map> initial_map = Map::Copy(isolate(), original_map, "InternalArray");
@@ -4655,7 +4655,7 @@
Handle<JSFunction> promise_internal_constructor =
SimpleCreateFunction(isolate(), factory()->empty_string(),
Builtins::kPromiseInternalConstructor, 1, true);
- promise_internal_constructor->shared()->set_native(false);
+ promise_internal_constructor->shared().set_native(false);
JSObject::AddProperty(isolate(), extras_utils, "createPromise",
promise_internal_constructor, DONT_ENUM);
@@ -4663,7 +4663,7 @@
Handle<JSFunction> promise_internal_reject =
SimpleCreateFunction(isolate(), factory()->empty_string(),
Builtins::kPromiseInternalReject, 2, true);
- promise_internal_reject->shared()->set_native(false);
+ promise_internal_reject->shared().set_native(false);
JSObject::AddProperty(isolate(), extras_utils, "rejectPromise",
promise_internal_reject, DONT_ENUM);
@@ -4671,7 +4671,7 @@
Handle<JSFunction> promise_internal_resolve =
SimpleCreateFunction(isolate(), factory()->empty_string(),
Builtins::kPromiseInternalResolve, 2, true);
- promise_internal_resolve->shared()->set_native(false);
+ promise_internal_resolve->shared().set_native(false);
JSObject::AddProperty(isolate(), extras_utils, "resolvePromise",
promise_internal_resolve, DONT_ENUM);
@@ -4708,10 +4708,10 @@
{
Handle<JSFunction> object_function(native_context()->object_function(),
isolate());
- DCHECK(JSObject::cast(object_function->initial_map()->prototype())
- ->HasFastProperties());
+ DCHECK(JSObject::cast(object_function->initial_map().prototype())
+ .HasFastProperties());
native_context()->set_object_function_prototype_map(
- HeapObject::cast(object_function->initial_map()->prototype())->map());
+ HeapObject::cast(object_function->initial_map().prototype()).map());
}
// Store the map for the %StringPrototype% after the natives has been compiled
@@ -4719,10 +4719,10 @@
Handle<JSFunction> string_function(native_context()->string_function(),
isolate());
JSObject string_function_prototype =
- JSObject::cast(string_function->initial_map()->prototype());
- DCHECK(string_function_prototype->HasFastProperties());
+ JSObject::cast(string_function->initial_map().prototype());
+ DCHECK(string_function_prototype.HasFastProperties());
native_context()->set_string_function_prototype_map(
- string_function_prototype->map());
+ string_function_prototype.map());
Handle<JSGlobalObject> global_object =
handle(native_context()->global_object(), isolate());
@@ -4775,7 +4775,7 @@
// Verification of important array prototype properties.
Object length = proto->length();
- CHECK(length->IsSmi());
+ CHECK(length.IsSmi());
CHECK_EQ(Smi::ToInt(length), 0);
CHECK(proto->HasSmiOrObjectElements());
// This is necessary to enable fast checks for absence of elements
@@ -4904,10 +4904,10 @@
{
JSFunction array_function = native_context()->array_function();
Handle<DescriptorArray> array_descriptors(
- array_function->initial_map()->instance_descriptors(), isolate());
+ array_function.initial_map().instance_descriptors(), isolate());
Handle<String> length = factory()->length_string();
int old = array_descriptors->SearchWithCache(
- isolate(), *length, array_function->initial_map());
+ isolate(), *length, array_function.initial_map());
DCHECK_NE(old, DescriptorArray::kNotFound);
Descriptor d = Descriptor::AccessorConstant(
length, handle(array_descriptors->GetStrongValue(old), isolate()),
@@ -5172,7 +5172,7 @@
Handle<FunctionTemplateInfo> proxy_constructor(
FunctionTemplateInfo::cast(global_proxy_data->constructor()),
isolate());
- if (!proxy_constructor->GetPrototypeTemplate()->IsUndefined(isolate())) {
+ if (!proxy_constructor->GetPrototypeTemplate().IsUndefined(isolate())) {
Handle<ObjectTemplateInfo> global_object_data(
ObjectTemplateInfo::cast(proxy_constructor->GetPrototypeTemplate()),
isolate());
@@ -5183,7 +5183,7 @@
JSObject::ForceSetPrototype(global_proxy, global_object);
native_context()->set_array_buffer_map(
- native_context()->array_buffer_fun()->initial_map());
+ native_context()->array_buffer_fun().initial_map());
Handle<JSFunction> js_map_fun(native_context()->js_map_fun(), isolate());
Handle<JSFunction> js_set_fun(native_context()->js_set_fun(), isolate());
@@ -5208,7 +5208,7 @@
Handle<ObjectTemplateInfo> object_template) {
DCHECK(!object_template.is_null());
DCHECK(FunctionTemplateInfo::cast(object_template->constructor())
- ->IsTemplateFor(object->map()));
+ .IsTemplateFor(object->map()));
MaybeHandle<JSObject> maybe_obj =
ApiNatives::InstantiateObject(object->GetIsolate(), object_template);
@@ -5238,8 +5238,8 @@
// in the snapshotted global object.
if (from->HasFastProperties()) {
Handle<DescriptorArray> descs =
- Handle<DescriptorArray>(from->map()->instance_descriptors(), isolate());
- for (int i = 0; i < from->map()->NumberOfOwnDescriptors(); i++) {
+ Handle<DescriptorArray>(from->map().instance_descriptors(), isolate());
+ for (int i = 0; i < from->map().NumberOfOwnDescriptors(); i++) {
PropertyDetails details = descs->GetDetails(i);
if (details.location() == kField) {
if (details.kind() == kData) {
@@ -5275,7 +5275,7 @@
} else if (from->IsJSGlobalObject()) {
// Copy all keys and values in enumeration order.
Handle<GlobalDictionary> properties(
- JSGlobalObject::cast(*from)->global_dictionary(), isolate());
+ JSGlobalObject::cast(*from).global_dictionary(), isolate());
Handle<FixedArray> indices =
GlobalDictionary::IterationIndices(isolate(), properties);
for (int i = 0; i < indices->length(); i++) {
@@ -5302,7 +5302,7 @@
int key_index = Smi::ToInt(key_indices->get(i));
Object raw_key = properties->KeyAt(key_index);
DCHECK(properties->IsKey(roots, raw_key));
- DCHECK(raw_key->IsName());
+ DCHECK(raw_key.IsName());
Handle<Name> key(Name::cast(raw_key), isolate());
// If the property is already there we skip it.
if (PropertyAlreadyExists(isolate(), to, key)) continue;
@@ -5337,7 +5337,7 @@
TransferIndexedProperties(from, to);
// Transfer the prototype (new map is needed).
- Handle<HeapObject> proto(from->map()->prototype(), isolate());
+ Handle<HeapObject> proto(from->map().prototype(), isolate());
JSObject::ForceSetPrototype(to, proto);
}
@@ -5366,7 +5366,7 @@
// The global proxy function to reinitialize this global proxy is in the
// context that is yet to be deserialized. We need to prepare a global
// proxy of the correct size.
- Object size = isolate->heap()->serialized_global_proxy_sizes()->get(
+ Object size = isolate->heap()->serialized_global_proxy_sizes().get(
static_cast<int>(context_snapshot_index) - 1);
instance_size = Smi::ToInt(size);
} else {
@@ -5457,10 +5457,10 @@
Handle<JSFunction> string_function(native_context()->string_function(),
isolate);
JSObject string_function_prototype =
- JSObject::cast(string_function->initial_map()->prototype());
- DCHECK(string_function_prototype->HasFastProperties());
+ JSObject::cast(string_function->initial_map().prototype());
+ DCHECK(string_function_prototype.HasFastProperties());
native_context()->set_string_function_prototype_map(
- string_function_prototype->map());
+ string_function_prototype.map());
}
if (FLAG_disallow_code_generation_from_strings) {
@@ -5525,10 +5525,10 @@
// Configure the hidden prototype chain of the global proxy.
JSObject::ForceSetPrototype(global_proxy, global_object);
- global_proxy->map()->SetConstructor(*global_constructor);
+ global_proxy->map().SetConstructor(*global_constructor);
// TODO(dcheng): This is a hack. Why does this need to be manually called
// here? Line 4812 should have taken care of it?
- global_proxy->map()->set_has_hidden_prototype(true);
+ global_proxy->map().set_has_hidden_prototype(true);
global_proxy_ = global_proxy;
}
diff --git a/src/interpreter/bytecode-array-accessor.cc b/src/interpreter/bytecode-array-accessor.cc
index e455cfd..9e1bca6 100644
--- a/src/interpreter/bytecode-array-accessor.cc
+++ b/src/interpreter/bytecode-array-accessor.cc
@@ -198,7 +198,7 @@
}
Object BytecodeArrayAccessor::GetConstantAtIndex(int index) const {
- return bytecode_array()->constant_pool()->get(index);
+ return bytecode_array()->constant_pool().get(index);
}
Object BytecodeArrayAccessor::GetConstantForIndexOperand(
@@ -216,7 +216,7 @@
return GetAbsoluteOffset(relative_offset);
} else if (interpreter::Bytecodes::IsJumpConstant(bytecode)) {
Smi smi = Smi::cast(GetConstantForIndexOperand(0));
- return GetAbsoluteOffset(smi->value());
+ return GetAbsoluteOffset(smi.value());
} else {
UNREACHABLE();
}
@@ -318,15 +318,15 @@
if (table_offset_ >= table_end_) return;
Object current = accessor_->GetConstantAtIndex(table_offset_);
- while (!current->IsSmi()) {
- DCHECK(current->IsTheHole());
+ while (!current.IsSmi()) {
+ DCHECK(current.IsTheHole());
++table_offset_;
++index_;
if (table_offset_ >= table_end_) break;
current = accessor_->GetConstantAtIndex(table_offset_);
}
// Make sure we haven't reached the end of the table with a hole in current.
- if (current->IsSmi()) {
+ if (current.IsSmi()) {
current_ = Smi::cast(current);
}
}
diff --git a/src/interpreter/bytecode-array-builder.cc b/src/interpreter/bytecode-array-builder.cc
index d0a3034..230f3f9 100644
--- a/src/interpreter/bytecode-array-builder.cc
+++ b/src/interpreter/bytecode-array-builder.cc
@@ -406,40 +406,40 @@
Token::Value op, Smi literal, int feedback_slot) {
switch (op) {
case Token::Value::ADD:
- OutputAddSmi(literal->value(), feedback_slot);
+ OutputAddSmi(literal.value(), feedback_slot);
break;
case Token::Value::SUB:
- OutputSubSmi(literal->value(), feedback_slot);
+ OutputSubSmi(literal.value(), feedback_slot);
break;
case Token::Value::MUL:
- OutputMulSmi(literal->value(), feedback_slot);
+ OutputMulSmi(literal.value(), feedback_slot);
break;
case Token::Value::DIV:
- OutputDivSmi(literal->value(), feedback_slot);
+ OutputDivSmi(literal.value(), feedback_slot);
break;
case Token::Value::MOD:
- OutputModSmi(literal->value(), feedback_slot);
+ OutputModSmi(literal.value(), feedback_slot);
break;
case Token::Value::EXP:
- OutputExpSmi(literal->value(), feedback_slot);
+ OutputExpSmi(literal.value(), feedback_slot);
break;
case Token::Value::BIT_OR:
- OutputBitwiseOrSmi(literal->value(), feedback_slot);
+ OutputBitwiseOrSmi(literal.value(), feedback_slot);
break;
case Token::Value::BIT_XOR:
- OutputBitwiseXorSmi(literal->value(), feedback_slot);
+ OutputBitwiseXorSmi(literal.value(), feedback_slot);
break;
case Token::Value::BIT_AND:
- OutputBitwiseAndSmi(literal->value(), feedback_slot);
+ OutputBitwiseAndSmi(literal.value(), feedback_slot);
break;
case Token::Value::SHL:
- OutputShiftLeftSmi(literal->value(), feedback_slot);
+ OutputShiftLeftSmi(literal.value(), feedback_slot);
break;
case Token::Value::SAR:
- OutputShiftRightSmi(literal->value(), feedback_slot);
+ OutputShiftRightSmi(literal.value(), feedback_slot);
break;
case Token::Value::SHR:
- OutputShiftRightLogicalSmi(literal->value(), feedback_slot);
+ OutputShiftRightLogicalSmi(literal.value(), feedback_slot);
break;
default:
UNREACHABLE();
@@ -573,7 +573,7 @@
}
BytecodeArrayBuilder& BytecodeArrayBuilder::LoadLiteral(Smi smi) {
- int32_t raw_smi = smi->value();
+ int32_t raw_smi = smi.value();
if (raw_smi == 0) {
OutputLdaZero();
} else {
diff --git a/src/interpreter/interpreter.cc b/src/interpreter/interpreter.cc
index 5ba51e9..406d7ed 100644
--- a/src/interpreter/interpreter.cc
+++ b/src/interpreter/interpreter.cc
@@ -86,9 +86,9 @@
void Interpreter::SetBytecodeHandler(Bytecode bytecode,
OperandScale operand_scale, Code handler) {
- DCHECK(handler->kind() == Code::BYTECODE_HANDLER);
+ DCHECK(handler.kind() == Code::BYTECODE_HANDLER);
size_t index = GetDispatchTableIndex(bytecode, operand_scale);
- dispatch_table_[index] = handler->InstructionStart();
+ dispatch_table_[index] = handler.InstructionStart();
}
// static
@@ -130,7 +130,7 @@
Code old_code = code;
v->VisitRootPointer(Root::kDispatchTable, nullptr, FullObjectSlot(&code));
if (code != old_code) {
- dispatch_table_[i] = code->entry();
+ dispatch_table_[i] = code.entry();
}
}
}
@@ -291,10 +291,9 @@
const char* Interpreter::LookupNameOfBytecodeHandler(const Code code) {
#ifdef ENABLE_DISASSEMBLER
-#define RETURN_NAME(Name, ...) \
- if (dispatch_table_[Bytecodes::ToByte(Bytecode::k##Name)] == \
- code->entry()) { \
- return #Name; \
+#define RETURN_NAME(Name, ...) \
+ if (dispatch_table_[Bytecodes::ToByte(Bytecode::k##Name)] == code.entry()) { \
+ return #Name; \
}
BYTECODE_LIST(RETURN_NAME)
#undef RETURN_NAME
diff --git a/src/json/json-parser.cc b/src/json/json-parser.cc
index 8c975bd..bcf7d93 100644
--- a/src/json/json-parser.cc
+++ b/src/json/json-parser.cc
@@ -220,13 +220,13 @@
if (StringShape(*source_).IsExternal()) {
chars_ =
- static_cast<const Char*>(SeqExternalString::cast(*source_)->GetChars());
+ static_cast<const Char*>(SeqExternalString::cast(*source_).GetChars());
chars_may_relocate_ = false;
} else {
DisallowHeapAllocation no_gc;
isolate->heap()->AddGCEpilogueCallback(UpdatePointersCallback,
v8::kGCTypeAll, this);
- chars_ = SeqString::cast(*source_)->GetChars(no_gc);
+ chars_ = SeqString::cast(*source_).GetChars(no_gc);
chars_may_relocate_ = true;
}
cursor_ = chars_ + start;
@@ -495,7 +495,7 @@
Handle<Object> value = property.value;
PropertyDetails details =
- target->instance_descriptors()->GetDetails(descriptor);
+ target->instance_descriptors().GetDetails(descriptor);
Representation expected_representation = details.representation();
if (!value->FitsRepresentation(expected_representation)) {
@@ -511,8 +511,8 @@
representation, value_type);
} else if (expected_representation.IsHeapObject() &&
!target->instance_descriptors()
- ->GetFieldType(descriptor)
- ->NowContains(value)) {
+ .GetFieldType(descriptor)
+ .NowContains(value)) {
Handle<FieldType> value_type =
value->OptimalType(isolate(), expected_representation);
Map::GeneralizeField(isolate(), target, descriptor, details.constness(),
@@ -523,8 +523,8 @@
}
DCHECK(target->instance_descriptors()
- ->GetFieldType(descriptor)
- ->NowContains(value));
+ .GetFieldType(descriptor)
+ .NowContains(value));
map = target;
descriptor++;
}
@@ -569,7 +569,7 @@
const JsonProperty& property = property_stack[start + j];
if (property.string.is_index()) continue;
PropertyDetails details =
- map->instance_descriptors()->GetDetails(descriptor);
+ map->instance_descriptors().GetDetails(descriptor);
Object value = *property.value;
FieldIndex index = FieldIndex::ForDescriptor(*map, descriptor);
descriptor++;
@@ -608,7 +608,7 @@
mutable_double_address += kMutableDoubleSize;
} else {
DCHECK(value.IsHeapNumber());
- HeapObject::cast(value)->synchronized_set_map(
+ HeapObject::cast(value).synchronized_set_map(
*factory()->mutable_heap_number_map());
}
}
@@ -670,7 +670,7 @@
DisallowHeapAllocation no_gc;
FixedDoubleArray elements = FixedDoubleArray::cast(array->elements());
for (int i = 0; i < length; i++) {
- elements->set(i, element_stack[start + i]->Number());
+ elements.set(i, element_stack[start + i]->Number());
}
} else {
DisallowHeapAllocation no_gc;
@@ -679,7 +679,7 @@
? SKIP_WRITE_BARRIER
: elements.GetWriteBarrierMode(no_gc);
for (int i = 0; i < length; i++) {
- elements->set(i, *element_stack[start + i], mode);
+ elements.set(i, *element_stack[start + i], mode);
}
}
return array;
@@ -826,7 +826,7 @@
cont_stack.back().index < element_stack.size() &&
element_stack.back()->IsJSObject()) {
feedback =
- handle(JSObject::cast(*element_stack.back())->map(), isolate_);
+ handle(JSObject::cast(*element_stack.back()).map(), isolate_);
}
value = BuildJsonObject(cont, property_stack, feedback);
property_stack.resize(cont.index);
diff --git a/src/json/json-stringifier.cc b/src/json/json-stringifier.cc
index 50519be..889899c 100644
--- a/src/json/json-stringifier.cc
+++ b/src/json/json-stringifier.cc
@@ -456,7 +456,7 @@
static const int kBufferSize = 100;
char chars[kBufferSize];
Vector<char> buffer(chars, kBufferSize);
- builder_.AppendCString(IntToCString(smi->value(), buffer));
+ builder_.AppendCString(IntToCString(smi.value(), buffer));
}
IncrementalStringBuilder builder_;
@@ -511,7 +511,7 @@
StackLimitCheck interrupt_check(isolate_);
Handle<Object> initial_value = object;
if (interrupt_check.InterruptRequested() &&
- isolate_->stack_guard()->HandleInterrupts()->IsException(isolate_)) {
+ isolate_->stack_guard()->HandleInterrupts().IsException(isolate_)) {
return EXCEPTION;
}
if (object->IsJSReceiver() || object->IsBigInt()) {
@@ -529,7 +529,7 @@
return SerializeSmi(Smi::cast(*object));
}
- switch (HeapObject::cast(*object)->map()->instance_type()) {
+ switch (HeapObject::cast(*object).map().instance_type()) {
case HEAP_NUMBER_TYPE:
case MUTABLE_HEAP_NUMBER_TYPE:
if (deferred_string_key) SerializeDeferredKey(comma, key);
@@ -539,7 +539,7 @@
*factory()->NewTypeError(MessageTemplate::kBigIntSerializeJSON));
return EXCEPTION;
case ODDBALL_TYPE:
- switch (Oddball::cast(*object)->kind()) {
+ switch (Oddball::cast(*object).kind()) {
case Oddball::kFalse:
if (deferred_string_key) SerializeDeferredKey(comma, key);
builder_.AppendCString("false");
@@ -586,23 +586,23 @@
JsonStringifier::Result JsonStringifier::SerializeJSValue(
Handle<JSValue> object, Handle<Object> key) {
Object raw = object->value();
- if (raw->IsString()) {
+ if (raw.IsString()) {
Handle<Object> value;
ASSIGN_RETURN_ON_EXCEPTION_VALUE(
isolate_, value, Object::ToString(isolate_, object), EXCEPTION);
SerializeString(Handle<String>::cast(value));
- } else if (raw->IsNumber()) {
+ } else if (raw.IsNumber()) {
Handle<Object> value;
ASSIGN_RETURN_ON_EXCEPTION_VALUE(
isolate_, value, Object::ToNumber(isolate_, object), EXCEPTION);
if (value->IsSmi()) return SerializeSmi(Smi::cast(*value));
SerializeHeapNumber(Handle<HeapNumber>::cast(value));
- } else if (raw->IsBigInt()) {
+ } else if (raw.IsBigInt()) {
isolate_->Throw(
*factory()->NewTypeError(MessageTemplate::kBigIntSerializeJSON));
return EXCEPTION;
- } else if (raw->IsBoolean()) {
- builder_.AppendCString(raw->IsTrue(isolate_) ? "true" : "false");
+ } else if (raw.IsBoolean()) {
+ builder_.AppendCString(raw.IsTrue(isolate_) ? "true" : "false");
} else {
// ES6 24.3.2.1 step 10.c, serialize as an ordinary JSObject.
return SerializeJSObject(object, key);
@@ -614,7 +614,7 @@
static const int kBufferSize = 100;
char chars[kBufferSize];
Vector<char> buffer(chars, kBufferSize);
- builder_.AppendCString(IntToCString(object->value(), buffer));
+ builder_.AppendCString(IntToCString(object.value(), buffer));
return SUCCESS;
}
@@ -636,7 +636,7 @@
Result stack_push = StackPush(object, key);
if (stack_push != SUCCESS) return stack_push;
uint32_t length = 0;
- CHECK(object->length()->ToArrayLength(&length));
+ CHECK(object->length().ToArrayLength(&length));
DCHECK(!object->IsAccessCheckNeeded());
builder_.AppendCharacter('[');
Indent();
@@ -649,7 +649,7 @@
StackLimitCheck interrupt_check(isolate_);
while (i < length) {
if (interrupt_check.InterruptRequested() &&
- isolate_->stack_guard()->HandleInterrupts()->IsException(
+ isolate_->stack_guard()->HandleInterrupts().IsException(
isolate_)) {
return EXCEPTION;
}
@@ -667,7 +667,7 @@
StackLimitCheck interrupt_check(isolate_);
while (i < length) {
if (interrupt_check.InterruptRequested() &&
- isolate_->stack_guard()->HandleInterrupts()->IsException(
+ isolate_->stack_guard()->HandleInterrupts().IsException(
isolate_)) {
return EXCEPTION;
}
@@ -688,7 +688,7 @@
Separator(i == 0);
Result result = SerializeElement(
isolate_,
- Handle<Object>(FixedArray::cast(object->elements())->get(i),
+ Handle<Object>(FixedArray::cast(object->elements()).get(i),
isolate_),
i);
if (result == UNCHANGED) {
@@ -752,7 +752,7 @@
if (stack_push != SUCCESS) return stack_push;
if (property_list_.is_null() &&
- !object->map()->IsCustomElementsReceiverMap() &&
+ !object->map().IsCustomElementsReceiverMap() &&
object->HasFastProperties() &&
(object->elements() == ReadOnlyRoots(isolate_).empty_fixed_array() ||
object->elements() ==
@@ -765,11 +765,11 @@
Indent();
bool comma = false;
for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
- Handle<Name> name(map->instance_descriptors()->GetKey(i), isolate_);
+ Handle<Name> name(map->instance_descriptors().GetKey(i), isolate_);
// TODO(rossberg): Should this throw?
if (!name->IsString()) continue;
Handle<String> key = Handle<String>::cast(name);
- PropertyDetails details = map->instance_descriptors()->GetDetails(i);
+ PropertyDetails details = map->instance_descriptors().GetDetails(i);
if (details.IsDontEnum()) continue;
Handle<Object> property;
if (details.location() == kField && *map == object->map()) {
diff --git a/src/keys.cc b/src/keys.cc
index 7e54048..89a7bb5 100644
--- a/src/keys.cc
+++ b/src/keys.cc
@@ -28,7 +28,7 @@
int len = array->length();
for (int i = 0; i < len; i++) {
Object e = array->get(i);
- if (!(e->IsName() || e->IsNumber())) return false;
+ if (!(e.IsName() || e.IsNumber())) return false;
}
return true;
}
@@ -71,11 +71,11 @@
void KeyAccumulator::AddKey(Handle<Object> key, AddKeyConversion convert) {
if (filter_ == PRIVATE_NAMES_ONLY) {
if (!key->IsSymbol()) return;
- if (!Symbol::cast(*key)->is_private_name()) return;
+ if (!Symbol::cast(*key).is_private_name()) return;
} else if (key->IsSymbol()) {
if (filter_ & SKIP_SYMBOLS) return;
- if (Symbol::cast(*key)->is_private()) return;
+ if (Symbol::cast(*key).is_private()) return;
} else if (filter_ & SKIP_STRINGS) {
return;
}
@@ -231,22 +231,22 @@
namespace {
void TrySettingEmptyEnumCache(JSReceiver object) {
- Map map = object->map();
- DCHECK_EQ(kInvalidEnumCacheSentinel, map->EnumLength());
- if (!map->OnlyHasSimpleProperties()) return;
- if (map->IsJSProxyMap()) return;
- if (map->NumberOfEnumerableProperties() > 0) return;
- DCHECK(object->IsJSObject());
- map->SetEnumLength(0);
+ Map map = object.map();
+ DCHECK_EQ(kInvalidEnumCacheSentinel, map.EnumLength());
+ if (!map.OnlyHasSimpleProperties()) return;
+ if (map.IsJSProxyMap()) return;
+ if (map.NumberOfEnumerableProperties() > 0) return;
+ DCHECK(object.IsJSObject());
+ map.SetEnumLength(0);
}
bool CheckAndInitalizeEmptyEnumCache(JSReceiver object) {
- if (object->map()->EnumLength() == kInvalidEnumCacheSentinel) {
+ if (object.map().EnumLength() == kInvalidEnumCacheSentinel) {
TrySettingEmptyEnumCache(object);
}
- if (object->map()->EnumLength() != 0) return false;
- DCHECK(object->IsJSObject());
- return !JSObject::cast(object)->HasEnumerableElements();
+ if (object.map().EnumLength() != 0) return false;
+ DCHECK(object.IsJSObject());
+ return !JSObject::cast(object).HasEnumerableElements();
}
} // namespace
@@ -268,8 +268,8 @@
}
if (has_empty_prototype_) {
is_receiver_simple_enum_ =
- receiver_->map()->EnumLength() != kInvalidEnumCacheSentinel &&
- !JSObject::cast(*receiver_)->HasEnumerableElements();
+ receiver_->map().EnumLength() != kInvalidEnumCacheSentinel &&
+ !JSObject::cast(*receiver_).HasEnumerableElements();
} else if (!last_prototype.is_null()) {
last_non_empty_prototype_ = handle(last_prototype, isolate_);
}
@@ -289,7 +289,7 @@
Handle<FixedArray> GetFastEnumPropertyKeys(Isolate* isolate,
Handle<JSObject> object) {
Handle<Map> map(object->map(), isolate);
- Handle<FixedArray> keys(map->instance_descriptors()->enum_cache()->keys(),
+ Handle<FixedArray> keys(map->instance_descriptors().enum_cache().keys(),
isolate);
// Check if the {map} has a valid enum length, which implies that it
@@ -328,7 +328,7 @@
PropertyDetails details = descriptors->GetDetails(i);
if (details.IsDontEnum()) continue;
Object key = descriptors->GetKey(i);
- if (key->IsSymbol()) continue;
+ if (key.IsSymbol()) continue;
keys->set(index, key);
if (details.location() != kField) fields_only = false;
index++;
@@ -345,7 +345,7 @@
PropertyDetails details = descriptors->GetDetails(i);
if (details.IsDontEnum()) continue;
Object key = descriptors->GetKey(i);
- if (key->IsSymbol()) continue;
+ if (key.IsSymbol()) continue;
DCHECK_EQ(kData, details.kind());
DCHECK_EQ(kField, details.location());
FieldIndex field_index = FieldIndex::ForDescriptor(*map, i);
@@ -410,7 +410,7 @@
GetKeysConversion keys_conversion) {
bool own_only = has_empty_prototype_ || mode_ == KeyCollectionMode::kOwnOnly;
Map map = receiver_->map();
- if (!own_only || map->IsCustomElementsReceiverMap()) {
+ if (!own_only || map.IsCustomElementsReceiverMap()) {
return MaybeHandle<FixedArray>();
}
@@ -419,11 +419,11 @@
Handle<JSObject> object = Handle<JSObject>::cast(receiver_);
// Do not try to use the enum-cache for dict-mode objects.
- if (map->is_dictionary_map()) {
+ if (map.is_dictionary_map()) {
return GetOwnKeysWithElements<false>(isolate_, object, keys_conversion,
skip_indices_);
}
- int enum_length = receiver_->map()->EnumLength();
+ int enum_length = receiver_->map().EnumLength();
if (enum_length == kInvalidEnumCacheSentinel) {
Handle<FixedArray> keys;
// Try initializing the enum cache and return own properties.
@@ -433,7 +433,7 @@
keys->length());
}
is_receiver_simple_enum_ =
- object->map()->EnumLength() != kInvalidEnumCacheSentinel;
+ object->map().EnumLength() != kInvalidEnumCacheSentinel;
return keys;
}
}
@@ -454,9 +454,9 @@
// Assume that there are elements.
return MaybeHandle<FixedArray>();
}
- int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ int number_of_own_descriptors = map.NumberOfOwnDescriptors();
if (number_of_own_descriptors == 0) {
- map->SetEnumLength(0);
+ map.SetEnumLength(0);
return isolate_->factory()->empty_fixed_array();
}
// We have no elements but possibly enumerable property keys, hence we can
@@ -533,7 +533,7 @@
*object, Just(kDontThrow));
Handle<JSObject> result;
- if (!interceptor->enumerator()->IsUndefined(isolate)) {
+ if (!interceptor->enumerator().IsUndefined(isolate)) {
if (type == kIndexed) {
result = enum_args.CallIndexedEnumerator(interceptor);
} else {
@@ -545,7 +545,7 @@
if (result.is_null()) return Just(true);
if ((accumulator->filter() & ONLY_ENUMERABLE) &&
- !interceptor->query()->IsUndefined(isolate)) {
+ !interceptor->query().IsUndefined(isolate)) {
FilterForEnumerableProperties(receiver, object, interceptor, accumulator,
result, type);
} else {
@@ -614,16 +614,16 @@
if (filter & ONLY_ALL_CAN_READ) {
if (details.kind() != kAccessor) continue;
Object accessors = descs->GetStrongValue(i);
- if (!accessors->IsAccessorInfo()) continue;
- if (!AccessorInfo::cast(accessors)->all_can_read()) continue;
+ if (!accessors.IsAccessorInfo()) continue;
+ if (!AccessorInfo::cast(accessors).all_can_read()) continue;
}
Name key = descs->GetKey(i);
- if (skip_symbols == key->IsSymbol()) {
+ if (skip_symbols == key.IsSymbol()) {
if (first_skipped == -1) first_skipped = i;
continue;
}
- if (key->FilterKey(keys->filter())) continue;
+ if (key.FilterKey(keys->filter())) continue;
if (is_shadowing_key) {
keys->AddShadowingKey(key);
@@ -660,10 +660,10 @@
// If the number of properties equals the length of enumerable properties
// we do not have to filter out non-enumerable ones
Map map = object->map();
- int nof_descriptors = map->NumberOfOwnDescriptors();
+ int nof_descriptors = map.NumberOfOwnDescriptors();
if (enum_keys->length() != nof_descriptors) {
Handle<DescriptorArray> descs =
- Handle<DescriptorArray>(map->instance_descriptors(), isolate_);
+ Handle<DescriptorArray>(map.instance_descriptors(), isolate_);
for (int i = 0; i < nof_descriptors; i++) {
PropertyDetails details = descs->GetDetails(i);
if (!details.IsDontEnum()) continue;
@@ -674,7 +674,7 @@
} else if (object->IsJSGlobalObject()) {
enum_keys = GetOwnEnumPropertyDictionaryKeys(
isolate_, mode_, this, object,
- JSGlobalObject::cast(*object)->global_dictionary());
+ JSGlobalObject::cast(*object).global_dictionary());
} else {
enum_keys = GetOwnEnumPropertyDictionaryKeys(
isolate_, mode_, this, object, object->property_dictionary());
@@ -694,8 +694,8 @@
AddKeys(enum_keys, DO_NOT_CONVERT);
} else {
if (object->HasFastProperties()) {
- int limit = object->map()->NumberOfOwnDescriptors();
- Handle<DescriptorArray> descs(object->map()->instance_descriptors(),
+ int limit = object->map().NumberOfOwnDescriptors();
+ Handle<DescriptorArray> descs(object->map().instance_descriptors(),
isolate_);
// First collect the strings,
int first_symbol =
@@ -707,7 +707,7 @@
}
} else if (object->IsJSGlobalObject()) {
GlobalDictionary::CollectKeysTo(
- handle(JSGlobalObject::cast(*object)->global_dictionary(), isolate_),
+ handle(JSGlobalObject::cast(*object).global_dictionary(), isolate_),
this);
} else {
NameDictionary::CollectKeysTo(
@@ -721,13 +721,13 @@
void KeyAccumulator::CollectPrivateNames(Handle<JSReceiver> receiver,
Handle<JSObject> object) {
if (object->HasFastProperties()) {
- int limit = object->map()->NumberOfOwnDescriptors();
- Handle<DescriptorArray> descs(object->map()->instance_descriptors(),
+ int limit = object->map().NumberOfOwnDescriptors();
+ Handle<DescriptorArray> descs(object->map().instance_descriptors(),
isolate_);
CollectOwnPropertyNamesInternal<false>(object, this, descs, 0, limit);
} else if (object->IsJSGlobalObject()) {
GlobalDictionary::CollectKeysTo(
- handle(JSGlobalObject::cast(*object)->global_dictionary(), isolate_),
+ handle(JSGlobalObject::cast(*object).global_dictionary(), isolate_),
this);
} else {
NameDictionary::CollectKeysTo(
@@ -807,7 +807,7 @@
} else if (object->IsJSGlobalObject()) {
return GetOwnEnumPropertyDictionaryKeys(
isolate, KeyCollectionMode::kOwnOnly, nullptr, object,
- JSGlobalObject::cast(*object)->global_dictionary());
+ JSGlobalObject::cast(*object).global_dictionary());
} else {
return GetOwnEnumPropertyDictionaryKeys(
isolate, KeyCollectionMode::kOwnOnly, nullptr, object,
@@ -973,7 +973,7 @@
// 21. Repeat, for each key that is an element of targetConfigurableKeys:
for (int i = 0; i < target_configurable_keys->length(); ++i) {
Object raw_key = target_configurable_keys->get(i);
- if (raw_key->IsSmi()) continue; // Zapped entry, was nonconfigurable.
+ if (raw_key.IsSmi()) continue; // Zapped entry, was nonconfigurable.
Handle<Name> key(Name::cast(raw_key), isolate_);
// 21a. If key is not an element of uncheckedResultKeys, throw a
// TypeError exception.
diff --git a/src/logging/log-inl.h b/src/logging/log-inl.h
index aa36a9a..f196ce5 100644
--- a/src/logging/log-inl.h
+++ b/src/logging/log-inl.h
@@ -15,7 +15,7 @@
CodeEventListener::LogEventsAndTags Logger::ToNativeByScript(
CodeEventListener::LogEventsAndTags tag, Script script) {
- if (script->type() != Script::TYPE_NATIVE) return tag;
+ if (script.type() != Script::TYPE_NATIVE) return tag;
switch (tag) {
case CodeEventListener::FUNCTION_TAG:
return CodeEventListener::NATIVE_FUNCTION_TAG;
diff --git a/src/logging/log-utils.cc b/src/logging/log-utils.cc
index 4182468..471b940 100644
--- a/src/logging/log-utils.cc
+++ b/src/logging/log-utils.cc
@@ -92,10 +92,10 @@
if (str.is_null()) return;
DisallowHeapAllocation no_gc; // Ensure string stays valid.
- int length = str->length();
+ int length = str.length();
if (length_limit) length = std::min(length, *length_limit);
for (int i = 0; i < length; i++) {
- uint16_t c = str->Get(i);
+ uint16_t c = str.Get(i);
if (c <= 0xFF) {
AppendCharacter(static_cast<char>(c));
} else {
@@ -158,12 +158,12 @@
DCHECK(!symbol.is_null());
OFStream& os = log_->os_;
os << "symbol(";
- if (!symbol->name()->IsUndefined()) {
+ if (!symbol.name().IsUndefined()) {
os << "\"";
- AppendSymbolNameDetails(String::cast(symbol->name()), false);
+ AppendSymbolNameDetails(String::cast(symbol.name()), false);
os << "\" ";
}
- os << "hash " << std::hex << symbol->Hash() << std::dec << ")";
+ os << "hash " << std::hex << symbol.Hash() << std::dec << ")";
}
void Log::MessageBuilder::AppendSymbolNameDetails(String str,
@@ -172,13 +172,13 @@
DisallowHeapAllocation no_gc; // Ensure string stays valid.
OFStream& os = log_->os_;
- int limit = str->length();
+ int limit = str.length();
if (limit > 0x1000) limit = 0x1000;
if (show_impl_info) {
- os << (str->IsOneByteRepresentation() ? 'a' : '2');
+ os << (str.IsOneByteRepresentation() ? 'a' : '2');
if (StringShape(str).IsExternal()) os << 'e';
if (StringShape(str).IsInternalized()) os << '#';
- os << ':' << str->length() << ':';
+ os << ':' << str.length() << ':';
}
AppendString(str, limit);
}
@@ -245,7 +245,7 @@
template <>
Log::MessageBuilder& Log::MessageBuilder::operator<<<Name>(Name name) {
- if (name->IsString()) {
+ if (name.IsString()) {
this->AppendString(String::cast(name));
} else {
this->AppendSymbolName(Symbol::cast(name));
diff --git a/src/logging/log.cc b/src/logging/log.cc
index 1fa6220..ed56b3a 100644
--- a/src/logging/log.cc
+++ b/src/logging/log.cc
@@ -73,9 +73,9 @@
}
static const char* ComputeMarker(SharedFunctionInfo shared, AbstractCode code) {
- switch (code->kind()) {
+ switch (code.kind()) {
case AbstractCode::INTERPRETED_FUNCTION:
- return shared->optimization_disabled() ? "" : "~";
+ return shared.optimization_disabled() ? "" : "~";
case AbstractCode::OPTIMIZED_FUNCTION:
return "*";
default:
@@ -107,18 +107,18 @@
}
void AppendName(Name name) {
- if (name->IsString()) {
+ if (name.IsString()) {
AppendString(String::cast(name));
} else {
Symbol symbol = Symbol::cast(name);
AppendBytes("symbol(");
- if (!symbol->name()->IsUndefined()) {
+ if (!symbol.name().IsUndefined()) {
AppendBytes("\"");
- AppendString(String::cast(symbol->name()));
+ AppendString(String::cast(symbol.name()));
AppendBytes("\" ");
}
AppendBytes("hash ");
- AppendHex(symbol->Hash());
+ AppendHex(symbol.Hash());
AppendByte(')');
}
}
@@ -127,7 +127,7 @@
if (str.is_null()) return;
int length = 0;
std::unique_ptr<char[]> c_str =
- str->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, &length);
+ str.ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, &length);
AppendBytes(c_str.get(), length);
}
@@ -216,13 +216,13 @@
int line, int column) {
name_buffer_->Init(tag);
name_buffer_->AppendBytes(ComputeMarker(shared, code));
- name_buffer_->AppendString(shared->DebugName());
+ name_buffer_->AppendString(shared.DebugName());
name_buffer_->AppendByte(' ');
- if (source->IsString()) {
+ if (source.IsString()) {
name_buffer_->AppendString(String::cast(source));
} else {
name_buffer_->AppendBytes("symbol(hash ");
- name_buffer_->AppendHex(Name::cast(source)->Hash());
+ name_buffer_->AppendHex(Name::cast(source).Hash());
name_buffer_->AppendByte(')');
}
name_buffer_->AppendByte(':');
@@ -319,14 +319,14 @@
void PerfBasicLogger::LogRecordedBuffer(AbstractCode code, SharedFunctionInfo,
const char* name, int length) {
if (FLAG_perf_basic_prof_only_functions &&
- (code->kind() != AbstractCode::INTERPRETED_FUNCTION &&
- code->kind() != AbstractCode::BUILTIN &&
- code->kind() != AbstractCode::OPTIMIZED_FUNCTION)) {
+ (code.kind() != AbstractCode::INTERPRETED_FUNCTION &&
+ code.kind() != AbstractCode::BUILTIN &&
+ code.kind() != AbstractCode::OPTIMIZED_FUNCTION)) {
return;
}
- WriteLogRecordedBuffer(static_cast<uintptr_t>(code->InstructionStart()),
- code->InstructionSize(), name, length);
+ WriteLogRecordedBuffer(static_cast<uintptr_t>(code.InstructionStart()),
+ code.InstructionSize(), name, length);
}
void PerfBasicLogger::LogRecordedBuffer(const wasm::WasmCode* code,
@@ -378,8 +378,8 @@
const char* comment) {
CodeEvent code_event;
code_event.code_start_address =
- static_cast<uintptr_t>(code->InstructionStart());
- code_event.code_size = static_cast<size_t>(code->InstructionSize());
+ static_cast<uintptr_t>(code.InstructionStart());
+ code_event.code_size = static_cast<size_t>(code.InstructionSize());
code_event.function_name = isolate_->factory()->empty_string();
code_event.script_name = isolate_->factory()->empty_string();
code_event.script_line = 0;
@@ -398,8 +398,8 @@
CodeEvent code_event;
code_event.code_start_address =
- static_cast<uintptr_t>(code->InstructionStart());
- code_event.code_size = static_cast<size_t>(code->InstructionSize());
+ static_cast<uintptr_t>(code.InstructionStart());
+ code_event.code_size = static_cast<size_t>(code.InstructionSize());
code_event.function_name = name_string;
code_event.script_name = isolate_->factory()->empty_string();
code_event.script_line = 0;
@@ -419,8 +419,8 @@
CodeEvent code_event;
code_event.code_start_address =
- static_cast<uintptr_t>(code->InstructionStart());
- code_event.code_size = static_cast<size_t>(code->InstructionSize());
+ static_cast<uintptr_t>(code.InstructionStart());
+ code_event.code_size = static_cast<size_t>(code.InstructionSize());
code_event.function_name = name_string;
code_event.script_name = isolate_->factory()->empty_string();
code_event.script_line = 0;
@@ -435,7 +435,7 @@
CodeEventListener::LogEventsAndTags tag, AbstractCode code,
SharedFunctionInfo shared, Name source, int line, int column) {
Handle<String> name_string =
- Name::ToFunctionName(isolate_, Handle<Name>(shared->Name(), isolate_))
+ Name::ToFunctionName(isolate_, Handle<Name>(shared.Name(), isolate_))
.ToHandleChecked();
Handle<String> source_string =
Name::ToFunctionName(isolate_, Handle<Name>(source, isolate_))
@@ -443,8 +443,8 @@
CodeEvent code_event;
code_event.code_start_address =
- static_cast<uintptr_t>(code->InstructionStart());
- code_event.code_size = static_cast<size_t>(code->InstructionSize());
+ static_cast<uintptr_t>(code.InstructionStart());
+ code_event.code_size = static_cast<size_t>(code.InstructionSize());
code_event.function_name = name_string;
code_event.script_name = source_string;
code_event.script_line = line;
@@ -465,8 +465,8 @@
String source) {
CodeEvent code_event;
code_event.code_start_address =
- static_cast<uintptr_t>(code->InstructionStart());
- code_event.code_size = static_cast<size_t>(code->InstructionSize());
+ static_cast<uintptr_t>(code.InstructionStart());
+ code_event.code_size = static_cast<size_t>(code.InstructionSize());
code_event.function_name = Handle<String>(source, isolate_);
code_event.script_name = isolate_->factory()->empty_string();
code_event.script_line = 0;
@@ -575,12 +575,12 @@
const char* name, int length) {
CodeCreateStruct event;
event.name_size = length;
- event.code_address = code->InstructionStart();
- event.code_size = code->InstructionSize();
+ event.code_address = code.InstructionStart();
+ event.code_size = code.InstructionSize();
LogWriteStruct(event);
LogWriteBytes(name, length);
- LogWriteBytes(reinterpret_cast<const char*>(code->InstructionStart()),
- code->InstructionSize());
+ LogWriteBytes(reinterpret_cast<const char*>(code.InstructionStart()),
+ code.InstructionSize());
}
void LowLevelLogger::LogRecordedBuffer(const wasm::WasmCode* code,
@@ -597,8 +597,8 @@
void LowLevelLogger::CodeMoveEvent(AbstractCode from, AbstractCode to) {
CodeMoveStruct event;
- event.from_address = from->InstructionStart();
- event.to_address = to->InstructionStart();
+ event.from_address = from.InstructionStart();
+ event.to_address = to.InstructionStart();
LogWriteStruct(event);
}
@@ -646,14 +646,14 @@
JitCodeEvent event;
memset(static_cast<void*>(&event), 0, sizeof(event));
event.type = JitCodeEvent::CODE_ADDED;
- event.code_start = reinterpret_cast<void*>(code->InstructionStart());
+ event.code_start = reinterpret_cast<void*>(code.InstructionStart());
event.code_type =
- code->IsCode() ? JitCodeEvent::JIT_CODE : JitCodeEvent::BYTE_CODE;
- event.code_len = code->InstructionSize();
+ code.IsCode() ? JitCodeEvent::JIT_CODE : JitCodeEvent::BYTE_CODE;
+ event.code_len = code.InstructionSize();
Handle<SharedFunctionInfo> shared_function_handle;
- if (!shared.is_null() && shared->script()->IsScript()) {
+ if (!shared.is_null() && shared.script().IsScript()) {
shared_function_handle =
- Handle<SharedFunctionInfo>(shared, shared->GetIsolate());
+ Handle<SharedFunctionInfo>(shared, shared.GetIsolate());
}
event.script = ToApiHandle<v8::UnboundScript>(shared_function_handle);
event.name.str = name;
@@ -682,10 +682,10 @@
JitCodeEvent event;
event.type = JitCodeEvent::CODE_MOVED;
event.code_type =
- from->IsCode() ? JitCodeEvent::JIT_CODE : JitCodeEvent::BYTE_CODE;
- event.code_start = reinterpret_cast<void*>(from->InstructionStart());
- event.code_len = from->InstructionSize();
- event.new_code_start = reinterpret_cast<void*>(to->InstructionStart());
+ from.IsCode() ? JitCodeEvent::JIT_CODE : JitCodeEvent::BYTE_CODE;
+ event.code_start = reinterpret_cast<void*>(from.InstructionStart());
+ event.code_len = from.InstructionSize();
+ event.new_code_start = reinterpret_cast<void*>(to.InstructionStart());
event.isolate = reinterpret_cast<v8::Isolate*>(isolate_);
code_event_handler_(&event);
@@ -990,8 +990,8 @@
Deoptimizer::DeoptInfo info = Deoptimizer::GetDeoptInfo(code, pc);
Log::MessageBuilder msg(log_);
msg << "code-deopt" << kNext << timer_.Elapsed().InMicroseconds() << kNext
- << code->CodeSize() << kNext
- << reinterpret_cast<void*>(code->InstructionStart());
+ << code.CodeSize() << kNext
+ << reinterpret_cast<void*>(code.InstructionStart());
// Deoptimization position.
std::ostringstream deopt_location;
@@ -1051,10 +1051,10 @@
void Logger::ApiNamedPropertyAccess(const char* tag, JSObject holder,
Object property_name) {
- DCHECK(property_name->IsName());
+ DCHECK(property_name.IsName());
if (!log_->IsEnabled() || !FLAG_log_api) return;
Log::MessageBuilder msg(log_);
- msg << "api" << kNext << tag << kNext << holder->class_name() << kNext
+ msg << "api" << kNext << tag << kNext << holder.class_name() << kNext
<< Name::cast(property_name);
msg.WriteToLogFile();
}
@@ -1063,7 +1063,7 @@
uint32_t index) {
if (!log_->IsEnabled() || !FLAG_log_api) return;
Log::MessageBuilder msg(log_);
- msg << "api" << kNext << tag << kNext << holder->class_name() << kNext
+ msg << "api" << kNext << tag << kNext << holder.class_name() << kNext
<< index;
msg.WriteToLogFile();
}
@@ -1071,7 +1071,7 @@
void Logger::ApiObjectAccess(const char* tag, JSObject object) {
if (!log_->IsEnabled() || !FLAG_log_api) return;
Log::MessageBuilder msg(log_);
- msg << "api" << kNext << tag << kNext << object->class_name();
+ msg << "api" << kNext << tag << kNext << object.class_name();
msg.WriteToLogFile();
}
@@ -1137,9 +1137,9 @@
void AppendCodeCreateHeader(Log::MessageBuilder& msg,
CodeEventListener::LogEventsAndTags tag,
AbstractCode code, base::ElapsedTimer* timer) {
- AppendCodeCreateHeader(msg, tag, code->kind(),
- reinterpret_cast<uint8_t*>(code->InstructionStart()),
- code->InstructionSize(), timer);
+ AppendCodeCreateHeader(msg, tag, code.kind(),
+ reinterpret_cast<uint8_t*>(code.InstructionStart()),
+ code.InstructionSize(), timer);
}
} // namespace
@@ -1176,7 +1176,7 @@
Log::MessageBuilder msg(log_);
AppendCodeCreateHeader(msg, tag, code, &timer_);
- msg << name << kNext << reinterpret_cast<void*>(shared->address()) << kNext
+ msg << name << kNext << reinterpret_cast<void*>(shared.address()) << kNext
<< ComputeMarker(shared, code);
msg.WriteToLogFile();
}
@@ -1216,15 +1216,15 @@
{
Log::MessageBuilder msg(log_);
AppendCodeCreateHeader(msg, tag, code, &timer_);
- msg << shared->DebugName() << " " << source << ":" << line << ":" << column
- << kNext << reinterpret_cast<void*>(shared->address()) << kNext
+ msg << shared.DebugName() << " " << source << ":" << line << ":" << column
+ << kNext << reinterpret_cast<void*>(shared.address()) << kNext
<< ComputeMarker(shared, code);
msg.WriteToLogFile();
}
if (!FLAG_log_source_code) return;
- Object script_object = shared->script();
- if (!script_object->IsScript()) return;
+ Object script_object = shared.script();
+ if (!script_object.IsScript()) return;
Script script = Script::cast(script_object);
if (!EnsureLogScriptSource(script)) return;
@@ -1251,11 +1251,11 @@
// S<shared-function-info-address>
Log::MessageBuilder msg(log_);
msg << "code-source-info" << kNext
- << reinterpret_cast<void*>(code->InstructionStart()) << kNext
- << script->id() << kNext << shared->StartPosition() << kNext
- << shared->EndPosition() << kNext;
+ << reinterpret_cast<void*>(code.InstructionStart()) << kNext
+ << script.id() << kNext << shared.StartPosition() << kNext
+ << shared.EndPosition() << kNext;
- SourcePositionTableIterator iterator(code->source_position_table());
+ SourcePositionTableIterator iterator(code.source_position_table());
bool hasInlined = false;
for (; !iterator.done(); iterator.Advance()) {
SourcePosition pos = iterator.source_position();
@@ -1269,10 +1269,10 @@
int maxInlinedId = -1;
if (hasInlined) {
PodArray<InliningPosition> inlining_positions =
- DeoptimizationData::cast(Code::cast(code)->deoptimization_data())
- ->InliningPositions();
- for (int i = 0; i < inlining_positions->length(); i++) {
- InliningPosition inlining_pos = inlining_positions->get(i);
+ DeoptimizationData::cast(Code::cast(code).deoptimization_data())
+ .InliningPositions();
+ for (int i = 0; i < inlining_positions.length(); i++) {
+ InliningPosition inlining_pos = inlining_positions.get(i);
msg << "F";
if (inlining_pos.inlined_function_id != -1) {
msg << inlining_pos.inlined_function_id;
@@ -1290,13 +1290,13 @@
msg << kNext;
if (hasInlined) {
DeoptimizationData deopt_data =
- DeoptimizationData::cast(Code::cast(code)->deoptimization_data());
+ DeoptimizationData::cast(Code::cast(code).deoptimization_data());
msg << std::hex;
for (int i = 0; i <= maxInlinedId; i++) {
msg << "S"
<< reinterpret_cast<void*>(
- deopt_data->GetInlinedFunction(i)->address());
+ deopt_data.GetInlinedFunction(i).address());
}
msg << std::dec;
}
@@ -1308,8 +1308,8 @@
if (!FLAG_log_code || !log_->IsEnabled()) return;
Log::MessageBuilder msg(log_);
msg << kLogEventsNames[CodeEventListener::CODE_DISABLE_OPT_EVENT] << kNext
- << shared->DebugName() << kNext
- << GetBailoutReason(shared->disable_optimization_reason());
+ << shared.DebugName() << kNext
+ << GetBailoutReason(shared.disable_optimization_reason());
msg.WriteToLogFile();
}
@@ -1330,8 +1330,8 @@
void Logger::CodeMoveEvent(AbstractCode from, AbstractCode to) {
if (!is_listening_to_code_events()) return;
- MoveEventInternal(CodeEventListener::CODE_MOVE_EVENT, from->address(),
- to->address());
+ MoveEventInternal(CodeEventListener::CODE_MOVE_EVENT, from.address(),
+ to.address());
}
namespace {
@@ -1408,9 +1408,8 @@
void Logger::SuspectReadEvent(Name name, Object obj) {
if (!log_->IsEnabled() || !FLAG_log_suspect) return;
Log::MessageBuilder msg(log_);
- String class_name = obj->IsJSObject()
- ? JSObject::cast(obj)->class_name()
- : ReadOnlyRoots(isolate_).empty_string();
+ String class_name = obj.IsJSObject() ? JSObject::cast(obj).class_name()
+ : ReadOnlyRoots(isolate_).empty_string();
msg << "suspect-read" << kNext << class_name << kNext << name;
msg.WriteToLogFile();
}
@@ -1456,12 +1455,12 @@
if (!log_->IsEnabled() || !FLAG_log_function_events) return;
Log::MessageBuilder msg(log_);
int script_id = -1;
- if (sfi->script()->IsScript()) {
- script_id = Script::cast(sfi->script())->id();
+ if (sfi.script().IsScript()) {
+ script_id = Script::cast(sfi.script()).id();
}
msg << "compilation-cache" << Logger::kNext << action << Logger::kNext
<< cache_type << Logger::kNext << script_id << Logger::kNext
- << sfi->StartPosition() << Logger::kNext << sfi->EndPosition()
+ << sfi.StartPosition() << Logger::kNext << sfi.EndPosition()
<< Logger::kNext << timer_.Elapsed().InMicroseconds();
msg.WriteToLogFile();
}
@@ -1496,14 +1495,14 @@
if (!log_->IsEnabled() || !FLAG_log_function_events) return;
{
Log::MessageBuilder msg(log_);
- msg << "script-details" << Logger::kNext << script->id() << Logger::kNext;
- if (script->name()->IsString()) {
- msg << String::cast(script->name());
+ msg << "script-details" << Logger::kNext << script.id() << Logger::kNext;
+ if (script.name().IsString()) {
+ msg << String::cast(script.name());
}
- msg << Logger::kNext << script->line_offset() << Logger::kNext
- << script->column_offset() << Logger::kNext;
- if (script->source_mapping_url()->IsString()) {
- msg << String::cast(script->source_mapping_url());
+ msg << Logger::kNext << script.line_offset() << Logger::kNext
+ << script.column_offset() << Logger::kNext;
+ if (script.source_mapping_url().IsString()) {
+ msg << String::cast(script.source_mapping_url());
}
msg.WriteToLogFile();
}
@@ -1514,20 +1513,20 @@
if (!log_->IsEnabled()) return false;
Log::MessageBuilder msg(log_);
// Make sure the script is written to the log file.
- int script_id = script->id();
+ int script_id = script.id();
if (logged_source_code_.find(script_id) != logged_source_code_.end()) {
return true;
}
// This script has not been logged yet.
logged_source_code_.insert(script_id);
- Object source_object = script->source();
- if (!source_object->IsString()) return false;
+ Object source_object = script.source();
+ if (!source_object.IsString()) return false;
String source_code = String::cast(source_object);
msg << "script-source" << kNext << script_id << kNext;
// Log the script name.
- if (script->name()->IsString()) {
- msg << String::cast(script->name()) << kNext;
+ if (script.name().IsString()) {
+ msg << String::cast(script.name()) << kNext;
} else {
msg << "<unknown>" << kNext;
}
@@ -1583,11 +1582,11 @@
msg << type << kNext << reinterpret_cast<void*>(pc) << kNext << line << kNext
<< column << kNext << old_state << kNext << new_state << kNext
<< AsHex::Address(map.ptr()) << kNext;
- if (key->IsSmi()) {
+ if (key.IsSmi()) {
msg << Smi::ToInt(key);
- } else if (key->IsNumber()) {
- msg << key->Number();
- } else if (key->IsName()) {
+ } else if (key.IsNumber()) {
+ msg << key.Number();
+ } else if (key.IsName()) {
msg << Name::cast(key);
}
msg << kNext << modifier << kNext;
@@ -1616,13 +1615,13 @@
<< line << kNext << column << kNext << reason << kNext;
if (!name_or_sfi.is_null()) {
- if (name_or_sfi->IsName()) {
+ if (name_or_sfi.IsName()) {
msg << Name::cast(name_or_sfi);
- } else if (name_or_sfi->IsSharedFunctionInfo()) {
+ } else if (name_or_sfi.IsSharedFunctionInfo()) {
SharedFunctionInfo sfi = SharedFunctionInfo::cast(name_or_sfi);
- msg << sfi->DebugName();
+ msg << sfi.DebugName();
#if V8_SFI_HAS_UNIQUE_ID
- msg << " " << sfi->unique_id();
+ msg << " " << sfi.unique_id();
#endif // V8_SFI_HAS_UNIQUE_ID
}
}
@@ -1646,7 +1645,7 @@
<< AsHex::Address(map.ptr()) << kNext;
if (FLAG_trace_maps_details) {
std::ostringstream buffer;
- map->PrintMapDetails(buffer);
+ map.PrintMapDetails(buffer);
msg << buffer.str().c_str();
}
msg.WriteToLogFile();
@@ -1656,10 +1655,10 @@
Handle<SharedFunctionInfo>* sfis,
Handle<AbstractCode>* code_objects, int offset) {
if (sfis != nullptr) {
- sfis[offset] = Handle<SharedFunctionInfo>(sfi, sfi->GetIsolate());
+ sfis[offset] = Handle<SharedFunctionInfo>(sfi, sfi.GetIsolate());
}
if (code_objects != nullptr) {
- code_objects[offset] = Handle<AbstractCode>(code_object, sfi->GetIsolate());
+ code_objects[offset] = Handle<AbstractCode>(code_object, sfi.GetIsolate());
}
}
@@ -1674,30 +1673,29 @@
// the unoptimized code for them.
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (obj->IsSharedFunctionInfo()) {
+ if (obj.IsSharedFunctionInfo()) {
SharedFunctionInfo sfi = SharedFunctionInfo::cast(obj);
- if (sfi->is_compiled() &&
- (!sfi->script()->IsScript() ||
- Script::cast(sfi->script())->HasValidSource())) {
- AddFunctionAndCode(sfi, AbstractCode::cast(sfi->abstract_code()), sfis,
+ if (sfi.is_compiled() && (!sfi.script().IsScript() ||
+ Script::cast(sfi.script()).HasValidSource())) {
+ AddFunctionAndCode(sfi, AbstractCode::cast(sfi.abstract_code()), sfis,
code_objects, compiled_funcs_count);
++compiled_funcs_count;
}
- } else if (obj->IsJSFunction()) {
+ } else if (obj.IsJSFunction()) {
// Given that we no longer iterate over all optimized JSFunctions, we need
// to take care of this here.
JSFunction function = JSFunction::cast(obj);
- SharedFunctionInfo sfi = SharedFunctionInfo::cast(function->shared());
- Object maybe_script = sfi->script();
- if (maybe_script->IsScript() &&
- !Script::cast(maybe_script)->HasValidSource()) {
+ SharedFunctionInfo sfi = SharedFunctionInfo::cast(function.shared());
+ Object maybe_script = sfi.script();
+ if (maybe_script.IsScript() &&
+ !Script::cast(maybe_script).HasValidSource()) {
continue;
}
// TODO(jarin) This leaves out deoptimized code that might still be on the
// stack. Also note that we will not log optimized code objects that are
// only on a type feedback vector. We should make this mroe precise.
- if (function->IsOptimized()) {
- AddFunctionAndCode(sfi, AbstractCode::cast(function->code()), sfis,
+ if (function.IsOptimized()) {
+ AddFunctionAndCode(sfi, AbstractCode::cast(function.code()), sfis,
code_objects, compiled_funcs_count);
++compiled_funcs_count;
}
@@ -1714,7 +1712,7 @@
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (obj->IsWasmModuleObject()) {
+ if (obj.IsWasmModuleObject()) {
WasmModuleObject module = WasmModuleObject::cast(obj);
if (module_objects != nullptr) {
module_objects[module_objects_count] =
@@ -1747,18 +1745,18 @@
DisallowHeapAllocation no_gc;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (!obj->IsAccessorInfo()) continue;
+ if (!obj.IsAccessorInfo()) continue;
AccessorInfo ai = AccessorInfo::cast(obj);
- if (!ai->name()->IsName()) continue;
- Address getter_entry = v8::ToCData<Address>(ai->getter());
- Name name = Name::cast(ai->name());
+ if (!ai.name().IsName()) continue;
+ Address getter_entry = v8::ToCData<Address>(ai.getter());
+ Name name = Name::cast(ai.name());
if (getter_entry != 0) {
#if USES_FUNCTION_DESCRIPTORS
getter_entry = *FUNCTION_ENTRYPOINT_ADDRESS(getter_entry);
#endif
PROFILE(isolate_, GetterCallbackEvent(name, getter_entry));
}
- Address setter_entry = v8::ToCData<Address>(ai->setter());
+ Address setter_entry = v8::ToCData<Address>(ai.setter());
if (setter_entry != 0) {
#if USES_FUNCTION_DESCRIPTORS
setter_entry = *FUNCTION_ENTRYPOINT_ADDRESS(setter_entry);
@@ -1774,7 +1772,7 @@
HeapIterator iterator(heap);
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (!obj->IsMap()) continue;
+ if (!obj.IsMap()) continue;
Map map = Map::cast(obj);
MapCreate(map);
MapDetails(map);
@@ -1945,7 +1943,7 @@
AbstractCode abstract_code = AbstractCode::cast(object);
CodeEventListener::LogEventsAndTags tag = CodeEventListener::STUB_TAG;
const char* description = "Unknown code from before profiling";
- switch (abstract_code->kind()) {
+ switch (abstract_code.kind()) {
case AbstractCode::INTERPRETED_FUNCTION:
case AbstractCode::OPTIMIZED_FUNCTION:
return; // We log this later using LogCompiledFunctions.
@@ -1960,13 +1958,13 @@
tag = CodeEventListener::REG_EXP_TAG;
break;
case AbstractCode::BUILTIN:
- if (Code::cast(object)->is_interpreter_trampoline_builtin() &&
+ if (Code::cast(object).is_interpreter_trampoline_builtin() &&
Code::cast(object) !=
*BUILTIN_CODE(isolate_, InterpreterEntryTrampoline)) {
return;
}
description =
- isolate_->builtins()->name(abstract_code->GetCode()->builtin_index());
+ isolate_->builtins()->name(abstract_code.GetCode().builtin_index());
tag = CodeEventListener::BUILTIN_TAG;
break;
case AbstractCode::WASM_FUNCTION:
@@ -2005,8 +2003,8 @@
DisallowHeapAllocation no_gc;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (obj->IsCode()) LogCodeObject(obj);
- if (obj->IsBytecodeArray()) LogCodeObject(obj);
+ if (obj.IsCode()) LogCodeObject(obj);
+ if (obj.IsBytecodeArray()) LogCodeObject(obj);
}
}
@@ -2023,7 +2021,7 @@
// GetScriptLineNumber call.
for (int i = 0; i < compiled_funcs_count; ++i) {
SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate_, sfis[i]);
- if (sfis[i]->function_data()->IsInterpreterData()) {
+ if (sfis[i]->function_data().IsInterpreterData()) {
LogExistingFunction(
sfis[i],
Handle<AbstractCode>(
@@ -2048,12 +2046,12 @@
void ExistingCodeLogger::LogExistingFunction(
Handle<SharedFunctionInfo> shared, Handle<AbstractCode> code,
CodeEventListener::LogEventsAndTags tag) {
- if (shared->script()->IsScript()) {
+ if (shared->script().IsScript()) {
Handle<Script> script(Script::cast(shared->script()), isolate_);
int line_num = Script::GetLineNumber(script, shared->StartPosition()) + 1;
int column_num =
Script::GetColumnNumber(script, shared->StartPosition()) + 1;
- if (script->name()->IsString()) {
+ if (script->name().IsString()) {
Handle<String> script_name(String::cast(script->name()), isolate_);
if (line_num > 0) {
CALL_CODE_EVENT_HANDLER(
@@ -2073,10 +2071,10 @@
} else if (shared->IsApiFunction()) {
// API function.
FunctionTemplateInfo fun_data = shared->get_api_func_data();
- Object raw_call_data = fun_data->call_code();
- if (!raw_call_data->IsUndefined(isolate_)) {
+ Object raw_call_data = fun_data.call_code();
+ if (!raw_call_data.IsUndefined(isolate_)) {
CallHandlerInfo call_data = CallHandlerInfo::cast(raw_call_data);
- Object callback_obj = call_data->callback();
+ Object callback_obj = call_data.callback();
Address entry_point = v8::ToCData<Address>(callback_obj);
#if USES_FUNCTION_DESCRIPTORS
entry_point = *FUNCTION_ENTRYPOINT_ADDRESS(entry_point);
diff --git a/src/mips/assembler-mips-inl.h b/src/mips/assembler-mips-inl.h
index dfcedf2..7d60d54 100644
--- a/src/mips/assembler-mips-inl.h
+++ b/src/mips/assembler-mips-inl.h
@@ -120,7 +120,8 @@
void Assembler::deserialization_set_special_target_at(
Address instruction_payload, Code code, Address target) {
set_target_address_at(instruction_payload,
- !code.is_null() ? code->constant_pool() : kNullAddress, target);
+ !code.is_null() ? code.constant_pool() : kNullAddress,
+ target);
}
int Assembler::deserialization_special_target_size(
@@ -189,7 +190,7 @@
WriteBarrierMode write_barrier_mode,
ICacheFlushMode icache_flush_mode) {
DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_));
- Assembler::set_target_address_at(pc_, constant_pool_, target->ptr(),
+ Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
icache_flush_mode);
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
WriteBarrierForCode(host(), this, target);
diff --git a/src/mips/simulator-mips.cc b/src/mips/simulator-mips.cc
index e4c0cdb..abe1b83 100644
--- a/src/mips/simulator-mips.cc
+++ b/src/mips/simulator-mips.cc
@@ -481,7 +481,7 @@
Object obj(value);
os << arg1 << ": \n";
#ifdef DEBUG
- obj->Print(os);
+ obj.Print(os);
os << "\n";
#else
os << Brief(obj) << "\n";
@@ -543,7 +543,7 @@
if (obj.IsSmi()) {
PrintF("smi %d", Smi::ToInt(obj));
} else {
- obj->ShortPrint();
+ obj.ShortPrint();
}
PrintF(")");
}
diff --git a/src/mips64/assembler-mips64-inl.h b/src/mips64/assembler-mips64-inl.h
index 3bfd119..6c336dc 100644
--- a/src/mips64/assembler-mips64-inl.h
+++ b/src/mips64/assembler-mips64-inl.h
@@ -110,7 +110,8 @@
void Assembler::deserialization_set_special_target_at(
Address instruction_payload, Code code, Address target) {
set_target_address_at(instruction_payload,
- !code.is_null() ? code->constant_pool() : kNullAddress, target);
+ !code.is_null() ? code.constant_pool() : kNullAddress,
+ target);
}
int Assembler::deserialization_special_target_size(
@@ -165,7 +166,7 @@
WriteBarrierMode write_barrier_mode,
ICacheFlushMode icache_flush_mode) {
DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_));
- Assembler::set_target_address_at(pc_, constant_pool_, target->ptr(),
+ Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
icache_flush_mode);
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
WriteBarrierForCode(host(), this, target);
diff --git a/src/mips64/simulator-mips64.cc b/src/mips64/simulator-mips64.cc
index e6e1909..4b3acce 100644
--- a/src/mips64/simulator-mips64.cc
+++ b/src/mips64/simulator-mips64.cc
@@ -424,7 +424,7 @@
Object obj(value);
os << arg1 << ": \n";
#ifdef DEBUG
- obj->Print(os);
+ obj.Print(os);
os << "\n";
#else
os << Brief(obj) << "\n";
@@ -473,7 +473,7 @@
if (obj.IsSmi()) {
PrintF("smi %d", Smi::ToInt(obj));
} else {
- obj->ShortPrint();
+ obj.ShortPrint();
}
PrintF(")");
}
diff --git a/src/numbers/conversions-inl.h b/src/numbers/conversions-inl.h
index 9fe7abd..90f1d8d 100644
--- a/src/numbers/conversions-inl.h
+++ b/src/numbers/conversions-inl.h
@@ -160,23 +160,23 @@
}
int32_t NumberToInt32(Object number) {
- if (number->IsSmi()) return Smi::ToInt(number);
- return DoubleToInt32(number->Number());
+ if (number.IsSmi()) return Smi::ToInt(number);
+ return DoubleToInt32(number.Number());
}
uint32_t NumberToUint32(Object number) {
- if (number->IsSmi()) return Smi::ToInt(number);
- return DoubleToUint32(number->Number());
+ if (number.IsSmi()) return Smi::ToInt(number);
+ return DoubleToUint32(number.Number());
}
uint32_t PositiveNumberToUint32(Object number) {
- if (number->IsSmi()) {
+ if (number.IsSmi()) {
int value = Smi::ToInt(number);
if (value <= 0) return 0;
return value;
}
- DCHECK(number->IsHeapNumber());
- double value = number->Number();
+ DCHECK(number.IsHeapNumber());
+ double value = number.Number();
// Catch all values smaller than 1 and use the double-negation trick for NANs.
if (!(value >= 1)) return 0;
uint32_t max = std::numeric_limits<uint32_t>::max();
@@ -185,8 +185,8 @@
}
int64_t NumberToInt64(Object number) {
- if (number->IsSmi()) return Smi::ToInt(number);
- double d = number->Number();
+ if (number.IsSmi()) return Smi::ToInt(number);
+ double d = number.Number();
if (std::isnan(d)) return 0;
if (d >= static_cast<double>(std::numeric_limits<int64_t>::max())) {
return std::numeric_limits<int64_t>::max();
@@ -198,13 +198,13 @@
}
uint64_t PositiveNumberToUint64(Object number) {
- if (number->IsSmi()) {
+ if (number.IsSmi()) {
int value = Smi::ToInt(number);
if (value <= 0) return 0;
return value;
}
- DCHECK(number->IsHeapNumber());
- double value = number->Number();
+ DCHECK(number.IsHeapNumber());
+ double value = number.Number();
// Catch all values smaller than 1 and use the double-negation trick for NANs.
if (!(value >= 1)) return 0;
uint64_t max = std::numeric_limits<uint64_t>::max();
@@ -215,7 +215,7 @@
bool TryNumberToSize(Object number, size_t* result) {
// Do not create handles in this function! Don't use SealHandleScope because
// the function can be used concurrently.
- if (number->IsSmi()) {
+ if (number.IsSmi()) {
int value = Smi::ToInt(number);
DCHECK(static_cast<unsigned>(Smi::kMaxValue) <=
std::numeric_limits<size_t>::max());
@@ -225,8 +225,8 @@
}
return false;
} else {
- DCHECK(number->IsHeapNumber());
- double value = HeapNumber::cast(number)->value();
+ DCHECK(number.IsHeapNumber());
+ double value = HeapNumber::cast(number).value();
// If value is compared directly to the limit, the limit will be
// casted to a double and could end up as limit + 1,
// because a double might not have enough mantissa bits for it.
diff --git a/src/numbers/conversions.cc b/src/numbers/conversions.cc
index da50fc3..053cc18 100644
--- a/src/numbers/conversions.cc
+++ b/src/numbers/conversions.cc
@@ -1324,7 +1324,7 @@
bool IsSpecialIndex(String string) {
// Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
const int kBufferSize = 24;
- const int length = string->length();
+ const int length = string.length();
if (length == 0 || length > kBufferSize) return false;
uint16_t buffer[kBufferSize];
String::WriteToFlat(string, buffer, 0, length);
diff --git a/src/numbers/math-random.cc b/src/numbers/math-random.cc
index cce4304..63d561f 100644
--- a/src/numbers/math-random.cc
+++ b/src/numbers/math-random.cc
@@ -28,17 +28,17 @@
}
void MathRandom::ResetContext(Context native_context) {
- native_context->set_math_random_index(Smi::zero());
+ native_context.set_math_random_index(Smi::zero());
State state = {0, 0};
- PodArray<State>::cast(native_context->math_random_state())->set(0, state);
+ PodArray<State>::cast(native_context.math_random_state()).set(0, state);
}
Address MathRandom::RefillCache(Isolate* isolate, Address raw_native_context) {
Context native_context = Context::cast(Object(raw_native_context));
DisallowHeapAllocation no_gc;
PodArray<State> pod =
- PodArray<State>::cast(native_context->math_random_state());
- State state = pod->get(0);
+ PodArray<State>::cast(native_context.math_random_state());
+ State state = pod.get(0);
// Initialize state if not yet initialized. If a fixed random seed was
// requested, use it to reset our state the first time a script asks for
// random numbers in this context. This ensures the script sees a consistent
@@ -56,17 +56,17 @@
}
FixedDoubleArray cache =
- FixedDoubleArray::cast(native_context->math_random_cache());
+ FixedDoubleArray::cast(native_context.math_random_cache());
// Create random numbers.
for (int i = 0; i < kCacheSize; i++) {
// Generate random numbers using xorshift128+.
base::RandomNumberGenerator::XorShift128(&state.s0, &state.s1);
- cache->set(i, base::RandomNumberGenerator::ToDouble(state.s0));
+ cache.set(i, base::RandomNumberGenerator::ToDouble(state.s0));
}
- pod->set(0, state);
+ pod.set(0, state);
Smi new_index = Smi::FromInt(kCacheSize);
- native_context->set_math_random_index(new_index);
+ native_context.set_math_random_index(new_index);
return new_index.ptr();
}
diff --git a/src/objects-body-descriptors-inl.h b/src/objects-body-descriptors-inl.h
index acc949b..c09e502 100644
--- a/src/objects-body-descriptors-inl.h
+++ b/src/objects-body-descriptors-inl.h
@@ -27,13 +27,13 @@
template <int start_offset>
int FlexibleBodyDescriptor<start_offset>::SizeOf(Map map, HeapObject object) {
- return object->SizeFromMap(map);
+ return object.SizeFromMap(map);
}
template <int start_offset>
int FlexibleWeakBodyDescriptor<start_offset>::SizeOf(Map map,
HeapObject object) {
- return object->SizeFromMap(map);
+ return object.SizeFromMap(map);
}
bool BodyDescriptorBase::IsValidJSObjectSlotImpl(Map map, HeapObject obj,
@@ -57,7 +57,7 @@
// embedder field area as tagged slots.
STATIC_ASSERT(kEmbedderDataSlotSize == kTaggedSize);
#endif
- if (!FLAG_unbox_double_fields || map->HasFastPointerLayout()) {
+ if (!FLAG_unbox_double_fields || map.HasFastPointerLayout()) {
return true;
} else {
DCHECK(FLAG_unbox_double_fields);
@@ -98,7 +98,7 @@
// embedder field area as tagged slots.
STATIC_ASSERT(kEmbedderDataSlotSize == kTaggedSize);
#endif
- if (!FLAG_unbox_double_fields || map->HasFastPointerLayout()) {
+ if (!FLAG_unbox_double_fields || map.HasFastPointerLayout()) {
IteratePointers(obj, start_offset, end_offset, v);
} else {
DCHECK(FLAG_unbox_double_fields);
@@ -183,7 +183,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -202,7 +202,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -221,7 +221,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -241,7 +241,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -265,7 +265,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -284,7 +284,7 @@
return true;
}
// check for weak_next offset
- if (map->instance_size() == AllocationSite::kSizeWithWeakNext &&
+ if (map.instance_size() == AllocationSite::kSizeWithWeakNext &&
offset == AllocationSite::kWeakNextOffset) {
return true;
}
@@ -306,7 +306,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -327,7 +327,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -348,7 +348,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -369,7 +369,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -381,7 +381,7 @@
Derived table = Derived::cast(obj);
// Only data table part contains tagged values.
return (offset >= DataTableStartOffset()) &&
- (offset < table->GetBucketsStartOffset());
+ (offset < table.GetBucketsStartOffset());
}
template <typename ObjectVisitor>
@@ -389,13 +389,13 @@
ObjectVisitor* v) {
Derived table = Derived::cast(obj);
int start_offset = DataTableStartOffset();
- int end_offset = table->GetBucketsStartOffset();
+ int end_offset = table.GetBucketsStartOffset();
IteratePointers(obj, start_offset, end_offset, v);
}
static inline int SizeOf(Map map, HeapObject obj) {
Derived table = Derived::cast(obj);
- return table->SizeFor(table->Capacity());
+ return table.SizeFor(table.Capacity());
}
};
@@ -408,7 +408,7 @@
ObjectVisitor* v) {}
static inline int SizeOf(Map map, HeapObject obj) {
- return ByteArray::SizeFor(ByteArray::cast(obj)->synchronized_length());
+ return ByteArray::SizeFor(ByteArray::cast(obj).synchronized_length());
}
};
@@ -429,7 +429,7 @@
static inline int SizeOf(Map map, HeapObject obj) {
return BytecodeArray::SizeFor(
- BytecodeArray::cast(obj)->synchronized_length());
+ BytecodeArray::cast(obj).synchronized_length());
}
};
@@ -442,7 +442,7 @@
ObjectVisitor* v) {}
static inline int SizeOf(Map map, HeapObject obj) {
- return BigInt::SizeFor(BigInt::cast(obj)->synchronized_length());
+ return BigInt::SizeFor(BigInt::cast(obj).synchronized_length());
}
};
@@ -456,7 +456,7 @@
static inline int SizeOf(Map map, HeapObject obj) {
return FixedDoubleArray::SizeFor(
- FixedDoubleArray::cast(obj)->synchronized_length());
+ FixedDoubleArray::cast(obj).synchronized_length());
}
};
@@ -473,7 +473,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return FixedTypedArrayBase::cast(object)->size();
+ return FixedTypedArrayBase::cast(object).size();
}
};
@@ -487,7 +487,7 @@
static inline int SizeOf(Map map, HeapObject obj) {
return FeedbackMetadata::SizeFor(
- FeedbackMetadata::cast(obj)->synchronized_slot_count());
+ FeedbackMetadata::cast(obj).synchronized_slot_count());
}
};
@@ -510,28 +510,28 @@
}
static inline int SizeOf(Map map, HeapObject obj) {
- return FeedbackVector::SizeFor(FeedbackVector::cast(obj)->length());
+ return FeedbackVector::SizeFor(FeedbackVector::cast(obj).length());
}
};
class PreparseData::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
- return offset >= PreparseData::cast(obj)->inner_start_offset();
+ return offset >= PreparseData::cast(obj).inner_start_offset();
}
template <typename ObjectVisitor>
static inline void IterateBody(Map map, HeapObject obj, int object_size,
ObjectVisitor* v) {
PreparseData data = PreparseData::cast(obj);
- int start_offset = data->inner_start_offset();
- int end_offset = start_offset + data->children_length() * kTaggedSize;
+ int start_offset = data.inner_start_offset();
+ int end_offset = start_offset + data.children_length() * kTaggedSize;
IteratePointers(obj, start_offset, end_offset, v);
}
static inline int SizeOf(Map map, HeapObject obj) {
PreparseData data = PreparseData::cast(obj);
- return PreparseData::SizeFor(data->data_length(), data->children_length());
+ return PreparseData::SizeFor(data.data_length(), data.children_length());
}
};
@@ -550,7 +550,7 @@
}
static inline int SizeOf(Map map, HeapObject obj) {
- return obj->SizeFromMap(map);
+ return obj.SizeFromMap(map);
}
};
@@ -569,7 +569,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -652,7 +652,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return Code::unchecked_cast(object)->CodeSize();
+ return Code::unchecked_cast(object).CodeSize();
}
};
@@ -666,7 +666,7 @@
static inline int SizeOf(Map map, HeapObject obj) {
SeqOneByteString string = SeqOneByteString::cast(obj);
- return string->SizeFor(string->synchronized_length());
+ return string.SizeFor(string.synchronized_length());
}
};
@@ -680,7 +680,7 @@
static inline int SizeOf(Map map, HeapObject obj) {
SeqTwoByteString string = SeqTwoByteString::cast(obj);
- return string->SizeFor(string->synchronized_length());
+ return string.SizeFor(string.synchronized_length());
}
};
@@ -710,7 +710,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return map->instance_size();
+ return map.instance_size();
}
};
@@ -754,7 +754,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return object->SizeFromMap(map);
+ return object.SizeFromMap(map);
}
};
@@ -838,7 +838,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return object->SizeFromMap(map);
+ return object.SizeFromMap(map);
}
};
@@ -1099,7 +1099,7 @@
template <typename ObjectVisitor>
void HeapObject::IterateBodyFast(Map map, int object_size, ObjectVisitor* v) {
- BodyDescriptorApply<CallIterateBody, void>(map->instance_type(), map, *this,
+ BodyDescriptorApply<CallIterateBody, void>(map.instance_type(), map, *this,
object_size, v);
}
@@ -1126,7 +1126,7 @@
}
static inline int SizeOf(Map map, HeapObject object) {
- return object->SizeFromMap(map);
+ return object.SizeFromMap(map);
}
};
diff --git a/src/objects-inl.h b/src/objects-inl.h
index afd4825..fc9210b 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -48,7 +48,7 @@
namespace v8 {
namespace internal {
-PropertyDetails::PropertyDetails(Smi smi) { value_ = smi->value(); }
+PropertyDetails::PropertyDetails(Smi smi) { value_ = smi.value(); }
Smi PropertyDetails::AsSmi() const {
// Ensure the upper 2 bits have the same value by sign extending it. This is
@@ -74,7 +74,7 @@
}
bool HeapObject::IsJSGeneratorObject() const {
- return map()->instance_type() == JS_GENERATOR_OBJECT_TYPE ||
+ return map().instance_type() == JS_GENERATOR_OBJECT_TYPE ||
IsJSAsyncFunctionObject() || IsJSAsyncGeneratorObject();
}
@@ -84,29 +84,29 @@
bool HeapObject::IsClassBoilerplate() const { return IsFixedArrayExact(); }
-#define IS_TYPE_FUNCTION_DEF(type_) \
- bool Object::Is##type_() const { \
- return IsHeapObject() && HeapObject::cast(*this)->Is##type_(); \
+#define IS_TYPE_FUNCTION_DEF(type_) \
+ bool Object::Is##type_() const { \
+ return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \
}
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
#undef IS_TYPE_FUNCTION_DEF
-#define IS_TYPE_FUNCTION_DEF(Type, Value) \
- bool Object::Is##Type(Isolate* isolate) const { \
- return Is##Type(ReadOnlyRoots(isolate->heap())); \
- } \
- bool Object::Is##Type(ReadOnlyRoots roots) const { \
- return *this == roots.Value(); \
- } \
- bool Object::Is##Type() const { \
- return IsHeapObject() && HeapObject::cast(*this)->Is##Type(); \
- } \
- bool HeapObject::Is##Type(Isolate* isolate) const { \
- return Object::Is##Type(isolate); \
- } \
- bool HeapObject::Is##Type(ReadOnlyRoots roots) const { \
- return Object::Is##Type(roots); \
- } \
+#define IS_TYPE_FUNCTION_DEF(Type, Value) \
+ bool Object::Is##Type(Isolate* isolate) const { \
+ return Is##Type(ReadOnlyRoots(isolate->heap())); \
+ } \
+ bool Object::Is##Type(ReadOnlyRoots roots) const { \
+ return *this == roots.Value(); \
+ } \
+ bool Object::Is##Type() const { \
+ return IsHeapObject() && HeapObject::cast(*this).Is##Type(); \
+ } \
+ bool HeapObject::Is##Type(Isolate* isolate) const { \
+ return Object::Is##Type(isolate); \
+ } \
+ bool HeapObject::Is##Type(ReadOnlyRoots roots) const { \
+ return Object::Is##Type(roots); \
+ } \
bool HeapObject::Is##Type() const { return Is##Type(GetReadOnlyRoots()); }
ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
#undef IS_TYPE_FUNCTION_DEF
@@ -120,7 +120,7 @@
}
bool Object::IsNullOrUndefined() const {
- return IsHeapObject() && HeapObject::cast(*this)->IsNullOrUndefined();
+ return IsHeapObject() && HeapObject::cast(*this).IsNullOrUndefined();
}
bool Object::IsZero() const { return *this == Smi::zero(); }
@@ -147,12 +147,12 @@
bool HeapObject::IsFunction() const {
STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
- return map()->instance_type() >= FIRST_FUNCTION_TYPE;
+ return map().instance_type() >= FIRST_FUNCTION_TYPE;
}
-bool HeapObject::IsCallable() const { return map()->is_callable(); }
+bool HeapObject::IsCallable() const { return map().is_callable(); }
-bool HeapObject::IsConstructor() const { return map()->is_constructor(); }
+bool HeapObject::IsConstructor() const { return map().is_constructor(); }
bool HeapObject::IsModuleInfo() const {
return map() == GetReadOnlyRoots().module_info_map();
@@ -185,13 +185,13 @@
bool HeapObject::IsSeqOneByteString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsSequential() &&
- String::cast(*this)->IsOneByteRepresentation();
+ String::cast(*this).IsOneByteRepresentation();
}
bool HeapObject::IsSeqTwoByteString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsSequential() &&
- String::cast(*this)->IsTwoByteRepresentation();
+ String::cast(*this).IsTwoByteRepresentation();
}
bool HeapObject::IsExternalString() const {
@@ -202,13 +202,13 @@
bool HeapObject::IsExternalOneByteString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsExternal() &&
- String::cast(*this)->IsOneByteRepresentation();
+ String::cast(*this).IsOneByteRepresentation();
}
bool HeapObject::IsExternalTwoByteString() const {
if (!IsString()) return false;
return StringShape(String::cast(*this)).IsExternal() &&
- String::cast(*this)->IsTwoByteRepresentation();
+ String::cast(*this).IsTwoByteRepresentation();
}
bool Object::IsNumber() const { return IsSmi() || IsHeapNumber(); }
@@ -216,7 +216,7 @@
bool Object::IsNumeric() const { return IsNumber() || IsBigInt(); }
bool HeapObject::IsFiller() const {
- InstanceType instance_type = map()->instance_type();
+ InstanceType instance_type = map().instance_type();
return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
}
@@ -249,7 +249,7 @@
// a deoptimization data array. Since this is used for asserts we can
// check that the length is zero or else the fixed size plus a multiple of
// the entry size.
- int length = FixedArray::cast(*this)->length();
+ int length = FixedArray::cast(*this).length();
if (length == 0) return true;
length -= DeoptimizationData::kFirstDeoptEntryIndex;
@@ -267,7 +267,7 @@
if (!IsFixedArrayExact()) return false;
// There's actually no way to see the difference between a fixed array and
// a template list.
- if (FixedArray::cast(*this)->length() < 1) return false;
+ if (FixedArray::cast(*this).length() < 1) return false;
return true;
}
@@ -283,27 +283,27 @@
}
bool HeapObject::IsStringWrapper() const {
- return IsJSValue() && JSValue::cast(*this)->value()->IsString();
+ return IsJSValue() && JSValue::cast(*this).value().IsString();
}
bool HeapObject::IsBooleanWrapper() const {
- return IsJSValue() && JSValue::cast(*this)->value()->IsBoolean();
+ return IsJSValue() && JSValue::cast(*this).value().IsBoolean();
}
bool HeapObject::IsScriptWrapper() const {
- return IsJSValue() && JSValue::cast(*this)->value()->IsScript();
+ return IsJSValue() && JSValue::cast(*this).value().IsScript();
}
bool HeapObject::IsNumberWrapper() const {
- return IsJSValue() && JSValue::cast(*this)->value()->IsNumber();
+ return IsJSValue() && JSValue::cast(*this).value().IsNumber();
}
bool HeapObject::IsBigIntWrapper() const {
- return IsJSValue() && JSValue::cast(*this)->value()->IsBigInt();
+ return IsJSValue() && JSValue::cast(*this).value().IsBigInt();
}
bool HeapObject::IsSymbolWrapper() const {
- return IsJSValue() && JSValue::cast(*this)->value()->IsSymbol();
+ return IsJSValue() && JSValue::cast(*this).value().IsSymbol();
}
bool HeapObject::IsJSArrayBufferView() const {
@@ -328,7 +328,7 @@
}
bool Object::IsPrimitive() const {
- return IsSmi() || HeapObject::cast(*this)->map()->IsPrimitiveMap();
+ return IsSmi() || HeapObject::cast(*this).map().IsPrimitiveMap();
}
// static
@@ -340,19 +340,19 @@
return JSProxy::IsArray(Handle<JSProxy>::cast(object));
}
-bool HeapObject::IsUndetectable() const { return map()->is_undetectable(); }
+bool HeapObject::IsUndetectable() const { return map().is_undetectable(); }
bool HeapObject::IsAccessCheckNeeded() const {
if (IsJSGlobalProxy()) {
const JSGlobalProxy proxy = JSGlobalProxy::cast(*this);
- JSGlobalObject global = proxy->GetIsolate()->context()->global_object();
- return proxy->IsDetachedFrom(global);
+ JSGlobalObject global = proxy.GetIsolate()->context().global_object();
+ return proxy.IsDetachedFrom(global);
}
- return map()->is_access_check_needed();
+ return map().is_access_check_needed();
}
bool HeapObject::IsStruct() const {
- switch (map()->instance_type()) {
+ switch (map().instance_type()) {
#define MAKE_STRUCT_CASE(TYPE, Name, name) \
case TYPE: \
return true;
@@ -375,18 +375,18 @@
}
}
-#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
- bool Object::Is##Name() const { \
- return IsHeapObject() && HeapObject::cast(*this)->Is##Name(); \
- } \
+#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
+ bool Object::Is##Name() const { \
+ return IsHeapObject() && HeapObject::cast(*this).Is##Name(); \
+ } \
TYPE_CHECKER(Name)
STRUCT_LIST(MAKE_STRUCT_PREDICATE)
#undef MAKE_STRUCT_PREDICATE
double Object::Number() const {
DCHECK(IsNumber());
- return IsSmi() ? static_cast<double>(Smi(this->ptr())->value())
- : HeapNumber::unchecked_cast(*this)->value();
+ return IsSmi() ? static_cast<double>(Smi(this->ptr()).value())
+ : HeapNumber::unchecked_cast(*this).value();
}
// static
@@ -400,12 +400,12 @@
}
bool Object::IsNaN() const {
- return this->IsHeapNumber() && std::isnan(HeapNumber::cast(*this)->value());
+ return this->IsHeapNumber() && std::isnan(HeapNumber::cast(*this).value());
}
bool Object::IsMinusZero() const {
return this->IsHeapNumber() &&
- i::IsMinusZero(HeapNumber::cast(*this)->value());
+ i::IsMinusZero(HeapNumber::cast(*this).value());
}
OBJECT_CONSTRUCTORS_IMPL(RegExpMatchInfo, FixedArray)
@@ -430,11 +430,11 @@
DCHECK(!IsPropertyCell());
if (filter == PRIVATE_NAMES_ONLY) {
if (!IsSymbol()) return true;
- return !Symbol::cast(*this)->is_private_name();
+ return !Symbol::cast(*this).is_private_name();
} else if (IsSymbol()) {
if (filter & SKIP_SYMBOLS) return true;
- if (Symbol::cast(*this)->is_private()) return true;
+ if (Symbol::cast(*this).is_private()) return true;
} else {
if (filter & SKIP_STRINGS) return true;
}
@@ -486,7 +486,7 @@
return true;
}
if (IsHeapNumber()) {
- double num = HeapNumber::cast(*this)->value();
+ double num = HeapNumber::cast(*this).value();
return DoubleToUint32IfEqualToSelf(num, value);
}
return false;
@@ -510,7 +510,7 @@
// static
MaybeHandle<Object> Object::ToPropertyKey(Isolate* isolate,
Handle<Object> value) {
- if (value->IsSmi() || HeapObject::cast(*value)->IsName()) return value;
+ if (value->IsSmi() || HeapObject::cast(*value).IsName()) return value;
return ConvertToPropertyKey(isolate, value);
}
@@ -547,7 +547,7 @@
// static
MaybeHandle<Object> Object::ToUint32(Isolate* isolate, Handle<Object> input) {
- if (input->IsSmi()) return handle(Smi::cast(*input)->ToUint32Smi(), isolate);
+ if (input->IsSmi()) return handle(Smi::cast(*input).ToUint32Smi(), isolate);
return ConvertToUint32(isolate, input);
}
@@ -611,7 +611,7 @@
bool MapWord::IsForwardingAddress() const { return HAS_SMI_TAG(value_); }
MapWord MapWord::FromForwardingAddress(HeapObject object) {
- return MapWord(object->ptr() - kHeapObjectTag);
+ return MapWord(object.ptr() - kHeapObjectTag);
}
HeapObject MapWord::ToForwardingAddress() {
@@ -632,7 +632,7 @@
}
void HeapObject::VerifySmiField(int offset) {
- CHECK(READ_FIELD(*this, offset)->IsSmi());
+ CHECK(READ_FIELD(*this, offset).IsSmi());
STATIC_ASSERT(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
}
@@ -807,7 +807,7 @@
// compression is supported) allow unaligned access to doubles and full words.
#endif // V8_COMPRESS_POINTERS
#ifdef V8_HOST_ARCH_32_BIT
- int instance_type = map->instance_type();
+ int instance_type = map.instance_type();
if (instance_type == FIXED_FLOAT64_ARRAY_TYPE ||
instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
return kDoubleAligned;
@@ -919,12 +919,12 @@
// static
Object Object::GetSimpleHash(Object object) {
DisallowHeapAllocation no_gc;
- if (object->IsSmi()) {
+ if (object.IsSmi()) {
uint32_t hash = ComputeUnseededHash(Smi::ToInt(object));
return Smi::FromInt(hash & Smi::kMaxValue);
}
- if (object->IsHeapNumber()) {
- double num = HeapNumber::cast(object)->value();
+ if (object.IsHeapNumber()) {
+ double num = HeapNumber::cast(object).value();
if (std::isnan(num)) return Smi::FromInt(Smi::kMaxValue);
// Use ComputeUnseededHash for all values in Signed32 range, including -0,
// which is considered equal to 0 because collections use SameValueZero.
@@ -937,34 +937,34 @@
}
return Smi::FromInt(hash & Smi::kMaxValue);
}
- if (object->IsName()) {
- uint32_t hash = Name::cast(object)->Hash();
+ if (object.IsName()) {
+ uint32_t hash = Name::cast(object).Hash();
return Smi::FromInt(hash);
}
- if (object->IsOddball()) {
- uint32_t hash = Oddball::cast(object)->to_string()->Hash();
+ if (object.IsOddball()) {
+ uint32_t hash = Oddball::cast(object).to_string().Hash();
return Smi::FromInt(hash);
}
- if (object->IsBigInt()) {
- uint32_t hash = BigInt::cast(object)->Hash();
+ if (object.IsBigInt()) {
+ uint32_t hash = BigInt::cast(object).Hash();
return Smi::FromInt(hash & Smi::kMaxValue);
}
- if (object->IsSharedFunctionInfo()) {
- uint32_t hash = SharedFunctionInfo::cast(object)->Hash();
+ if (object.IsSharedFunctionInfo()) {
+ uint32_t hash = SharedFunctionInfo::cast(object).Hash();
return Smi::FromInt(hash & Smi::kMaxValue);
}
- DCHECK(object->IsJSReceiver());
+ DCHECK(object.IsJSReceiver());
return object;
}
Object Object::GetHash() {
DisallowHeapAllocation no_gc;
Object hash = GetSimpleHash(*this);
- if (hash->IsSmi()) return hash;
+ if (hash.IsSmi()) return hash;
DCHECK(IsJSReceiver());
JSReceiver receiver = JSReceiver::cast(*this);
- return receiver->GetIdentityHash();
+ return receiver.GetIdentityHash();
}
Handle<Object> ObjectHashTableShape::AsHandle(Handle<Object> key) {
@@ -1036,8 +1036,8 @@
#undef FIELD_ACCESSORS
FreshlyAllocatedBigInt FreshlyAllocatedBigInt::cast(Object object) {
- SLOW_DCHECK(object->IsBigInt());
- return FreshlyAllocatedBigInt(object->ptr());
+ SLOW_DCHECK(object.IsBigInt());
+ return FreshlyAllocatedBigInt(object.ptr());
}
} // namespace internal
diff --git a/src/objects.cc b/src/objects.cc
index c1ae7ac..b29e37f 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -122,7 +122,7 @@
ShouldThrow GetShouldThrow(Isolate* isolate, Maybe<ShouldThrow> should_throw) {
if (should_throw.IsJust()) return should_throw.FromJust();
- LanguageMode mode = isolate->context()->scope_info()->language_mode();
+ LanguageMode mode = isolate->context().scope_info().language_mode();
if (mode == LanguageMode::kStrict) return kThrowOnError;
for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
@@ -133,7 +133,7 @@
JavaScriptFrame* js_frame = static_cast<JavaScriptFrame*>(it.frame());
std::vector<SharedFunctionInfo> functions;
js_frame->GetFunctions(&functions);
- LanguageMode closure_language_mode = functions.back()->language_mode();
+ LanguageMode closure_language_mode = functions.back().language_mode();
if (closure_language_mode > mode) {
mode = closure_language_mode;
}
@@ -178,7 +178,7 @@
if (FLAG_track_field_types) {
if (representation.IsHeapObject() && IsHeapObject()) {
// We can track only JavaScript objects with stable maps.
- Handle<Map> map(HeapObject::cast(*this)->map(), isolate);
+ Handle<Map> map(HeapObject::cast(*this).map(), isolate);
if (map->is_stable() && map->IsJSReceiverMap()) {
return FieldType::Class(map, isolate);
}
@@ -195,8 +195,7 @@
result->set_value_as_bits(kHoleNanInt64);
} else if (object->IsMutableHeapNumber()) {
// Ensure that all bits of the double value are preserved.
- result->set_value_as_bits(
- MutableHeapNumber::cast(*object)->value_as_bits());
+ result->set_value_as_bits(MutableHeapNumber::cast(*object).value_as_bits());
} else {
result->set_value(object->Number());
}
@@ -211,7 +210,7 @@
return object;
}
return isolate->factory()->NewHeapNumber(
- MutableHeapNumber::cast(*object)->value());
+ MutableHeapNumber::cast(*object).value());
}
MaybeHandle<JSReceiver> Object::ToObjectImpl(Isolate* isolate,
@@ -224,7 +223,7 @@
constructor = handle(native_context->number_function(), isolate);
} else {
int constructor_function_index =
- Handle<HeapObject>::cast(object)->map()->GetConstructorFunctionIndex();
+ Handle<HeapObject>::cast(object)->map().GetConstructorFunctionIndex();
if (constructor_function_index == Map::kNoConstructorFunctionIndex) {
if (method_name != nullptr) {
THROW_NEW_ERROR(
@@ -315,7 +314,7 @@
ASSIGN_RETURN_ON_EXCEPTION(
isolate, input,
ConvertToNumberOrNumeric(isolate, input, Conversion::kToNumber), Object);
- if (input->IsSmi()) return handle(Smi::cast(*input)->ToUint32Smi(), isolate);
+ if (input->IsSmi()) return handle(Smi::cast(*input).ToUint32Smi(), isolate);
return isolate->factory()->NewNumberFromUint(DoubleToUint32(input->Number()));
}
@@ -468,7 +467,7 @@
IncrementalStringBuilder builder(isolate);
builder.AppendCString("Symbol(");
- if (symbol->name()->IsString()) {
+ if (symbol->name().IsString()) {
builder.AppendString(handle(String::cast(symbol->name()), isolate));
}
builder.AppendCharacter(')');
@@ -522,7 +521,7 @@
// This is the only case where Object::ToObject throws.
DCHECK(!input->IsSmi());
int constructor_function_index =
- Handle<HeapObject>::cast(input)->map()->GetConstructorFunctionIndex();
+ Handle<HeapObject>::cast(input)->map().GetConstructorFunctionIndex();
if (constructor_function_index == Map::kNoConstructorFunctionIndex) {
return isolate->factory()->NewStringFromAsciiChecked("[object Unknown]");
}
@@ -582,9 +581,9 @@
if (IsBoolean()) return IsTrue(isolate);
if (IsNullOrUndefined(isolate)) return false;
if (IsUndetectable()) return false; // Undetectable object is false.
- if (IsString()) return String::cast(*this)->length() != 0;
- if (IsHeapNumber()) return DoubleToBoolean(HeapNumber::cast(*this)->value());
- if (IsBigInt()) return BigInt::cast(*this)->ToBoolean();
+ if (IsString()) return String::cast(*this).length() != 0;
+ if (IsHeapNumber()) return DoubleToBoolean(HeapNumber::cast(*this).value());
+ if (IsBigInt()) return BigInt::cast(*this).ToBoolean();
return true;
}
@@ -618,7 +617,7 @@
}
bool StrictNumberEquals(const Object x, const Object y) {
- return StrictNumberEquals(x->Number(), y->Number());
+ return StrictNumberEquals(x.Number(), y.Number());
}
bool StrictNumberEquals(Handle<Object> x, Handle<Object> y) {
@@ -783,13 +782,13 @@
bool Object::StrictEquals(Object that) {
if (this->IsNumber()) {
- if (!that->IsNumber()) return false;
+ if (!that.IsNumber()) return false;
return StrictNumberEquals(*this, that);
} else if (this->IsString()) {
- if (!that->IsString()) return false;
- return String::cast(*this)->Equals(String::cast(that));
+ if (!that.IsString()) return false;
+ return String::cast(*this).Equals(String::cast(that));
} else if (this->IsBigInt()) {
- if (!that->IsBigInt()) return false;
+ if (!that.IsBigInt()) return false;
return BigInt::EqualToBigInt(BigInt::cast(*this), BigInt::cast(that));
}
return *this == that;
@@ -799,7 +798,7 @@
Handle<String> Object::TypeOf(Isolate* isolate, Handle<Object> object) {
if (object->IsNumber()) return isolate->factory()->number_string();
if (object->IsOddball())
- return handle(Oddball::cast(*object)->type_of(), isolate);
+ return handle(Oddball::cast(*object).type_of(), isolate);
if (object->IsUndetectable()) {
return isolate->factory()->undefined_string();
}
@@ -947,7 +946,7 @@
Handle<JSArray> array = Handle<JSArray>::cast(object);
uint32_t length;
if (!array->HasArrayPrototype(isolate) ||
- !array->length()->ToUint32(&length) || !array->HasFastElements() ||
+ !array->length().ToUint32(&length) || !array->HasFastElements() ||
!JSObject::PrototypeHasNoElements(isolate, *array)) {
return MaybeHandle<FixedArray>();
}
@@ -1061,7 +1060,7 @@
// In case of global IC, the receiver is the global object. Replace by
// the global proxy.
if (receiver->IsJSGlobalObject()) {
- receiver = handle(JSGlobalObject::cast(*receiver)->global_proxy(),
+ receiver = handle(JSGlobalObject::cast(*receiver).global_proxy(),
it->isolate());
}
MaybeHandle<Object> result =
@@ -1226,7 +1225,7 @@
return true;
}
if (IsHeapNumber()) {
- double num = HeapNumber::cast(*this)->value();
+ double num = HeapNumber::cast(*this).value();
// Check range before conversion to avoid undefined behavior.
if (num >= kMinInt && num <= kMaxInt && FastI2D(FastD2I(num)) == num) {
*value = FastD2I(num);
@@ -1245,14 +1244,14 @@
Isolate* isolate, Handle<FunctionTemplateInfo> info,
MaybeHandle<Name> maybe_name) {
Object current_info = info->shared_function_info();
- if (current_info->IsSharedFunctionInfo()) {
+ if (current_info.IsSharedFunctionInfo()) {
return handle(SharedFunctionInfo::cast(current_info), isolate);
}
Handle<Name> name;
Handle<String> name_string;
if (maybe_name.ToHandle(&name) && name->IsString()) {
name_string = Handle<String>::cast(name);
- } else if (info->class_name()->IsString()) {
+ } else if (info->class_name().IsString()) {
name_string = handle(String::cast(info->class_name()), isolate);
} else {
name_string = isolate->factory()->empty_string();
@@ -1277,23 +1276,23 @@
bool FunctionTemplateInfo::IsTemplateFor(Map map) {
// There is a constraint on the object; check.
- if (!map->IsJSObjectMap()) return false;
+ if (!map.IsJSObjectMap()) return false;
// Fetch the constructor function of the object.
- Object cons_obj = map->GetConstructor();
+ Object cons_obj = map.GetConstructor();
Object type;
- if (cons_obj->IsJSFunction()) {
+ if (cons_obj.IsJSFunction()) {
JSFunction fun = JSFunction::cast(cons_obj);
- type = fun->shared()->function_data();
- } else if (cons_obj->IsFunctionTemplateInfo()) {
+ type = fun.shared().function_data();
+ } else if (cons_obj.IsFunctionTemplateInfo()) {
type = FunctionTemplateInfo::cast(cons_obj);
} else {
return false;
}
// Iterate through the chain of inheriting function templates to
// see if the required one occurs.
- while (type->IsFunctionTemplateInfo()) {
+ while (type.IsFunctionTemplateInfo()) {
if (type == *this) return true;
- type = FunctionTemplateInfo::cast(type)->GetParentTemplate();
+ type = FunctionTemplateInfo::cast(type).GetParentTemplate();
}
// Didn't find the required type in the inheritance chain.
return false;
@@ -1302,7 +1301,7 @@
// static
FunctionTemplateRareData FunctionTemplateInfo::AllocateFunctionTemplateRareData(
Isolate* isolate, Handle<FunctionTemplateInfo> function_template_info) {
- DCHECK(function_template_info->rare_data()->IsUndefined(isolate));
+ DCHECK(function_template_info->rare_data().IsUndefined(isolate));
Handle<Struct> struct_obj = isolate->factory()->NewStruct(
FUNCTION_TEMPLATE_RARE_DATA_TYPE, AllocationType::kOld);
Handle<FunctionTemplateRareData> rare_data =
@@ -1401,7 +1400,7 @@
// In case of global IC, the receiver is the global object. Replace by the
// global proxy.
if (receiver->IsJSGlobalObject()) {
- receiver = handle(JSGlobalObject::cast(*receiver)->global_proxy(), isolate);
+ receiver = handle(JSGlobalObject::cast(*receiver).global_proxy(), isolate);
}
// We should never get here to initialize a const with the hole value since a
@@ -1449,7 +1448,7 @@
}
// Regular accessor.
- Handle<Object> getter(AccessorPair::cast(*structure)->getter(), isolate);
+ Handle<Object> getter(AccessorPair::cast(*structure).getter(), isolate);
if (getter->IsFunctionTemplateInfo()) {
SaveAndSwitchContext save(isolate, *holder->GetCreationContext());
return Builtins::InvokeApiFunction(
@@ -1490,7 +1489,7 @@
if (!info->HasExpectedReceiverType()) return true;
if (!map->IsJSObjectMap()) return false;
return FunctionTemplateInfo::cast(info->expected_receiver_type())
- ->IsTemplateFor(*map);
+ .IsTemplateFor(*map);
}
Maybe<bool> Object::SetPropertyWithAccessor(
@@ -1502,7 +1501,7 @@
// In case of global IC, the receiver is the global object. Replace by the
// global proxy.
if (receiver->IsJSGlobalObject()) {
- receiver = handle(JSGlobalObject::cast(*receiver)->global_proxy(), isolate);
+ receiver = handle(JSGlobalObject::cast(*receiver).global_proxy(), isolate);
}
// We should never get here to initialize a const with the hole value since a
@@ -1554,7 +1553,7 @@
}
// Regular accessor.
- Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate);
+ Handle<Object> setter(AccessorPair::cast(*structure).setter(), isolate);
if (setter->IsFunctionTemplateInfo()) {
SaveAndSwitchContext save(isolate, *holder->GetCreationContext());
Handle<Object> argv[] = {value};
@@ -1613,33 +1612,33 @@
Map Object::GetPrototypeChainRootMap(Isolate* isolate) const {
DisallowHeapAllocation no_alloc;
if (IsSmi()) {
- Context native_context = isolate->context()->native_context();
- return native_context->number_function()->initial_map();
+ Context native_context = isolate->context().native_context();
+ return native_context.number_function().initial_map();
}
const HeapObject heap_object = HeapObject::cast(*this);
- return heap_object->map()->GetPrototypeChainRootMap(isolate);
+ return heap_object.map().GetPrototypeChainRootMap(isolate);
}
Smi Object::GetOrCreateHash(Isolate* isolate) {
DisallowHeapAllocation no_gc;
Object hash = Object::GetSimpleHash(*this);
- if (hash->IsSmi()) return Smi::cast(hash);
+ if (hash.IsSmi()) return Smi::cast(hash);
DCHECK(IsJSReceiver());
- return JSReceiver::cast(*this)->GetOrCreateIdentityHash(isolate);
+ return JSReceiver::cast(*this).GetOrCreateIdentityHash(isolate);
}
bool Object::SameValue(Object other) {
if (other == *this) return true;
- if (IsNumber() && other->IsNumber()) {
- return SameNumberValue(Number(), other->Number());
+ if (IsNumber() && other.IsNumber()) {
+ return SameNumberValue(Number(), other.Number());
}
- if (IsString() && other->IsString()) {
- return String::cast(*this)->Equals(String::cast(other));
+ if (IsString() && other.IsString()) {
+ return String::cast(*this).Equals(String::cast(other));
}
- if (IsBigInt() && other->IsBigInt()) {
+ if (IsBigInt() && other.IsBigInt()) {
return BigInt::EqualToBigInt(BigInt::cast(*this), BigInt::cast(other));
}
return false;
@@ -1648,17 +1647,17 @@
bool Object::SameValueZero(Object other) {
if (other == *this) return true;
- if (IsNumber() && other->IsNumber()) {
+ if (IsNumber() && other.IsNumber()) {
double this_value = Number();
- double other_value = other->Number();
+ double other_value = other.Number();
// +0 == -0 is true
return this_value == other_value ||
(std::isnan(this_value) && std::isnan(other_value));
}
- if (IsString() && other->IsString()) {
- return String::cast(*this)->Equals(String::cast(other));
+ if (IsString() && other.IsString()) {
+ return String::cast(*this).Equals(String::cast(other));
}
- if (IsBigInt() && other->IsBigInt()) {
+ if (IsBigInt() && other.IsBigInt()) {
return BigInt::EqualToBigInt(BigInt::cast(*this), BigInt::cast(other));
}
return false;
@@ -1758,15 +1757,15 @@
// Check that this object is an array.
if (!IsJSArray()) return true;
JSArray array = JSArray::cast(*this);
- Isolate* isolate = array->GetIsolate();
+ Isolate* isolate = array.GetIsolate();
#ifdef V8_ENABLE_FORCE_SLOW_PATH
if (isolate->force_slow_path()) return true;
#endif
// Check that we have the original ArrayPrototype.
- if (!array->map()->prototype()->IsJSObject()) return true;
- JSObject array_proto = JSObject::cast(array->map()->prototype());
+ if (!array.map().prototype().IsJSObject()) return true;
+ JSObject array_proto = JSObject::cast(array.map().prototype());
if (!isolate->is_initial_array_prototype(array_proto)) return true;
// Check that the ArrayPrototype hasn't been modified in a way that would
@@ -1775,7 +1774,7 @@
// For FastPacked kinds, iteration will have the same effect as simply
// accessing each property in order.
- ElementsKind array_kind = array->GetElementsKind();
+ ElementsKind array_kind = array.GetElementsKind();
if (IsFastPackedElementsKind(array_kind)) return false;
// For FastHoley kinds, an element access on a hole would cause a lookup on
@@ -1811,14 +1810,14 @@
Smi smi;
HeapObject heap_object;
if (maybe_object->ToSmi(&smi)) {
- smi->SmiPrint(os);
+ smi.SmiPrint(os);
} else if (maybe_object->IsCleared()) {
os << "[cleared]";
} else if (maybe_object->GetHeapObjectIfWeak(&heap_object)) {
os << "[weak] ";
- heap_object->HeapObjectShortPrint(os);
+ heap_object.HeapObjectShortPrint(os);
} else if (maybe_object->GetHeapObjectIfStrong(&heap_object)) {
- heap_object->HeapObjectShortPrint(os);
+ heap_object.HeapObjectShortPrint(os);
} else {
UNREACHABLE();
}
@@ -1837,25 +1836,25 @@
if (IsString()) {
HeapStringAllocator allocator;
StringStream accumulator(&allocator);
- String::cast(*this)->StringShortPrint(&accumulator);
+ String::cast(*this).StringShortPrint(&accumulator);
os << accumulator.ToCString().get();
return;
}
if (IsJSObject()) {
HeapStringAllocator allocator;
StringStream accumulator(&allocator);
- JSObject::cast(*this)->JSObjectShortPrint(&accumulator);
+ JSObject::cast(*this).JSObjectShortPrint(&accumulator);
os << accumulator.ToCString().get();
return;
}
- switch (map()->instance_type()) {
+ switch (map().instance_type()) {
case MAP_TYPE: {
os << "<Map";
Map mapInstance = Map::cast(*this);
- if (mapInstance->IsJSObjectMap()) {
- os << "(" << ElementsKindToString(mapInstance->elements_kind()) << ")";
- } else if (mapInstance->instance_size() != kVariableSizeSentinel) {
- os << "[" << mapInstance->instance_size() << "]";
+ if (mapInstance.IsJSObjectMap()) {
+ os << "(" << ElementsKindToString(mapInstance.elements_kind()) << ")";
+ } else if (mapInstance.instance_size() != kVariableSizeSentinel) {
+ os << "[" << mapInstance.instance_size() << "]";
}
os << ">";
} break;
@@ -1863,97 +1862,97 @@
os << "<AwaitContext generator= ";
HeapStringAllocator allocator;
StringStream accumulator(&allocator);
- Context::cast(*this)->extension()->ShortPrint(&accumulator);
+ Context::cast(*this).extension().ShortPrint(&accumulator);
os << accumulator.ToCString().get();
os << '>';
break;
}
case BLOCK_CONTEXT_TYPE:
- os << "<BlockContext[" << Context::cast(*this)->length() << "]>";
+ os << "<BlockContext[" << Context::cast(*this).length() << "]>";
break;
case CATCH_CONTEXT_TYPE:
- os << "<CatchContext[" << Context::cast(*this)->length() << "]>";
+ os << "<CatchContext[" << Context::cast(*this).length() << "]>";
break;
case DEBUG_EVALUATE_CONTEXT_TYPE:
- os << "<DebugEvaluateContext[" << Context::cast(*this)->length() << "]>";
+ os << "<DebugEvaluateContext[" << Context::cast(*this).length() << "]>";
break;
case EVAL_CONTEXT_TYPE:
- os << "<EvalContext[" << Context::cast(*this)->length() << "]>";
+ os << "<EvalContext[" << Context::cast(*this).length() << "]>";
break;
case FUNCTION_CONTEXT_TYPE:
- os << "<FunctionContext[" << Context::cast(*this)->length() << "]>";
+ os << "<FunctionContext[" << Context::cast(*this).length() << "]>";
break;
case MODULE_CONTEXT_TYPE:
- os << "<ModuleContext[" << Context::cast(*this)->length() << "]>";
+ os << "<ModuleContext[" << Context::cast(*this).length() << "]>";
break;
case NATIVE_CONTEXT_TYPE:
- os << "<NativeContext[" << Context::cast(*this)->length() << "]>";
+ os << "<NativeContext[" << Context::cast(*this).length() << "]>";
break;
case SCRIPT_CONTEXT_TYPE:
- os << "<ScriptContext[" << Context::cast(*this)->length() << "]>";
+ os << "<ScriptContext[" << Context::cast(*this).length() << "]>";
break;
case WITH_CONTEXT_TYPE:
- os << "<WithContext[" << Context::cast(*this)->length() << "]>";
+ os << "<WithContext[" << Context::cast(*this).length() << "]>";
break;
case SCRIPT_CONTEXT_TABLE_TYPE:
- os << "<ScriptContextTable[" << FixedArray::cast(*this)->length() << "]>";
+ os << "<ScriptContextTable[" << FixedArray::cast(*this).length() << "]>";
break;
case HASH_TABLE_TYPE:
- os << "<HashTable[" << FixedArray::cast(*this)->length() << "]>";
+ os << "<HashTable[" << FixedArray::cast(*this).length() << "]>";
break;
case ORDERED_HASH_MAP_TYPE:
- os << "<OrderedHashMap[" << FixedArray::cast(*this)->length() << "]>";
+ os << "<OrderedHashMap[" << FixedArray::cast(*this).length() << "]>";
break;
case ORDERED_HASH_SET_TYPE:
- os << "<OrderedHashSet[" << FixedArray::cast(*this)->length() << "]>";
+ os << "<OrderedHashSet[" << FixedArray::cast(*this).length() << "]>";
break;
case ORDERED_NAME_DICTIONARY_TYPE:
- os << "<OrderedNameDictionary[" << FixedArray::cast(*this)->length()
+ os << "<OrderedNameDictionary[" << FixedArray::cast(*this).length()
<< "]>";
break;
case NAME_DICTIONARY_TYPE:
- os << "<NameDictionary[" << FixedArray::cast(*this)->length() << "]>";
+ os << "<NameDictionary[" << FixedArray::cast(*this).length() << "]>";
break;
case GLOBAL_DICTIONARY_TYPE:
- os << "<GlobalDictionary[" << FixedArray::cast(*this)->length() << "]>";
+ os << "<GlobalDictionary[" << FixedArray::cast(*this).length() << "]>";
break;
case NUMBER_DICTIONARY_TYPE:
- os << "<NumberDictionary[" << FixedArray::cast(*this)->length() << "]>";
+ os << "<NumberDictionary[" << FixedArray::cast(*this).length() << "]>";
break;
case SIMPLE_NUMBER_DICTIONARY_TYPE:
- os << "<SimpleNumberDictionary[" << FixedArray::cast(*this)->length()
+ os << "<SimpleNumberDictionary[" << FixedArray::cast(*this).length()
<< "]>";
break;
case STRING_TABLE_TYPE:
- os << "<StringTable[" << FixedArray::cast(*this)->length() << "]>";
+ os << "<StringTable[" << FixedArray::cast(*this).length() << "]>";
break;
case FIXED_ARRAY_TYPE:
- os << "<FixedArray[" << FixedArray::cast(*this)->length() << "]>";
+ os << "<FixedArray[" << FixedArray::cast(*this).length() << "]>";
break;
case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
- os << "<ObjectBoilerplateDescription["
- << FixedArray::cast(*this)->length() << "]>";
+ os << "<ObjectBoilerplateDescription[" << FixedArray::cast(*this).length()
+ << "]>";
break;
case FIXED_DOUBLE_ARRAY_TYPE:
- os << "<FixedDoubleArray[" << FixedDoubleArray::cast(*this)->length()
+ os << "<FixedDoubleArray[" << FixedDoubleArray::cast(*this).length()
<< "]>";
break;
case BYTE_ARRAY_TYPE:
- os << "<ByteArray[" << ByteArray::cast(*this)->length() << "]>";
+ os << "<ByteArray[" << ByteArray::cast(*this).length() << "]>";
break;
case BYTECODE_ARRAY_TYPE:
- os << "<BytecodeArray[" << BytecodeArray::cast(*this)->length() << "]>";
+ os << "<BytecodeArray[" << BytecodeArray::cast(*this).length() << "]>";
break;
case DESCRIPTOR_ARRAY_TYPE:
os << "<DescriptorArray["
- << DescriptorArray::cast(*this)->number_of_descriptors() << "]>";
+ << DescriptorArray::cast(*this).number_of_descriptors() << "]>";
break;
case TRANSITION_ARRAY_TYPE:
- os << "<TransitionArray[" << TransitionArray::cast(*this)->length()
+ os << "<TransitionArray[" << TransitionArray::cast(*this).length()
<< "]>";
break;
case PROPERTY_ARRAY_TYPE:
- os << "<PropertyArray[" << PropertyArray::cast(*this)->length() << "]>";
+ os << "<PropertyArray[" << PropertyArray::cast(*this).length() << "]>";
break;
case FEEDBACK_CELL_TYPE: {
{
@@ -1976,19 +1975,19 @@
}
case CLOSURE_FEEDBACK_CELL_ARRAY_TYPE:
os << "<ClosureFeedbackCellArray["
- << ClosureFeedbackCellArray::cast(*this)->length() << "]>";
+ << ClosureFeedbackCellArray::cast(*this).length() << "]>";
break;
case FEEDBACK_VECTOR_TYPE:
- os << "<FeedbackVector[" << FeedbackVector::cast(*this)->length() << "]>";
+ os << "<FeedbackVector[" << FeedbackVector::cast(*this).length() << "]>";
break;
case FREE_SPACE_TYPE:
- os << "<FreeSpace[" << FreeSpace::cast(*this)->size() << "]>";
+ os << "<FreeSpace[" << FreeSpace::cast(*this).size() << "]>";
break;
-#define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype) \
- case FIXED_##TYPE##_ARRAY_TYPE: \
- os << "<Fixed" #Type "Array[" \
- << Fixed##Type##Array::cast(*this)->number_of_elements_onheap_only() \
- << "]>"; \
+#define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype) \
+ case FIXED_##TYPE##_ARRAY_TYPE: \
+ os << "<Fixed" #Type "Array[" \
+ << Fixed##Type##Array::cast(*this).number_of_elements_onheap_only() \
+ << "]>"; \
break;
TYPED_ARRAYS(TYPED_ARRAY_SHORT_PRINT)
@@ -1996,31 +1995,31 @@
case PREPARSE_DATA_TYPE: {
PreparseData data = PreparseData::cast(*this);
- os << "<PreparseData[data=" << data->data_length()
- << " children=" << data->children_length() << "]>";
+ os << "<PreparseData[data=" << data.data_length()
+ << " children=" << data.children_length() << "]>";
break;
}
case UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE: {
UncompiledDataWithoutPreparseData data =
UncompiledDataWithoutPreparseData::cast(*this);
- os << "<UncompiledDataWithoutPreparseData (" << data->start_position()
- << ", " << data->end_position() << ")]>";
+ os << "<UncompiledDataWithoutPreparseData (" << data.start_position()
+ << ", " << data.end_position() << ")]>";
break;
}
case UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE: {
UncompiledDataWithPreparseData data =
UncompiledDataWithPreparseData::cast(*this);
- os << "<UncompiledDataWithPreparseData (" << data->start_position()
- << ", " << data->end_position()
- << ") preparsed=" << Brief(data->preparse_data()) << ">";
+ os << "<UncompiledDataWithPreparseData (" << data.start_position() << ", "
+ << data.end_position() << ") preparsed=" << Brief(data.preparse_data())
+ << ">";
break;
}
case SHARED_FUNCTION_INFO_TYPE: {
SharedFunctionInfo shared = SharedFunctionInfo::cast(*this);
- std::unique_ptr<char[]> debug_name = shared->DebugName()->ToCString();
+ std::unique_ptr<char[]> debug_name = shared.DebugName().ToCString();
if (debug_name[0] != 0) {
os << "<SharedFunctionInfo " << debug_name.get() << ">";
} else {
@@ -2031,32 +2030,32 @@
case JS_MESSAGE_OBJECT_TYPE:
os << "<JSMessageObject>";
break;
-#define MAKE_STRUCT_CASE(TYPE, Name, name) \
- case TYPE: \
- os << "<" #Name; \
- Name::cast(*this)->BriefPrintDetails(os); \
- os << ">"; \
+#define MAKE_STRUCT_CASE(TYPE, Name, name) \
+ case TYPE: \
+ os << "<" #Name; \
+ Name::cast(*this).BriefPrintDetails(os); \
+ os << ">"; \
break;
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
case ALLOCATION_SITE_TYPE: {
os << "<AllocationSite";
- AllocationSite::cast(*this)->BriefPrintDetails(os);
+ AllocationSite::cast(*this).BriefPrintDetails(os);
os << ">";
break;
}
case SCOPE_INFO_TYPE: {
ScopeInfo scope = ScopeInfo::cast(*this);
os << "<ScopeInfo";
- if (scope->length()) os << " " << scope->scope_type() << " ";
- os << "[" << scope->length() << "]>";
+ if (scope.length()) os << " " << scope.scope_type() << " ";
+ os << "[" << scope.length() << "]>";
break;
}
case CODE_TYPE: {
Code code = Code::cast(*this);
- os << "<Code " << Code::Kind2String(code->kind());
- if (code->is_builtin()) {
- os << " " << Builtins::name(code->builtin_index());
+ os << "<Code " << Code::Kind2String(code.kind());
+ if (code.is_builtin()) {
+ os << " " << Builtins::name(code.builtin_index());
}
os << ">";
break;
@@ -2074,31 +2073,31 @@
os << "<false>";
} else {
os << "<Odd Oddball: ";
- os << Oddball::cast(*this)->to_string()->ToCString().get();
+ os << Oddball::cast(*this).to_string().ToCString().get();
os << ">";
}
break;
}
case SYMBOL_TYPE: {
Symbol symbol = Symbol::cast(*this);
- symbol->SymbolShortPrint(os);
+ symbol.SymbolShortPrint(os);
break;
}
case HEAP_NUMBER_TYPE: {
os << "<HeapNumber ";
- HeapNumber::cast(*this)->HeapNumberPrint(os);
+ HeapNumber::cast(*this).HeapNumberPrint(os);
os << ">";
break;
}
case MUTABLE_HEAP_NUMBER_TYPE: {
os << "<MutableHeapNumber ";
- MutableHeapNumber::cast(*this)->MutableHeapNumberPrint(os);
+ MutableHeapNumber::cast(*this).MutableHeapNumberPrint(os);
os << '>';
break;
}
case BIGINT_TYPE: {
os << "<BigInt ";
- BigInt::cast(*this)->BigIntShortPrint(os);
+ BigInt::cast(*this).BigIntShortPrint(os);
os << ">";
break;
}
@@ -2112,7 +2111,7 @@
os << "<Cell value= ";
HeapStringAllocator allocator;
StringStream accumulator(&allocator);
- Cell::cast(*this)->value()->ShortPrint(&accumulator);
+ Cell::cast(*this).value().ShortPrint(&accumulator);
os << accumulator.ToCString().get();
os << '>';
break;
@@ -2120,11 +2119,11 @@
case PROPERTY_CELL_TYPE: {
PropertyCell cell = PropertyCell::cast(*this);
os << "<PropertyCell name=";
- cell->name()->ShortPrint(os);
+ cell.name().ShortPrint(os);
os << " value=";
HeapStringAllocator allocator;
StringStream accumulator(&allocator);
- cell->value()->ShortPrint(&accumulator);
+ cell.value().ShortPrint(&accumulator);
os << accumulator.ToCString().get();
os << '>';
break;
@@ -2132,10 +2131,10 @@
case CALL_HANDLER_INFO_TYPE: {
CallHandlerInfo info = CallHandlerInfo::cast(*this);
os << "<CallHandlerInfo ";
- os << "callback= " << Brief(info->callback());
- os << ", js_callback= " << Brief(info->js_callback());
- os << ", data= " << Brief(info->data());
- if (info->IsSideEffectFreeCallHandlerInfo()) {
+ os << "callback= " << Brief(info.callback());
+ os << ", js_callback= " << Brief(info.js_callback());
+ os << ", data= " << Brief(info.data());
+ if (info.IsSideEffectFreeCallHandlerInfo()) {
os << ", side_effect_free= true>";
} else {
os << ", side_effect_free= false>";
@@ -2143,7 +2142,7 @@
break;
}
default:
- os << "<Other heap object (" << map()->instance_type() << ")>";
+ os << "<Other heap object (" << map().instance_type() << ")>";
break;
}
}
@@ -2193,120 +2192,120 @@
bool HeapObject::IsValidSlot(Map map, int offset) {
DCHECK_NE(0, offset);
- return BodyDescriptorApply<CallIsValidSlot, bool>(map->instance_type(), map,
+ return BodyDescriptorApply<CallIsValidSlot, bool>(map.instance_type(), map,
*this, offset, 0);
}
int HeapObject::SizeFromMap(Map map) const {
- int instance_size = map->instance_size();
+ int instance_size = map.instance_size();
if (instance_size != kVariableSizeSentinel) return instance_size;
// Only inline the most frequent cases.
- InstanceType instance_type = map->instance_type();
+ InstanceType instance_type = map.instance_type();
if (IsInRange(instance_type, FIRST_FIXED_ARRAY_TYPE, LAST_FIXED_ARRAY_TYPE)) {
return FixedArray::SizeFor(
- FixedArray::unchecked_cast(*this)->synchronized_length());
+ FixedArray::unchecked_cast(*this).synchronized_length());
}
if (IsInRange(instance_type, FIRST_CONTEXT_TYPE, LAST_CONTEXT_TYPE)) {
// Native context has fixed size.
DCHECK_NE(instance_type, NATIVE_CONTEXT_TYPE);
- return Context::SizeFor(Context::unchecked_cast(*this)->length());
+ return Context::SizeFor(Context::unchecked_cast(*this).length());
}
if (instance_type == ONE_BYTE_STRING_TYPE ||
instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
// Strings may get concurrently truncated, hence we have to access its
// length synchronized.
return SeqOneByteString::SizeFor(
- SeqOneByteString::unchecked_cast(*this)->synchronized_length());
+ SeqOneByteString::unchecked_cast(*this).synchronized_length());
}
if (instance_type == BYTE_ARRAY_TYPE) {
return ByteArray::SizeFor(
- ByteArray::unchecked_cast(*this)->synchronized_length());
+ ByteArray::unchecked_cast(*this).synchronized_length());
}
if (instance_type == BYTECODE_ARRAY_TYPE) {
return BytecodeArray::SizeFor(
- BytecodeArray::unchecked_cast(*this)->synchronized_length());
+ BytecodeArray::unchecked_cast(*this).synchronized_length());
}
if (instance_type == FREE_SPACE_TYPE) {
- return FreeSpace::unchecked_cast(*this)->relaxed_read_size();
+ return FreeSpace::unchecked_cast(*this).relaxed_read_size();
}
if (instance_type == STRING_TYPE ||
instance_type == INTERNALIZED_STRING_TYPE) {
// Strings may get concurrently truncated, hence we have to access its
// length synchronized.
return SeqTwoByteString::SizeFor(
- SeqTwoByteString::unchecked_cast(*this)->synchronized_length());
+ SeqTwoByteString::unchecked_cast(*this).synchronized_length());
}
if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
return FixedDoubleArray::SizeFor(
- FixedDoubleArray::unchecked_cast(*this)->synchronized_length());
+ FixedDoubleArray::unchecked_cast(*this).synchronized_length());
}
if (instance_type == FEEDBACK_METADATA_TYPE) {
return FeedbackMetadata::SizeFor(
- FeedbackMetadata::unchecked_cast(*this)->synchronized_slot_count());
+ FeedbackMetadata::unchecked_cast(*this).synchronized_slot_count());
}
if (instance_type == DESCRIPTOR_ARRAY_TYPE) {
return DescriptorArray::SizeFor(
- DescriptorArray::unchecked_cast(*this)->number_of_all_descriptors());
+ DescriptorArray::unchecked_cast(*this).number_of_all_descriptors());
}
if (IsInRange(instance_type, FIRST_WEAK_FIXED_ARRAY_TYPE,
LAST_WEAK_FIXED_ARRAY_TYPE)) {
return WeakFixedArray::SizeFor(
- WeakFixedArray::unchecked_cast(*this)->synchronized_length());
+ WeakFixedArray::unchecked_cast(*this).synchronized_length());
}
if (instance_type == WEAK_ARRAY_LIST_TYPE) {
return WeakArrayList::SizeForCapacity(
- WeakArrayList::unchecked_cast(*this)->synchronized_capacity());
+ WeakArrayList::unchecked_cast(*this).synchronized_capacity());
}
if (IsInRange(instance_type, FIRST_FIXED_TYPED_ARRAY_TYPE,
LAST_FIXED_TYPED_ARRAY_TYPE)) {
- return FixedTypedArrayBase::unchecked_cast(*this)->TypedArraySize(
+ return FixedTypedArrayBase::unchecked_cast(*this).TypedArraySize(
instance_type);
}
if (instance_type == SMALL_ORDERED_HASH_SET_TYPE) {
return SmallOrderedHashSet::SizeFor(
- SmallOrderedHashSet::unchecked_cast(*this)->Capacity());
+ SmallOrderedHashSet::unchecked_cast(*this).Capacity());
}
if (instance_type == SMALL_ORDERED_HASH_MAP_TYPE) {
return SmallOrderedHashMap::SizeFor(
- SmallOrderedHashMap::unchecked_cast(*this)->Capacity());
+ SmallOrderedHashMap::unchecked_cast(*this).Capacity());
}
if (instance_type == SMALL_ORDERED_NAME_DICTIONARY_TYPE) {
return SmallOrderedNameDictionary::SizeFor(
- SmallOrderedNameDictionary::unchecked_cast(*this)->Capacity());
+ SmallOrderedNameDictionary::unchecked_cast(*this).Capacity());
}
if (instance_type == PROPERTY_ARRAY_TYPE) {
return PropertyArray::SizeFor(
- PropertyArray::cast(*this)->synchronized_length());
+ PropertyArray::cast(*this).synchronized_length());
}
if (instance_type == FEEDBACK_VECTOR_TYPE) {
return FeedbackVector::SizeFor(
- FeedbackVector::unchecked_cast(*this)->length());
+ FeedbackVector::unchecked_cast(*this).length());
}
if (instance_type == BIGINT_TYPE) {
- return BigInt::SizeFor(BigInt::unchecked_cast(*this)->length());
+ return BigInt::SizeFor(BigInt::unchecked_cast(*this).length());
}
if (instance_type == PREPARSE_DATA_TYPE) {
PreparseData data = PreparseData::unchecked_cast(*this);
- return PreparseData::SizeFor(data->data_length(), data->children_length());
+ return PreparseData::SizeFor(data.data_length(), data.children_length());
}
if (instance_type == CODE_TYPE) {
- return Code::unchecked_cast(*this)->CodeSize();
+ return Code::unchecked_cast(*this).CodeSize();
}
DCHECK_EQ(instance_type, EMBEDDER_DATA_ARRAY_TYPE);
return EmbedderDataArray::SizeFor(
- EmbedderDataArray::unchecked_cast(*this)->length());
+ EmbedderDataArray::unchecked_cast(*this).length());
}
bool HeapObject::NeedsRehashing() const {
- switch (map()->instance_type()) {
+ switch (map().instance_type()) {
case DESCRIPTOR_ARRAY_TYPE:
- return DescriptorArray::cast(*this)->number_of_descriptors() > 1;
+ return DescriptorArray::cast(*this).number_of_descriptors() > 1;
case TRANSITION_ARRAY_TYPE:
- return TransitionArray::cast(*this)->number_of_entries() > 1;
+ return TransitionArray::cast(*this).number_of_entries() > 1;
case ORDERED_HASH_MAP_TYPE:
- return OrderedHashMap::cast(*this)->NumberOfElements() > 0;
+ return OrderedHashMap::cast(*this).NumberOfElements() > 0;
case ORDERED_HASH_SET_TYPE:
- return OrderedHashSet::cast(*this)->NumberOfElements() > 0;
+ return OrderedHashSet::cast(*this).NumberOfElements() > 0;
case NAME_DICTIONARY_TYPE:
case GLOBAL_DICTIONARY_TYPE:
case NUMBER_DICTIONARY_TYPE:
@@ -2324,7 +2323,7 @@
bool HeapObject::CanBeRehashed() const {
DCHECK(NeedsRehashing());
- switch (map()->instance_type()) {
+ switch (map().instance_type()) {
case ORDERED_HASH_MAP_TYPE:
case ORDERED_HASH_SET_TYPE:
case ORDERED_NAME_DICTIONARY_TYPE:
@@ -2341,11 +2340,11 @@
case TRANSITION_ARRAY_TYPE:
return true;
case SMALL_ORDERED_HASH_MAP_TYPE:
- return SmallOrderedHashMap::cast(*this)->NumberOfElements() == 0;
+ return SmallOrderedHashMap::cast(*this).NumberOfElements() == 0;
case SMALL_ORDERED_HASH_SET_TYPE:
- return SmallOrderedHashMap::cast(*this)->NumberOfElements() == 0;
+ return SmallOrderedHashMap::cast(*this).NumberOfElements() == 0;
case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
- return SmallOrderedNameDictionary::cast(*this)->NumberOfElements() == 0;
+ return SmallOrderedNameDictionary::cast(*this).NumberOfElements() == 0;
default:
return false;
}
@@ -2353,45 +2352,45 @@
}
void HeapObject::RehashBasedOnMap(ReadOnlyRoots roots) {
- switch (map()->instance_type()) {
+ switch (map().instance_type()) {
case HASH_TABLE_TYPE:
UNREACHABLE();
case NAME_DICTIONARY_TYPE:
- NameDictionary::cast(*this)->Rehash(roots);
+ NameDictionary::cast(*this).Rehash(roots);
break;
case GLOBAL_DICTIONARY_TYPE:
- GlobalDictionary::cast(*this)->Rehash(roots);
+ GlobalDictionary::cast(*this).Rehash(roots);
break;
case NUMBER_DICTIONARY_TYPE:
- NumberDictionary::cast(*this)->Rehash(roots);
+ NumberDictionary::cast(*this).Rehash(roots);
break;
case SIMPLE_NUMBER_DICTIONARY_TYPE:
- SimpleNumberDictionary::cast(*this)->Rehash(roots);
+ SimpleNumberDictionary::cast(*this).Rehash(roots);
break;
case STRING_TABLE_TYPE:
- StringTable::cast(*this)->Rehash(roots);
+ StringTable::cast(*this).Rehash(roots);
break;
case DESCRIPTOR_ARRAY_TYPE:
- DCHECK_LE(1, DescriptorArray::cast(*this)->number_of_descriptors());
- DescriptorArray::cast(*this)->Sort();
+ DCHECK_LE(1, DescriptorArray::cast(*this).number_of_descriptors());
+ DescriptorArray::cast(*this).Sort();
break;
case TRANSITION_ARRAY_TYPE:
- TransitionArray::cast(*this)->Sort();
+ TransitionArray::cast(*this).Sort();
break;
case SMALL_ORDERED_HASH_MAP_TYPE:
- DCHECK_EQ(0, SmallOrderedHashMap::cast(*this)->NumberOfElements());
+ DCHECK_EQ(0, SmallOrderedHashMap::cast(*this).NumberOfElements());
break;
case SMALL_ORDERED_HASH_SET_TYPE:
- DCHECK_EQ(0, SmallOrderedHashSet::cast(*this)->NumberOfElements());
+ DCHECK_EQ(0, SmallOrderedHashSet::cast(*this).NumberOfElements());
break;
case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
- DCHECK_EQ(0, SmallOrderedNameDictionary::cast(*this)->NumberOfElements());
+ DCHECK_EQ(0, SmallOrderedNameDictionary::cast(*this).NumberOfElements());
break;
case ONE_BYTE_INTERNALIZED_STRING_TYPE:
case INTERNALIZED_STRING_TYPE:
// Rare case, rehash read-only space strings before they are sealed.
DCHECK(ReadOnlyHeap::Contains(*this));
- String::cast(*this)->Hash();
+ String::cast(*this).Hash();
break;
default:
UNREACHABLE();
@@ -2399,7 +2398,7 @@
}
bool HeapObject::IsExternal(Isolate* isolate) const {
- return map()->FindRootMap(isolate) == isolate->heap()->external_map();
+ return map().FindRootMap(isolate) == isolate->heap()->external_map();
}
void DescriptorArray::GeneralizeAllFields() {
@@ -2453,7 +2452,7 @@
// In case of global IC, the receiver is the global object. Replace by
// the global proxy.
if (receiver->IsJSGlobalObject()) {
- receiver = handle(JSGlobalObject::cast(*receiver)->global_proxy(),
+ receiver = handle(JSGlobalObject::cast(*receiver).global_proxy(),
it->isolate());
}
return JSProxy::SetProperty(it->GetHolder<JSProxy>(), it->GetName(),
@@ -2486,7 +2485,7 @@
Handle<Object> accessors = it->GetAccessors();
if (accessors->IsAccessorInfo() &&
!it->HolderIsReceiverOrHiddenPrototype() &&
- AccessorInfo::cast(*accessors)->is_special_data_property()) {
+ AccessorInfo::cast(*accessors).is_special_data_property()) {
*found = false;
return Nothing<bool>();
}
@@ -2709,8 +2708,8 @@
Handle<Object> to_assign = value;
// Convert the incoming value to a number for storing into typed arrays.
if (it->IsElement() && receiver->IsJSObject() &&
- JSObject::cast(*receiver)->HasFixedTypedArrayElements()) {
- ElementsKind elements_kind = JSObject::cast(*receiver)->GetElementsKind();
+ JSObject::cast(*receiver).HasFixedTypedArrayElements()) {
+ ElementsKind elements_kind = JSObject::cast(*receiver).GetElementsKind();
if (elements_kind == BIGINT64_ELEMENTS ||
elements_kind == BIGUINT64_ELEMENTS) {
ASSIGN_RETURN_ON_EXCEPTION_VALUE(it->isolate(), to_assign,
@@ -2864,7 +2863,7 @@
int valid_descriptors,
Handle<FixedArray> array) {
for (int i = 0; i < valid_descriptors; i++) {
- if (*key == AccessorInfo::cast(array->get(i))->name()) return true;
+ if (*key == AccessorInfo::cast(array->get(i)).name()) return true;
}
return false;
}
@@ -3104,7 +3103,7 @@
THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kProxyNonObject),
JSProxy);
}
- if (target->IsJSProxy() && JSProxy::cast(*target)->IsRevoked()) {
+ if (target->IsJSProxy() && JSProxy::cast(*target).IsRevoked()) {
THROW_NEW_ERROR(isolate,
NewTypeError(MessageTemplate::kProxyHandlerOrTargetRevoked),
JSProxy);
@@ -3113,7 +3112,7 @@
THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kProxyNonObject),
JSProxy);
}
- if (handler->IsJSProxy() && JSProxy::cast(*handler)->IsRevoked()) {
+ if (handler->IsJSProxy() && JSProxy::cast(*handler).IsRevoked()) {
THROW_NEW_ERROR(isolate,
NewTypeError(MessageTemplate::kProxyHandlerOrTargetRevoked),
JSProxy);
@@ -3125,7 +3124,7 @@
// static
MaybeHandle<NativeContext> JSProxy::GetFunctionRealm(Handle<JSProxy> proxy) {
- DCHECK(proxy->map()->is_constructor());
+ DCHECK(proxy->map().is_constructor());
if (proxy->IsRevoked()) {
THROW_NEW_ERROR(proxy->GetIsolate(),
NewTypeError(MessageTemplate::kProxyRevoked),
@@ -3150,7 +3149,7 @@
bool PropertyKeyToArrayLength(Handle<Object> value, uint32_t* length) {
DCHECK(value->IsNumber() || value->IsName());
if (value->ToArrayLength(length)) return true;
- if (value->IsString()) return String::cast(*value)->AsArrayIndex(length);
+ if (value->IsString()) return String::cast(*value).AsArrayIndex(length);
return false;
}
@@ -3331,7 +3330,7 @@
USE(success);
}
uint32_t actual_new_len = 0;
- CHECK(a->length()->ToArrayLength(&actual_new_len));
+ CHECK(a->length().ToArrayLength(&actual_new_len));
// Steps 19d-v, 21. Return false if there were non-deletable elements.
bool result = actual_new_len == new_len;
if (!result) {
@@ -3469,7 +3468,7 @@
RETURN_FAILURE(isolate, GetShouldThrow(isolate, should_throw),
NewTypeError(MessageTemplate::kProxyPrivate));
}
- DCHECK(proxy->map()->is_dictionary_map());
+ DCHECK(proxy->map().is_dictionary_map());
Handle<Object> value =
desc->has_value()
? desc->value()
@@ -3719,13 +3718,13 @@
Name key = desc->GetKey(i);
PropertyDetails details = desc->GetDetails(i);
// Bulk attribute changes never affect private properties.
- if (!key->IsPrivate()) {
+ if (!key.IsPrivate()) {
int mask = DONT_DELETE | DONT_ENUM;
// READ_ONLY is an invalid attribute for JS setters/getters.
HeapObject heap_object;
if (details.kind() != kAccessor ||
!(value_or_field_type->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsAccessorPair())) {
+ heap_object.IsAccessorPair())) {
mask |= READ_ONLY;
}
details = details.CopyAddAttributes(
@@ -3761,7 +3760,7 @@
Name key = src->GetKey(i);
PropertyDetails details = src->GetDetails(i);
- DCHECK(!key->IsPrivateName());
+ DCHECK(!key.IsPrivateName());
DCHECK(details.IsEnumerable());
DCHECK_EQ(details.kind(), kData);
@@ -3797,11 +3796,11 @@
bool DescriptorArray::IsEqualUpTo(DescriptorArray desc, int nof_descriptors) {
for (int i = 0; i < nof_descriptors; i++) {
- if (GetKey(i) != desc->GetKey(i) || GetValue(i) != desc->GetValue(i)) {
+ if (GetKey(i) != desc.GetKey(i) || GetValue(i) != desc.GetValue(i)) {
return false;
}
PropertyDetails details = GetDetails(i);
- PropertyDetails other_details = desc->GetDetails(i);
+ PropertyDetails other_details = desc.GetDetails(i);
if (details.kind() != other_details.kind() ||
details.location() != other_details.location() ||
!details.representation().Equals(other_details.representation())) {
@@ -3835,7 +3834,7 @@
for (int i = 1; i < length(); ++i) {
Object a_obj = get(i - 1);
Object b_obj = get(i);
- if (!a_obj->IsNumber() || !b_obj->IsNumber()) return false;
+ if (!a_obj.IsNumber() || !b_obj.IsNumber()) return false;
uint32_t a = NumberToUint32(a_obj);
uint32_t b = NumberToUint32(b_obj);
@@ -3868,9 +3867,9 @@
// Return early if len == 0 so that we don't try to read the write barrier off
// a canonical read-only empty fixed array.
if (len == 0) return;
- WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc);
+ WriteBarrierMode mode = dest.GetWriteBarrierMode(no_gc);
for (int index = 0; index < len; index++) {
- dest->set(dest_pos + index, get(pos + index), mode);
+ dest.set(dest_pos + index, get(pos + index), mode);
}
}
@@ -4132,7 +4131,7 @@
// The {code} will be {nullptr} for interpreted wasm frames.
Handle<Object> code_ref = isolate->factory()->undefined_value();
if (code) {
- auto native_module = wasm_instance->module_object()->shared_native_module();
+ auto native_module = wasm_instance->module_object().shared_native_module();
code_ref = Managed<wasm::GlobalWasmCodeRef>::Allocate(
isolate, 0, code, std::move(native_module));
}
@@ -4200,14 +4199,14 @@
enum_cache = *isolate->factory()->NewEnumCache(keys, indices);
descriptors->set_enum_cache(enum_cache);
} else {
- enum_cache->set_keys(*keys);
- enum_cache->set_indices(*indices);
+ enum_cache.set_keys(*keys);
+ enum_cache.set_indices(*indices);
}
}
void DescriptorArray::CopyFrom(int index, DescriptorArray src) {
- PropertyDetails details = src->GetDetails(index);
- Set(index, src->GetKey(index), src->GetValue(index), details);
+ PropertyDetails details = src.GetDetails(index);
+ Set(index, src.GetKey(index), src.GetValue(index), details);
}
void DescriptorArray::Sort() {
@@ -4220,12 +4219,12 @@
const int max_parent_index = (len / 2) - 1;
for (int i = max_parent_index; i >= 0; --i) {
int parent_index = i;
- const uint32_t parent_hash = GetSortedKey(i)->Hash();
+ const uint32_t parent_hash = GetSortedKey(i).Hash();
while (parent_index <= max_parent_index) {
int child_index = 2 * parent_index + 1;
- uint32_t child_hash = GetSortedKey(child_index)->Hash();
+ uint32_t child_hash = GetSortedKey(child_index).Hash();
if (child_index + 1 < len) {
- uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
+ uint32_t right_child_hash = GetSortedKey(child_index + 1).Hash();
if (right_child_hash > child_hash) {
child_index++;
child_hash = right_child_hash;
@@ -4244,13 +4243,13 @@
SwapSortedKeys(0, i);
// Shift down the new top element.
int parent_index = 0;
- const uint32_t parent_hash = GetSortedKey(parent_index)->Hash();
+ const uint32_t parent_hash = GetSortedKey(parent_index).Hash();
const int max_parent_index = (i / 2) - 1;
while (parent_index <= max_parent_index) {
int child_index = parent_index * 2 + 1;
- uint32_t child_hash = GetSortedKey(child_index)->Hash();
+ uint32_t child_hash = GetSortedKey(child_index).Hash();
if (child_index + 1 < i) {
- uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash();
+ uint32_t right_child_hash = GetSortedKey(child_index + 1).Hash();
if (right_child_hash > child_hash) {
child_index++;
child_hash = right_child_hash;
@@ -4298,12 +4297,12 @@
Handle<AccessorPair> accessor_pair,
AccessorComponent component) {
Object accessor = accessor_pair->get(component);
- if (accessor->IsFunctionTemplateInfo()) {
+ if (accessor.IsFunctionTemplateInfo()) {
return ApiNatives::InstantiateFunction(
handle(FunctionTemplateInfo::cast(accessor), isolate))
.ToHandleChecked();
}
- if (accessor->IsNull(isolate)) {
+ if (accessor.IsNull(isolate)) {
return isolate->factory()->undefined_value();
}
return handle(accessor, isolate);
@@ -4311,11 +4310,11 @@
#ifdef DEBUG
bool DescriptorArray::IsEqualTo(DescriptorArray other) {
- if (number_of_all_descriptors() != other->number_of_all_descriptors()) {
+ if (number_of_all_descriptors() != other.number_of_all_descriptors()) {
return false;
}
for (int i = 0; i < number_of_all_descriptors(); ++i) {
- if (get(i) != other->get(i)) return false;
+ if (get(i) != other.get(i)) return false;
}
return true;
}
@@ -4408,31 +4407,31 @@
sinkchar* sink, int sink_length) {
DisallowHeapAllocation no_allocation;
CHECK_GT(length, 0);
- CHECK_LE(length, fixed_array->length());
+ CHECK_LE(length, fixed_array.length());
#ifdef DEBUG
sinkchar* sink_end = sink + sink_length;
#endif
- const int separator_length = separator->length();
+ const int separator_length = separator.length();
const bool use_one_byte_separator_fast_path =
separator_length == 1 && sizeof(sinkchar) == 1 &&
StringShape(separator).IsSequentialOneByte();
uint8_t separator_one_char;
if (use_one_byte_separator_fast_path) {
CHECK(StringShape(separator).IsSequentialOneByte());
- CHECK_EQ(separator->length(), 1);
+ CHECK_EQ(separator.length(), 1);
separator_one_char =
- SeqOneByteString::cast(separator)->GetChars(no_allocation)[0];
+ SeqOneByteString::cast(separator).GetChars(no_allocation)[0];
}
uint32_t num_separators = 0;
for (int i = 0; i < length; i++) {
- Object element = fixed_array->get(i);
- const bool element_is_separator_sequence = element->IsSmi();
+ Object element = fixed_array.get(i);
+ const bool element_is_separator_sequence = element.IsSmi();
// If element is a Smi, it represents the number of separators to write.
if (V8_UNLIKELY(element_is_separator_sequence)) {
- CHECK(element->ToUint32(&num_separators));
+ CHECK(element.ToUint32(&num_separators));
// Verify that Smis (number of separators) only occur when necessary:
// 1) at the beginning
// 2) at the end
@@ -4464,9 +4463,9 @@
if (V8_UNLIKELY(element_is_separator_sequence)) {
num_separators = 0;
} else {
- DCHECK(element->IsString());
+ DCHECK(element.IsString());
String string = String::cast(element);
- const int string_length = string->length();
+ const int string_length = string.length();
DCHECK(string_length == 0 || sink < sink_end);
String::WriteToFlat(string, sink, 0, string_length);
@@ -4494,21 +4493,21 @@
FixedArray fixed_array = FixedArray::cast(Object(raw_fixed_array));
String separator = String::cast(Object(raw_separator));
String dest = String::cast(Object(raw_dest));
- DCHECK(fixed_array->IsFixedArray());
+ DCHECK(fixed_array.IsFixedArray());
DCHECK(StringShape(dest).IsSequentialOneByte() ||
StringShape(dest).IsSequentialTwoByte());
if (StringShape(dest).IsSequentialOneByte()) {
WriteFixedArrayToFlat(fixed_array, static_cast<int>(length), separator,
- SeqOneByteString::cast(dest)->GetChars(no_allocation),
- dest->length());
+ SeqOneByteString::cast(dest).GetChars(no_allocation),
+ dest.length());
} else {
DCHECK(StringShape(dest).IsSequentialTwoByte());
WriteFixedArrayToFlat(fixed_array, static_cast<int>(length), separator,
- SeqTwoByteString::cast(dest)->GetChars(no_allocation),
- dest->length());
+ SeqTwoByteString::cast(dest).GetChars(no_allocation),
+ dest.length());
}
- return dest->ptr();
+ return dest.ptr();
}
@@ -4595,7 +4594,7 @@
Handle<SharedFunctionInfo> shared =
handle(script->eval_from_shared(), isolate);
SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate, shared);
- position = shared->abstract_code()->SourcePosition(-position);
+ position = shared->abstract_code().SourcePosition(-position);
}
DCHECK_GE(position, 0);
script->set_eval_from_position(position);
@@ -4605,22 +4604,22 @@
void Script::InitLineEnds(Handle<Script> script) {
Isolate* isolate = script->GetIsolate();
- if (!script->line_ends()->IsUndefined(isolate)) return;
+ if (!script->line_ends().IsUndefined(isolate)) return;
DCHECK(script->type() != Script::TYPE_WASM ||
- script->source_mapping_url()->IsString());
+ script->source_mapping_url().IsString());
Object src_obj = script->source();
- if (!src_obj->IsString()) {
- DCHECK(src_obj->IsUndefined(isolate));
+ if (!src_obj.IsString()) {
+ DCHECK(src_obj.IsUndefined(isolate));
script->set_line_ends(ReadOnlyRoots(isolate).empty_fixed_array());
} else {
- DCHECK(src_obj->IsString());
+ DCHECK(src_obj.IsString());
Handle<String> src(String::cast(src_obj), isolate);
Handle<FixedArray> array = String::CalculateLineEnds(isolate, src, true);
script->set_line_ends(*array);
}
- DCHECK(script->line_ends()->IsFixedArray());
+ DCHECK(script->line_ends().IsFixedArray());
}
bool Script::GetPositionInfo(Handle<Script> script, int position,
@@ -4638,7 +4637,7 @@
SharedFunctionInfo::ScriptIterator iter(this->GetIsolate(), *this);
for (SharedFunctionInfo info = iter.Next(); !info.is_null();
info = iter.Next()) {
- if (info->HasAsmWasmData()) return true;
+ if (info.HasAsmWasmData()) return true;
}
return false;
}
@@ -4646,15 +4645,15 @@
namespace {
bool GetPositionInfoSlow(const Script script, int position,
Script::PositionInfo* info) {
- if (!script->source()->IsString()) return false;
+ if (!script.source().IsString()) return false;
if (position < 0) position = 0;
- String source_string = String::cast(script->source());
+ String source_string = String::cast(script.source());
int line = 0;
int line_start = 0;
- int len = source_string->length();
+ int len = source_string.length();
for (int pos = 0; pos <= len; ++pos) {
- if (pos == len || source_string->Get(pos) == '\n') {
+ if (pos == len || source_string.Get(pos) == '\n') {
if (position <= pos) {
info->line = line;
info->column = position - line_start;
@@ -4680,29 +4679,29 @@
if (type() == Script::TYPE_WASM) {
DCHECK_LE(0, position);
return WasmModuleObject::cast(wasm_module_object())
- ->GetPositionInfo(static_cast<uint32_t>(position), info);
+ .GetPositionInfo(static_cast<uint32_t>(position), info);
}
- if (line_ends()->IsUndefined()) {
+ if (line_ends().IsUndefined()) {
// Slow mode: we do not have line_ends. We have to iterate through source.
if (!GetPositionInfoSlow(*this, position, info)) return false;
} else {
- DCHECK(line_ends()->IsFixedArray());
+ DCHECK(line_ends().IsFixedArray());
FixedArray ends = FixedArray::cast(line_ends());
- const int ends_len = ends->length();
+ const int ends_len = ends.length();
if (ends_len == 0) return false;
// Return early on invalid positions. Negative positions behave as if 0 was
// passed, and positions beyond the end of the script return as failure.
if (position < 0) {
position = 0;
- } else if (position > SMI_VALUE(ends->get(ends_len - 1))) {
+ } else if (position > SMI_VALUE(ends.get(ends_len - 1))) {
return false;
}
// Determine line number by doing a binary search on the line ends array.
- if (SMI_VALUE(ends->get(0)) >= position) {
+ if (SMI_VALUE(ends.get(0)) >= position) {
info->line = 0;
info->line_start = 0;
info->column = position;
@@ -4713,28 +4712,28 @@
while (right > 0) {
DCHECK_LE(left, right);
const int mid = (left + right) / 2;
- if (position > SMI_VALUE(ends->get(mid))) {
+ if (position > SMI_VALUE(ends.get(mid))) {
left = mid + 1;
- } else if (position <= SMI_VALUE(ends->get(mid - 1))) {
+ } else if (position <= SMI_VALUE(ends.get(mid - 1))) {
right = mid - 1;
} else {
info->line = mid;
break;
}
}
- DCHECK(SMI_VALUE(ends->get(info->line)) >= position &&
- SMI_VALUE(ends->get(info->line - 1)) < position);
- info->line_start = SMI_VALUE(ends->get(info->line - 1)) + 1;
+ DCHECK(SMI_VALUE(ends.get(info->line)) >= position &&
+ SMI_VALUE(ends.get(info->line - 1)) < position);
+ info->line_start = SMI_VALUE(ends.get(info->line - 1)) + 1;
info->column = position - info->line_start;
}
// Line end is position of the linebreak character.
- info->line_end = SMI_VALUE(ends->get(info->line));
+ info->line_end = SMI_VALUE(ends.get(info->line));
if (info->line_end > 0) {
- DCHECK(source()->IsString());
+ DCHECK(source().IsString());
String src = String::cast(source());
- if (src->length() >= info->line_end &&
- src->Get(info->line_end - 1) == '\r') {
+ if (src.length() >= info->line_end &&
+ src.Get(info->line_end - 1) == '\r') {
info->line_end--;
}
}
@@ -4778,7 +4777,7 @@
Object Script::GetNameOrSourceURL() {
// Keep in sync with ScriptNameOrSourceURL in messages.js.
- if (!source_url()->IsUndefined()) return source_url();
+ if (!source_url().IsUndefined()) return source_url();
return name();
}
@@ -4789,11 +4788,11 @@
// renumbering done by AstFunctionLiteralIdReindexer; in particular, that
// AstTraversalVisitor doesn't recurse properly in the construct which
// triggers the mismatch.
- CHECK_LT(fun->function_literal_id(), shared_function_infos()->length());
- MaybeObject shared = shared_function_infos()->Get(fun->function_literal_id());
+ CHECK_LT(fun->function_literal_id(), shared_function_infos().length());
+ MaybeObject shared = shared_function_infos().Get(fun->function_literal_id());
HeapObject heap_object;
if (!shared->GetHeapObject(&heap_object) ||
- heap_object->IsUndefined(isolate)) {
+ heap_object.IsUndefined(isolate)) {
return MaybeHandle<SharedFunctionInfo>();
}
return handle(SharedFunctionInfo::cast(heap_object), isolate);
@@ -4801,17 +4800,17 @@
std::unique_ptr<v8::tracing::TracedValue> Script::ToTracedValue() {
auto value = v8::tracing::TracedValue::Create();
- if (name()->IsString()) {
- value->SetString("name", String::cast(name())->ToCString());
+ if (name().IsString()) {
+ value->SetString("name", String::cast(name()).ToCString());
}
value->SetInteger("lineOffset", line_offset());
value->SetInteger("columnOffset", column_offset());
- if (source_mapping_url()->IsString()) {
+ if (source_mapping_url().IsString()) {
value->SetString("sourceMappingURL",
- String::cast(source_mapping_url())->ToCString());
+ String::cast(source_mapping_url()).ToCString());
}
- if (source()->IsString()) {
- value->SetString("source", String::cast(source())->ToCString());
+ if (source().IsString()) {
+ value->SetString("source", String::cast(source()).ToCString());
}
return value;
}
@@ -4846,7 +4845,7 @@
// don't use the function's literal id since getting that is slow for compiled
// funcitons.
int start_pos = StartPosition();
- int script_id = script()->IsScript() ? Script::cast(script())->id() : 0;
+ int script_id = script().IsScript() ? Script::cast(script()).id() : 0;
return static_cast<uint32_t>(base::hash_combine(start_pos, script_id));
}
@@ -4854,10 +4853,10 @@
FunctionLiteral* literal) {
auto value = v8::tracing::TracedValue::Create();
if (HasSharedName()) {
- value->SetString("name", Name()->ToCString());
+ value->SetString("name", Name().ToCString());
}
if (HasInferredName()) {
- value->SetString("inferredName", inferred_name()->ToCString());
+ value->SetString("inferredName", inferred_name().ToCString());
}
if (is_toplevel()) {
value->SetBoolean("isToplevel", true);
@@ -4865,16 +4864,16 @@
value->SetInteger("formalParameterCount", internal_formal_parameter_count());
value->SetString("languageMode", LanguageMode2String(language_mode()));
value->SetString("kind", FunctionKind2String(kind()));
- if (script()->IsScript()) {
- value->SetValue("script", Script::cast(script())->TraceIDRef());
+ if (script().IsScript()) {
+ value->SetValue("script", Script::cast(script()).TraceIDRef());
value->BeginDictionary("sourcePosition");
Script::PositionInfo info;
// We get the start position from the {literal} here, because the
// SharedFunctionInfo itself might not have a way to get to the
// start position early on (currently that's the case when it's
// marked for eager compilation).
- if (Script::cast(script())->GetPositionInfo(literal->start_position(),
- &info, Script::WITH_OFFSET)) {
+ if (Script::cast(script()).GetPositionInfo(literal->start_position(), &info,
+ Script::WITH_OFFSET)) {
value->SetInteger("line", info.line + 1);
value->SetInteger("column", info.column + 1);
}
@@ -4893,12 +4892,12 @@
// can add significant overhead, and we should probably find a better way
// to uniquely identify SharedFunctionInfos over time.
Script script = Script::cast(this->script());
- WeakFixedArray script_functions = script->shared_function_infos();
- for (int i = 0; i < script_functions->length(); ++i) {
+ WeakFixedArray script_functions = script.shared_function_infos();
+ for (int i = 0; i < script_functions.length(); ++i) {
HeapObject script_function;
- if (script_functions->Get(i).GetHeapObjectIfWeak(&script_function) &&
- script_function->address() == address()) {
- return (static_cast<uint64_t>(script->id() + 1) << 32) |
+ if (script_functions.Get(i).GetHeapObjectIfWeak(&script_function) &&
+ script_function.address() == address()) {
+ return (static_cast<uint64_t>(script.id() + 1) << 32) |
(static_cast<uint64_t>(i));
}
}
@@ -4923,37 +4922,37 @@
Isolate* isolate = GetIsolate();
Object data = function_data();
- if (data->IsSmi()) {
+ if (data.IsSmi()) {
// Holding a Smi means we are a builtin.
DCHECK(HasBuiltinId());
return isolate->builtins()->builtin(builtin_id());
- } else if (data->IsBytecodeArray()) {
+ } else if (data.IsBytecodeArray()) {
// Having a bytecode array means we are a compiled, interpreted function.
DCHECK(HasBytecodeArray());
return isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
- } else if (data->IsAsmWasmData()) {
+ } else if (data.IsAsmWasmData()) {
// Having AsmWasmData means we are an asm.js/wasm function.
DCHECK(HasAsmWasmData());
return isolate->builtins()->builtin(Builtins::kInstantiateAsmJs);
- } else if (data->IsUncompiledData()) {
+ } else if (data.IsUncompiledData()) {
// Having uncompiled data (with or without scope) means we need to compile.
DCHECK(HasUncompiledData());
return isolate->builtins()->builtin(Builtins::kCompileLazy);
- } else if (data->IsFunctionTemplateInfo()) {
+ } else if (data.IsFunctionTemplateInfo()) {
// Having a function template info means we are an API function.
DCHECK(IsApiFunction());
return isolate->builtins()->builtin(Builtins::kHandleApiCall);
- } else if (data->IsWasmExportedFunctionData()) {
+ } else if (data.IsWasmExportedFunctionData()) {
// Having a WasmExportedFunctionData means the code is in there.
DCHECK(HasWasmExportedFunctionData());
- return wasm_exported_function_data()->wrapper_code();
- } else if (data->IsInterpreterData()) {
+ return wasm_exported_function_data().wrapper_code();
+ } else if (data.IsInterpreterData()) {
Code code = InterpreterTrampoline();
- DCHECK(code->IsCode());
- DCHECK(code->is_interpreter_trampoline_builtin());
+ DCHECK(code.IsCode());
+ DCHECK(code.is_interpreter_trampoline_builtin());
return code;
- } else if (data->IsWasmCapiFunctionData()) {
- return wasm_capi_function_data()->wrapper_code();
+ } else if (data.IsWasmCapiFunctionData()) {
+ return wasm_capi_function_data().wrapper_code();
}
UNREACHABLE();
}
@@ -4971,8 +4970,8 @@
SharedFunctionInfo::ScriptIterator::ScriptIterator(Isolate* isolate,
Script script)
- : ScriptIterator(isolate,
- handle(script->shared_function_infos(), isolate)) {}
+ : ScriptIterator(isolate, handle(script.shared_function_infos(), isolate)) {
+}
SharedFunctionInfo::ScriptIterator::ScriptIterator(
Isolate* isolate, Handle<WeakFixedArray> shared_function_infos)
@@ -4985,7 +4984,7 @@
MaybeObject raw = shared_function_infos_->Get(index_++);
HeapObject heap_object;
if (!raw->GetHeapObject(&heap_object) ||
- heap_object->IsUndefined(isolate_)) {
+ heap_object.IsUndefined(isolate_)) {
continue;
}
return SharedFunctionInfo::cast(heap_object);
@@ -4994,7 +4993,7 @@
}
void SharedFunctionInfo::ScriptIterator::Reset(Script script) {
- shared_function_infos_ = handle(script->shared_function_infos(), isolate_);
+ shared_function_infos_ = handle(script.shared_function_infos(), isolate_);
index_ = 0;
}
@@ -5032,7 +5031,7 @@
// This is okay because the gc-time processing of these lists can tolerate
// duplicates.
if (script_object->IsScript()) {
- DCHECK(!shared->script()->IsScript());
+ DCHECK(!shared->script().IsScript());
Handle<Script> script = Handle<Script>::cast(script_object);
Handle<WeakFixedArray> list =
handle(script->shared_function_infos(), isolate);
@@ -5049,9 +5048,9 @@
// Remove shared function info from root array.
WeakArrayList noscript_list =
isolate->heap()->noscript_shared_function_infos();
- CHECK(noscript_list->RemoveOne(MaybeObjectHandle::Weak(shared)));
+ CHECK(noscript_list.RemoveOne(MaybeObjectHandle::Weak(shared)));
} else {
- DCHECK(shared->script()->IsScript());
+ DCHECK(shared->script().IsScript());
Handle<WeakArrayList> list =
isolate->factory()->noscript_shared_function_infos();
@@ -5075,13 +5074,13 @@
// Due to liveedit, it might happen that the old_script doesn't know
// about the SharedFunctionInfo, so we have to guard against that.
- Handle<WeakFixedArray> infos(old_script->shared_function_infos(), isolate);
+ Handle<WeakFixedArray> infos(old_script.shared_function_infos(), isolate);
if (function_literal_id < infos->length()) {
MaybeObject raw =
- old_script->shared_function_infos()->Get(function_literal_id);
+ old_script.shared_function_infos().Get(function_literal_id);
HeapObject heap_object;
if (raw->GetHeapObjectIfWeak(&heap_object) && heap_object == *shared) {
- old_script->shared_function_infos()->Set(
+ old_script.shared_function_infos().Set(
function_literal_id, HeapObjectReference::Strong(
ReadOnlyRoots(isolate).undefined_value()));
}
@@ -5095,46 +5094,46 @@
bool SharedFunctionInfo::HasBreakInfo() const {
if (!HasDebugInfo()) return false;
DebugInfo info = GetDebugInfo();
- bool has_break_info = info->HasBreakInfo();
+ bool has_break_info = info.HasBreakInfo();
return has_break_info;
}
bool SharedFunctionInfo::BreakAtEntry() const {
if (!HasDebugInfo()) return false;
DebugInfo info = GetDebugInfo();
- bool break_at_entry = info->BreakAtEntry();
+ bool break_at_entry = info.BreakAtEntry();
return break_at_entry;
}
bool SharedFunctionInfo::HasCoverageInfo() const {
if (!HasDebugInfo()) return false;
DebugInfo info = GetDebugInfo();
- bool has_coverage_info = info->HasCoverageInfo();
+ bool has_coverage_info = info.HasCoverageInfo();
return has_coverage_info;
}
CoverageInfo SharedFunctionInfo::GetCoverageInfo() const {
DCHECK(HasCoverageInfo());
- return CoverageInfo::cast(GetDebugInfo()->coverage_info());
+ return CoverageInfo::cast(GetDebugInfo().coverage_info());
}
String SharedFunctionInfo::DebugName() {
DisallowHeapAllocation no_gc;
String function_name = Name();
- if (function_name->length() > 0) return function_name;
+ if (function_name.length() > 0) return function_name;
return inferred_name();
}
bool SharedFunctionInfo::PassesFilter(const char* raw_filter) {
Vector<const char> filter = CStrVector(raw_filter);
- std::unique_ptr<char[]> cstrname(DebugName()->ToCString());
+ std::unique_ptr<char[]> cstrname(DebugName().ToCString());
return v8::internal::PassesFilter(CStrVector(cstrname.get()), filter);
}
bool SharedFunctionInfo::HasSourceCode() const {
Isolate* isolate = GetIsolate();
- return !script()->IsUndefined(isolate) &&
- !Script::cast(script())->source()->IsUndefined(isolate);
+ return !script().IsUndefined(isolate) &&
+ !Script::cast(script()).source().IsUndefined(isolate);
}
void SharedFunctionInfo::DiscardCompiledMetadata(
@@ -5144,8 +5143,8 @@
DisallowHeapAllocation no_gc;
if (is_compiled()) {
HeapObject outer_scope_info;
- if (scope_info()->HasOuterScopeInfo()) {
- outer_scope_info = scope_info()->OuterScopeInfo();
+ if (scope_info().HasOuterScopeInfo()) {
+ outer_scope_info = scope_info().OuterScopeInfo();
} else {
outer_scope_info = ReadOnlyRoots(isolate).the_hole_value();
}
@@ -5158,8 +5157,7 @@
RawField(SharedFunctionInfo::kOuterScopeInfoOrFeedbackMetadataOffset),
outer_scope_info);
} else {
- DCHECK(outer_scope_info()->IsScopeInfo() ||
- outer_scope_info()->IsTheHole());
+ DCHECK(outer_scope_info().IsScopeInfo() || outer_scope_info().IsTheHole());
}
// TODO(rmcilroy): Possibly discard ScopeInfo here as well.
@@ -5200,7 +5198,7 @@
Handle<SharedFunctionInfo> shared) {
Isolate* isolate = shared->GetIsolate();
if (!shared->HasSourceCode()) return isolate->factory()->undefined_value();
- Handle<String> source(String::cast(Script::cast(shared->script())->source()),
+ Handle<String> source(String::cast(Script::cast(shared->script()).source()),
isolate);
return isolate->factory()->NewSubString(source, shared->StartPosition(),
shared->EndPosition());
@@ -5212,7 +5210,7 @@
Isolate* isolate = shared->GetIsolate();
if (!shared->HasSourceCode()) return isolate->factory()->undefined_value();
Handle<String> script_source(
- String::cast(Script::cast(shared->script())->source()), isolate);
+ String::cast(Script::cast(shared->script()).source()), isolate);
int start_pos = shared->function_token_position();
DCHECK_NE(start_pos, kNoSourcePosition);
Handle<String> source = isolate->factory()->NewSubString(
@@ -5224,7 +5222,7 @@
builder.AppendCString("function ");
builder.AppendString(Handle<String>(shared->Name(), isolate));
builder.AppendCString("(");
- Handle<FixedArray> args(Script::cast(shared->script())->wrapped_arguments(),
+ Handle<FixedArray> args(Script::cast(shared->script()).wrapped_arguments(),
isolate);
int argc = args->length();
for (int i = 0; i < argc; i++) {
@@ -5247,7 +5245,7 @@
} // namespace
bool SharedFunctionInfo::IsInlineable() {
- if (!script()->IsScript()) {
+ if (!script().IsScript()) {
TraceInlining(*this, "false (no Script associated with it)");
return false;
}
@@ -5283,7 +5281,7 @@
return false;
}
- if (GetBytecodeArray()->length() > FLAG_max_inlined_bytecode_size) {
+ if (GetBytecodeArray().length() > FLAG_max_inlined_bytecode_size) {
TraceInlining(*this, "false (length > FLAG_max_inlined_bytecode_size)");
return false;
}
@@ -5303,10 +5301,10 @@
DisallowHeapAllocation no_gc;
Object script_obj = script();
- if (!script_obj->IsScript()) return kFunctionLiteralIdInvalid;
+ if (!script_obj.IsScript()) return kFunctionLiteralIdInvalid;
WeakFixedArray shared_info_list =
- Script::cast(script_obj)->shared_function_infos();
+ Script::cast(script_obj).shared_function_infos();
SharedFunctionInfo::ScriptIterator iterator(
isolate,
Handle<WeakFixedArray>(reinterpret_cast<Address*>(&shared_info_list)));
@@ -5326,31 +5324,31 @@
std::ostream& operator<<(std::ostream& os, const SourceCodeOf& v) {
const SharedFunctionInfo s = v.value;
// For some native functions there is no source.
- if (!s->HasSourceCode()) return os << "<No Source>";
+ if (!s.HasSourceCode()) return os << "<No Source>";
// Get the source for the script which this function came from.
// Don't use String::cast because we don't want more assertion errors while
// we are already creating a stack dump.
String script_source =
- String::unchecked_cast(Script::cast(s->script())->source());
+ String::unchecked_cast(Script::cast(s.script()).source());
- if (!script_source->LooksValid()) return os << "<Invalid Source>";
+ if (!script_source.LooksValid()) return os << "<Invalid Source>";
- if (!s->is_toplevel()) {
+ if (!s.is_toplevel()) {
os << "function ";
- String name = s->Name();
- if (name->length() > 0) {
- name->PrintUC16(os);
+ String name = s.Name();
+ if (name.length() > 0) {
+ name.PrintUC16(os);
}
}
- int len = s->EndPosition() - s->StartPosition();
+ int len = s.EndPosition() - s.StartPosition();
if (len <= v.max_length || v.max_length < 0) {
- script_source->PrintUC16(os, s->StartPosition(), s->EndPosition());
+ script_source.PrintUC16(os, s.StartPosition(), s.EndPosition());
return os;
} else {
- script_source->PrintUC16(os, s->StartPosition(),
- s->StartPosition() + v.max_length);
+ script_source.PrintUC16(os, s.StartPosition(),
+ s.StartPosition() + v.max_length);
return os << "...\n";
}
}
@@ -5361,8 +5359,8 @@
set_flags(DisabledOptimizationReasonBits::update(flags(), reason));
// Code should be the lazy compilation stub or else interpreted.
- DCHECK(abstract_code()->kind() == AbstractCode::INTERPRETED_FUNCTION ||
- abstract_code()->kind() == AbstractCode::BUILTIN);
+ DCHECK(abstract_code().kind() == AbstractCode::INTERPRETED_FUNCTION ||
+ abstract_code().kind() == AbstractCode::BUILTIN);
PROFILE(GetIsolate(), CodeDisableOptEvent(abstract_code(), *this));
if (FLAG_trace_opt) {
PrintF("[disabled optimization for ");
@@ -5382,9 +5380,9 @@
shared_info->set_internal_formal_parameter_count(lit->parameter_count());
shared_info->SetFunctionTokenPosition(lit->function_token_position(),
lit->start_position());
- if (shared_info->scope_info()->HasPositionInfo()) {
- shared_info->scope_info()->SetPositionInfo(lit->start_position(),
- lit->end_position());
+ if (shared_info->scope_info().HasPositionInfo()) {
+ shared_info->scope_info().SetPositionInfo(lit->start_position(),
+ lit->end_position());
needs_position_info = false;
}
shared_info->set_is_declaration(lit->is_declaration());
@@ -5403,7 +5401,7 @@
lit->requires_instance_members_initializer());
shared_info->set_is_toplevel(is_toplevel);
- DCHECK(shared_info->outer_scope_info()->IsTheHole());
+ DCHECK(shared_info->outer_scope_info().IsTheHole());
if (!is_toplevel) {
Scope* outer_scope = lit->scope()->GetOuterScopeWithContext();
if (outer_scope) {
@@ -5506,14 +5504,14 @@
int SharedFunctionInfo::StartPosition() const {
Object maybe_scope_info = name_or_scope_info();
- if (maybe_scope_info->IsScopeInfo()) {
+ if (maybe_scope_info.IsScopeInfo()) {
ScopeInfo info = ScopeInfo::cast(maybe_scope_info);
- if (info->HasPositionInfo()) {
- return info->StartPosition();
+ if (info.HasPositionInfo()) {
+ return info.StartPosition();
}
} else if (HasUncompiledData()) {
// Works with or without scope.
- return uncompiled_data()->start_position();
+ return uncompiled_data().start_position();
} else if (IsApiFunction() || HasBuiltinId()) {
DCHECK_IMPLIES(HasBuiltinId(), builtin_id() != Builtins::kCompileLazy);
return 0;
@@ -5523,14 +5521,14 @@
int SharedFunctionInfo::EndPosition() const {
Object maybe_scope_info = name_or_scope_info();
- if (maybe_scope_info->IsScopeInfo()) {
+ if (maybe_scope_info.IsScopeInfo()) {
ScopeInfo info = ScopeInfo::cast(maybe_scope_info);
- if (info->HasPositionInfo()) {
- return info->EndPosition();
+ if (info.HasPositionInfo()) {
+ return info.EndPosition();
}
} else if (HasUncompiledData()) {
// Works with or without scope.
- return uncompiled_data()->end_position();
+ return uncompiled_data().end_position();
} else if (IsApiFunction() || HasBuiltinId()) {
DCHECK_IMPLIES(HasBuiltinId(), builtin_id() != Builtins::kCompileLazy);
return 0;
@@ -5541,8 +5539,8 @@
int SharedFunctionInfo::FunctionLiteralId(Isolate* isolate) const {
// Fast path for the common case when the SFI is uncompiled and so the
// function literal id is already in the uncompiled data.
- if (HasUncompiledData() && uncompiled_data()->has_function_literal_id()) {
- int id = uncompiled_data()->function_literal_id();
+ if (HasUncompiledData() && uncompiled_data().has_function_literal_id()) {
+ int id = uncompiled_data().function_literal_id();
// Make sure the id is what we should have found with the slow path.
DCHECK_EQ(id, FindIndexInScript(isolate));
return id;
@@ -5555,10 +5553,10 @@
void SharedFunctionInfo::SetPosition(int start_position, int end_position) {
Object maybe_scope_info = name_or_scope_info();
- if (maybe_scope_info->IsScopeInfo()) {
+ if (maybe_scope_info.IsScopeInfo()) {
ScopeInfo info = ScopeInfo::cast(maybe_scope_info);
- if (info->HasPositionInfo()) {
- info->SetPositionInfo(start_position, end_position);
+ if (info.HasPositionInfo()) {
+ info.SetPositionInfo(start_position, end_position);
}
} else if (HasUncompiledData()) {
if (HasUncompiledDataWithPreparseData()) {
@@ -5566,8 +5564,8 @@
// any scope data.
ClearPreparseData();
}
- uncompiled_data()->set_start_position(start_position);
- uncompiled_data()->set_end_position(end_position);
+ uncompiled_data().set_start_position(start_position);
+ uncompiled_data().set_end_position(end_position);
} else {
UNREACHABLE();
}
@@ -5575,7 +5573,7 @@
bool SharedFunctionInfo::AreSourcePositionsAvailable() const {
if (FLAG_enable_lazy_source_positions) {
- return !HasBytecodeArray() || GetBytecodeArray()->HasSourcePositionTable();
+ return !HasBytecodeArray() || GetBytecodeArray().HasSourcePositionTable();
}
return true;
}
@@ -5584,16 +5582,16 @@
void SharedFunctionInfo::EnsureSourcePositionsAvailable(
Isolate* isolate, Handle<SharedFunctionInfo> shared_info) {
if (FLAG_enable_lazy_source_positions && shared_info->HasBytecodeArray() &&
- !shared_info->GetBytecodeArray()->HasSourcePositionTable()) {
+ !shared_info->GetBytecodeArray().HasSourcePositionTable()) {
Compiler::CollectSourcePositions(isolate, shared_info);
}
}
bool BytecodeArray::IsBytecodeEqual(const BytecodeArray other) const {
- if (length() != other->length()) return false;
+ if (length() != other.length()) return false;
for (int i = 0; i < length(); ++i) {
- if (get(i) != other->get(i)) return false;
+ if (get(i) != other.get(i)) return false;
}
return true;
@@ -5692,7 +5690,7 @@
bool JSArray::SetLengthWouldNormalize(uint32_t new_length) {
if (!HasFastElements()) return false;
- uint32_t capacity = static_cast<uint32_t>(elements()->length());
+ uint32_t capacity = static_cast<uint32_t>(elements().length());
uint32_t new_capacity;
return JSArray::SetLengthWouldNormalize(GetHeap(), new_length) &&
ShouldConvertToSlowElements(*this, capacity, new_length - 1,
@@ -5717,13 +5715,13 @@
bool AllocationSite::IsNested() {
DCHECK(FLAG_trace_track_allocation_sites);
- Object current = boilerplate()->GetHeap()->allocation_sites_list();
- while (current->IsAllocationSite()) {
+ Object current = boilerplate().GetHeap()->allocation_sites_list();
+ while (current.IsAllocationSite()) {
AllocationSite current_site = AllocationSite::cast(current);
- if (current_site->nested_site() == *this) {
+ if (current_site.nested_site() == *this) {
return true;
}
- current = current_site->weak_next();
+ current = current_site.weak_next();
}
return false;
}
@@ -5752,10 +5750,10 @@
Map map = array->map();
// Fast path: "length" is the first fast property of arrays. Since it's not
// configurable, it's guaranteed to be the first in the descriptor array.
- if (!map->is_dictionary_map()) {
- DCHECK(map->instance_descriptors()->GetKey(0) ==
+ if (!map.is_dictionary_map()) {
+ DCHECK(map.instance_descriptors().GetKey(0) ==
array->GetReadOnlyRoots().length_string());
- return map->instance_descriptors()->GetDetails(0).IsReadOnly();
+ return map.instance_descriptors().GetDetails(0).IsReadOnly();
}
Isolate* isolate = array->GetIsolate();
@@ -5769,7 +5767,7 @@
bool JSArray::WouldChangeReadOnlyLength(Handle<JSArray> array,
uint32_t index) {
uint32_t length = 0;
- CHECK(array->length()->ToArrayLength(&length));
+ CHECK(array->length().ToArrayLength(&length));
if (length <= index) return HasReadOnlyLength(array);
return false;
}
@@ -5787,18 +5785,18 @@
DisallowHeapAllocation no_gc;
ReadOnlyRoots roots = this->GetReadOnlyRoots();
Derived dictionary = Derived::cast(*this);
- int capacity = dictionary->Capacity();
+ int capacity = dictionary.Capacity();
for (int i = 0; i < capacity; i++) {
- Object k = dictionary->KeyAt(i);
- if (!dictionary->ToKey(roots, i, &k)) continue;
+ Object k = dictionary.KeyAt(i);
+ if (!dictionary.ToKey(roots, i, &k)) continue;
os << "\n ";
- if (k->IsString()) {
- String::cast(k)->StringPrint(os);
+ if (k.IsString()) {
+ String::cast(k).StringPrint(os);
} else {
os << Brief(k);
}
- os << ": " << Brief(dictionary->ValueAt(i)) << " ";
- dictionary->DetailsAt(i).PrintAsSlowTo(os);
+ os << ": " << Brief(dictionary.ValueAt(i)) << " ";
+ dictionary.DetailsAt(i).PrintAsSlowTo(os);
}
}
template <typename Derived, typename Shape>
@@ -5833,11 +5831,11 @@
void Symbol::SymbolShortPrint(std::ostream& os) {
os << "<Symbol:";
- if (!name()->IsUndefined()) {
+ if (!name().IsUndefined()) {
os << " ";
HeapStringAllocator allocator;
StringStream accumulator(&allocator);
- String::cast(name())->StringShortPrint(&accumulator, false);
+ String::cast(name()).StringShortPrint(&accumulator, false);
os << accumulator.ToCString().get();
} else {
os << " (" << PrivateSymbolToName() << ")";
@@ -5870,22 +5868,22 @@
bool IsMatch(Object other) override {
DisallowHeapAllocation no_allocation;
- if (!other->IsFixedArray()) {
- DCHECK(other->IsNumber());
- uint32_t other_hash = static_cast<uint32_t>(other->Number());
+ if (!other.IsFixedArray()) {
+ DCHECK(other.IsNumber());
+ uint32_t other_hash = static_cast<uint32_t>(other.Number());
return Hash() == other_hash;
}
FixedArray other_array = FixedArray::cast(other);
- SharedFunctionInfo shared = SharedFunctionInfo::cast(other_array->get(0));
+ SharedFunctionInfo shared = SharedFunctionInfo::cast(other_array.get(0));
if (shared != *shared_) return false;
- int language_unchecked = Smi::ToInt(other_array->get(2));
+ int language_unchecked = Smi::ToInt(other_array.get(2));
DCHECK(is_valid_language_mode(language_unchecked));
LanguageMode language_mode = static_cast<LanguageMode>(language_unchecked);
if (language_mode != language_mode_) return false;
- int position = Smi::ToInt(other_array->get(3));
+ int position = Smi::ToInt(other_array.get(3));
if (position != position_) return false;
- String source = String::cast(other_array->get(1));
- return source->Equals(*source_);
+ String source = String::cast(other_array.get(1));
+ return source.Equals(*source_);
}
Handle<Object> AsHandle(Isolate* isolate) {
@@ -6083,9 +6081,9 @@
DisallowHeapAllocation no_gc;
Object current = *reactions;
Object reversed = Smi::kZero;
- while (!current->IsSmi()) {
- Object next = PromiseReaction::cast(current)->next();
- PromiseReaction::cast(current)->set_next(reversed);
+ while (!current.IsSmi()) {
+ Object next = PromiseReaction::cast(current).next();
+ PromiseReaction::cast(current).set_next(reversed);
reversed = current;
current = next;
}
@@ -6402,9 +6400,9 @@
regexp->set_flags(Smi::FromInt(flags));
Map map = regexp->map();
- Object constructor = map->GetConstructor();
- if (constructor->IsJSFunction() &&
- JSFunction::cast(constructor)->initial_map() == map) {
+ Object constructor = map.GetConstructor();
+ if (constructor.IsJSFunction() &&
+ JSFunction::cast(constructor).initial_map() == map) {
// If we still have the original map, set in-object properties directly.
regexp->InObjectPropertyAtPut(JSRegExp::kLastIndexFieldIndex, Smi::kZero,
SKIP_WRITE_BARRIER);
@@ -6436,8 +6434,8 @@
// a key to a key.
bool IsMatch(Object obj) override {
FixedArray val = FixedArray::cast(obj);
- return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex)))
- && (flags_ == val->get(JSRegExp::kFlagsIndex));
+ return string_->Equals(String::cast(val.get(JSRegExp::kSourceIndex))) &&
+ (flags_ == val.get(JSRegExp::kFlagsIndex));
}
Handle<String> string_;
@@ -6535,13 +6533,13 @@
template <typename Derived, typename Shape>
void HashTable<Derived, Shape>::Rehash(ReadOnlyRoots roots, Derived new_table) {
DisallowHeapAllocation no_gc;
- WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc);
+ WriteBarrierMode mode = new_table.GetWriteBarrierMode(no_gc);
- DCHECK_LT(NumberOfElements(), new_table->Capacity());
+ DCHECK_LT(NumberOfElements(), new_table.Capacity());
// Copy prefix to new array.
for (int i = kPrefixStartIndex; i < kElementsStartIndex; i++) {
- new_table->set(i, get(i), mode);
+ new_table.set(i, get(i), mode);
}
// Rehash the elements.
@@ -6551,15 +6549,14 @@
Object k = this->get(from_index);
if (!Shape::IsLive(roots, k)) continue;
uint32_t hash = Shape::HashForObject(roots, k);
- uint32_t insertion_index =
- EntryToIndex(new_table->FindInsertionEntry(hash));
- new_table->set_key(insertion_index, get(from_index), mode);
+ uint32_t insertion_index = EntryToIndex(new_table.FindInsertionEntry(hash));
+ new_table.set_key(insertion_index, get(from_index), mode);
for (int j = 1; j < Shape::kEntrySize; j++) {
- new_table->set(insertion_index + j, get(from_index + j), mode);
+ new_table.set(insertion_index + j, get(from_index + j), mode);
}
}
- new_table->SetNumberOfElements(NumberOfElements());
- new_table->SetNumberOfDeletedElements(0);
+ new_table.SetNumberOfElements(NumberOfElements());
+ new_table.SetNumberOfDeletedElements(0);
}
template <typename Derived, typename Shape>
@@ -6862,16 +6859,16 @@
return Smi::FromInt(ResultSentinel::kUnsupported).ptr();
}
- int entry = table->FindEntry(ReadOnlyRoots(isolate), &key, key.hash());
+ int entry = table.FindEntry(ReadOnlyRoots(isolate), &key, key.hash());
if (entry == kNotFound) {
// A string that's not an array index, and not in the string table,
// cannot have been used as a property name before.
return Smi::FromInt(ResultSentinel::kNotFound).ptr();
}
- String internalized = String::cast(table->KeyAt(entry));
+ String internalized = String::cast(table.KeyAt(entry));
if (FLAG_thin_strings) {
- string->MakeThin(isolate, internalized);
+ string.MakeThin(isolate, internalized);
}
return internalized.ptr();
}
@@ -6933,7 +6930,7 @@
Handle<ObjectHashSet> ObjectHashSet::Add(Isolate* isolate,
Handle<ObjectHashSet> set,
Handle<Object> key) {
- int32_t hash = key->GetOrCreateHash(isolate)->value();
+ int32_t hash = key->GetOrCreateHash(isolate).value();
if (!set->Has(isolate, key, hash)) {
set = EnsureCapacity(isolate, set, 1);
int entry = set->FindInsertionEntry(hash);
@@ -6953,18 +6950,18 @@
int SearchLiteralsMapEntry(CompilationCacheTable cache, int cache_entry,
Context native_context) {
DisallowHeapAllocation no_gc;
- DCHECK(native_context->IsNativeContext());
- Object obj = cache->get(cache_entry);
+ DCHECK(native_context.IsNativeContext());
+ Object obj = cache.get(cache_entry);
// Check that there's no confusion between FixedArray and WeakFixedArray (the
// object used to be a FixedArray here).
- DCHECK(!obj->IsFixedArray());
- if (obj->IsWeakFixedArray()) {
+ DCHECK(!obj.IsFixedArray());
+ if (obj.IsWeakFixedArray()) {
WeakFixedArray literals_map = WeakFixedArray::cast(obj);
- int length = literals_map->length();
+ int length = literals_map.length();
for (int i = 0; i < length; i += kLiteralEntryLength) {
- DCHECK(literals_map->Get(i + kLiteralContextOffset)->IsWeakOrCleared());
- if (literals_map->Get(i + kLiteralContextOffset) ==
+ DCHECK(literals_map.Get(i + kLiteralContextOffset)->IsWeakOrCleared());
+ if (literals_map.Get(i + kLiteralContextOffset) ==
HeapObjectReference::Weak(native_context)) {
return i;
}
@@ -6986,8 +6983,8 @@
// Check that there's no confusion between FixedArray and WeakFixedArray (the
// object used to be a FixedArray here).
- DCHECK(!obj->IsFixedArray());
- if (!obj->IsWeakFixedArray() || WeakFixedArray::cast(obj)->length() == 0) {
+ DCHECK(!obj.IsFixedArray());
+ if (!obj.IsWeakFixedArray() || WeakFixedArray::cast(obj).length() == 0) {
new_literals_map = isolate->factory()->NewWeakFixedArray(
kLiteralInitialLength, AllocationType::kOld);
entry = 0;
@@ -7029,10 +7026,10 @@
for (int i = 0; i < new_literals_map->length(); i += kLiteralEntryLength) {
MaybeObject object = new_literals_map->Get(i + kLiteralContextOffset);
DCHECK(object->IsCleared() ||
- object->GetHeapObjectAssumeWeak()->IsNativeContext());
+ object->GetHeapObjectAssumeWeak().IsNativeContext());
object = new_literals_map->Get(i + kLiteralLiteralsOffset);
DCHECK(object->IsCleared() ||
- object->GetHeapObjectAssumeWeak()->IsFeedbackCell());
+ object->GetHeapObjectAssumeWeak().IsFeedbackCell());
}
#endif
@@ -7047,15 +7044,15 @@
FeedbackCell result;
int entry = SearchLiteralsMapEntry(cache, cache_entry, native_context);
if (entry >= 0) {
- WeakFixedArray literals_map = WeakFixedArray::cast(cache->get(cache_entry));
- DCHECK_LE(entry + kLiteralEntryLength, literals_map->length());
- MaybeObject object = literals_map->Get(entry + kLiteralLiteralsOffset);
+ WeakFixedArray literals_map = WeakFixedArray::cast(cache.get(cache_entry));
+ DCHECK_LE(entry + kLiteralEntryLength, literals_map.length());
+ MaybeObject object = literals_map.Get(entry + kLiteralLiteralsOffset);
if (!object->IsCleared()) {
result = FeedbackCell::cast(object->GetHeapObjectAssumeWeak());
}
}
- DCHECK(result.is_null() || result->IsFeedbackCell());
+ DCHECK(result.is_null() || result.IsFeedbackCell());
return result;
}
@@ -7068,7 +7065,7 @@
// empty_function is native context dependent, the SFI is de-duped on
// snapshot builds by the PartialSnapshotCache, and so this does not prevent
// reuse of scripts in the compilation cache across native contexts.
- Handle<SharedFunctionInfo> shared(native_context->empty_function()->shared(),
+ Handle<SharedFunctionInfo> shared(native_context->empty_function().shared(),
native_context->GetIsolate());
Isolate* isolate = native_context->GetIsolate();
src = String::Flatten(isolate, src);
@@ -7076,11 +7073,11 @@
int entry = table->FindEntry(isolate, &key);
if (entry == kNotFound) return MaybeHandle<SharedFunctionInfo>();
int index = EntryToIndex(entry);
- if (!table->get(index)->IsFixedArray()) {
+ if (!table->get(index).IsFixedArray()) {
return MaybeHandle<SharedFunctionInfo>();
}
Object obj = table->get(index + 1);
- if (obj->IsSharedFunctionInfo()) {
+ if (obj.IsSharedFunctionInfo()) {
return handle(SharedFunctionInfo::cast(obj), native_context->GetIsolate());
}
return MaybeHandle<SharedFunctionInfo>();
@@ -7097,9 +7094,9 @@
int entry = table->FindEntry(isolate, &key);
if (entry == kNotFound) return empty_result;
int index = EntryToIndex(entry);
- if (!table->get(index)->IsFixedArray()) return empty_result;
+ if (!table->get(index).IsFixedArray()) return empty_result;
Object obj = table->get(EntryToIndex(entry) + 1);
- if (obj->IsSharedFunctionInfo()) {
+ if (obj.IsSharedFunctionInfo()) {
FeedbackCell feedback_cell =
SearchLiteralsMap(*table, EntryToIndex(entry) + 2, *native_context);
return InfoCellPair(SharedFunctionInfo::cast(obj), feedback_cell);
@@ -7126,7 +7123,7 @@
// empty_function is native context dependent, the SFI is de-duped on
// snapshot builds by the PartialSnapshotCache, and so this does not prevent
// reuse of scripts in the compilation cache across native contexts.
- Handle<SharedFunctionInfo> shared(native_context->empty_function()->shared(),
+ Handle<SharedFunctionInfo> shared(native_context->empty_function().shared(),
isolate);
src = String::Flatten(isolate, src);
StringSharedKey key(src, shared, language_mode, kNoSourcePosition);
@@ -7195,19 +7192,19 @@
int entry_index = EntryToIndex(entry);
int value_index = entry_index + 1;
- if (get(entry_index)->IsNumber()) {
+ if (get(entry_index).IsNumber()) {
Smi count = Smi::cast(get(value_index));
- count = Smi::FromInt(count->value() - 1);
- if (count->value() == 0) {
+ count = Smi::FromInt(count.value() - 1);
+ if (count.value() == 0) {
NoWriteBarrierSet(*this, entry_index, the_hole_value);
NoWriteBarrierSet(*this, value_index, the_hole_value);
ElementRemoved();
} else {
NoWriteBarrierSet(*this, value_index, count);
}
- } else if (get(entry_index)->IsFixedArray()) {
+ } else if (get(entry_index).IsFixedArray()) {
SharedFunctionInfo info = SharedFunctionInfo::cast(get(value_index));
- if (info->IsInterpreted() && info->GetBytecodeArray()->IsOld()) {
+ if (info.IsInterpreted() && info.GetBytecodeArray().IsOld()) {
for (int i = 0; i < kEntrySize; i++) {
NoWriteBarrierSet(*this, entry_index + i, the_hole_value);
}
@@ -7349,8 +7346,8 @@
uint32_t entry = dictionary->FindInsertionEntry(hash);
dictionary->SetEntry(isolate, entry, *k, *value, details);
- DCHECK(dictionary->KeyAt(entry)->IsNumber() ||
- Shape::Unwrap(dictionary->KeyAt(entry))->IsUniqueName());
+ DCHECK(dictionary->KeyAt(entry).IsNumber() ||
+ Shape::Unwrap(dictionary->KeyAt(entry)).IsUniqueName());
dictionary->ElementAdded();
if (entry_out) *entry_out = entry;
return dictionary;
@@ -7380,7 +7377,7 @@
}
// Update max key value.
Object max_index_object = get(kMaxNumberKeyIndex);
- if (!max_index_object->IsSmi() || max_number_key() < key) {
+ if (!max_index_object.IsSmi() || max_number_key() < key) {
FixedArray::set(kMaxNumberKeyIndex,
Smi::FromInt(key << kRequiresSlowElementsTagSize));
}
@@ -7399,14 +7396,14 @@
int pos = 0;
int capacity = this->Capacity();
DisallowHeapAllocation no_gc;
- WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
+ WriteBarrierMode mode = elements.GetWriteBarrierMode(no_gc);
for (int i = 0; i < capacity; i++) {
Object k;
if (this->ToKey(roots, i, &k)) {
- elements->set(pos++, this->ValueAt(i), mode);
+ elements.set(pos++, this->ValueAt(i), mode);
}
}
- DCHECK_EQ(pos, elements->length());
+ DCHECK_EQ(pos, elements.length());
}
template <typename Derived, typename Shape>
@@ -7417,7 +7414,7 @@
for (int i = 0; i < capacity; i++) {
Object k;
if (!this->ToKey(roots, i, &k)) continue;
- if (k->FilterKey(ENUMERABLE_STRINGS)) continue;
+ if (k.FilterKey(ENUMERABLE_STRINGS)) continue;
PropertyDetails details = this->DetailsAt(i);
PropertyAttributes attr = details.attributes();
if ((attr & ONLY_ENUMERABLE) == 0) result++;
@@ -7430,8 +7427,8 @@
struct EnumIndexComparator {
explicit EnumIndexComparator(Dictionary dict) : dict(dict) {}
bool operator()(Tagged_t a, Tagged_t b) {
- PropertyDetails da(dict->DetailsAt(Smi(static_cast<Address>(a)).value()));
- PropertyDetails db(dict->DetailsAt(Smi(static_cast<Address>(b)).value()));
+ PropertyDetails da(dict.DetailsAt(Smi(static_cast<Address>(a)).value()));
+ PropertyDetails db(dict.DetailsAt(Smi(static_cast<Address>(b)).value()));
return da.dictionary_index() < db.dictionary_index();
}
Dictionary dict;
@@ -7450,7 +7447,7 @@
Object key;
if (!dictionary->ToKey(roots, i, &key)) continue;
bool is_shadowing_key = false;
- if (key->IsSymbol()) continue;
+ if (key.IsSymbol()) continue;
PropertyDetails details = dictionary->DetailsAt(i);
if (details.IsDontEnum()) {
if (mode == KeyCollectionMode::kIncludePrototypes) {
@@ -7479,8 +7476,8 @@
AtomicSlot start(storage->GetFirstElementAddress());
std::sort(start, start + length, cmp);
for (int i = 0; i < length; i++) {
- int index = Smi::ToInt(raw_storage->get(i));
- raw_storage->set(i, raw_dictionary->NameAt(index));
+ int index = Smi::ToInt(raw_storage.get(i));
+ raw_storage.set(i, raw_dictionary.NameAt(index));
}
}
@@ -7497,7 +7494,7 @@
Derived raw_dictionary = *dictionary;
for (int i = 0; i < capacity; i++) {
Object k;
- if (!raw_dictionary->ToKey(roots, i, &k)) continue;
+ if (!raw_dictionary.ToKey(roots, i, &k)) continue;
array->set(array_size++, Smi::FromInt(i));
}
@@ -7527,18 +7524,18 @@
Derived raw_dictionary = *dictionary;
for (int i = 0; i < capacity; i++) {
Object k;
- if (!raw_dictionary->ToKey(roots, i, &k)) continue;
- if (k->FilterKey(filter)) continue;
- PropertyDetails details = raw_dictionary->DetailsAt(i);
+ if (!raw_dictionary.ToKey(roots, i, &k)) continue;
+ if (k.FilterKey(filter)) continue;
+ PropertyDetails details = raw_dictionary.DetailsAt(i);
if ((details.attributes() & filter) != 0) {
keys->AddShadowingKey(k);
continue;
}
if (filter & ONLY_ALL_CAN_READ) {
if (details.kind() != kAccessor) continue;
- Object accessors = raw_dictionary->ValueAt(i);
- if (!accessors->IsAccessorInfo()) continue;
- if (!AccessorInfo::cast(accessors)->all_can_read()) continue;
+ Object accessors = raw_dictionary.ValueAt(i);
+ if (!accessors.IsAccessorInfo()) continue;
+ if (!AccessorInfo::cast(accessors).all_can_read()) continue;
}
array->set(array_size++, Smi::FromInt(i));
}
@@ -7554,7 +7551,7 @@
for (int i = 0; i < array_size; i++) {
int index = Smi::ToInt(array->get(i));
Object key = dictionary->NameAt(index);
- if (key->IsSymbol()) {
+ if (key.IsSymbol()) {
has_seen_symbol = true;
continue;
}
@@ -7564,7 +7561,7 @@
for (int i = 0; i < array_size; i++) {
int index = Smi::ToInt(array->get(i));
Object key = dictionary->NameAt(index);
- if (!key->IsSymbol()) continue;
+ if (!key.IsSymbol()) continue;
keys->AddKey(key, DO_NOT_CONVERT);
}
}
@@ -7574,12 +7571,12 @@
template <typename Derived, typename Shape>
Object Dictionary<Derived, Shape>::SlowReverseLookup(Object value) {
Derived dictionary = Derived::cast(*this);
- ReadOnlyRoots roots = dictionary->GetReadOnlyRoots();
- int capacity = dictionary->Capacity();
+ ReadOnlyRoots roots = dictionary.GetReadOnlyRoots();
+ int capacity = dictionary.Capacity();
for (int i = 0; i < capacity; i++) {
Object k;
- if (!dictionary->ToKey(roots, i, &k)) continue;
- Object e = dictionary->ValueAt(i);
+ if (!dictionary.ToKey(roots, i, &k)) continue;
+ Object e = dictionary.ValueAt(i);
if (e == value) return k;
}
return roots.undefined_value();
@@ -7615,7 +7612,7 @@
// If the object does not have an identity hash, it was never used as a key.
Object hash = key->GetHash();
- if (hash->IsUndefined(roots)) {
+ if (hash.IsUndefined(roots)) {
return roots.the_hole_value();
}
return Lookup(roots, key, Smi::ToInt(hash));
@@ -7641,7 +7638,7 @@
DCHECK(!value->IsTheHole(ReadOnlyRoots(isolate)));
// Make sure the key object has an identity hash code.
- int32_t hash = key->GetOrCreateHash(isolate)->value();
+ int32_t hash = key->GetOrCreateHash(isolate).value();
return ObjectHashTableBase<Derived, Shape>::Put(isolate, table, key, value,
hash);
@@ -7697,7 +7694,7 @@
DCHECK(table->IsKey(table->GetReadOnlyRoots(), *key));
Object hash = key->GetHash();
- if (hash->IsUndefined()) {
+ if (hash.IsUndefined()) {
*was_present = false;
return table;
}
@@ -7852,7 +7849,7 @@
Handle<PropertyCell> new_cell = isolate->factory()->NewPropertyCell(name);
new_cell->set_value(cell->value());
dictionary->ValueAtPut(entry, *new_cell);
- bool is_the_hole = cell->value()->IsTheHole(isolate);
+ bool is_the_hole = cell->value().IsTheHole(isolate);
// Cell is officially mutable henceforth.
PropertyDetails details = cell->property_details();
details = details.set_cell_type(is_the_hole ? PropertyCellType::kUninitialized
@@ -7866,14 +7863,14 @@
}
details = details.set_cell_type(PropertyCellType::kInvalidated);
cell->set_property_details(details);
- cell->dependent_code()->DeoptimizeDependentCodeGroup(
+ cell->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPropertyCellChangedGroup);
return new_cell;
}
PropertyCellConstantType PropertyCell::GetConstantType() {
- if (value()->IsSmi()) return PropertyCellConstantType::kSmi;
+ if (value().IsSmi()) return PropertyCellConstantType::kSmi;
return PropertyCellConstantType::kStableMap;
}
@@ -7881,12 +7878,12 @@
static bool RemainsConstantType(Handle<PropertyCell> cell,
Handle<Object> value) {
// TODO(dcarney): double->smi and smi->double transition from kConstant
- if (cell->value()->IsSmi() && value->IsSmi()) {
+ if (cell->value().IsSmi() && value->IsSmi()) {
return true;
- } else if (cell->value()->IsHeapObject() && value->IsHeapObject()) {
- return HeapObject::cast(cell->value())->map() ==
- HeapObject::cast(*value)->map() &&
- HeapObject::cast(*value)->map()->is_stable();
+ } else if (cell->value().IsHeapObject() && value->IsHeapObject()) {
+ return HeapObject::cast(cell->value()).map() ==
+ HeapObject::cast(*value).map() &&
+ HeapObject::cast(*value).map().is_stable();
}
return false;
}
@@ -7897,7 +7894,7 @@
PropertyDetails details) {
PropertyCellType type = details.cell_type();
DCHECK(!value->IsTheHole(isolate));
- if (cell->value()->IsTheHole(isolate)) {
+ if (cell->value().IsTheHole(isolate)) {
switch (type) {
// Only allow a cell to transition once into constant state.
case PropertyCellType::kUninitialized:
@@ -7940,7 +7937,7 @@
PropertyCellType old_type = original_details.cell_type();
// Preserve the enumeration index unless the property was deleted or never
// initialized.
- if (cell->value()->IsTheHole(isolate)) {
+ if (cell->value().IsTheHole(isolate)) {
index = dictionary->NextEnumerationIndex();
dictionary->SetNextEnumerationIndex(index + 1);
} else {
@@ -7970,7 +7967,7 @@
// Deopt when transitioning from a constant type.
if (!invalidate && (old_type != new_type ||
original_details.IsReadOnly() != details.IsReadOnly())) {
- cell->dependent_code()->DeoptimizeDependentCodeGroup(
+ cell->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPropertyCellChangedGroup);
}
return cell;
@@ -7983,15 +7980,15 @@
Handle<Object> new_value) {
if (cell->value() != *new_value) {
cell->set_value(*new_value);
- cell->dependent_code()->DeoptimizeDependentCodeGroup(
+ cell->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPropertyCellChangedGroup);
}
}
int JSGeneratorObject::source_position() const {
CHECK(is_suspended());
- DCHECK(function()->shared()->HasBytecodeArray());
- DCHECK(function()->shared()->GetBytecodeArray()->HasSourcePositionTable());
+ DCHECK(function().shared().HasBytecodeArray());
+ DCHECK(function().shared().GetBytecodeArray().HasSourcePositionTable());
int code_offset = Smi::ToInt(input_or_debug_pos());
@@ -7999,31 +7996,31 @@
// is used in the source position table, hence the subtraction.
code_offset -= BytecodeArray::kHeaderSize - kHeapObjectTag;
AbstractCode code =
- AbstractCode::cast(function()->shared()->GetBytecodeArray());
- return code->SourcePosition(code_offset);
+ AbstractCode::cast(function().shared().GetBytecodeArray());
+ return code.SourcePosition(code_offset);
}
// static
AccessCheckInfo AccessCheckInfo::Get(Isolate* isolate,
Handle<JSObject> receiver) {
DisallowHeapAllocation no_gc;
- DCHECK(receiver->map()->is_access_check_needed());
- Object maybe_constructor = receiver->map()->GetConstructor();
- if (maybe_constructor->IsFunctionTemplateInfo()) {
+ DCHECK(receiver->map().is_access_check_needed());
+ Object maybe_constructor = receiver->map().GetConstructor();
+ if (maybe_constructor.IsFunctionTemplateInfo()) {
Object data_obj =
- FunctionTemplateInfo::cast(maybe_constructor)->GetAccessCheckInfo();
- if (data_obj->IsUndefined(isolate)) return AccessCheckInfo();
+ FunctionTemplateInfo::cast(maybe_constructor).GetAccessCheckInfo();
+ if (data_obj.IsUndefined(isolate)) return AccessCheckInfo();
return AccessCheckInfo::cast(data_obj);
}
// Might happen for a detached context.
- if (!maybe_constructor->IsJSFunction()) return AccessCheckInfo();
+ if (!maybe_constructor.IsJSFunction()) return AccessCheckInfo();
JSFunction constructor = JSFunction::cast(maybe_constructor);
// Might happen for the debug context.
- if (!constructor->shared()->IsApiFunction()) return AccessCheckInfo();
+ if (!constructor.shared().IsApiFunction()) return AccessCheckInfo();
Object data_obj =
- constructor->shared()->get_api_func_data()->GetAccessCheckInfo();
- if (data_obj->IsUndefined(isolate)) return AccessCheckInfo();
+ constructor.shared().get_api_func_data().GetAccessCheckInfo();
+ if (data_obj.IsUndefined(isolate)) return AccessCheckInfo();
return AccessCheckInfo::cast(data_obj);
}
@@ -8035,7 +8032,7 @@
Handle<FunctionTemplateInfo> fti =
Handle<FunctionTemplateInfo>::cast(getter);
// Check if the accessor uses a cached property.
- if (!fti->cached_property_name()->IsTheHole(isolate)) {
+ if (!fti->cached_property_name().IsTheHole(isolate)) {
return handle(Name::cast(fti->cached_property_name()), isolate);
}
}
@@ -8186,7 +8183,7 @@
// It's possible that the cleared_cells list is empty, since
// FinalizationGroup.unregister() removed all its elements before this task
// ran. In that case, don't call the cleanup function.
- if (!finalization_group->cleared_cells()->IsUndefined(isolate)) {
+ if (!finalization_group->cleared_cells().IsUndefined(isolate)) {
// Construct the iterator.
Handle<JSFinalizationGroupCleanupIterator> iterator;
{
diff --git a/src/objects/allocation-site-inl.h b/src/objects/allocation-site-inl.h
index ac0a16c..aaf0105 100644
--- a/src/objects/allocation-site-inl.h
+++ b/src/objects/allocation-site-inl.h
@@ -46,7 +46,7 @@
int AllocationSite::transition_info() const {
DCHECK(!PointsToLiteral());
- return Smi::cast(transition_info_or_boilerplate())->value();
+ return Smi::cast(transition_info_or_boilerplate()).value();
}
void AllocationSite::set_transition_info(int value) {
@@ -105,9 +105,9 @@
bool AllocationSite::PointsToLiteral() const {
Object raw_value = transition_info_or_boilerplate();
- DCHECK_EQ(!raw_value->IsSmi(),
- raw_value->IsJSArray() || raw_value->IsJSObject());
- return !raw_value->IsSmi();
+ DCHECK_EQ(!raw_value.IsSmi(),
+ raw_value.IsJSArray() || raw_value.IsJSObject());
+ return !raw_value.IsSmi();
}
// Heuristic: We only need to create allocation site info if the boilerplate
@@ -181,8 +181,8 @@
}
bool AllocationMemento::IsValid() const {
- return allocation_site()->IsAllocationSite() &&
- !AllocationSite::cast(allocation_site())->IsZombie();
+ return allocation_site().IsAllocationSite() &&
+ !AllocationSite::cast(allocation_site()).IsZombie();
}
AllocationSite AllocationMemento::GetAllocationSite() const {
@@ -191,7 +191,7 @@
}
Address AllocationMemento::GetAllocationSiteUnchecked() const {
- return allocation_site()->ptr();
+ return allocation_site().ptr();
}
template <AllocationSiteUpdateMode update_or_check>
@@ -200,7 +200,7 @@
Isolate* isolate = site->GetIsolate();
bool result = false;
- if (site->PointsToLiteral() && site->boilerplate()->IsJSArray()) {
+ if (site->PointsToLiteral() && site->boilerplate().IsJSArray()) {
Handle<JSArray> boilerplate(JSArray::cast(site->boilerplate()), isolate);
ElementsKind kind = boilerplate->GetElementsKind();
// if kind is holey ensure that to_kind is as well.
@@ -211,7 +211,7 @@
// If the array is huge, it's not likely to be defined in a local
// function, so we shouldn't make new instances of it very often.
uint32_t length = 0;
- CHECK(boilerplate->length()->ToArrayLength(&length));
+ CHECK(boilerplate->length().ToArrayLength(&length));
if (length <= kMaximumArrayBytesToPretransition) {
if (update_or_check == AllocationSiteUpdateMode::kCheckOnly) {
return true;
@@ -224,7 +224,7 @@
ElementsKindToString(to_kind));
}
JSObject::TransitionElementsKind(boilerplate, to_kind);
- site->dependent_code()->DeoptimizeDependentCodeGroup(
+ site->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
result = true;
}
@@ -244,7 +244,7 @@
ElementsKindToString(to_kind));
}
site->SetElementsKind(to_kind);
- site->dependent_code()->DeoptimizeDependentCodeGroup(
+ site->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
result = true;
}
diff --git a/src/objects/api-callbacks-inl.h b/src/objects/api-callbacks-inl.h
index 7cd08f7..0412476 100644
--- a/src/objects/api-callbacks-inl.h
+++ b/src/objects/api-callbacks-inl.h
@@ -46,7 +46,7 @@
bool result = getter() != Smi::kZero;
DCHECK_EQ(result,
getter() != Smi::kZero &&
- Foreign::cast(getter())->foreign_address() != kNullAddress);
+ Foreign::cast(getter()).foreign_address() != kNullAddress);
return result;
}
@@ -54,7 +54,7 @@
bool result = setter() != Smi::kZero;
DCHECK_EQ(result,
setter() != Smi::kZero &&
- Foreign::cast(setter())->foreign_address() != kNullAddress);
+ Foreign::cast(setter()).foreign_address() != kNullAddress);
return result;
}
@@ -88,13 +88,13 @@
bool AccessorInfo::IsCompatibleReceiver(Object receiver) {
if (!HasExpectedReceiverType()) return true;
- if (!receiver->IsJSObject()) return false;
+ if (!receiver.IsJSObject()) return false;
return FunctionTemplateInfo::cast(expected_receiver_type())
- ->IsTemplateFor(JSObject::cast(receiver)->map());
+ .IsTemplateFor(JSObject::cast(receiver).map());
}
bool AccessorInfo::HasExpectedReceiverType() {
- return expected_receiver_type()->IsFunctionTemplateInfo();
+ return expected_receiver_type().IsFunctionTemplateInfo();
}
ACCESSORS(AccessCheckInfo, callback, Object, kCallbackOffset)
diff --git a/src/objects/arguments-inl.h b/src/objects/arguments-inl.h
index 2d313fd..e5421b2 100644
--- a/src/objects/arguments-inl.h
+++ b/src/objects/arguments-inl.h
@@ -62,23 +62,23 @@
int* out) {
Context context = *isolate->native_context();
Map map = object->map();
- if (map != context->sloppy_arguments_map() &&
- map != context->strict_arguments_map() &&
- map != context->fast_aliased_arguments_map()) {
+ if (map != context.sloppy_arguments_map() &&
+ map != context.strict_arguments_map() &&
+ map != context.fast_aliased_arguments_map()) {
return false;
}
DCHECK(object->HasFastElements() || object->HasFastArgumentsElements());
Object len_obj =
object->InObjectPropertyAt(JSArgumentsObjectWithLength::kLengthIndex);
- if (!len_obj->IsSmi()) return false;
+ if (!len_obj.IsSmi()) return false;
*out = Max(0, Smi::ToInt(len_obj));
FixedArray parameters = FixedArray::cast(object->elements());
if (object->HasSloppyArgumentsElements()) {
- FixedArray arguments = FixedArray::cast(parameters->get(1));
- return *out <= arguments->length();
+ FixedArray arguments = FixedArray::cast(parameters.get(1));
+ return *out <= arguments.length();
}
- return *out <= parameters->length();
+ return *out <= parameters.length();
}
} // namespace internal
diff --git a/src/objects/bigint.cc b/src/objects/bigint.cc
index 8cc5661..0e3e0f4 100644
--- a/src/objects/bigint.cc
+++ b/src/objects/bigint.cc
@@ -498,7 +498,7 @@
work_estimate = 0;
StackLimitCheck interrupt_check(isolate);
if (interrupt_check.InterruptRequested() &&
- isolate->stack_guard()->HandleInterrupts()->IsException(isolate)) {
+ isolate->stack_guard()->HandleInterrupts().IsException(isolate)) {
return MaybeHandle<BigInt>();
}
}
@@ -655,10 +655,10 @@
}
bool BigInt::EqualToBigInt(BigInt x, BigInt y) {
- if (x->sign() != y->sign()) return false;
- if (x->length() != y->length()) return false;
- for (int i = 0; i < x->length(); i++) {
- if (x->digit(i) != y->digit(i)) return false;
+ if (x.sign() != y.sign()) return false;
+ if (x.length() != y.length()) return false;
+ for (int i = 0; i < x.length(); i++) {
+ if (x.digit(i) != y.digit(i)) return false;
}
return true;
}
@@ -979,7 +979,7 @@
if (number->IsSmi()) {
return MutableBigInt::NewFromInt(isolate, Smi::ToInt(*number));
}
- double value = HeapNumber::cast(*number)->value();
+ double value = HeapNumber::cast(*number).value();
if (!std::isfinite(value) || (DoubleToInteger(value) != value)) {
THROW_NEW_ERROR(isolate,
NewRangeError(MessageTemplate::kBigIntFromNumber, number),
@@ -1311,8 +1311,8 @@
if (result_storage.is_null()) {
result = New(isolate, result_length).ToHandleChecked();
} else {
- DCHECK(result_storage->length() >= result_length);
- result_length = result_storage->length();
+ DCHECK(result_storage.length() >= result_length);
+ result_length = result_storage.length();
}
int i = 0;
for (; i < num_pairs; i++) {
@@ -1428,12 +1428,12 @@
void MutableBigInt::InternalMultiplyAdd(BigIntBase source, digit_t factor,
digit_t summand, int n,
MutableBigInt result) {
- DCHECK(source->length() >= n);
- DCHECK(result->length() >= n);
+ DCHECK(source.length() >= n);
+ DCHECK(result.length() >= n);
digit_t carry = summand;
digit_t high = 0;
for (int i = 0; i < n; i++) {
- digit_t current = source->digit(i);
+ digit_t current = source.digit(i);
digit_t new_carry = 0;
// Compute this round's multiplication.
digit_t new_high = 0;
@@ -1442,15 +1442,15 @@
current = digit_add(current, high, &new_carry);
current = digit_add(current, carry, &new_carry);
// Store result and prepare for next round.
- result->set_digit(i, current);
+ result.set_digit(i, current);
carry = new_carry;
high = new_high;
}
- if (result->length() > n) {
- result->set_digit(n++, carry + high);
+ if (result.length() > n) {
+ result.set_digit(n++, carry + high);
// Current callers don't pass in such large results, but let's be robust.
- while (n < result->length()) {
- result->set_digit(n++, 0);
+ while (n < result.length()) {
+ result.set_digit(n++, 0);
}
} else {
CHECK_EQ(carry + high, 0);
@@ -1601,7 +1601,7 @@
work_estimate = 0;
StackLimitCheck interrupt_check(isolate);
if (interrupt_check.InterruptRequested() &&
- isolate->stack_guard()->HandleInterrupts()->IsException(isolate)) {
+ isolate->stack_guard()->HandleInterrupts().IsException(isolate)) {
return false;
}
}
@@ -2146,7 +2146,7 @@
if (interrupt_check.InterruptRequested()) {
{
AllowHeapAllocation might_throw;
- if (isolate->stack_guard()->HandleInterrupts()->IsException(
+ if (isolate->stack_guard()->HandleInterrupts().IsException(
isolate)) {
return MaybeHandle<String>();
}
@@ -2156,7 +2156,7 @@
chars = result->GetChars(no_gc);
}
if (interrupt_check.InterruptRequested() &&
- isolate->stack_guard()->HandleInterrupts()->IsException(isolate)) {
+ isolate->stack_guard()->HandleInterrupts().IsException(isolate)) {
return MaybeHandle<String>();
}
}
@@ -2457,16 +2457,16 @@
uint64_t MutableBigInt::GetRawBits(BigIntBase x, bool* lossless) {
if (lossless != nullptr) *lossless = true;
- if (x->is_zero()) return 0;
- int len = x->length();
+ if (x.is_zero()) return 0;
+ int len = x.length();
STATIC_ASSERT(kDigitBits == 64 || kDigitBits == 32);
if (lossless != nullptr && len > 64 / kDigitBits) *lossless = false;
- uint64_t raw = static_cast<uint64_t>(x->digit(0));
+ uint64_t raw = static_cast<uint64_t>(x.digit(0));
if (kDigitBits == 32 && len > 1) {
- raw |= static_cast<uint64_t>(x->digit(1)) << 32;
+ raw |= static_cast<uint64_t>(x.digit(1)) << 32;
}
// Simulate two's complement. MSVC dislikes "-raw".
- return x->sign() ? ((~raw) + 1u) : raw;
+ return x.sign() ? ((~raw) + 1u) : raw;
}
int64_t BigInt::AsInt64(bool* lossless) {
diff --git a/src/objects/code-inl.h b/src/objects/code-inl.h
index a2f28d4..05f1cec 100644
--- a/src/objects/code-inl.h
+++ b/src/objects/code-inl.h
@@ -48,88 +48,88 @@
int AbstractCode::raw_instruction_size() {
if (IsCode()) {
- return GetCode()->raw_instruction_size();
+ return GetCode().raw_instruction_size();
} else {
- return GetBytecodeArray()->length();
+ return GetBytecodeArray().length();
}
}
int AbstractCode::InstructionSize() {
if (IsCode()) {
- return GetCode()->InstructionSize();
+ return GetCode().InstructionSize();
} else {
- return GetBytecodeArray()->length();
+ return GetBytecodeArray().length();
}
}
ByteArray AbstractCode::source_position_table() {
if (IsCode()) {
- return GetCode()->SourcePositionTable();
+ return GetCode().SourcePositionTable();
} else {
- return GetBytecodeArray()->SourcePositionTable();
+ return GetBytecodeArray().SourcePositionTable();
}
}
Object AbstractCode::stack_frame_cache() {
Object maybe_table;
if (IsCode()) {
- maybe_table = GetCode()->source_position_table();
+ maybe_table = GetCode().source_position_table();
} else {
- maybe_table = GetBytecodeArray()->source_position_table();
+ maybe_table = GetBytecodeArray().source_position_table();
}
- if (maybe_table->IsSourcePositionTableWithFrameCache()) {
+ if (maybe_table.IsSourcePositionTableWithFrameCache()) {
return SourcePositionTableWithFrameCache::cast(maybe_table)
- ->stack_frame_cache();
+ .stack_frame_cache();
}
return Smi::kZero;
}
int AbstractCode::SizeIncludingMetadata() {
if (IsCode()) {
- return GetCode()->SizeIncludingMetadata();
+ return GetCode().SizeIncludingMetadata();
} else {
- return GetBytecodeArray()->SizeIncludingMetadata();
+ return GetBytecodeArray().SizeIncludingMetadata();
}
}
int AbstractCode::ExecutableSize() {
if (IsCode()) {
- return GetCode()->ExecutableSize();
+ return GetCode().ExecutableSize();
} else {
- return GetBytecodeArray()->BytecodeArraySize();
+ return GetBytecodeArray().BytecodeArraySize();
}
}
Address AbstractCode::raw_instruction_start() {
if (IsCode()) {
- return GetCode()->raw_instruction_start();
+ return GetCode().raw_instruction_start();
} else {
- return GetBytecodeArray()->GetFirstBytecodeAddress();
+ return GetBytecodeArray().GetFirstBytecodeAddress();
}
}
Address AbstractCode::InstructionStart() {
if (IsCode()) {
- return GetCode()->InstructionStart();
+ return GetCode().InstructionStart();
} else {
- return GetBytecodeArray()->GetFirstBytecodeAddress();
+ return GetBytecodeArray().GetFirstBytecodeAddress();
}
}
Address AbstractCode::raw_instruction_end() {
if (IsCode()) {
- return GetCode()->raw_instruction_end();
+ return GetCode().raw_instruction_end();
} else {
- return GetBytecodeArray()->GetFirstBytecodeAddress() +
- GetBytecodeArray()->length();
+ return GetBytecodeArray().GetFirstBytecodeAddress() +
+ GetBytecodeArray().length();
}
}
Address AbstractCode::InstructionEnd() {
if (IsCode()) {
- return GetCode()->InstructionEnd();
+ return GetCode().InstructionEnd();
} else {
- return GetBytecodeArray()->GetFirstBytecodeAddress() +
- GetBytecodeArray()->length();
+ return GetBytecodeArray().GetFirstBytecodeAddress() +
+ GetBytecodeArray().length();
}
}
@@ -139,7 +139,7 @@
AbstractCode::Kind AbstractCode::kind() {
if (IsCode()) {
- return static_cast<AbstractCode::Kind>(GetCode()->kind());
+ return static_cast<AbstractCode::Kind>(GetCode().kind());
} else {
return INTERPRETED_FUNCTION;
}
@@ -236,26 +236,26 @@
ByteArray Code::SourcePositionTableIfCollected() const {
ReadOnlyRoots roots = GetReadOnlyRoots();
Object maybe_table = source_position_table();
- if (maybe_table->IsUndefined(roots) || maybe_table->IsException(roots))
+ if (maybe_table.IsUndefined(roots) || maybe_table.IsException(roots))
return roots.empty_byte_array();
return SourcePositionTable();
}
ByteArray Code::SourcePositionTable() const {
Object maybe_table = source_position_table();
- DCHECK(!maybe_table->IsUndefined() && !maybe_table->IsException());
- if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
- DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
+ DCHECK(!maybe_table.IsUndefined() && !maybe_table.IsException());
+ if (maybe_table.IsByteArray()) return ByteArray::cast(maybe_table);
+ DCHECK(maybe_table.IsSourcePositionTableWithFrameCache());
return SourcePositionTableWithFrameCache::cast(maybe_table)
- ->source_position_table();
+ .source_position_table();
}
Object Code::next_code_link() const {
- return code_data_container()->next_code_link();
+ return code_data_container().next_code_link();
}
void Code::set_next_code_link(Object value) {
- code_data_container()->set_next_code_link(value);
+ code_data_container().set_next_code_link(value);
}
int Code::InstructionSize() const {
@@ -325,8 +325,8 @@
int Code::SizeIncludingMetadata() const {
int size = CodeSize();
- size += relocation_info()->Size();
- size += deoptimization_data()->Size();
+ size += relocation_info().Size();
+ size += deoptimization_data().Size();
return size;
}
@@ -335,15 +335,15 @@
}
byte* Code::relocation_start() const {
- return unchecked_relocation_info()->GetDataStartAddress();
+ return unchecked_relocation_info().GetDataStartAddress();
}
byte* Code::relocation_end() const {
- return unchecked_relocation_info()->GetDataEndAddress();
+ return unchecked_relocation_info().GetDataEndAddress();
}
int Code::relocation_size() const {
- return unchecked_relocation_info()->length();
+ return unchecked_relocation_info().length();
}
Address Code::entry() const { return raw_instruction_start(); }
@@ -368,8 +368,8 @@
// static
void Code::CopyRelocInfoToByteArray(ByteArray dest, const CodeDesc& desc) {
- DCHECK_EQ(dest->length(), desc.reloc_size);
- CopyBytes(dest->GetDataStartAddress(),
+ DCHECK_EQ(dest.length(), desc.reloc_size);
+ CopyBytes(dest.GetDataStartAddress(),
desc.buffer + desc.buffer_size - desc.reloc_size,
static_cast<size_t>(desc.reloc_size));
}
@@ -425,41 +425,41 @@
inline bool Code::can_have_weak_objects() const {
DCHECK(kind() == OPTIMIZED_FUNCTION);
- int32_t flags = code_data_container()->kind_specific_flags();
+ int32_t flags = code_data_container().kind_specific_flags();
return CanHaveWeakObjectsField::decode(flags);
}
inline void Code::set_can_have_weak_objects(bool value) {
DCHECK(kind() == OPTIMIZED_FUNCTION);
- int32_t previous = code_data_container()->kind_specific_flags();
+ int32_t previous = code_data_container().kind_specific_flags();
int32_t updated = CanHaveWeakObjectsField::update(previous, value);
- code_data_container()->set_kind_specific_flags(updated);
+ code_data_container().set_kind_specific_flags(updated);
}
inline bool Code::is_promise_rejection() const {
DCHECK(kind() == BUILTIN);
- int32_t flags = code_data_container()->kind_specific_flags();
+ int32_t flags = code_data_container().kind_specific_flags();
return IsPromiseRejectionField::decode(flags);
}
inline void Code::set_is_promise_rejection(bool value) {
DCHECK(kind() == BUILTIN);
- int32_t previous = code_data_container()->kind_specific_flags();
+ int32_t previous = code_data_container().kind_specific_flags();
int32_t updated = IsPromiseRejectionField::update(previous, value);
- code_data_container()->set_kind_specific_flags(updated);
+ code_data_container().set_kind_specific_flags(updated);
}
inline bool Code::is_exception_caught() const {
DCHECK(kind() == BUILTIN);
- int32_t flags = code_data_container()->kind_specific_flags();
+ int32_t flags = code_data_container().kind_specific_flags();
return IsExceptionCaughtField::decode(flags);
}
inline void Code::set_is_exception_caught(bool value) {
DCHECK(kind() == BUILTIN);
- int32_t previous = code_data_container()->kind_specific_flags();
+ int32_t previous = code_data_container().kind_specific_flags();
int32_t updated = IsExceptionCaughtField::update(previous, value);
- code_data_container()->set_kind_specific_flags(updated);
+ code_data_container().set_kind_specific_flags(updated);
}
inline bool Code::is_off_heap_trampoline() const {
@@ -496,44 +496,44 @@
bool Code::marked_for_deoptimization() const {
DCHECK(kind() == OPTIMIZED_FUNCTION);
- int32_t flags = code_data_container()->kind_specific_flags();
+ int32_t flags = code_data_container().kind_specific_flags();
return MarkedForDeoptimizationField::decode(flags);
}
void Code::set_marked_for_deoptimization(bool flag) {
DCHECK(kind() == OPTIMIZED_FUNCTION);
DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
- int32_t previous = code_data_container()->kind_specific_flags();
+ int32_t previous = code_data_container().kind_specific_flags();
int32_t updated = MarkedForDeoptimizationField::update(previous, flag);
- code_data_container()->set_kind_specific_flags(updated);
+ code_data_container().set_kind_specific_flags(updated);
}
bool Code::embedded_objects_cleared() const {
DCHECK(kind() == OPTIMIZED_FUNCTION);
- int32_t flags = code_data_container()->kind_specific_flags();
+ int32_t flags = code_data_container().kind_specific_flags();
return EmbeddedObjectsClearedField::decode(flags);
}
void Code::set_embedded_objects_cleared(bool flag) {
DCHECK(kind() == OPTIMIZED_FUNCTION);
DCHECK_IMPLIES(flag, marked_for_deoptimization());
- int32_t previous = code_data_container()->kind_specific_flags();
+ int32_t previous = code_data_container().kind_specific_flags();
int32_t updated = EmbeddedObjectsClearedField::update(previous, flag);
- code_data_container()->set_kind_specific_flags(updated);
+ code_data_container().set_kind_specific_flags(updated);
}
bool Code::deopt_already_counted() const {
DCHECK(kind() == OPTIMIZED_FUNCTION);
- int32_t flags = code_data_container()->kind_specific_flags();
+ int32_t flags = code_data_container().kind_specific_flags();
return DeoptAlreadyCountedField::decode(flags);
}
void Code::set_deopt_already_counted(bool flag) {
DCHECK(kind() == OPTIMIZED_FUNCTION);
DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
- int32_t previous = code_data_container()->kind_specific_flags();
+ int32_t previous = code_data_container().kind_specific_flags();
int32_t updated = DeoptAlreadyCountedField::update(previous, flag);
- code_data_container()->set_kind_specific_flags(updated);
+ code_data_container().set_kind_specific_flags(updated);
}
bool Code::is_optimized_code() const { return kind() == OPTIMIZED_FUNCTION; }
@@ -591,10 +591,10 @@
}
bool Code::IsWeakObjectInOptimizedCode(HeapObject object) {
- Map map = object->synchronized_map();
- InstanceType instance_type = map->instance_type();
+ Map map = object.synchronized_map();
+ InstanceType instance_type = map.instance_type();
if (InstanceTypeChecker::IsMap(instance_type)) {
- return Map::cast(object)->CanTransition();
+ return Map::cast(object).CanTransition();
}
return InstanceTypeChecker::IsPropertyCell(instance_type) ||
InstanceTypeChecker::IsJSReceiver(instance_type) ||
@@ -714,11 +714,11 @@
bool BytecodeArray::HasSourcePositionTable() const {
Object maybe_table = source_position_table();
- return !(maybe_table->IsUndefined() || DidSourcePositionGenerationFail());
+ return !(maybe_table.IsUndefined() || DidSourcePositionGenerationFail());
}
bool BytecodeArray::DidSourcePositionGenerationFail() const {
- return source_position_table()->IsException();
+ return source_position_table().IsException();
}
void BytecodeArray::SetSourcePositionsFailedToCollect() {
@@ -727,14 +727,14 @@
ByteArray BytecodeArray::SourcePositionTable() const {
Object maybe_table = source_position_table();
- if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
+ if (maybe_table.IsByteArray()) return ByteArray::cast(maybe_table);
ReadOnlyRoots roots = GetReadOnlyRoots();
- if (maybe_table->IsException(roots)) return roots.empty_byte_array();
+ if (maybe_table.IsException(roots)) return roots.empty_byte_array();
- DCHECK(!maybe_table->IsUndefined(roots));
- DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
+ DCHECK(!maybe_table.IsUndefined(roots));
+ DCHECK(maybe_table.IsSourcePositionTableWithFrameCache());
return SourcePositionTableWithFrameCache::cast(maybe_table)
- ->source_position_table();
+ .source_position_table();
}
ByteArray BytecodeArray::SourcePositionTableIfCollected() const {
@@ -745,20 +745,20 @@
void BytecodeArray::ClearFrameCacheFromSourcePositionTable() {
Object maybe_table = source_position_table();
- if (maybe_table->IsUndefined() || maybe_table->IsByteArray()) return;
- DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
+ if (maybe_table.IsUndefined() || maybe_table.IsByteArray()) return;
+ DCHECK(maybe_table.IsSourcePositionTableWithFrameCache());
set_source_position_table(SourcePositionTableWithFrameCache::cast(maybe_table)
- ->source_position_table());
+ .source_position_table());
}
int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
int BytecodeArray::SizeIncludingMetadata() {
int size = BytecodeArraySize();
- size += constant_pool()->Size();
- size += handler_table()->Size();
+ size += constant_pool().Size();
+ size += handler_table().Size();
if (HasSourcePositionTable()) {
- size += SourcePositionTable()->Size();
+ size += SourcePositionTable().Size();
}
return size;
}
@@ -776,7 +776,7 @@
DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
BailoutId DeoptimizationData::BytecodeOffset(int i) {
- return BailoutId(BytecodeOffsetRaw(i)->value());
+ return BailoutId(BytecodeOffsetRaw(i).value());
}
void DeoptimizationData::SetBytecodeOffset(int i, BailoutId value) {
diff --git a/src/objects/code.cc b/src/objects/code.cc
index 7f9c347..33f4736 100644
--- a/src/objects/code.cc
+++ b/src/objects/code.cc
@@ -114,7 +114,7 @@
// code object.
Handle<Object> p = it.rinfo()->target_object_handle(origin);
Code code = Code::cast(*p);
- it.rinfo()->set_target_address(code->raw_instruction_start(),
+ it.rinfo()->set_target_address(code.raw_instruction_start(),
UPDATE_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
} else if (RelocInfo::IsRuntimeEntry(mode)) {
Address p = it.rinfo()->target_runtime_entry(origin);
@@ -192,12 +192,12 @@
namespace {
template <typename Code>
void DropStackFrameCacheCommon(Code code) {
- i::Object maybe_table = code->source_position_table();
- if (maybe_table->IsUndefined() || maybe_table->IsByteArray()) return;
- DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
- code->set_source_position_table(
+ i::Object maybe_table = code.source_position_table();
+ if (maybe_table.IsUndefined() || maybe_table.IsByteArray()) return;
+ DCHECK(maybe_table.IsSourcePositionTableWithFrameCache());
+ code.set_source_position_table(
i::SourcePositionTableWithFrameCache::cast(maybe_table)
- ->source_position_table());
+ .source_position_table());
}
} // namespace
@@ -211,7 +211,7 @@
int AbstractCode::SourcePosition(int offset) {
Object maybe_table = source_position_table();
- if (maybe_table->IsException()) return kNoSourcePosition;
+ if (maybe_table.IsException()) return kNoSourcePosition;
ByteArray source_position_table = ByteArray::cast(maybe_table);
int position = 0;
@@ -257,10 +257,10 @@
DeoptimizationData deopt_data =
DeoptimizationData::cast(deoptimization_data());
Address code_start_address = InstructionStart();
- for (int i = 0; i < deopt_data->DeoptCount(); i++) {
- if (deopt_data->Pc(i)->value() == -1) continue;
- Address address = code_start_address + deopt_data->Pc(i)->value();
- if (address == pc && deopt_data->BytecodeOffset(i) != BailoutId::None()) {
+ for (int i = 0; i < deopt_data.DeoptCount(); i++) {
+ if (deopt_data.Pc(i).value() == -1) continue;
+ Address address = code_start_address + deopt_data.Pc(i).value();
+ if (address == pc && deopt_data.BytecodeOffset(i) != BailoutId::None()) {
return true;
}
}
@@ -323,7 +323,7 @@
if (InstructionStream::PcIsOffHeap(isolate, target_address)) continue;
Code target = Code::GetCodeFromTargetAddress(target_address);
- CHECK(target->IsCode());
+ CHECK(target.IsCode());
if (Builtins::IsIsolateIndependentBuiltin(target)) continue;
}
#endif
@@ -339,12 +339,12 @@
DisallowHeapAllocation no_gc;
DeoptimizationData const data =
DeoptimizationData::cast(deoptimization_data());
- if (data->length() == 0) return false;
- if (data->SharedFunctionInfo() == sfi) return true;
- FixedArray const literals = data->LiteralArray();
- int const inlined_count = data->InlinedFunctionCount()->value();
+ if (data.length() == 0) return false;
+ if (data.SharedFunctionInfo() == sfi) return true;
+ FixedArray const literals = data.LiteralArray();
+ int const inlined_count = data.InlinedFunctionCount().value();
for (int i = 0; i < inlined_count; ++i) {
- if (SharedFunctionInfo::cast(literals->get(i)) == sfi) return true;
+ if (SharedFunctionInfo::cast(literals.get(i)) == sfi) return true;
}
return false;
}
@@ -352,7 +352,7 @@
Code::OptimizedCodeIterator::OptimizedCodeIterator(Isolate* isolate) {
isolate_ = isolate;
Object list = isolate->heap()->native_contexts_list();
- next_context_ = list->IsUndefined(isolate_) ? Context() : Context::cast(list);
+ next_context_ = list.IsUndefined(isolate_) ? Context() : Context::cast(list);
}
Code Code::OptimizedCodeIterator::Next() {
@@ -360,21 +360,21 @@
Object next;
if (!current_code_.is_null()) {
// Get next code in the linked list.
- next = current_code_->next_code_link();
+ next = current_code_.next_code_link();
} else if (!next_context_.is_null()) {
// Linked list of code exhausted. Get list of next context.
- next = next_context_->OptimizedCodeListHead();
- Object next_context = next_context_->next_context_link();
- next_context_ = next_context->IsUndefined(isolate_)
+ next = next_context_.OptimizedCodeListHead();
+ Object next_context = next_context_.next_context_link();
+ next_context_ = next_context.IsUndefined(isolate_)
? Context()
: Context::cast(next_context);
} else {
// Exhausted contexts.
return Code();
}
- current_code_ = next->IsUndefined(isolate_) ? Code() : Code::cast(next);
+ current_code_ = next.IsUndefined(isolate_) ? Code() : Code::cast(next);
} while (current_code_.is_null());
- DCHECK_EQ(Code::OPTIMIZED_FUNCTION, current_code_->kind());
+ DCHECK_EQ(Code::OPTIMIZED_FUNCTION, current_code_.kind());
return current_code_;
}
@@ -394,7 +394,7 @@
if (index == -1) {
return SharedFunctionInfo::cast(SharedFunctionInfo());
} else {
- return SharedFunctionInfo::cast(LiteralArray()->get(index));
+ return SharedFunctionInfo::cast(LiteralArray().get(index));
}
}
@@ -427,10 +427,10 @@
}
disasm::NameConverter converter;
- int const inlined_function_count = InlinedFunctionCount()->value();
+ int const inlined_function_count = InlinedFunctionCount().value();
os << "Inlined functions (count = " << inlined_function_count << ")\n";
for (int id = 0; id < inlined_function_count; ++id) {
- Object info = LiteralArray()->get(id);
+ Object info = LiteralArray().get(id);
os << " " << Brief(SharedFunctionInfo::cast(info)) << "\n";
}
os << "\n";
@@ -444,7 +444,7 @@
for (int i = 0; i < deopt_count; i++) {
os << std::setw(6) << i << " " << std::setw(15)
<< BytecodeOffset(i).ToInt() << " " << std::setw(4);
- print_pc(os, Pc(i)->value());
+ print_pc(os, Pc(i).value());
os << std::setw(2);
if (!FLAG_print_code_verbose) {
@@ -453,7 +453,7 @@
}
// Print details of the frame translation.
- int translation_index = TranslationIndex(i)->value();
+ int translation_index = TranslationIndex(i).value();
TranslationIterator iterator(TranslationByteArray(), translation_index);
Translation::Opcode opcode =
static_cast<Translation::Opcode>(iterator.Next());
@@ -482,9 +482,9 @@
unsigned height = iterator.Next();
int return_value_offset = iterator.Next();
int return_value_count = iterator.Next();
- Object shared_info = LiteralArray()->get(shared_info_id);
+ Object shared_info = LiteralArray().get(shared_info_id);
os << "{bytecode_offset=" << bytecode_offset << ", function="
- << Brief(SharedFunctionInfo::cast(shared_info)->DebugName())
+ << Brief(SharedFunctionInfo::cast(shared_info).DebugName())
<< ", height=" << height << ", retval=@" << return_value_offset
<< "(#" << return_value_count << ")}";
break;
@@ -493,10 +493,10 @@
case Translation::CONSTRUCT_STUB_FRAME: {
int bailout_id = iterator.Next();
int shared_info_id = iterator.Next();
- Object shared_info = LiteralArray()->get(shared_info_id);
+ Object shared_info = LiteralArray().get(shared_info_id);
unsigned height = iterator.Next();
os << "{bailout_id=" << bailout_id << ", function="
- << Brief(SharedFunctionInfo::cast(shared_info)->DebugName())
+ << Brief(SharedFunctionInfo::cast(shared_info).DebugName())
<< ", height=" << height << "}";
break;
}
@@ -506,20 +506,20 @@
case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME: {
int bailout_id = iterator.Next();
int shared_info_id = iterator.Next();
- Object shared_info = LiteralArray()->get(shared_info_id);
+ Object shared_info = LiteralArray().get(shared_info_id);
unsigned height = iterator.Next();
os << "{bailout_id=" << bailout_id << ", function="
- << Brief(SharedFunctionInfo::cast(shared_info)->DebugName())
+ << Brief(SharedFunctionInfo::cast(shared_info).DebugName())
<< ", height=" << height << "}";
break;
}
case Translation::ARGUMENTS_ADAPTOR_FRAME: {
int shared_info_id = iterator.Next();
- Object shared_info = LiteralArray()->get(shared_info_id);
+ Object shared_info = LiteralArray().get(shared_info_id);
unsigned height = iterator.Next();
os << "{function="
- << Brief(SharedFunctionInfo::cast(shared_info)->DebugName())
+ << Brief(SharedFunctionInfo::cast(shared_info).DebugName())
<< ", height=" << height << "}";
break;
}
@@ -609,7 +609,7 @@
case Translation::LITERAL: {
int literal_index = iterator.Next();
- Object literal_value = LiteralArray()->get(literal_index);
+ Object literal_value = LiteralArray().get(literal_index);
os << "{literal_id=" << literal_index << " (" << Brief(literal_value)
<< ")}";
break;
@@ -743,7 +743,7 @@
if (kind() == OPTIMIZED_FUNCTION) {
DeoptimizationData data =
DeoptimizationData::cast(this->deoptimization_data());
- data->DeoptimizationDataPrint(os);
+ data.DeoptimizationDataPrint(os);
}
os << "\n";
@@ -853,16 +853,16 @@
iterator.Advance();
}
- os << "Constant pool (size = " << constant_pool()->length() << ")\n";
+ os << "Constant pool (size = " << constant_pool().length() << ")\n";
#ifdef OBJECT_PRINT
- if (constant_pool()->length() > 0) {
- constant_pool()->Print();
+ if (constant_pool().length() > 0) {
+ constant_pool().Print();
}
#endif
- os << "Handler Table (size = " << handler_table()->length() << ")\n";
+ os << "Handler Table (size = " << handler_table().length() << ")\n";
#ifdef ENABLE_DISASSEMBLER
- if (handler_table()->length() > 0) {
+ if (handler_table().length() > 0) {
HandlerTable table(*this);
table.HandlerTableRangePrint(os);
}
@@ -871,10 +871,10 @@
void BytecodeArray::CopyBytecodesTo(BytecodeArray to) {
BytecodeArray from = *this;
- DCHECK_EQ(from->length(), to->length());
- CopyBytes(reinterpret_cast<byte*>(to->GetFirstBytecodeAddress()),
- reinterpret_cast<byte*>(from->GetFirstBytecodeAddress()),
- from->length());
+ DCHECK_EQ(from.length(), to.length());
+ CopyBytes(reinterpret_cast<byte*>(to.GetFirstBytecodeAddress()),
+ reinterpret_cast<byte*>(from.GetFirstBytecodeAddress()),
+ from.length());
}
void BytecodeArray::MakeOlder() {
@@ -1017,7 +1017,7 @@
}
if (this->group() < group) {
// The group comes later in the list.
- return next_link()->MarkCodeForDeoptimization(isolate, group);
+ return next_link().MarkCodeForDeoptimization(isolate, group);
}
DCHECK_EQ(group, this->group());
DisallowHeapAllocation no_allocation_scope;
@@ -1028,8 +1028,8 @@
MaybeObject obj = object_at(i);
if (obj->IsCleared()) continue;
Code code = Code::cast(obj->GetHeapObjectAssumeWeak());
- if (!code->marked_for_deoptimization()) {
- code->SetMarkedForDeoptimization(DependencyGroupName(group));
+ if (!code.marked_for_deoptimization()) {
+ code.SetMarkedForDeoptimization(DependencyGroupName(group));
marked = true;
}
}
@@ -1060,7 +1060,7 @@
PrintF(scope.file(),
"[marking dependent code " V8PRIxPTR_FMT
" (opt #%d) for deoptimization, reason: %s]\n",
- ptr(), deopt_data->OptimizationId()->value(), reason);
+ ptr(), deopt_data.OptimizationId().value(), reason);
}
}
diff --git a/src/objects/compilation-cache-inl.h b/src/objects/compilation-cache-inl.h
index 1849111..81b953a 100644
--- a/src/objects/compilation-cache-inl.h
+++ b/src/objects/compilation-cache-inl.h
@@ -28,22 +28,22 @@
CAST_ACCESSOR(CompilationCacheTable)
uint32_t CompilationCacheShape::RegExpHash(String string, Smi flags) {
- return string->Hash() + flags->value();
+ return string.Hash() + flags.value();
}
uint32_t CompilationCacheShape::StringSharedHash(String source,
SharedFunctionInfo shared,
LanguageMode language_mode,
int position) {
- uint32_t hash = source->Hash();
- if (shared->HasSourceCode()) {
+ uint32_t hash = source.Hash();
+ if (shared.HasSourceCode()) {
// Instead of using the SharedFunctionInfo pointer in the hash
// code computation, we use a combination of the hash of the
// script source code and the start position of the calling scope.
// We do this to ensure that the cache entries can survive garbage
// collection.
- Script script(Script::cast(shared->script()));
- hash ^= String::cast(script->source())->Hash();
+ Script script(Script::cast(shared.script()));
+ hash ^= String::cast(script.source()).Hash();
STATIC_ASSERT(LanguageModeSize == 2);
if (is_strict(language_mode)) hash ^= 0x8000;
hash += position;
@@ -53,27 +53,27 @@
uint32_t CompilationCacheShape::HashForObject(ReadOnlyRoots roots,
Object object) {
- if (object->IsNumber()) return static_cast<uint32_t>(object->Number());
+ if (object.IsNumber()) return static_cast<uint32_t>(object.Number());
FixedArray val = FixedArray::cast(object);
- if (val->map() == roots.fixed_cow_array_map()) {
- DCHECK_EQ(4, val->length());
- SharedFunctionInfo shared = SharedFunctionInfo::cast(val->get(0));
- String source = String::cast(val->get(1));
- int language_unchecked = Smi::ToInt(val->get(2));
+ if (val.map() == roots.fixed_cow_array_map()) {
+ DCHECK_EQ(4, val.length());
+ SharedFunctionInfo shared = SharedFunctionInfo::cast(val.get(0));
+ String source = String::cast(val.get(1));
+ int language_unchecked = Smi::ToInt(val.get(2));
DCHECK(is_valid_language_mode(language_unchecked));
LanguageMode language_mode = static_cast<LanguageMode>(language_unchecked);
- int position = Smi::ToInt(val->get(3));
+ int position = Smi::ToInt(val.get(3));
return StringSharedHash(source, shared, language_mode, position);
}
- DCHECK_LT(2, val->length());
- return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)),
- Smi::cast(val->get(JSRegExp::kFlagsIndex)));
+ DCHECK_LT(2, val.length());
+ return RegExpHash(String::cast(val.get(JSRegExp::kSourceIndex)),
+ Smi::cast(val.get(JSRegExp::kFlagsIndex)));
}
InfoCellPair::InfoCellPair(SharedFunctionInfo shared,
FeedbackCell feedback_cell)
- : is_compiled_scope_(!shared.is_null() ? shared->is_compiled_scope()
+ : is_compiled_scope_(!shared.is_null() ? shared.is_compiled_scope()
: IsCompiledScope()),
shared_(shared),
feedback_cell_(feedback_cell) {}
diff --git a/src/objects/data-handler-inl.h b/src/objects/data-handler-inl.h
index 1be71ce..9649e1e 100644
--- a/src/objects/data-handler-inl.h
+++ b/src/objects/data-handler-inl.h
@@ -22,15 +22,15 @@
ACCESSORS(DataHandler, validity_cell, Object, kValidityCellOffset)
int DataHandler::data_field_count() const {
- return (map()->instance_size() - kSizeWithData0) / kTaggedSize;
+ return (map().instance_size() - kSizeWithData0) / kTaggedSize;
}
WEAK_ACCESSORS_CHECKED(DataHandler, data1, kData1Offset,
- map()->instance_size() >= kSizeWithData1)
+ map().instance_size() >= kSizeWithData1)
WEAK_ACCESSORS_CHECKED(DataHandler, data2, kData2Offset,
- map()->instance_size() >= kSizeWithData2)
+ map().instance_size() >= kSizeWithData2)
WEAK_ACCESSORS_CHECKED(DataHandler, data3, kData3Offset,
- map()->instance_size() >= kSizeWithData3)
+ map().instance_size() >= kSizeWithData3)
} // namespace internal
} // namespace v8
diff --git a/src/objects/debug-objects-inl.h b/src/objects/debug-objects-inl.h
index 3280788..9935272 100644
--- a/src/objects/debug-objects-inl.h
+++ b/src/objects/debug-objects-inl.h
@@ -56,9 +56,9 @@
ACCESSORS(BreakPoint, condition, String, kConditionOffset)
bool DebugInfo::HasInstrumentedBytecodeArray() {
- DCHECK_EQ(debug_bytecode_array()->IsBytecodeArray(),
- original_bytecode_array()->IsBytecodeArray());
- return debug_bytecode_array()->IsBytecodeArray();
+ DCHECK_EQ(debug_bytecode_array().IsBytecodeArray(),
+ original_bytecode_array().IsBytecodeArray());
+ return debug_bytecode_array().IsBytecodeArray();
}
BytecodeArray DebugInfo::OriginalBytecodeArray() {
@@ -68,7 +68,7 @@
BytecodeArray DebugInfo::DebugBytecodeArray() {
DCHECK(HasInstrumentedBytecodeArray());
- DCHECK_EQ(shared()->GetDebugBytecodeArray(), debug_bytecode_array());
+ DCHECK_EQ(shared().GetDebugBytecodeArray(), debug_bytecode_array());
return BytecodeArray::cast(debug_bytecode_array());
}
diff --git a/src/objects/debug-objects.cc b/src/objects/debug-objects.cc
index 2d89db0..75c028d 100644
--- a/src/objects/debug-objects.cc
+++ b/src/objects/debug-objects.cc
@@ -31,7 +31,7 @@
if (HasInstrumentedBytecodeArray()) {
// Reset function's bytecode array field to point to the original bytecode
// array.
- shared()->SetDebugBytecodeArray(OriginalBytecodeArray());
+ shared().SetDebugBytecodeArray(OriginalBytecodeArray());
// If the function is currently running on the stack, we need to update the
// bytecode pointers on the stack so they point to the original
@@ -80,19 +80,18 @@
// If there is no break point info object or no break points in the break
// point info object there is no break point at this code offset.
- if (break_point_info->IsUndefined(isolate)) return false;
- return BreakPointInfo::cast(break_point_info)->GetBreakPointCount(isolate) >
- 0;
+ if (break_point_info.IsUndefined(isolate)) return false;
+ return BreakPointInfo::cast(break_point_info).GetBreakPointCount(isolate) > 0;
}
// Get the break point info object for this source position.
Object DebugInfo::GetBreakPointInfo(Isolate* isolate, int source_position) {
DCHECK(HasBreakInfo());
- for (int i = 0; i < break_points()->length(); i++) {
- if (!break_points()->get(i)->IsUndefined(isolate)) {
+ for (int i = 0; i < break_points().length(); i++) {
+ if (!break_points().get(i).IsUndefined(isolate)) {
BreakPointInfo break_point_info =
- BreakPointInfo::cast(break_points()->get(i));
- if (break_point_info->source_position() == source_position) {
+ BreakPointInfo::cast(break_points().get(i));
+ if (break_point_info.source_position() == source_position) {
return break_point_info;
}
}
@@ -103,10 +102,10 @@
bool DebugInfo::ClearBreakPoint(Isolate* isolate, Handle<DebugInfo> debug_info,
Handle<BreakPoint> break_point) {
DCHECK(debug_info->HasBreakInfo());
- for (int i = 0; i < debug_info->break_points()->length(); i++) {
- if (debug_info->break_points()->get(i)->IsUndefined(isolate)) continue;
+ for (int i = 0; i < debug_info->break_points().length(); i++) {
+ if (debug_info->break_points().get(i).IsUndefined(isolate)) continue;
Handle<BreakPointInfo> break_point_info = Handle<BreakPointInfo>(
- BreakPointInfo::cast(debug_info->break_points()->get(i)), isolate);
+ BreakPointInfo::cast(debug_info->break_points().get(i)), isolate);
if (BreakPointInfo::HasBreakPoint(isolate, break_point_info, break_point)) {
BreakPointInfo::ClearBreakPoint(isolate, break_point_info, break_point);
return true;
@@ -131,8 +130,8 @@
// break points before. Try to find a free slot.
static const int kNoBreakPointInfo = -1;
int index = kNoBreakPointInfo;
- for (int i = 0; i < debug_info->break_points()->length(); i++) {
- if (debug_info->break_points()->get(i)->IsUndefined(isolate)) {
+ for (int i = 0; i < debug_info->break_points().length(); i++) {
+ if (debug_info->break_points().get(i).IsUndefined(isolate)) {
index = i;
break;
}
@@ -157,7 +156,7 @@
Handle<BreakPointInfo> new_break_point_info =
isolate->factory()->NewBreakPointInfo(source_position);
BreakPointInfo::SetBreakPoint(isolate, new_break_point_info, break_point);
- debug_info->break_points()->set(index, *new_break_point_info);
+ debug_info->break_points().set(index, *new_break_point_info);
}
// Get the break point objects for a source position.
@@ -165,10 +164,10 @@
int source_position) {
DCHECK(HasBreakInfo());
Object break_point_info = GetBreakPointInfo(isolate, source_position);
- if (break_point_info->IsUndefined(isolate)) {
+ if (break_point_info.IsUndefined(isolate)) {
return isolate->factory()->undefined_value();
}
- return Handle<Object>(BreakPointInfo::cast(break_point_info)->break_points(),
+ return Handle<Object>(BreakPointInfo::cast(break_point_info).break_points(),
isolate);
}
@@ -176,11 +175,11 @@
int DebugInfo::GetBreakPointCount(Isolate* isolate) {
DCHECK(HasBreakInfo());
int count = 0;
- for (int i = 0; i < break_points()->length(); i++) {
- if (!break_points()->get(i)->IsUndefined(isolate)) {
+ for (int i = 0; i < break_points().length(); i++) {
+ if (!break_points().get(i).IsUndefined(isolate)) {
BreakPointInfo break_point_info =
- BreakPointInfo::cast(break_points()->get(i));
- count += break_point_info->GetBreakPointCount(isolate);
+ BreakPointInfo::cast(break_points().get(i));
+ count += break_point_info.GetBreakPointCount(isolate);
}
}
return count;
@@ -190,10 +189,10 @@
Handle<DebugInfo> debug_info,
Handle<BreakPoint> break_point) {
DCHECK(debug_info->HasBreakInfo());
- for (int i = 0; i < debug_info->break_points()->length(); i++) {
- if (!debug_info->break_points()->get(i)->IsUndefined(isolate)) {
+ for (int i = 0; i < debug_info->break_points().length(); i++) {
+ if (!debug_info->break_points().get(i).IsUndefined(isolate)) {
Handle<BreakPointInfo> break_point_info = Handle<BreakPointInfo>(
- BreakPointInfo::cast(debug_info->break_points()->get(i)), isolate);
+ BreakPointInfo::cast(debug_info->break_points().get(i)), isolate);
if (BreakPointInfo::HasBreakPoint(isolate, break_point_info,
break_point)) {
return break_point_info;
@@ -228,7 +227,7 @@
namespace {
bool IsEqual(BreakPoint break_point1, BreakPoint break_point2) {
- return break_point1->id() == break_point2->id();
+ return break_point1.id() == break_point2.id();
}
} // namespace
@@ -237,9 +236,9 @@
Handle<BreakPointInfo> break_point_info,
Handle<BreakPoint> break_point) {
// If there are no break points just ignore.
- if (break_point_info->break_points()->IsUndefined(isolate)) return;
+ if (break_point_info->break_points().IsUndefined(isolate)) return;
// If there is a single break point clear it if it is the same.
- if (!break_point_info->break_points()->IsFixedArray()) {
+ if (!break_point_info->break_points().IsFixedArray()) {
if (IsEqual(BreakPoint::cast(break_point_info->break_points()),
*break_point)) {
break_point_info->set_break_points(
@@ -248,7 +247,7 @@
return;
}
// If there are multiple break points shrink the array
- DCHECK(break_point_info->break_points()->IsFixedArray());
+ DCHECK(break_point_info->break_points().IsFixedArray());
Handle<FixedArray> old_array = Handle<FixedArray>(
FixedArray::cast(break_point_info->break_points()), isolate);
Handle<FixedArray> new_array =
@@ -271,14 +270,14 @@
Handle<BreakPointInfo> break_point_info,
Handle<BreakPoint> break_point) {
// If there was no break point objects before just set it.
- if (break_point_info->break_points()->IsUndefined(isolate)) {
+ if (break_point_info->break_points().IsUndefined(isolate)) {
break_point_info->set_break_points(*break_point);
return;
}
// If the break point object is the same as before just ignore.
if (break_point_info->break_points() == *break_point) return;
// If there was one break point object before replace with array.
- if (!break_point_info->break_points()->IsFixedArray()) {
+ if (!break_point_info->break_points().IsFixedArray()) {
Handle<FixedArray> array = isolate->factory()->NewFixedArray(2);
array->set(0, break_point_info->break_points());
array->set(1, *break_point);
@@ -304,18 +303,18 @@
Handle<BreakPointInfo> break_point_info,
Handle<BreakPoint> break_point) {
// No break point.
- if (break_point_info->break_points()->IsUndefined(isolate)) {
+ if (break_point_info->break_points().IsUndefined(isolate)) {
return false;
}
// Single break point.
- if (!break_point_info->break_points()->IsFixedArray()) {
+ if (!break_point_info->break_points().IsFixedArray()) {
return IsEqual(BreakPoint::cast(break_point_info->break_points()),
*break_point);
}
// Multiple break points.
FixedArray array = FixedArray::cast(break_point_info->break_points());
- for (int i = 0; i < array->length(); i++) {
- if (IsEqual(BreakPoint::cast(array->get(i)), *break_point)) {
+ for (int i = 0; i < array.length(); i++) {
+ if (IsEqual(BreakPoint::cast(array.get(i)), *break_point)) {
return true;
}
}
@@ -325,11 +324,11 @@
// Get the number of break points.
int BreakPointInfo::GetBreakPointCount(Isolate* isolate) {
// No break point.
- if (break_points()->IsUndefined(isolate)) return 0;
+ if (break_points().IsUndefined(isolate)) return 0;
// Single break point.
- if (!break_points()->IsFixedArray()) return 1;
+ if (!break_points().IsFixedArray()) return 1;
// Multiple break points.
- return FixedArray::cast(break_points())->length();
+ return FixedArray::cast(break_points()).length();
}
int CoverageInfo::SlotCount() const {
diff --git a/src/objects/descriptor-array-inl.h b/src/objects/descriptor-array-inl.h
index 4159f5b..1cd64c1 100644
--- a/src/objects/descriptor-array-inl.h
+++ b/src/objects/descriptor-array-inl.h
@@ -59,25 +59,25 @@
}
void DescriptorArray::CopyEnumCacheFrom(DescriptorArray array) {
- set_enum_cache(array->enum_cache());
+ set_enum_cache(array.enum_cache());
}
int DescriptorArray::Search(Name name, int valid_descriptors) {
- DCHECK(name->IsUniqueName());
+ DCHECK(name.IsUniqueName());
return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors,
nullptr);
}
int DescriptorArray::Search(Name name, Map map) {
- DCHECK(name->IsUniqueName());
- int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ DCHECK(name.IsUniqueName());
+ int number_of_own_descriptors = map.NumberOfOwnDescriptors();
if (number_of_own_descriptors == 0) return kNotFound;
return Search(name, number_of_own_descriptors);
}
int DescriptorArray::SearchWithCache(Isolate* isolate, Name name, Map map) {
- DCHECK(name->IsUniqueName());
- int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ DCHECK(name.IsUniqueName());
+ int number_of_own_descriptors = map.NumberOfOwnDescriptors();
if (number_of_own_descriptors == 0) return kNotFound;
DescriptorLookupCache* cache = isolate->descriptor_lookup_cache();
@@ -109,7 +109,7 @@
ObjectSlot DescriptorArray::GetKeySlot(int descriptor) {
DCHECK_LE(descriptor, number_of_all_descriptors());
ObjectSlot slot = GetDescriptorSlot(descriptor) + kEntryKeyIndex;
- DCHECK((*slot)->IsObject());
+ DCHECK((*slot).IsObject());
return slot;
}
@@ -198,7 +198,7 @@
for (insertion = descriptor_number; insertion > 0; --insertion) {
Name key = GetSortedKey(insertion - 1);
- if (key->Hash() <= hash) break;
+ if (key.Hash() <= hash) break;
SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
}
diff --git a/src/objects/dictionary-inl.h b/src/objects/dictionary-inl.h
index caacde2..60c43fb 100644
--- a/src/objects/dictionary-inl.h
+++ b/src/objects/dictionary-inl.h
@@ -53,14 +53,14 @@
bool NumberDictionary::requires_slow_elements() {
Object max_index_object = get(kMaxNumberKeyIndex);
- if (!max_index_object->IsSmi()) return false;
+ if (!max_index_object.IsSmi()) return false;
return 0 != (Smi::ToInt(max_index_object) & kRequiresSlowElementsMask);
}
uint32_t NumberDictionary::max_number_key() {
DCHECK(!requires_slow_elements());
Object max_index_object = get(kMaxNumberKeyIndex);
- if (!max_index_object->IsSmi()) return 0;
+ if (!max_index_object.IsSmi()) return 0;
uint32_t value = static_cast<uint32_t>(Smi::ToInt(max_index_object));
return value >> kRequiresSlowElementsTagSize;
}
@@ -73,7 +73,7 @@
void Dictionary<Derived, Shape>::ClearEntry(Isolate* isolate, int entry) {
Object the_hole = this->GetReadOnlyRoots().the_hole_value();
PropertyDetails details = PropertyDetails::Empty();
- Derived::cast(*this)->SetEntry(isolate, entry, the_hole, the_hole, details);
+ Derived::cast(*this).SetEntry(isolate, entry, the_hole, the_hole, details);
}
template <typename Derived, typename Shape>
@@ -81,7 +81,7 @@
Object key, Object value,
PropertyDetails details) {
DCHECK(Dictionary::kEntrySize == 2 || Dictionary::kEntrySize == 3);
- DCHECK(!key->IsName() || details.dictionary_index() > 0);
+ DCHECK(!key.IsName() || details.dictionary_index() > 0);
int index = DerivedHashTable::EntryToIndex(entry);
DisallowHeapAllocation no_gc;
WriteBarrierMode mode = this->GetWriteBarrierMode(no_gc);
@@ -91,7 +91,7 @@
}
Object GlobalDictionaryShape::Unwrap(Object object) {
- return PropertyCell::cast(object)->name();
+ return PropertyCell::cast(object).name();
}
RootIndex GlobalDictionaryShape::GetMapRootIndex() {
@@ -105,7 +105,7 @@
}
PropertyCell GlobalDictionary::CellAt(int entry) {
- DCHECK(KeyAt(entry)->IsPropertyCell());
+ DCHECK(KeyAt(entry).IsPropertyCell());
return PropertyCell::cast(KeyAt(entry));
}
@@ -115,15 +115,15 @@
}
bool GlobalDictionaryShape::IsKey(ReadOnlyRoots roots, Object k) {
- return IsLive(roots, k) && !PropertyCell::cast(k)->value()->IsTheHole(roots);
+ return IsLive(roots, k) && !PropertyCell::cast(k).value().IsTheHole(roots);
}
-Name GlobalDictionary::NameAt(int entry) { return CellAt(entry)->name(); }
-Object GlobalDictionary::ValueAt(int entry) { return CellAt(entry)->value(); }
+Name GlobalDictionary::NameAt(int entry) { return CellAt(entry).name(); }
+Object GlobalDictionary::ValueAt(int entry) { return CellAt(entry).value(); }
void GlobalDictionary::SetEntry(Isolate* isolate, int entry, Object key,
Object value, PropertyDetails details) {
- DCHECK_EQ(key, PropertyCell::cast(value)->name());
+ DCHECK_EQ(key, PropertyCell::cast(value).name());
set(EntryToIndex(entry) + kEntryKeyIndex, value);
DetailsAtPut(isolate, entry, details);
}
@@ -133,8 +133,8 @@
}
bool NumberDictionaryBaseShape::IsMatch(uint32_t key, Object other) {
- DCHECK(other->IsNumber());
- return key == static_cast<uint32_t>(other->Number());
+ DCHECK(other.IsNumber());
+ return key == static_cast<uint32_t>(other.Number());
}
uint32_t NumberDictionaryBaseShape::Hash(Isolate* isolate, uint32_t key) {
@@ -143,8 +143,8 @@
uint32_t NumberDictionaryBaseShape::HashForObject(ReadOnlyRoots roots,
Object other) {
- DCHECK(other->IsNumber());
- return ComputeSeededHash(static_cast<uint32_t>(other->Number()),
+ DCHECK(other.IsNumber());
+ return ComputeSeededHash(static_cast<uint32_t>(other.Number()),
HashSeed(roots));
}
@@ -162,7 +162,7 @@
}
bool NameDictionaryShape::IsMatch(Handle<Name> key, Object other) {
- DCHECK(other->IsTheHole() || Name::cast(other)->IsUniqueName());
+ DCHECK(other.IsTheHole() || Name::cast(other).IsUniqueName());
DCHECK(key->IsUniqueName());
return *key == other;
}
@@ -172,17 +172,17 @@
}
uint32_t NameDictionaryShape::HashForObject(ReadOnlyRoots roots, Object other) {
- return Name::cast(other)->Hash();
+ return Name::cast(other).Hash();
}
bool GlobalDictionaryShape::IsMatch(Handle<Name> key, Object other) {
- DCHECK(PropertyCell::cast(other)->name()->IsUniqueName());
- return *key == PropertyCell::cast(other)->name();
+ DCHECK(PropertyCell::cast(other).name().IsUniqueName());
+ return *key == PropertyCell::cast(other).name();
}
uint32_t GlobalDictionaryShape::HashForObject(ReadOnlyRoots roots,
Object other) {
- return PropertyCell::cast(other)->name()->Hash();
+ return PropertyCell::cast(other).name().Hash();
}
Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
@@ -194,19 +194,19 @@
template <typename Dictionary>
PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary dict, int entry) {
DCHECK_LE(0, entry); // Not found is -1, which is not caught by get().
- return dict->CellAt(entry)->property_details();
+ return dict.CellAt(entry).property_details();
}
template <typename Dictionary>
void GlobalDictionaryShape::DetailsAtPut(Isolate* isolate, Dictionary dict,
int entry, PropertyDetails value) {
DCHECK_LE(0, entry); // Not found is -1, which is not caught by get().
- PropertyCell cell = dict->CellAt(entry);
- if (cell->property_details().IsReadOnly() != value.IsReadOnly()) {
- cell->dependent_code()->DeoptimizeDependentCodeGroup(
+ PropertyCell cell = dict.CellAt(entry);
+ if (cell.property_details().IsReadOnly() != value.IsReadOnly()) {
+ cell.dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPropertyCellChangedGroup);
}
- cell->set_property_details(value);
+ cell.set_property_details(value);
}
} // namespace internal
diff --git a/src/objects/dictionary.h b/src/objects/dictionary.h
index 4226128..3652103 100644
--- a/src/objects/dictionary.h
+++ b/src/objects/dictionary.h
@@ -99,16 +99,16 @@
static inline PropertyDetails DetailsAt(Dictionary dict, int entry) {
STATIC_ASSERT(Dictionary::kEntrySize == 3);
DCHECK_GE(entry, 0); // Not found is -1, which is not caught by get().
- return PropertyDetails(Smi::cast(dict->get(
- Dictionary::EntryToIndex(entry) + Dictionary::kEntryDetailsIndex)));
+ return PropertyDetails(Smi::cast(dict.get(Dictionary::EntryToIndex(entry) +
+ Dictionary::kEntryDetailsIndex)));
}
template <typename Dictionary>
static inline void DetailsAtPut(Isolate* isolate, Dictionary dict, int entry,
PropertyDetails value) {
STATIC_ASSERT(Dictionary::kEntrySize == 3);
- dict->set(Dictionary::EntryToIndex(entry) + Dictionary::kEntryDetailsIndex,
- value.AsSmi());
+ dict.set(Dictionary::EntryToIndex(entry) + Dictionary::kEntryDetailsIndex,
+ value.AsSmi());
}
};
diff --git a/src/objects/elements-inl.h b/src/objects/elements-inl.h
index 25db22b..4689b5e 100644
--- a/src/objects/elements-inl.h
+++ b/src/objects/elements-inl.h
@@ -29,7 +29,7 @@
inline bool ElementsAccessor::HasElement(JSObject holder, uint32_t index,
PropertyFilter filter) {
- return HasElement(holder, index, holder->elements(), filter);
+ return HasElement(holder, index, holder.elements(), filter);
}
} // namespace internal
diff --git a/src/objects/elements.cc b/src/objects/elements.cc
index 078ec63..c078ad0 100644
--- a/src/objects/elements.cc
+++ b/src/objects/elements.cc
@@ -147,25 +147,25 @@
FixedArrayBase to_base, ElementsKind to_kind,
uint32_t to_start, int raw_copy_size) {
ReadOnlyRoots roots(isolate);
- DCHECK(to_base->map() != roots.fixed_cow_array_map());
+ DCHECK(to_base.map() != roots.fixed_cow_array_map());
DisallowHeapAllocation no_allocation;
int copy_size = raw_copy_size;
if (raw_copy_size < 0) {
DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd ||
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size =
- Min(from_base->length() - from_start, to_base->length() - to_start);
+ Min(from_base.length() - from_start, to_base.length() - to_start);
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
int start = to_start + copy_size;
- int length = to_base->length() - start;
+ int length = to_base.length() - start;
if (length > 0) {
- MemsetTagged(FixedArray::cast(to_base)->RawFieldOfElementAt(start),
+ MemsetTagged(FixedArray::cast(to_base).RawFieldOfElementAt(start),
roots.the_hole_value(), length);
}
}
}
- DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
- (copy_size + static_cast<int>(from_start)) <= from_base->length());
+ DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
+ (copy_size + static_cast<int>(from_start)) <= from_base.length());
if (copy_size == 0) return;
FixedArray from = FixedArray::cast(from_base);
FixedArray to = FixedArray::cast(to_base);
@@ -176,8 +176,8 @@
(IsObjectElementsKind(from_kind) && IsObjectElementsKind(to_kind))
? UPDATE_WRITE_BARRIER
: SKIP_WRITE_BARRIER;
- to->CopyElements(isolate, to_start, from, from_start, copy_size,
- write_barrier_mode);
+ to.CopyElements(isolate, to_start, from, from_start, copy_size,
+ write_barrier_mode);
}
static void CopyDictionaryToObjectElements(
@@ -190,12 +190,12 @@
if (raw_copy_size < 0) {
DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd ||
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
- copy_size = from->max_number_key() + 1 - from_start;
+ copy_size = from.max_number_key() + 1 - from_start;
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
int start = to_start + copy_size;
- int length = to_base->length() - start;
+ int length = to_base.length() - start;
if (length > 0) {
- MemsetTagged(FixedArray::cast(to_base)->RawFieldOfElementAt(start),
+ MemsetTagged(FixedArray::cast(to_base).RawFieldOfElementAt(start),
ReadOnlyRoots(isolate).the_hole_value(), length);
}
}
@@ -204,19 +204,19 @@
DCHECK(IsSmiOrObjectElementsKind(to_kind));
if (copy_size == 0) return;
FixedArray to = FixedArray::cast(to_base);
- uint32_t to_length = to->length();
+ uint32_t to_length = to.length();
if (to_start + copy_size > to_length) {
copy_size = to_length - to_start;
}
WriteBarrierMode write_barrier_mode = GetWriteBarrierMode(to_kind);
for (int i = 0; i < copy_size; i++) {
- int entry = from->FindEntry(isolate, i + from_start);
+ int entry = from.FindEntry(isolate, i + from_start);
if (entry != NumberDictionary::kNotFound) {
- Object value = from->ValueAt(entry);
- DCHECK(!value->IsTheHole(isolate));
- to->set(i + to_start, value, write_barrier_mode);
+ Object value = from.ValueAt(entry);
+ DCHECK(!value.IsTheHole(isolate));
+ to.set(i + to_start, value, write_barrier_mode);
} else {
- to->set_the_hole(isolate, i + to_start);
+ to.set_the_hole(isolate, i + to_start);
}
}
}
@@ -235,22 +235,22 @@
DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd ||
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size =
- Min(from_base->length() - from_start, to_base->length() - to_start);
+ Min(from_base.length() - from_start, to_base.length() - to_start);
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
// Also initialize the area that will be copied over since HeapNumber
// allocation below can cause an incremental marking step, requiring all
// existing heap objects to be propertly initialized.
int start = to_start;
- int length = to_base->length() - start;
+ int length = to_base.length() - start;
if (length > 0) {
- MemsetTagged(FixedArray::cast(to_base)->RawFieldOfElementAt(start),
+ MemsetTagged(FixedArray::cast(to_base).RawFieldOfElementAt(start),
ReadOnlyRoots(isolate).the_hole_value(), length);
}
}
}
- DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
- (copy_size + static_cast<int>(from_start)) <= from_base->length());
+ DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
+ (copy_size + static_cast<int>(from_start)) <= from_base.length());
if (copy_size == 0) return;
// From here on, the code below could actually allocate. Therefore the raw
@@ -283,20 +283,20 @@
DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd ||
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size =
- Min(from_base->length() - from_start, to_base->length() - to_start);
+ Min(from_base.length() - from_start, to_base.length() - to_start);
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- for (int i = to_start + copy_size; i < to_base->length(); ++i) {
- FixedDoubleArray::cast(to_base)->set_the_hole(i);
+ for (int i = to_start + copy_size; i < to_base.length(); ++i) {
+ FixedDoubleArray::cast(to_base).set_the_hole(i);
}
}
}
- DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
- (copy_size + static_cast<int>(from_start)) <= from_base->length());
+ DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
+ (copy_size + static_cast<int>(from_start)) <= from_base.length());
if (copy_size == 0) return;
FixedDoubleArray from = FixedDoubleArray::cast(from_base);
FixedDoubleArray to = FixedDoubleArray::cast(to_base);
- Address to_address = to->address() + FixedDoubleArray::kHeaderSize;
- Address from_address = from->address() + FixedDoubleArray::kHeaderSize;
+ Address to_address = to.address() + FixedDoubleArray::kHeaderSize;
+ Address from_address = from.address() + FixedDoubleArray::kHeaderSize;
to_address += kDoubleSize * to_start;
from_address += kDoubleSize * from_start;
#ifdef V8_COMPRESS_POINTERS
@@ -321,26 +321,26 @@
if (raw_copy_size < 0) {
DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd ||
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
- copy_size = from_base->length() - from_start;
+ copy_size = from_base.length() - from_start;
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- for (int i = to_start + copy_size; i < to_base->length(); ++i) {
- FixedDoubleArray::cast(to_base)->set_the_hole(i);
+ for (int i = to_start + copy_size; i < to_base.length(); ++i) {
+ FixedDoubleArray::cast(to_base).set_the_hole(i);
}
}
}
- DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
- (copy_size + static_cast<int>(from_start)) <= from_base->length());
+ DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
+ (copy_size + static_cast<int>(from_start)) <= from_base.length());
if (copy_size == 0) return;
FixedArray from = FixedArray::cast(from_base);
FixedDoubleArray to = FixedDoubleArray::cast(to_base);
- Object the_hole = from->GetReadOnlyRoots().the_hole_value();
+ Object the_hole = from.GetReadOnlyRoots().the_hole_value();
for (uint32_t from_end = from_start + static_cast<uint32_t>(copy_size);
from_start < from_end; from_start++, to_start++) {
- Object hole_or_smi = from->get(from_start);
+ Object hole_or_smi = from.get(from_start);
if (hole_or_smi == the_hole) {
- to->set_the_hole(to_start);
+ to.set_the_hole(to_start);
} else {
- to->set(to_start, Smi::ToInt(hole_or_smi));
+ to.set(to_start, Smi::ToInt(hole_or_smi));
}
}
}
@@ -358,9 +358,9 @@
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size = packed_size - from_start;
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- to_end = to_base->length();
+ to_end = to_base.length();
for (uint32_t i = to_start + copy_size; i < to_end; ++i) {
- FixedDoubleArray::cast(to_base)->set_the_hole(i);
+ FixedDoubleArray::cast(to_base).set_the_hole(i);
}
} else {
to_end = to_start + static_cast<uint32_t>(copy_size);
@@ -368,18 +368,18 @@
} else {
to_end = to_start + static_cast<uint32_t>(copy_size);
}
- DCHECK(static_cast<int>(to_end) <= to_base->length());
+ DCHECK(static_cast<int>(to_end) <= to_base.length());
DCHECK(packed_size >= 0 && packed_size <= copy_size);
- DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
- (copy_size + static_cast<int>(from_start)) <= from_base->length());
+ DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
+ (copy_size + static_cast<int>(from_start)) <= from_base.length());
if (copy_size == 0) return;
FixedArray from = FixedArray::cast(from_base);
FixedDoubleArray to = FixedDoubleArray::cast(to_base);
for (uint32_t from_end = from_start + static_cast<uint32_t>(packed_size);
from_start < from_end; from_start++, to_start++) {
- Object smi = from->get(from_start);
- DCHECK(!smi->IsTheHole());
- to->set(to_start, Smi::ToInt(smi));
+ Object smi = from.get(from_start);
+ DCHECK(!smi.IsTheHole());
+ to.set(to_start, Smi::ToInt(smi));
}
}
@@ -392,26 +392,26 @@
if (raw_copy_size < 0) {
DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd ||
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
- copy_size = from_base->length() - from_start;
+ copy_size = from_base.length() - from_start;
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- for (int i = to_start + copy_size; i < to_base->length(); ++i) {
- FixedDoubleArray::cast(to_base)->set_the_hole(i);
+ for (int i = to_start + copy_size; i < to_base.length(); ++i) {
+ FixedDoubleArray::cast(to_base).set_the_hole(i);
}
}
}
- DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
- (copy_size + static_cast<int>(from_start)) <= from_base->length());
+ DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
+ (copy_size + static_cast<int>(from_start)) <= from_base.length());
if (copy_size == 0) return;
FixedArray from = FixedArray::cast(from_base);
FixedDoubleArray to = FixedDoubleArray::cast(to_base);
- Object the_hole = from->GetReadOnlyRoots().the_hole_value();
+ Object the_hole = from.GetReadOnlyRoots().the_hole_value();
for (uint32_t from_end = from_start + copy_size; from_start < from_end;
from_start++, to_start++) {
- Object hole_or_object = from->get(from_start);
+ Object hole_or_object = from.get(from_start);
if (hole_or_object == the_hole) {
- to->set_the_hole(to_start);
+ to.set_the_hole(to_start);
} else {
- to->set(to_start, hole_or_object->Number());
+ to.set(to_start, hole_or_object.Number());
}
}
}
@@ -425,25 +425,25 @@
if (copy_size < 0) {
DCHECK(copy_size == ElementsAccessor::kCopyToEnd ||
copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
- copy_size = from->max_number_key() + 1 - from_start;
+ copy_size = from.max_number_key() + 1 - from_start;
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- for (int i = to_start + copy_size; i < to_base->length(); ++i) {
- FixedDoubleArray::cast(to_base)->set_the_hole(i);
+ for (int i = to_start + copy_size; i < to_base.length(); ++i) {
+ FixedDoubleArray::cast(to_base).set_the_hole(i);
}
}
}
if (copy_size == 0) return;
FixedDoubleArray to = FixedDoubleArray::cast(to_base);
- uint32_t to_length = to->length();
+ uint32_t to_length = to.length();
if (to_start + copy_size > to_length) {
copy_size = to_length - to_start;
}
for (int i = 0; i < copy_size; i++) {
- int entry = from->FindEntry(isolate, i + from_start);
+ int entry = from.FindEntry(isolate, i + from_start);
if (entry != NumberDictionary::kNotFound) {
- to->set(i + to_start, from->ValueAt(entry)->Number());
+ to.set(i + to_start, from.ValueAt(entry).Number());
} else {
- to->set_the_hole(i + to_start);
+ to.set_the_hole(i + to_start);
}
}
}
@@ -458,7 +458,7 @@
if (raw_frame->is_internal()) {
Code current_code_object =
isolate->heap()->GcSafeFindCodeForInnerPointer(raw_frame->pc());
- if (current_code_object->builtin_index() ==
+ if (current_code_object.builtin_index() ==
Builtins::kFunctionPrototypeApply) {
PrintF("apply from ");
it.Advance();
@@ -482,13 +482,13 @@
Object a(elementA);
Object b(elementB);
#endif
- if (a->IsSmi() || !a->IsUndefined(isolate)) {
- if (!b->IsSmi() && b->IsUndefined(isolate)) {
+ if (a.IsSmi() || !a.IsUndefined(isolate)) {
+ if (!b.IsSmi() && b.IsUndefined(isolate)) {
return true;
}
- return a->Number() < b->Number();
+ return a.Number() < b.Number();
}
- return !b->IsSmi() && b->IsUndefined(isolate);
+ return !b.IsSmi() && b.IsUndefined(isolate);
});
isolate->heap()->WriteBarrierForRange(*indices, ObjectSlot(start),
ObjectSlot(end));
@@ -581,21 +581,21 @@
static void ValidateContents(JSObject holder, int length) {}
static void ValidateImpl(JSObject holder) {
- FixedArrayBase fixed_array_base = holder->elements();
- if (!fixed_array_base->IsHeapObject()) return;
+ FixedArrayBase fixed_array_base = holder.elements();
+ if (!fixed_array_base.IsHeapObject()) return;
// Arrays that have been shifted in place can't be verified.
- if (fixed_array_base->IsFiller()) return;
+ if (fixed_array_base.IsFiller()) return;
int length = 0;
- if (holder->IsJSArray()) {
- Object length_obj = JSArray::cast(holder)->length();
- if (length_obj->IsSmi()) {
+ if (holder.IsJSArray()) {
+ Object length_obj = JSArray::cast(holder).length();
+ if (length_obj.IsSmi()) {
length = Smi::ToInt(length_obj);
}
- } else if (holder->IsJSTypedArray()) {
+ } else if (holder.IsJSTypedArray()) {
// TODO(bmeurer, v8:4153): Change this to size_t later.
- length = static_cast<int>(JSTypedArray::cast(holder)->length());
+ length = static_cast<int>(JSTypedArray::cast(holder).length());
} else {
- length = fixed_array_base->length();
+ length = fixed_array_base.length();
}
Subclass::ValidateContents(holder, length);
}
@@ -609,7 +609,7 @@
uint32_t start, uint32_t end) {
DisallowHeapAllocation no_gc;
if (IsFastPackedElementsKind(kind())) return true;
- Isolate* isolate = holder->GetIsolate();
+ Isolate* isolate = holder.GetIsolate();
for (uint32_t i = start; i < end; i++) {
if (!Subclass::HasElementImpl(isolate, holder, i, backing_store,
ALL_PROPERTIES)) {
@@ -638,7 +638,7 @@
bool HasElement(JSObject holder, uint32_t index, FixedArrayBase backing_store,
PropertyFilter filter) final {
- return Subclass::HasElementImpl(holder->GetIsolate(), holder, index,
+ return Subclass::HasElementImpl(holder.GetIsolate(), holder, index,
backing_store, filter);
}
@@ -650,7 +650,7 @@
}
bool HasEntry(JSObject holder, uint32_t entry) final {
- return Subclass::HasEntryImpl(holder->GetIsolate(), holder->elements(),
+ return Subclass::HasEntryImpl(holder.GetIsolate(), holder.elements(),
entry);
}
@@ -660,7 +660,7 @@
}
bool HasAccessors(JSObject holder) final {
- return Subclass::HasAccessorsImpl(holder, holder->elements());
+ return Subclass::HasAccessorsImpl(holder, holder.elements());
}
static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) {
@@ -679,7 +679,7 @@
static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store,
uint32_t entry) {
uint32_t index = GetIndexForEntryImpl(backing_store, entry);
- return handle(BackingStore::cast(backing_store)->get(index), isolate);
+ return handle(BackingStore::cast(backing_store).get(index), isolate);
}
void Set(Handle<JSObject> holder, uint32_t entry, Object value) final {
@@ -753,7 +753,7 @@
DCHECK(!array->SetLengthWouldNormalize(length));
DCHECK(IsFastElementsKind(array->GetElementsKind()));
uint32_t old_length = 0;
- CHECK(array->length()->ToArrayIndex(&old_length));
+ CHECK(array->length().ToArrayIndex(&old_length));
if (old_length < length) {
ElementsKind kind = array->GetElementsKind();
@@ -786,11 +786,11 @@
isolate->heap()->RightTrimFixedArray(*backing_store, elements_to_trim);
// Fill the non-trimmed elements with holes.
BackingStore::cast(*backing_store)
- ->FillWithHoles(length,
- std::min(old_length, capacity - elements_to_trim));
+ .FillWithHoles(length,
+ std::min(old_length, capacity - elements_to_trim));
} else {
// Otherwise, fill the unused tail with holes.
- BackingStore::cast(*backing_store)->FillWithHoles(length, old_length);
+ BackingStore::cast(*backing_store).FillWithHoles(length, old_length);
}
} else {
// Check whether the backing store should be expanded.
@@ -803,7 +803,7 @@
}
uint32_t NumberOfElements(JSObject receiver) final {
- return Subclass::NumberOfElementsImpl(receiver, receiver->elements());
+ return Subclass::NumberOfElementsImpl(receiver, receiver.elements());
}
static uint32_t NumberOfElementsImpl(JSObject receiver,
@@ -812,10 +812,10 @@
}
static uint32_t GetMaxIndex(JSObject receiver, FixedArrayBase elements) {
- if (receiver->IsJSArray()) {
- DCHECK(JSArray::cast(receiver)->length()->IsSmi());
+ if (receiver.IsJSArray()) {
+ DCHECK(JSArray::cast(receiver).length().IsSmi());
return static_cast<uint32_t>(
- Smi::ToInt(JSArray::cast(receiver)->length()));
+ Smi::ToInt(JSArray::cast(receiver).length()));
}
return Subclass::GetCapacityImpl(receiver, elements);
}
@@ -854,7 +854,7 @@
int packed_size = kPackedSizeNotKnown;
if (IsFastPackedElementsKind(from_kind) && object->IsJSArray()) {
- packed_size = Smi::ToInt(JSArray::cast(*object)->length());
+ packed_size = Smi::ToInt(JSArray::cast(*object).length());
}
Subclass::CopyElementsImpl(isolate, *old_elements, src_index, *new_elements,
@@ -889,7 +889,7 @@
DCHECK(
(IsSmiElementsKind(from_kind) && IsDoubleElementsKind(to_kind)) ||
(IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind)));
- uint32_t capacity = static_cast<uint32_t>(object->elements()->length());
+ uint32_t capacity = static_cast<uint32_t>(object->elements().length());
Handle<FixedArrayBase> elements = ConvertElementsWithCapacity(
object, from_elements, from_kind, capacity);
JSObject::SetMapAndElements(object, to_map, elements);
@@ -955,7 +955,7 @@
bool GrowCapacity(Handle<JSObject> object, uint32_t index) final {
// This function is intended to be called from optimized code. We don't
// want to trigger lazy deopts there, so refuse to handle cases that would.
- if (object->map()->is_prototype_map() ||
+ if (object->map().is_prototype_map() ||
object->WouldConvertToSlowElements(index)) {
return false;
}
@@ -993,14 +993,14 @@
uint32_t to_start, int copy_size) final {
int packed_size = kPackedSizeNotKnown;
bool is_packed =
- IsFastPackedElementsKind(from_kind) && from_holder->IsJSArray();
+ IsFastPackedElementsKind(from_kind) && from_holder.IsJSArray();
if (is_packed) {
- packed_size = Smi::ToInt(JSArray::cast(from_holder)->length());
+ packed_size = Smi::ToInt(JSArray::cast(from_holder).length());
if (copy_size >= 0 && packed_size > copy_size) {
packed_size = copy_size;
}
}
- FixedArrayBase from = from_holder->elements();
+ FixedArrayBase from = from_holder.elements();
// NOTE: the Subclass::CopyElementsImpl() methods
// violate the handlified function signature convention:
// raw pointer parameters in the function that allocates. This is done
@@ -1010,7 +1010,7 @@
// copying from object with fast double elements to object with object
// elements. In all the other cases there are no allocations performed and
// handle creation causes noticeable performance degradation of the builtin.
- Subclass::CopyElementsImpl(from_holder->GetIsolate(), from, from_start, *to,
+ Subclass::CopyElementsImpl(from_holder.GetIsolate(), from, from_start, *to,
from_kind, to_start, packed_size, copy_size);
}
@@ -1243,7 +1243,7 @@
if (convert == GetKeysConversion::kConvertToString) {
for (uint32_t i = 0; i < nof_indices; i++) {
Handle<Object> index_string = isolate->factory()->Uint32ToString(
- combined_keys->get(i)->Number());
+ combined_keys->get(i).Number());
combined_keys->set(i, *index_string);
}
}
@@ -1275,7 +1275,7 @@
static uint32_t GetCapacityImpl(JSObject holder,
FixedArrayBase backing_store) {
- return backing_store->length();
+ return backing_store.length();
}
uint32_t GetCapacity(JSObject holder, FixedArrayBase backing_store) final {
@@ -1348,7 +1348,7 @@
uint32_t length = Subclass::GetMaxIndex(holder, backing_store);
if (IsHoleyElementsKindForRead(kind())) {
return index < length && !BackingStore::cast(backing_store)
- ->is_the_hole(isolate, index)
+ .is_the_hole(isolate, index)
? index
: kMaxUInt32;
} else {
@@ -1413,7 +1413,7 @@
static uint32_t NumberOfElementsImpl(JSObject receiver,
FixedArrayBase backing_store) {
NumberDictionary dict = NumberDictionary::cast(backing_store);
- return dict->NumberOfElements();
+ return dict.NumberOfElements();
}
static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
@@ -1423,7 +1423,7 @@
Handle<NumberDictionary>::cast(backing_store);
int capacity = dict->Capacity();
uint32_t old_length = 0;
- CHECK(array->length()->ToArrayLength(&old_length));
+ CHECK(array->length().ToArrayLength(&old_length));
{
DisallowHeapAllocation no_gc;
ReadOnlyRoots roots(isolate);
@@ -1434,7 +1434,7 @@
for (int entry = 0; entry < capacity; entry++) {
Object index = dict->KeyAt(entry);
if (dict->IsKey(roots, index)) {
- uint32_t number = static_cast<uint32_t>(index->Number());
+ uint32_t number = static_cast<uint32_t>(index.Number());
if (length <= number && number < old_length) {
PropertyDetails details = dict->DetailsAt(entry);
if (!details.IsConfigurable()) length = number + 1;
@@ -1452,7 +1452,7 @@
for (int entry = 0; entry < capacity; entry++) {
Object index = dict->KeyAt(entry);
if (dict->IsKey(roots, index)) {
- uint32_t number = static_cast<uint32_t>(index->Number());
+ uint32_t number = static_cast<uint32_t>(index.Number());
if (length <= number && number < old_length) {
dict->ClearEntry(isolate, entry);
removed_entries++;
@@ -1489,13 +1489,13 @@
static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) {
DisallowHeapAllocation no_gc;
NumberDictionary dict = NumberDictionary::cast(backing_store);
- if (!dict->requires_slow_elements()) return false;
- int capacity = dict->Capacity();
- ReadOnlyRoots roots = holder->GetReadOnlyRoots();
+ if (!dict.requires_slow_elements()) return false;
+ int capacity = dict.Capacity();
+ ReadOnlyRoots roots = holder.GetReadOnlyRoots();
for (int i = 0; i < capacity; i++) {
- Object key = dict->KeyAt(i);
- if (!dict->IsKey(roots, key)) continue;
- PropertyDetails details = dict->DetailsAt(i);
+ Object key = dict.KeyAt(i);
+ if (!dict.IsKey(roots, key)) continue;
+ PropertyDetails details = dict.DetailsAt(i);
if (details.kind() == kAccessor) return true;
}
return false;
@@ -1503,7 +1503,7 @@
static Object GetRaw(FixedArrayBase store, uint32_t entry) {
NumberDictionary backing_store = NumberDictionary::cast(store);
- return backing_store->ValueAt(entry);
+ return backing_store.ValueAt(entry);
}
static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store,
@@ -1518,7 +1518,7 @@
static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
Object value) {
- NumberDictionary::cast(backing_store)->ValueAtPut(entry, value);
+ NumberDictionary::cast(backing_store).ValueAtPut(entry, value);
}
static void ReconfigureImpl(Handle<JSObject> object,
@@ -1527,12 +1527,12 @@
PropertyAttributes attributes) {
NumberDictionary dictionary = NumberDictionary::cast(*store);
if (attributes != NONE) object->RequireSlowElements(dictionary);
- dictionary->ValueAtPut(entry, *value);
- PropertyDetails details = dictionary->DetailsAt(entry);
+ dictionary.ValueAtPut(entry, *value);
+ PropertyDetails details = dictionary.DetailsAt(entry);
details = PropertyDetails(kData, attributes, PropertyCellType::kNoCell,
details.dictionary_index());
- dictionary->DetailsAtPut(object->GetIsolate(), entry, details);
+ dictionary.DetailsAtPut(object->GetIsolate(), entry, details);
}
static void AddImpl(Handle<JSObject> object, uint32_t index,
@@ -1556,15 +1556,15 @@
uint32_t entry) {
DisallowHeapAllocation no_gc;
NumberDictionary dict = NumberDictionary::cast(store);
- Object index = dict->KeyAt(entry);
- return !index->IsTheHole(isolate);
+ Object index = dict.KeyAt(entry);
+ return !index.IsTheHole(isolate);
}
static uint32_t GetIndexForEntryImpl(FixedArrayBase store, uint32_t entry) {
DisallowHeapAllocation no_gc;
NumberDictionary dict = NumberDictionary::cast(store);
uint32_t result = 0;
- CHECK(dict->KeyAt(entry)->ToArrayIndex(&result));
+ CHECK(dict.KeyAt(entry).ToArrayIndex(&result));
return result;
}
@@ -1573,10 +1573,10 @@
PropertyFilter filter) {
DisallowHeapAllocation no_gc;
NumberDictionary dictionary = NumberDictionary::cast(store);
- int entry = dictionary->FindEntry(isolate, index);
+ int entry = dictionary.FindEntry(isolate, index);
if (entry == NumberDictionary::kNotFound) return kMaxUInt32;
if (filter != ALL_PROPERTIES) {
- PropertyDetails details = dictionary->DetailsAt(entry);
+ PropertyDetails details = dictionary.DetailsAt(entry);
PropertyAttributes attr = details.attributes();
if ((attr & filter) != 0) return kMaxUInt32;
}
@@ -1584,22 +1584,22 @@
}
static PropertyDetails GetDetailsImpl(JSObject holder, uint32_t entry) {
- return GetDetailsImpl(holder->elements(), entry);
+ return GetDetailsImpl(holder.elements(), entry);
}
static PropertyDetails GetDetailsImpl(FixedArrayBase backing_store,
uint32_t entry) {
- return NumberDictionary::cast(backing_store)->DetailsAt(entry);
+ return NumberDictionary::cast(backing_store).DetailsAt(entry);
}
static uint32_t FilterKey(Handle<NumberDictionary> dictionary, int entry,
Object raw_key, PropertyFilter filter) {
- DCHECK(raw_key->IsNumber());
- DCHECK_LE(raw_key->Number(), kMaxUInt32);
+ DCHECK(raw_key.IsNumber());
+ DCHECK_LE(raw_key.Number(), kMaxUInt32);
PropertyDetails details = dictionary->DetailsAt(entry);
PropertyAttributes attr = details.attributes();
if ((attr & filter) != 0) return kMaxUInt32;
- return static_cast<uint32_t>(raw_key->Number());
+ return static_cast<uint32_t>(raw_key.Number());
}
static uint32_t GetKeyForEntryImpl(Isolate* isolate,
@@ -1675,9 +1675,9 @@
Object k = dictionary->KeyAt(i);
if (!dictionary->IsKey(roots, k)) continue;
Object value = dictionary->ValueAt(i);
- DCHECK(!value->IsTheHole(isolate));
- DCHECK(!value->IsAccessorPair());
- DCHECK(!value->IsAccessorInfo());
+ DCHECK(!value.IsTheHole(isolate));
+ DCHECK(!value.IsAccessorPair());
+ DCHECK(!value.IsAccessorInfo());
accumulator->AddKey(value, convert);
}
}
@@ -1687,7 +1687,7 @@
uint32_t length, Maybe<bool>* result) {
DisallowHeapAllocation no_gc;
NumberDictionary dictionary = NumberDictionary::cast(receiver->elements());
- int capacity = dictionary->Capacity();
+ int capacity = dictionary.Capacity();
Object the_hole = ReadOnlyRoots(isolate).the_hole_value();
Object undefined = ReadOnlyRoots(isolate).undefined_value();
@@ -1695,21 +1695,21 @@
// must be accessed in order via the slow path.
bool found = false;
for (int i = 0; i < capacity; ++i) {
- Object k = dictionary->KeyAt(i);
+ Object k = dictionary.KeyAt(i);
if (k == the_hole) continue;
if (k == undefined) continue;
uint32_t index;
- if (!k->ToArrayIndex(&index) || index < start_from || index >= length) {
+ if (!k.ToArrayIndex(&index) || index < start_from || index >= length) {
continue;
}
- if (dictionary->DetailsAt(i).kind() == kAccessor) {
+ if (dictionary.DetailsAt(i).kind() == kAccessor) {
// Restart from beginning in slow path, otherwise we may observably
// access getters out of order
return false;
} else if (!found) {
- Object element_k = dictionary->ValueAt(i);
+ Object element_k = dictionary.ValueAt(i);
if (value->SameValueZero(element_k)) found = true;
}
}
@@ -1779,7 +1779,7 @@
// If switched to initial elements, return true if searching for
// undefined, and false otherwise.
- if (receiver->map()->GetInitialElements() == receiver->elements()) {
+ if (receiver->map().GetInitialElements() == receiver->elements()) {
return Just(search_for_hole);
}
@@ -1864,28 +1864,28 @@
static void ValidateContents(JSObject holder, int length) {
DisallowHeapAllocation no_gc;
#if DEBUG
- DCHECK_EQ(holder->map()->elements_kind(), DICTIONARY_ELEMENTS);
+ DCHECK_EQ(holder.map().elements_kind(), DICTIONARY_ELEMENTS);
if (!FLAG_enable_slow_asserts) return;
- ReadOnlyRoots roots = holder->GetReadOnlyRoots();
- NumberDictionary dictionary = NumberDictionary::cast(holder->elements());
+ ReadOnlyRoots roots = holder.GetReadOnlyRoots();
+ NumberDictionary dictionary = NumberDictionary::cast(holder.elements());
// Validate the requires_slow_elements and max_number_key values.
- int capacity = dictionary->Capacity();
+ int capacity = dictionary.Capacity();
bool requires_slow_elements = false;
int max_key = 0;
for (int i = 0; i < capacity; ++i) {
Object k;
- if (!dictionary->ToKey(roots, i, &k)) continue;
- DCHECK_LE(0.0, k->Number());
- if (k->Number() > NumberDictionary::kRequiresSlowElementsLimit) {
+ if (!dictionary.ToKey(roots, i, &k)) continue;
+ DCHECK_LE(0.0, k.Number());
+ if (k.Number() > NumberDictionary::kRequiresSlowElementsLimit) {
requires_slow_elements = true;
} else {
max_key = Max(max_key, Smi::ToInt(k));
}
}
if (requires_slow_elements) {
- DCHECK(dictionary->requires_slow_elements());
- } else if (!dictionary->requires_slow_elements()) {
- DCHECK_LE(max_key, dictionary->max_number_key());
+ DCHECK(dictionary.requires_slow_elements());
+ } else if (!dictionary.requires_slow_elements()) {
+ DCHECK_LE(max_key, dictionary.max_number_key());
}
#endif
}
@@ -1921,7 +1921,7 @@
int max_number_key = -1;
for (int i = 0; j < capacity; i++) {
if (IsHoleyElementsKindForRead(kind)) {
- if (BackingStore::cast(*store)->is_the_hole(isolate, i)) continue;
+ if (BackingStore::cast(*store).is_the_hole(isolate, i)) continue;
}
max_number_key = i;
Handle<Object> value = Subclass::GetImpl(isolate, *store, i);
@@ -1949,7 +1949,7 @@
// Dynamically ask for the elements kind here since we manually redirect
// the operations for argument backing stores.
if (obj->GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS) {
- SloppyArgumentsElements::cast(obj->elements())->set_arguments(empty);
+ SloppyArgumentsElements::cast(obj->elements()).set_arguments(empty);
} else {
obj->set_elements(empty);
}
@@ -1983,7 +1983,7 @@
if (ObjectInYoungGeneration(*backing_store)) return;
uint32_t length = 0;
if (obj->IsJSArray()) {
- JSArray::cast(*obj)->length()->ToArrayLength(&length);
+ JSArray::cast(*obj).length().ToArrayLength(&length);
} else {
length = static_cast<uint32_t>(store->length());
}
@@ -2077,14 +2077,14 @@
static bool HasEntryImpl(Isolate* isolate, FixedArrayBase backing_store,
uint32_t entry) {
- return !BackingStore::cast(backing_store)->is_the_hole(isolate, entry);
+ return !BackingStore::cast(backing_store).is_the_hole(isolate, entry);
}
static uint32_t NumberOfElementsImpl(JSObject receiver,
FixedArrayBase backing_store) {
uint32_t max_index = Subclass::GetMaxIndex(receiver, backing_store);
if (IsFastPackedElementsKind(Subclass::kind())) return max_index;
- Isolate* isolate = receiver->GetIsolate();
+ Isolate* isolate = receiver.GetIsolate();
uint32_t count = 0;
for (uint32_t i = 0; i < max_index; i++) {
if (Subclass::HasEntryImpl(isolate, backing_store, i)) count++;
@@ -2108,10 +2108,10 @@
static void ValidateContents(JSObject holder, int length) {
#if DEBUG
- Isolate* isolate = holder->GetIsolate();
+ Isolate* isolate = holder.GetIsolate();
Heap* heap = isolate->heap();
- FixedArrayBase elements = holder->elements();
- Map map = elements->map();
+ FixedArrayBase elements = holder.elements();
+ Map map = elements.map();
if (IsSmiOrObjectElementsKind(KindTraits::Kind)) {
DCHECK_NE(map, ReadOnlyRoots(heap).fixed_double_array_map());
} else if (IsDoubleElementsKind(KindTraits::Kind)) {
@@ -2129,12 +2129,12 @@
for (int i = 0; i < length; i++) {
DCHECK(BackingStore::get(backing_store, i, isolate)->IsSmi() ||
(IsHoleyElementsKind(KindTraits::Kind) &&
- backing_store->is_the_hole(isolate, i)));
+ backing_store.is_the_hole(isolate, i)));
}
} else if (KindTraits::Kind == PACKED_ELEMENTS ||
KindTraits::Kind == PACKED_DOUBLE_ELEMENTS) {
for (int i = 0; i < length; i++) {
- DCHECK(!backing_store->is_the_hole(isolate, i));
+ DCHECK(!backing_store.is_the_hole(isolate, i));
}
} else {
DCHECK(IsHoleyElementsKind(KindTraits::Kind));
@@ -2178,7 +2178,7 @@
*dst_elms.location() =
BackingStore::cast(
isolate->heap()->LeftTrimFixedArray(*dst_elms, src_index))
- ->ptr();
+ .ptr();
receiver->set_elements(*dst_elms);
// Adjust the hole offset as the array has been shrunk.
hole_end -= src_index;
@@ -2233,7 +2233,7 @@
if (start_from >= length) return Just(false);
// Elements beyond the capacity of the backing store treated as undefined.
- uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
+ uint32_t elements_length = static_cast<uint32_t>(elements_base.length());
if (value == undefined && elements_length < length) return Just(true);
if (elements_length == 0) {
DCHECK_NE(value, undefined);
@@ -2242,7 +2242,7 @@
length = std::min(elements_length, length);
- if (!value->IsNumber()) {
+ if (!value.IsNumber()) {
if (value == undefined) {
// Search for `undefined` or The Hole. Even in the case of
// PACKED_DOUBLE_ELEMENTS or PACKED_SMI_ELEMENTS, we might encounter The
@@ -2253,7 +2253,7 @@
auto elements = FixedArray::cast(receiver->elements());
for (uint32_t k = start_from; k < length; ++k) {
- Object element_k = elements->get(k);
+ Object element_k = elements.get(k);
if (element_k == the_hole || element_k == undefined) {
return Just(true);
@@ -2267,7 +2267,7 @@
auto elements = FixedDoubleArray::cast(receiver->elements());
for (uint32_t k = start_from; k < length; ++k) {
- if (elements->is_the_hole(k)) {
+ if (elements.is_the_hole(k)) {
return Just(true);
}
}
@@ -2288,18 +2288,18 @@
auto elements = FixedArray::cast(receiver->elements());
for (uint32_t k = start_from; k < length; ++k) {
- Object element_k = elements->get(k);
+ Object element_k = elements.get(k);
if (element_k == the_hole) {
continue;
}
- if (value->SameValueZero(element_k)) return Just(true);
+ if (value.SameValueZero(element_k)) return Just(true);
}
return Just(false);
}
} else {
- if (!value->IsNaN()) {
- double search_value = value->Number();
+ if (!value.IsNaN()) {
+ double search_value = value.Number();
if (IsDoubleElementsKind(Subclass::kind())) {
// Search for non-NaN Number in PACKED_DOUBLE_ELEMENTS or
// HOLEY_DOUBLE_ELEMENTS --- Skip TheHole, and trust UCOMISD or
@@ -2307,10 +2307,10 @@
auto elements = FixedDoubleArray::cast(receiver->elements());
for (uint32_t k = start_from; k < length; ++k) {
- if (elements->is_the_hole(k)) {
+ if (elements.is_the_hole(k)) {
continue;
}
- if (elements->get_scalar(k) == search_value) return Just(true);
+ if (elements.get_scalar(k) == search_value) return Just(true);
}
return Just(false);
} else {
@@ -2320,8 +2320,8 @@
auto elements = FixedArray::cast(receiver->elements());
for (uint32_t k = start_from; k < length; ++k) {
- Object element_k = elements->get(k);
- if (element_k->IsNumber() && element_k->Number() == search_value) {
+ Object element_k = elements.get(k);
+ if (element_k.IsNumber() && element_k.Number() == search_value) {
return Just(true);
}
}
@@ -2339,10 +2339,10 @@
auto elements = FixedDoubleArray::cast(receiver->elements());
for (uint32_t k = start_from; k < length; ++k) {
- if (elements->is_the_hole(k)) {
+ if (elements.is_the_hole(k)) {
continue;
}
- if (std::isnan(elements->get_scalar(k))) return Just(true);
+ if (std::isnan(elements.get_scalar(k))) return Just(true);
}
return Just(false);
} else {
@@ -2354,7 +2354,7 @@
auto elements = FixedArray::cast(receiver->elements());
for (uint32_t k = start_from; k < length; ++k) {
- if (elements->get(k)->IsNaN()) return Just(true);
+ if (elements.get(k).IsNaN()) return Just(true);
}
return Just(false);
}
@@ -2449,10 +2449,10 @@
// Add the provided values.
DisallowHeapAllocation no_gc;
FixedArrayBase raw_backing_store = *dst_store;
- WriteBarrierMode mode = raw_backing_store->GetWriteBarrierMode(no_gc);
+ WriteBarrierMode mode = raw_backing_store.GetWriteBarrierMode(no_gc);
for (uint32_t i = 0; i < copy_size; i++) {
Object argument = (*args)[src_index + i];
- DCHECK(!argument->IsTheHole());
+ DCHECK(!argument.IsTheHole());
Subclass::SetImpl(raw_backing_store, dst_index + i, argument, mode);
}
}
@@ -2472,17 +2472,17 @@
static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
Object value) {
- FixedArray::cast(backing_store)->set(entry, value);
+ FixedArray::cast(backing_store).set(entry, value);
}
static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
Object value, WriteBarrierMode mode) {
- FixedArray::cast(backing_store)->set(entry, value, mode);
+ FixedArray::cast(backing_store).set(entry, value, mode);
}
static Object GetRaw(FixedArray backing_store, uint32_t entry) {
uint32_t index = Subclass::GetIndexForEntryImpl(backing_store, entry);
- return backing_store->get(index);
+ return backing_store.get(index);
}
// NOTE: this method violates the handlified function signature convention:
@@ -2555,7 +2555,7 @@
// No allocations here, so we can avoid handlification overhead.
DisallowHeapAllocation no_gc;
FixedArray elements = FixedArray::cast(object->elements());
- uint32_t length = elements->length();
+ uint32_t length = elements.length();
for (uint32_t index = 0; index < length; ++index) {
if (!Subclass::HasEntryImpl(isolate, elements, index)) continue;
Object value = GetRaw(elements, index);
@@ -2577,15 +2577,15 @@
if (start_from >= length) return Just<int64_t>(-1);
- length = std::min(static_cast<uint32_t>(elements_base->length()), length);
+ length = std::min(static_cast<uint32_t>(elements_base.length()), length);
// Only FAST_{,HOLEY_}ELEMENTS can store non-numbers.
- if (!value->IsNumber() && !IsObjectElementsKind(Subclass::kind()) &&
+ if (!value.IsNumber() && !IsObjectElementsKind(Subclass::kind()) &&
!IsFrozenOrSealedElementsKind(Subclass::kind())) {
return Just<int64_t>(-1);
}
// NaN can never be found by strict equality.
- if (value->IsNaN()) return Just<int64_t>(-1);
+ if (value.IsNaN()) return Just<int64_t>(-1);
// k can be greater than receiver->length() below, but it is bounded by
// elements_base->length() so we never read out of bounds. This means that
@@ -2593,7 +2593,7 @@
// always fail.
FixedArray elements = FixedArray::cast(receiver->elements());
for (uint32_t k = start_from; k < length; ++k) {
- if (value->StrictEquals(elements->get(k))) return Just<int64_t>(k);
+ if (value.StrictEquals(elements.get(k))) return Just<int64_t>(k);
}
return Just<int64_t>(-1);
}
@@ -2677,7 +2677,7 @@
uint32_t length,
Handle<FixedArrayBase> backing_store) {
uint32_t old_length = 0;
- CHECK(array->length()->ToArrayIndex(&old_length));
+ CHECK(array->length().ToArrayIndex(&old_length));
if (length == old_length) {
// Do nothing.
return;
@@ -2854,12 +2854,12 @@
static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
Object value) {
- FixedDoubleArray::cast(backing_store)->set(entry, value->Number());
+ FixedDoubleArray::cast(backing_store).set(entry, value.Number());
}
static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
Object value, WriteBarrierMode mode) {
- FixedDoubleArray::cast(backing_store)->set(entry, value->Number());
+ FixedDoubleArray::cast(backing_store).set(entry, value.Number());
}
static void CopyElementsImpl(Isolate* isolate, FixedArrayBase from,
@@ -2934,24 +2934,24 @@
FixedArrayBase elements_base = receiver->elements();
Object value = *search_value;
- length = std::min(static_cast<uint32_t>(elements_base->length()), length);
+ length = std::min(static_cast<uint32_t>(elements_base.length()), length);
if (start_from >= length) return Just<int64_t>(-1);
- if (!value->IsNumber()) {
+ if (!value.IsNumber()) {
return Just<int64_t>(-1);
}
- if (value->IsNaN()) {
+ if (value.IsNaN()) {
return Just<int64_t>(-1);
}
- double numeric_search_value = value->Number();
+ double numeric_search_value = value.Number();
FixedDoubleArray elements = FixedDoubleArray::cast(receiver->elements());
for (uint32_t k = start_from; k < length; ++k) {
- if (elements->is_the_hole(k)) {
+ if (elements.is_the_hole(k)) {
continue;
}
- if (elements->get_scalar(k) == numeric_search_value) {
+ if (elements.get_scalar(k) == numeric_search_value) {
return Just<int64_t>(k);
}
}
@@ -3000,7 +3000,7 @@
static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
Object value) {
- BackingStore::cast(backing_store)->SetValue(entry, value);
+ BackingStore::cast(backing_store).SetValue(entry, value);
}
static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
@@ -3059,9 +3059,9 @@
static uint32_t GetCapacityImpl(JSObject holder,
FixedArrayBase backing_store) {
JSTypedArray typed_array = JSTypedArray::cast(holder);
- if (typed_array->WasDetached()) return 0;
+ if (typed_array.WasDetached()) return 0;
// TODO(bmeurer, v8:4153): We need to support arbitrary size_t here.
- return static_cast<uint32_t>(typed_array->length());
+ return static_cast<uint32_t>(typed_array.length());
}
static uint32_t NumberOfElementsImpl(JSObject receiver,
@@ -3117,7 +3117,7 @@
DisallowHeapAllocation no_gc;
BackingStore elements = BackingStore::cast(receiver->elements());
- ctype* data = static_cast<ctype*>(elements->DataPtr());
+ ctype* data = static_cast<ctype*>(elements.DataPtr());
if (COMPRESS_POINTERS_BOOL && alignof(ctype) > kTaggedSize) {
// TODO(ishell, v8:8875): See UnalignedSlot<T> for details.
std::fill(UnalignedSlot<ctype>(data + start),
@@ -3137,20 +3137,20 @@
// TODO(caitp): return Just(false) here when implementing strict throwing on
// detached views.
- if (typed_array->WasDetached()) {
+ if (typed_array.WasDetached()) {
return Just(value->IsUndefined(isolate) && length > start_from);
}
- BackingStore elements = BackingStore::cast(typed_array->elements());
- if (value->IsUndefined(isolate) && length > typed_array->length()) {
+ BackingStore elements = BackingStore::cast(typed_array.elements());
+ if (value->IsUndefined(isolate) && length > typed_array.length()) {
return Just(true);
}
ctype typed_search_value;
// Prototype has no elements, and not searching for the hole --- limit
// search to backing store length.
- if (typed_array->length() < length) {
+ if (typed_array.length() < length) {
// TODO(bmeurer, v8:4153): Don't cast to uint32_t here.
- length = static_cast<uint32_t>(typed_array->length());
+ length = static_cast<uint32_t>(typed_array.length());
}
if (Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS) {
@@ -3168,7 +3168,7 @@
}
if (std::isnan(search_value)) {
for (uint32_t k = start_from; k < length; ++k) {
- double element_k = elements->get_scalar(k);
+ double element_k = elements.get_scalar(k);
if (std::isnan(element_k)) return Just(true);
}
return Just(false);
@@ -3185,7 +3185,7 @@
}
for (uint32_t k = start_from; k < length; ++k) {
- ctype element_k = elements->get_scalar(k);
+ ctype element_k = elements.get_scalar(k);
if (element_k == typed_search_value) return Just(true);
}
return Just(false);
@@ -3198,9 +3198,9 @@
DisallowHeapAllocation no_gc;
JSTypedArray typed_array = JSTypedArray::cast(*receiver);
- if (typed_array->WasDetached()) return Just<int64_t>(-1);
+ if (typed_array.WasDetached()) return Just<int64_t>(-1);
- BackingStore elements = BackingStore::cast(typed_array->elements());
+ BackingStore elements = BackingStore::cast(typed_array.elements());
ctype typed_search_value;
if (Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS) {
@@ -3232,13 +3232,13 @@
// Prototype has no elements, and not searching for the hole --- limit
// search to backing store length.
- if (typed_array->length() < length) {
+ if (typed_array.length() < length) {
// TODO(bmeurer, v8:4153): Don't cast to uint32_t here.
- length = static_cast<uint32_t>(typed_array->length());
+ length = static_cast<uint32_t>(typed_array.length());
}
for (uint32_t k = start_from; k < length; ++k) {
- ctype element_k = elements->get_scalar(k);
+ ctype element_k = elements.get_scalar(k);
if (element_k == typed_search_value) return Just<int64_t>(k);
}
return Just<int64_t>(-1);
@@ -3250,9 +3250,9 @@
DisallowHeapAllocation no_gc;
JSTypedArray typed_array = JSTypedArray::cast(*receiver);
- DCHECK(!typed_array->WasDetached());
+ DCHECK(!typed_array.WasDetached());
- BackingStore elements = BackingStore::cast(typed_array->elements());
+ BackingStore elements = BackingStore::cast(typed_array.elements());
ctype typed_search_value;
if (Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS) {
@@ -3282,10 +3282,10 @@
}
}
- DCHECK_LT(start_from, typed_array->length());
+ DCHECK_LT(start_from, typed_array.length());
uint32_t k = start_from;
do {
- ctype element_k = elements->get_scalar(k);
+ ctype element_k = elements.get_scalar(k);
if (element_k == typed_search_value) return Just<int64_t>(k);
} while (k-- != 0);
return Just<int64_t>(-1);
@@ -3295,14 +3295,14 @@
DisallowHeapAllocation no_gc;
JSTypedArray typed_array = JSTypedArray::cast(receiver);
- DCHECK(!typed_array->WasDetached());
+ DCHECK(!typed_array.WasDetached());
- BackingStore elements = BackingStore::cast(typed_array->elements());
+ BackingStore elements = BackingStore::cast(typed_array.elements());
- size_t len = typed_array->length();
+ size_t len = typed_array.length();
if (len == 0) return;
- ctype* data = static_cast<ctype*>(elements->DataPtr());
+ ctype* data = static_cast<ctype*>(elements.DataPtr());
if (COMPRESS_POINTERS_BOOL && alignof(ctype) > kTaggedSize) {
// TODO(ishell, v8:8875): See UnalignedSlot<T> for details.
std::reverse(UnalignedSlot<ctype>(data),
@@ -3331,26 +3331,26 @@
JSTypedArray destination,
size_t start, size_t end) {
DisallowHeapAllocation no_gc;
- DCHECK_EQ(destination->GetElementsKind(), AccessorClass::kind());
- CHECK(!source->WasDetached());
- CHECK(!destination->WasDetached());
+ DCHECK_EQ(destination.GetElementsKind(), AccessorClass::kind());
+ CHECK(!source.WasDetached());
+ CHECK(!destination.WasDetached());
DCHECK_LE(start, end);
- DCHECK_LE(end, source->length());
+ DCHECK_LE(end, source.length());
size_t count = end - start;
- DCHECK_LE(count, destination->length());
+ DCHECK_LE(count, destination.length());
FixedTypedArrayBase src_elements =
- FixedTypedArrayBase::cast(source->elements());
- BackingStore dest_elements = BackingStore::cast(destination->elements());
+ FixedTypedArrayBase::cast(source.elements());
+ BackingStore dest_elements = BackingStore::cast(destination.elements());
- size_t element_size = source->element_size();
+ size_t element_size = source.element_size();
uint8_t* source_data =
- static_cast<uint8_t*>(src_elements->DataPtr()) + start * element_size;
+ static_cast<uint8_t*>(src_elements.DataPtr()) + start * element_size;
// Fast path for the same type result array
- if (source->type() == destination->type()) {
- uint8_t* dest_data = static_cast<uint8_t*>(dest_elements->DataPtr());
+ if (source.type() == destination.type()) {
+ uint8_t* dest_data = static_cast<uint8_t*>(dest_elements.DataPtr());
// The spec defines the copy-step iteratively, which means that we
// cannot use memcpy if the buffer is shared.
@@ -3361,7 +3361,7 @@
return;
}
- switch (source->GetElementsKind()) {
+ switch (source.GetElementsKind()) {
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
case TYPE##_ELEMENTS: \
CopyBetweenBackingStores<Type##ArrayTraits>(source_data, dest_elements, \
@@ -3391,7 +3391,7 @@
typename SourceTraits::ElementType elem =
FixedTypedArray<SourceTraits>::get_scalar_from_data_ptr(
source_data_ptr, i);
- dest->set(offset + i, dest->from(elem));
+ dest.set(offset + i, dest.from(elem));
}
}
@@ -3402,38 +3402,37 @@
// side-effects, as the source elements will always be a number.
DisallowHeapAllocation no_gc;
- CHECK(!source->WasDetached());
- CHECK(!destination->WasDetached());
+ CHECK(!source.WasDetached());
+ CHECK(!destination.WasDetached());
FixedTypedArrayBase source_elements =
- FixedTypedArrayBase::cast(source->elements());
+ FixedTypedArrayBase::cast(source.elements());
BackingStore destination_elements =
- BackingStore::cast(destination->elements());
+ BackingStore::cast(destination.elements());
- DCHECK_LE(offset, destination->length());
- DCHECK_LE(length, destination->length() - offset);
- DCHECK_LE(length, source->length());
+ DCHECK_LE(offset, destination.length());
+ DCHECK_LE(length, destination.length() - offset);
+ DCHECK_LE(length, source.length());
- InstanceType source_type = source_elements->map()->instance_type();
- InstanceType destination_type =
- destination_elements->map()->instance_type();
+ InstanceType source_type = source_elements.map().instance_type();
+ InstanceType destination_type = destination_elements.map().instance_type();
bool same_type = source_type == destination_type;
- bool same_size = source->element_size() == destination->element_size();
+ bool same_size = source.element_size() == destination.element_size();
bool both_are_simple = HasSimpleRepresentation(source_type) &&
HasSimpleRepresentation(destination_type);
- uint8_t* source_data = static_cast<uint8_t*>(source_elements->DataPtr());
- uint8_t* dest_data = static_cast<uint8_t*>(destination_elements->DataPtr());
- size_t source_byte_length = source->byte_length();
- size_t dest_byte_length = destination->byte_length();
+ uint8_t* source_data = static_cast<uint8_t*>(source_elements.DataPtr());
+ uint8_t* dest_data = static_cast<uint8_t*>(destination_elements.DataPtr());
+ size_t source_byte_length = source.byte_length();
+ size_t dest_byte_length = destination.byte_length();
// We can simply copy the backing store if the types are the same, or if
// we are converting e.g. Uint8 <-> Int8, as the binary representation
// will be the same. This is not the case for floats or clamped Uint8,
// which have special conversion operations.
if (same_type || (same_size && both_are_simple)) {
- size_t element_size = source->element_size();
+ size_t element_size = source.element_size();
std::memmove(dest_data + offset * element_size, source_data,
length * element_size);
} else {
@@ -3448,7 +3447,7 @@
source_data = cloned_source_elements.get();
}
- switch (source->GetElementsKind()) {
+ switch (source.GetElementsKind()) {
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
case TYPE##_ELEMENTS: \
CopyBetweenBackingStores<Type##ArrayTraits>( \
@@ -3472,13 +3471,13 @@
if (isolate->force_slow_path()) return true;
#endif
- Object source_proto = source->map()->prototype();
+ Object source_proto = source.map().prototype();
// Null prototypes are OK - we don't need to do prototype chain lookups on
// them.
- if (source_proto->IsNull(isolate)) return false;
- if (source_proto->IsJSProxy()) return true;
- if (!context->native_context()->is_initial_array_prototype(
+ if (source_proto.IsNull(isolate)) return false;
+ if (source_proto.IsJSProxy()) return true;
+ if (!context.native_context().is_initial_array_prototype(
JSObject::cast(source_proto))) {
return true;
}
@@ -3490,24 +3489,24 @@
JSTypedArray destination, size_t length,
uint32_t offset) {
if (Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS) return false;
- Isolate* isolate = source->GetIsolate();
+ Isolate* isolate = source.GetIsolate();
DisallowHeapAllocation no_gc;
DisallowJavascriptExecution no_js(isolate);
- CHECK(!destination->WasDetached());
+ CHECK(!destination.WasDetached());
size_t current_length;
- DCHECK(source->length()->IsNumber() &&
- TryNumberToSize(source->length(), ¤t_length) &&
+ DCHECK(source.length().IsNumber() &&
+ TryNumberToSize(source.length(), ¤t_length) &&
length <= current_length);
USE(current_length);
- size_t dest_length = destination->length();
+ size_t dest_length = destination.length();
DCHECK(length + offset <= dest_length);
USE(dest_length);
- ElementsKind kind = source->GetElementsKind();
- BackingStore dest = BackingStore::cast(destination->elements());
+ ElementsKind kind = source.GetElementsKind();
+ BackingStore dest = BackingStore::cast(destination.elements());
// When we find the hole, we normally have to look up the element on the
// prototype chain, which is not handled here and we return false instead.
@@ -3520,50 +3519,48 @@
// Fastpath for packed Smi kind.
if (kind == PACKED_SMI_ELEMENTS) {
- FixedArray source_store = FixedArray::cast(source->elements());
+ FixedArray source_store = FixedArray::cast(source.elements());
for (uint32_t i = 0; i < length; i++) {
- Object elem = source_store->get(i);
- DCHECK(elem->IsSmi());
+ Object elem = source_store.get(i);
+ DCHECK(elem.IsSmi());
int int_value = Smi::ToInt(elem);
- dest->set(offset + i, dest->from(int_value));
+ dest.set(offset + i, dest.from(int_value));
}
return true;
} else if (kind == HOLEY_SMI_ELEMENTS) {
- FixedArray source_store = FixedArray::cast(source->elements());
+ FixedArray source_store = FixedArray::cast(source.elements());
for (uint32_t i = 0; i < length; i++) {
- if (source_store->is_the_hole(isolate, i)) {
- dest->SetValue(offset + i, undefined);
+ if (source_store.is_the_hole(isolate, i)) {
+ dest.SetValue(offset + i, undefined);
} else {
- Object elem = source_store->get(i);
- DCHECK(elem->IsSmi());
+ Object elem = source_store.get(i);
+ DCHECK(elem.IsSmi());
int int_value = Smi::ToInt(elem);
- dest->set(offset + i, dest->from(int_value));
+ dest.set(offset + i, dest.from(int_value));
}
}
return true;
} else if (kind == PACKED_DOUBLE_ELEMENTS) {
// Fastpath for packed double kind. We avoid boxing and then immediately
// unboxing the double here by using get_scalar.
- FixedDoubleArray source_store =
- FixedDoubleArray::cast(source->elements());
+ FixedDoubleArray source_store = FixedDoubleArray::cast(source.elements());
for (uint32_t i = 0; i < length; i++) {
// Use the from_double conversion for this specific TypedArray type,
// rather than relying on C++ to convert elem.
- double elem = source_store->get_scalar(i);
- dest->set(offset + i, dest->from(elem));
+ double elem = source_store.get_scalar(i);
+ dest.set(offset + i, dest.from(elem));
}
return true;
} else if (kind == HOLEY_DOUBLE_ELEMENTS) {
- FixedDoubleArray source_store =
- FixedDoubleArray::cast(source->elements());
+ FixedDoubleArray source_store = FixedDoubleArray::cast(source.elements());
for (uint32_t i = 0; i < length; i++) {
- if (source_store->is_the_hole(i)) {
- dest->SetValue(offset + i, undefined);
+ if (source_store.is_the_hole(i)) {
+ dest.SetValue(offset + i, undefined);
} else {
- double elem = source_store->get_scalar(i);
- dest->set(offset + i, dest->from(elem));
+ double elem = source_store.get_scalar(i);
+ dest.set(offset + i, dest.from(elem));
}
}
return true;
@@ -3652,7 +3649,7 @@
if (source->IsJSArray()) {
Handle<JSArray> source_js_array = Handle<JSArray>::cast(source);
size_t current_length;
- if (source_js_array->length()->IsNumber() &&
+ if (source_js_array->length().IsNumber() &&
TryNumberToSize(source_js_array->length(), ¤t_length)) {
if (length <= current_length) {
Handle<JSArray> source_array = Handle<JSArray>::cast(source);
@@ -3699,11 +3696,11 @@
// Read context mapped entry.
DisallowHeapAllocation no_gc;
Object probe = elements->get_mapped_entry(entry);
- DCHECK(!probe->IsTheHole(isolate));
+ DCHECK(!probe.IsTheHole(isolate));
Context context = elements->context();
int context_entry = Smi::ToInt(probe);
- DCHECK(!context->get(context_entry)->IsTheHole(isolate));
- return handle(context->get(context_entry), isolate);
+ DCHECK(!context.get(context_entry).IsTheHole(isolate));
+ return handle(context.get(context_entry), isolate);
} else {
// Entry is not context mapped, defer to the arguments.
Handle<Object> result = ArgumentsAccessor::GetImpl(
@@ -3730,26 +3727,26 @@
static inline void SetImpl(FixedArrayBase store, uint32_t entry,
Object value) {
SloppyArgumentsElements elements = SloppyArgumentsElements::cast(store);
- uint32_t length = elements->parameter_map_length();
+ uint32_t length = elements.parameter_map_length();
if (entry < length) {
// Store context mapped entry.
DisallowHeapAllocation no_gc;
- Object probe = elements->get_mapped_entry(entry);
- DCHECK(!probe->IsTheHole());
- Context context = elements->context();
+ Object probe = elements.get_mapped_entry(entry);
+ DCHECK(!probe.IsTheHole());
+ Context context = elements.context();
int context_entry = Smi::ToInt(probe);
- DCHECK(!context->get(context_entry)->IsTheHole());
- context->set(context_entry, value);
+ DCHECK(!context.get(context_entry).IsTheHole());
+ context.set(context_entry, value);
} else {
// Entry is not context mapped defer to arguments.
- FixedArray arguments = elements->arguments();
+ FixedArray arguments = elements.arguments();
Object current = ArgumentsAccessor::GetRaw(arguments, entry - length);
- if (current->IsAliasedArgumentsEntry()) {
+ if (current.IsAliasedArgumentsEntry()) {
AliasedArgumentsEntry alias = AliasedArgumentsEntry::cast(current);
- Context context = elements->context();
- int context_entry = alias->aliased_context_slot();
- DCHECK(!context->get(context_entry)->IsTheHole());
- context->set(context_entry, value);
+ Context context = elements.context();
+ int context_entry = alias.aliased_context_slot();
+ DCHECK(!context.get(context_entry).IsTheHole());
+ context.set(context_entry, value);
} else {
ArgumentsAccessor::SetImpl(arguments, entry - length, value);
}
@@ -3765,8 +3762,8 @@
static uint32_t GetCapacityImpl(JSObject holder, FixedArrayBase store) {
SloppyArgumentsElements elements = SloppyArgumentsElements::cast(store);
- FixedArray arguments = elements->arguments();
- return elements->parameter_map_length() +
+ FixedArray arguments = elements.arguments();
+ return elements.parameter_map_length() +
ArgumentsAccessor::GetCapacityImpl(holder, arguments);
}
@@ -3774,19 +3771,19 @@
FixedArrayBase backing_store) {
SloppyArgumentsElements elements =
SloppyArgumentsElements::cast(backing_store);
- FixedArrayBase arguments = elements->arguments();
- return elements->parameter_map_length() +
+ FixedArrayBase arguments = elements.arguments();
+ return elements.parameter_map_length() +
ArgumentsAccessor::GetMaxNumberOfEntries(holder, arguments);
}
static uint32_t NumberOfElementsImpl(JSObject receiver,
FixedArrayBase backing_store) {
- Isolate* isolate = receiver->GetIsolate();
+ Isolate* isolate = receiver.GetIsolate();
SloppyArgumentsElements elements =
SloppyArgumentsElements::cast(backing_store);
- FixedArrayBase arguments = elements->arguments();
+ FixedArrayBase arguments = elements.arguments();
uint32_t nof_elements = 0;
- uint32_t length = elements->parameter_map_length();
+ uint32_t length = elements.parameter_map_length();
for (uint32_t entry = 0; entry < length; entry++) {
if (HasParameterMapArg(isolate, elements, entry)) nof_elements++;
}
@@ -3811,18 +3808,18 @@
uint32_t entry) {
SloppyArgumentsElements elements =
SloppyArgumentsElements::cast(parameters);
- uint32_t length = elements->parameter_map_length();
+ uint32_t length = elements.parameter_map_length();
if (entry < length) {
return HasParameterMapArg(isolate, elements, entry);
}
- FixedArrayBase arguments = elements->arguments();
+ FixedArrayBase arguments = elements.arguments();
return ArgumentsAccessor::HasEntryImpl(isolate, arguments, entry - length);
}
static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) {
SloppyArgumentsElements elements =
SloppyArgumentsElements::cast(backing_store);
- FixedArray arguments = elements->arguments();
+ FixedArray arguments = elements.arguments();
return ArgumentsAccessor::HasAccessorsImpl(holder, arguments);
}
@@ -3830,9 +3827,9 @@
uint32_t entry) {
SloppyArgumentsElements elements =
SloppyArgumentsElements::cast(parameters);
- uint32_t length = elements->parameter_map_length();
+ uint32_t length = elements.parameter_map_length();
if (entry < length) return entry;
- FixedArray arguments = elements->arguments();
+ FixedArray arguments = elements.arguments();
return ArgumentsAccessor::GetIndexForEntryImpl(arguments, entry - length);
}
@@ -3842,32 +3839,32 @@
SloppyArgumentsElements elements =
SloppyArgumentsElements::cast(parameters);
if (HasParameterMapArg(isolate, elements, index)) return index;
- FixedArray arguments = elements->arguments();
+ FixedArray arguments = elements.arguments();
uint32_t entry = ArgumentsAccessor::GetEntryForIndexImpl(
isolate, holder, arguments, index, filter);
if (entry == kMaxUInt32) return kMaxUInt32;
// Arguments entries could overlap with the dictionary entries, hence offset
// them by the number of context mapped entries.
- return elements->parameter_map_length() + entry;
+ return elements.parameter_map_length() + entry;
}
static PropertyDetails GetDetailsImpl(JSObject holder, uint32_t entry) {
SloppyArgumentsElements elements =
- SloppyArgumentsElements::cast(holder->elements());
- uint32_t length = elements->parameter_map_length();
+ SloppyArgumentsElements::cast(holder.elements());
+ uint32_t length = elements.parameter_map_length();
if (entry < length) {
return PropertyDetails(kData, NONE, PropertyCellType::kNoCell);
}
- FixedArray arguments = elements->arguments();
+ FixedArray arguments = elements.arguments();
return ArgumentsAccessor::GetDetailsImpl(arguments, entry - length);
}
static bool HasParameterMapArg(Isolate* isolate,
SloppyArgumentsElements elements,
uint32_t index) {
- uint32_t length = elements->parameter_map_length();
+ uint32_t length = elements.parameter_map_length();
if (index >= length) return false;
- return !elements->get_mapped_entry(index)->IsTheHole(isolate);
+ return !elements.get_mapped_entry(index).IsTheHole(isolate);
}
static void DeleteImpl(Handle<JSObject> obj, uint32_t entry) {
@@ -3920,7 +3917,7 @@
uint32_t length = elements->parameter_map_length();
for (uint32_t i = 0; i < length; ++i) {
- if (elements->get_mapped_entry(i)->IsTheHole(isolate)) continue;
+ if (elements->get_mapped_entry(i).IsTheHole(isolate)) continue;
if (convert == GetKeysConversion::kConvertToString) {
Handle<String> index_string = isolate->factory()->Uint32ToString(i);
list->set(insertion_index, *index_string);
@@ -4039,9 +4036,9 @@
DisallowHeapAllocation no_gc;
AliasedArgumentsEntry alias = AliasedArgumentsEntry::cast(*result);
Context context = elements->context();
- int context_entry = alias->aliased_context_slot();
- DCHECK(!context->get(context_entry)->IsTheHole(isolate));
- return handle(context->get(context_entry), isolate);
+ int context_entry = alias.aliased_context_slot();
+ DCHECK(!context.get(context_entry).IsTheHole(isolate));
+ return handle(context.get(context_entry), isolate);
}
return result;
}
@@ -4088,11 +4085,11 @@
uint32_t length = elements->parameter_map_length();
if (entry < length) {
Object probe = elements->get_mapped_entry(entry);
- DCHECK(!probe->IsTheHole(isolate));
+ DCHECK(!probe.IsTheHole(isolate));
Context context = elements->context();
int context_entry = Smi::ToInt(probe);
- DCHECK(!context->get(context_entry)->IsTheHole(isolate));
- context->set(context_entry, *value);
+ DCHECK(!context.get(context_entry).IsTheHole(isolate));
+ context.set(context_entry, *value);
// Redefining attributes of an aliased element destroys fast aliasing.
elements->set_mapped_entry(entry,
@@ -4141,7 +4138,7 @@
static Handle<FixedArray> GetArguments(Isolate* isolate,
FixedArrayBase store) {
SloppyArgumentsElements elements = SloppyArgumentsElements::cast(store);
- return Handle<FixedArray>(elements->arguments(), isolate);
+ return Handle<FixedArray>(elements.arguments(), isolate);
}
static Handle<NumberDictionary> NormalizeImpl(
@@ -4212,7 +4209,7 @@
uint32_t from_start, FixedArrayBase to,
ElementsKind from_kind, uint32_t to_start,
int packed_size, int copy_size) {
- DCHECK(!to->IsDictionary());
+ DCHECK(!to.IsDictionary());
if (from_kind == SLOW_SLOPPY_ARGUMENTS_ELEMENTS) {
CopyDictionaryToObjectElements(isolate, from, from_start, to,
HOLEY_ELEMENTS, to_start, copy_size);
@@ -4277,7 +4274,7 @@
}
static PropertyDetails GetDetailsImpl(JSObject holder, uint32_t entry) {
- uint32_t length = static_cast<uint32_t>(GetString(holder)->length());
+ uint32_t length = static_cast<uint32_t>(GetString(holder).length());
if (entry < length) {
PropertyAttributes attributes =
static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
@@ -4289,7 +4286,7 @@
static uint32_t GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
FixedArrayBase backing_store,
uint32_t index, PropertyFilter filter) {
- uint32_t length = static_cast<uint32_t>(GetString(holder)->length());
+ uint32_t length = static_cast<uint32_t>(GetString(holder).length());
if (index < length) return index;
uint32_t backing_store_entry = BackingStoreAccessor::GetEntryForIndexImpl(
isolate, holder, backing_store, index, filter);
@@ -4299,7 +4296,7 @@
}
static void DeleteImpl(Handle<JSObject> holder, uint32_t entry) {
- uint32_t length = static_cast<uint32_t>(GetString(*holder)->length());
+ uint32_t length = static_cast<uint32_t>(GetString(*holder).length());
if (entry < length) {
return; // String contents can't be deleted.
}
@@ -4307,7 +4304,7 @@
}
static void SetImpl(Handle<JSObject> holder, uint32_t entry, Object value) {
- uint32_t length = static_cast<uint32_t>(GetString(*holder)->length());
+ uint32_t length = static_cast<uint32_t>(GetString(*holder).length());
if (entry < length) {
return; // String contents are read-only.
}
@@ -4317,7 +4314,7 @@
static void AddImpl(Handle<JSObject> object, uint32_t index,
Handle<Object> value, PropertyAttributes attributes,
uint32_t new_capacity) {
- DCHECK(index >= static_cast<uint32_t>(GetString(*object)->length()));
+ DCHECK(index >= static_cast<uint32_t>(GetString(*object).length()));
// Explicitly grow fast backing stores if needed. Dictionaries know how to
// extend their capacity themselves.
if (KindTraits::Kind == FAST_STRING_WRAPPER_ELEMENTS &&
@@ -4334,7 +4331,7 @@
Handle<FixedArrayBase> store, uint32_t entry,
Handle<Object> value,
PropertyAttributes attributes) {
- uint32_t length = static_cast<uint32_t>(GetString(*object)->length());
+ uint32_t length = static_cast<uint32_t>(GetString(*object).length());
if (entry < length) {
return; // String contents can't be reconfigured.
}
@@ -4362,7 +4359,7 @@
static void CollectElementIndicesImpl(Handle<JSObject> object,
Handle<FixedArrayBase> backing_store,
KeyAccumulator* keys) {
- uint32_t length = GetString(*object)->length();
+ uint32_t length = GetString(*object).length();
Factory* factory = keys->isolate()->factory();
for (uint32_t i = 0; i < length; i++) {
keys->AddKey(factory->NewNumberFromUint(i));
@@ -4396,7 +4393,7 @@
uint32_t from_start, FixedArrayBase to,
ElementsKind from_kind, uint32_t to_start,
int packed_size, int copy_size) {
- DCHECK(!to->IsDictionary());
+ DCHECK(!to.IsDictionary());
if (from_kind == SLOW_STRING_WRAPPER_ELEMENTS) {
CopyDictionaryToObjectElements(isolate, from, from_start, to,
HOLEY_ELEMENTS, to_start, copy_size);
@@ -4409,17 +4406,17 @@
static uint32_t NumberOfElementsImpl(JSObject object,
FixedArrayBase backing_store) {
- uint32_t length = GetString(object)->length();
+ uint32_t length = GetString(object).length();
return length +
BackingStoreAccessor::NumberOfElementsImpl(object, backing_store);
}
private:
static String GetString(JSObject holder) {
- DCHECK(holder->IsJSValue());
+ DCHECK(holder.IsJSValue());
JSValue js_value = JSValue::cast(holder);
- DCHECK(js_value->value()->IsString());
- return String::cast(js_value->value());
+ DCHECK(js_value.value().IsString());
+ return String::cast(js_value.value());
}
};
@@ -4463,14 +4460,14 @@
const char* elements_type = "array";
if (obj->IsJSArray()) {
JSArray array = JSArray::cast(*obj);
- raw_length = array->length();
+ raw_length = array.length();
} else {
- raw_length = Smi::FromInt(obj->elements()->length());
+ raw_length = Smi::FromInt(obj->elements().length());
elements_type = "object";
}
- if (raw_length->IsNumber()) {
- double n = raw_length->Number();
+ if (raw_length.IsNumber()) {
+ double n = raw_length.Number();
if (FastI2D(FastD2UI(n)) == n) {
int32_t int32_length = DoubleToInt32(n);
uint32_t compare_length = static_cast<uint32_t>(int32_length);
@@ -4570,7 +4567,7 @@
Handle<FixedDoubleArray> double_elms =
Handle<FixedDoubleArray>::cast(elms);
for (int entry = 0; entry < number_of_elements; entry++) {
- double_elms->set(entry, (*args)[entry]->Number());
+ double_elms->set(entry, (*args)[entry].Number());
}
break;
}
@@ -4592,7 +4589,7 @@
JSArray source = JSArray::cast(Object(raw_source));
JSTypedArray destination = JSTypedArray::cast(Object(raw_destination));
- switch (destination->GetElementsKind()) {
+ switch (destination.GetElementsKind()) {
#define TYPED_ARRAYS_CASE(Type, type, TYPE, ctype) \
case TYPE##_ELEMENTS: \
CHECK(Fixed##Type##ElementsAccessor::TryCopyElementsFastNumber( \
@@ -4611,7 +4608,7 @@
JSTypedArray source = JSTypedArray::cast(Object(raw_source));
JSTypedArray destination = JSTypedArray::cast(Object(raw_destination));
- switch (destination->GetElementsKind()) {
+ switch (destination.GetElementsKind()) {
#define TYPED_ARRAYS_CASE(Type, type, TYPE, ctype) \
case TYPE##_ELEMENTS: \
Fixed##Type##ElementsAccessor::CopyElementsFromTypedArray( \
@@ -4629,7 +4626,7 @@
JSTypedArray source = JSTypedArray::cast(Object(raw_source));
JSTypedArray destination = JSTypedArray::cast(Object(raw_destination));
- destination->GetElementsAccessor()->CopyTypedArrayElementsSlice(
+ destination.GetElementsAccessor()->CopyTypedArrayElementsSlice(
source, destination, start, end);
}
@@ -4664,7 +4661,7 @@
bool is_holey = false;
for (uint32_t i = 0; i < concat_size; i++) {
Object arg = (*args)[i];
- ElementsKind arg_kind = JSArray::cast(arg)->GetElementsKind();
+ ElementsKind arg_kind = JSArray::cast(arg).GetElementsKind();
has_raw_doubles = has_raw_doubles || IsDoubleElementsKind(arg_kind);
is_holey = is_holey || IsHoleyElementsKind(arg_kind);
result_elements_kind =
@@ -4695,9 +4692,9 @@
// performance degradation.
JSArray array = JSArray::cast((*args)[i]);
uint32_t len = 0;
- array->length()->ToArrayLength(&len);
+ array.length().ToArrayLength(&len);
if (len == 0) continue;
- ElementsKind from_kind = array->GetElementsKind();
+ ElementsKind from_kind = array.GetElementsKind();
accessor->CopyElements(array, 0, from_kind, storage, insertion_index, len);
insertion_index += len;
}
diff --git a/src/objects/embedder-data-slot-inl.h b/src/objects/embedder-data-slot-inl.h
index b87f31a..59cc343 100644
--- a/src/objects/embedder-data-slot-inl.h
+++ b/src/objects/embedder-data-slot-inl.h
@@ -25,7 +25,7 @@
EmbedderDataSlot::EmbedderDataSlot(JSObject object, int embedder_field_index)
: SlotBase(FIELD_ADDR(
- object, object->GetEmbedderFieldOffset(embedder_field_index))) {}
+ object, object.GetEmbedderFieldOffset(embedder_field_index))) {}
Object EmbedderDataSlot::load_tagged() const {
return ObjectSlot(address() + kTaggedPayloadOffset).Relaxed_Load();
@@ -56,7 +56,7 @@
// static
void EmbedderDataSlot::store_tagged(JSObject object, int embedder_field_index,
Object value) {
- int slot_offset = object->GetEmbedderFieldOffset(embedder_field_index);
+ int slot_offset = object.GetEmbedderFieldOffset(embedder_field_index);
ObjectSlot(FIELD_ADDR(object, slot_offset + kTaggedPayloadOffset))
.Relaxed_Store(value);
WRITE_BARRIER(object, slot_offset + kTaggedPayloadOffset, value);
diff --git a/src/objects/field-index-inl.h b/src/objects/field-index-inl.h
index c4164e7..c0eb220 100644
--- a/src/objects/field-index-inl.h
+++ b/src/objects/field-index-inl.h
@@ -21,14 +21,14 @@
FieldIndex FieldIndex::ForPropertyIndex(const Map map, int property_index,
Representation representation) {
- DCHECK(map->instance_type() >= FIRST_NONSTRING_TYPE);
- int inobject_properties = map->GetInObjectProperties();
+ DCHECK(map.instance_type() >= FIRST_NONSTRING_TYPE);
+ int inobject_properties = map.GetInObjectProperties();
bool is_inobject = property_index < inobject_properties;
int first_inobject_offset;
int offset;
if (is_inobject) {
- first_inobject_offset = map->GetInObjectPropertyOffset(0);
- offset = map->GetInObjectPropertyOffset(property_index);
+ first_inobject_offset = map.GetInObjectPropertyOffset(0);
+ offset = map.GetInObjectPropertyOffset(property_index);
} else {
first_inobject_offset = FixedArray::kHeaderSize;
property_index -= inobject_properties;
@@ -62,7 +62,7 @@
FieldIndex FieldIndex::ForDescriptor(const Map map, int descriptor_index) {
PropertyDetails details =
- map->instance_descriptors()->GetDetails(descriptor_index);
+ map.instance_descriptors().GetDetails(descriptor_index);
int field_index = details.field_index();
return ForPropertyIndex(map, field_index, details.representation());
}
diff --git a/src/objects/field-type.cc b/src/objects/field-type.cc
index 97073f9..9e8e2fa 100644
--- a/src/objects/field-type.cc
+++ b/src/objects/field-type.cc
@@ -38,8 +38,8 @@
// static
FieldType FieldType::cast(Object object) {
- DCHECK(object == None() || object == Any() || object->IsMap());
- return FieldType(object->ptr());
+ DCHECK(object == None() || object == Any() || object.IsMap());
+ return FieldType(object.ptr());
}
bool FieldType::IsClass() const { return this->IsMap(); }
@@ -50,16 +50,16 @@
}
bool FieldType::NowStable() const {
- return !this->IsClass() || AsClass()->is_stable();
+ return !this->IsClass() || AsClass().is_stable();
}
bool FieldType::NowIs(FieldType other) const {
- if (other->IsAny()) return true;
+ if (other.IsAny()) return true;
if (IsNone()) return true;
- if (other->IsNone()) return false;
+ if (other.IsNone()) return false;
if (IsAny()) return false;
DCHECK(IsClass());
- DCHECK(other->IsClass());
+ DCHECK(other.IsClass());
return *this == other;
}
@@ -72,15 +72,15 @@
os << "None";
} else {
DCHECK(IsClass());
- os << "Class(" << reinterpret_cast<void*>(AsClass()->ptr()) << ")";
+ os << "Class(" << reinterpret_cast<void*>(AsClass().ptr()) << ")";
}
}
bool FieldType::NowContains(Object value) const {
if (*this == Any()) return true;
if (*this == None()) return false;
- if (!value->IsHeapObject()) return false;
- return HeapObject::cast(value)->map() == Map::cast(*this);
+ if (!value.IsHeapObject()) return false;
+ return HeapObject::cast(value).map() == Map::cast(*this);
}
} // namespace internal
diff --git a/src/objects/fixed-array-inl.h b/src/objects/fixed-array-inl.h
index 8ad9403..d8452e7 100644
--- a/src/objects/fixed-array-inl.h
+++ b/src/objects/fixed-array-inl.h
@@ -70,7 +70,7 @@
// Only read the map word once to avoid race with evacuator.
MapWord mw = map_word();
if (!mw.IsForwardingAddress()) {
- DCHECK(!IsInRange(mw.ToMap()->instance_type(), FIRST_FIXED_TYPED_ARRAY_TYPE,
+ DCHECK(!IsInRange(mw.ToMap().instance_type(), FIRST_FIXED_TYPED_ARRAY_TYPE,
LAST_FIXED_TYPED_ARRAY_TYPE));
}
#endif
@@ -78,7 +78,7 @@
return Smi::ToInt(value);
}
void FixedArrayBase::set_length(int value) {
- DCHECK(!IsInRange(map()->instance_type(), FIRST_FIXED_TYPED_ARRAY_TYPE,
+ DCHECK(!IsInRange(map().instance_type(), FIRST_FIXED_TYPED_ARRAY_TYPE,
LAST_FIXED_TYPED_ARRAY_TYPE));
WRITE_FIELD(*this, kLengthOffset, Smi::FromInt(value));
}
@@ -114,7 +114,7 @@
ObjectSlot current = GetFirstElementAddress();
for (int i = 0; i < length(); ++i, ++current) {
Object candidate = *current;
- if (!candidate->IsSmi() && candidate != the_hole) return false;
+ if (!candidate.IsSmi() && candidate != the_hole) return false;
}
return true;
}
@@ -125,11 +125,11 @@
}
Handle<Object> FixedArray::get(FixedArray array, int index, Isolate* isolate) {
- return handle(array->get(index), isolate);
+ return handle(array.get(index), isolate);
}
bool FixedArray::is_the_hole(Isolate* isolate, int index) {
- return get(index)->IsTheHole(isolate);
+ return get(index).IsTheHole(isolate);
}
void FixedArray::set(int index, Smi value) {
@@ -160,9 +160,9 @@
}
void FixedArray::NoWriteBarrierSet(FixedArray array, int index, Object value) {
- DCHECK_NE(array->map(), array->GetReadOnlyRoots().fixed_cow_array_map());
+ DCHECK_NE(array.map(), array.GetReadOnlyRoots().fixed_cow_array_map());
DCHECK_GE(index, 0);
- DCHECK_LT(index, array->length());
+ DCHECK_LT(index, array.length());
DCHECK(!ObjectInYoungGeneration(value));
RELAXED_WRITE_FIELD(array, kHeaderSize + index * kTaggedSize, value);
}
@@ -234,7 +234,7 @@
DisallowHeapAllocation no_gc;
ObjectSlot dst_slot(RawFieldOfElementAt(dst_index));
- ObjectSlot src_slot(src->RawFieldOfElementAt(src_index));
+ ObjectSlot src_slot(src.RawFieldOfElementAt(src_index));
isolate->heap()->CopyRange(*this, dst_slot, src_slot, len, mode);
}
@@ -245,7 +245,7 @@
DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == nullptr);
int low = 0;
int high = array->number_of_entries() - 1;
- uint32_t hash = name->hash_field();
+ uint32_t hash = name.hash_field();
int limit = high;
DCHECK(low <= high);
@@ -253,7 +253,7 @@
while (low != high) {
int mid = low + (high - low) / 2;
Name mid_name = array->GetSortedKey(mid);
- uint32_t mid_hash = mid_name->hash_field();
+ uint32_t mid_hash = mid_name.hash_field();
if (mid_hash >= hash) {
high = mid;
@@ -265,7 +265,7 @@
for (; low <= limit; ++low) {
int sort_index = array->GetSortedKeyIndex(low);
Name entry = array->GetKey(sort_index);
- uint32_t current_hash = entry->hash_field();
+ uint32_t current_hash = entry.hash_field();
if (current_hash != hash) {
if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
*out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
@@ -292,12 +292,12 @@
int LinearSearch(T* array, Name name, int valid_entries,
int* out_insertion_index) {
if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
- uint32_t hash = name->hash_field();
+ uint32_t hash = name.hash_field();
int len = array->number_of_entries();
for (int number = 0; number < len; number++) {
int sorted_index = array->GetSortedKeyIndex(number);
Name entry = array->GetKey(sorted_index);
- uint32_t current_hash = entry->hash_field();
+ uint32_t current_hash = entry.hash_field();
if (current_hash > hash) {
*out_insertion_index = sorted_index;
return T::kNotFound;
@@ -358,10 +358,10 @@
Handle<Object> FixedDoubleArray::get(FixedDoubleArray array, int index,
Isolate* isolate) {
- if (array->is_the_hole(index)) {
+ if (array.is_the_hole(index)) {
return ReadOnlyRoots(isolate).the_hole_value_handle();
} else {
- return isolate->factory()->NewNumber(array->get_scalar(index));
+ return isolate->factory()->NewNumber(array.get_scalar(index));
}
}
@@ -449,7 +449,7 @@
DisallowHeapAllocation no_gc;
MaybeObjectSlot dst_slot(data_start() + dst_index);
- MaybeObjectSlot src_slot(src->data_start() + src_index);
+ MaybeObjectSlot src_slot(src.data_start() + src_index);
isolate->heap()->CopyRange(*this, dst_slot, src_slot, len, mode);
}
@@ -479,14 +479,14 @@
DisallowHeapAllocation no_gc;
MaybeObjectSlot dst_slot(data_start() + dst_index);
- MaybeObjectSlot src_slot(src->data_start() + src_index);
+ MaybeObjectSlot src_slot(src.data_start() + src_index);
isolate->heap()->CopyRange(*this, dst_slot, src_slot, len, mode);
}
HeapObject WeakArrayList::Iterator::Next() {
if (!array_.is_null()) {
- while (index_ < array_->length()) {
- MaybeObject item = array_->Get(index_++);
+ while (index_ < array_.length()) {
+ MaybeObject item = array_.Get(index_++);
DCHECK(item->IsWeakOrCleared());
if (!item->IsCleared()) return item->GetHeapObjectAssumeWeak();
}
@@ -496,16 +496,16 @@
}
int ArrayList::Length() const {
- if (FixedArray::cast(*this)->length() == 0) return 0;
- return Smi::ToInt(FixedArray::cast(*this)->get(kLengthIndex));
+ if (FixedArray::cast(*this).length() == 0) return 0;
+ return Smi::ToInt(FixedArray::cast(*this).get(kLengthIndex));
}
void ArrayList::SetLength(int length) {
- return FixedArray::cast(*this)->set(kLengthIndex, Smi::FromInt(length));
+ return FixedArray::cast(*this).set(kLengthIndex, Smi::FromInt(length));
}
Object ArrayList::Get(int index) const {
- return FixedArray::cast(*this)->get(kFirstIndex + index);
+ return FixedArray::cast(*this).get(kFirstIndex + index);
}
ObjectSlot ArrayList::Slot(int index) {
@@ -513,13 +513,13 @@
}
void ArrayList::Set(int index, Object obj, WriteBarrierMode mode) {
- FixedArray::cast(*this)->set(kFirstIndex + index, obj, mode);
+ FixedArray::cast(*this).set(kFirstIndex + index, obj, mode);
}
void ArrayList::Clear(int index, Object undefined) {
- DCHECK(undefined->IsUndefined());
- FixedArray::cast(*this)->set(kFirstIndex + index, undefined,
- SKIP_WRITE_BARRIER);
+ DCHECK(undefined.IsUndefined());
+ FixedArray::cast(*this).set(kFirstIndex + index, undefined,
+ SKIP_WRITE_BARRIER);
}
int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kTaggedSize); }
@@ -621,7 +621,7 @@
void* FixedTypedArrayBase::DataPtr() {
return reinterpret_cast<void*>(
- base_pointer()->ptr() + reinterpret_cast<intptr_t>(external_pointer()));
+ base_pointer().ptr() + reinterpret_cast<intptr_t>(external_pointer()));
}
int FixedTypedArrayBase::ElementSize(InstanceType type) {
@@ -646,7 +646,7 @@
}
int FixedTypedArrayBase::DataSize() const {
- return DataSize(map()->instance_type());
+ return DataSize(map().instance_type());
}
int FixedTypedArrayBase::size() const {
@@ -860,43 +860,43 @@
return from(Smi::ToInt(*value));
}
DCHECK(value->IsHeapNumber());
- return from(HeapNumber::cast(*value)->value());
+ return from(HeapNumber::cast(*value).value());
}
template <>
inline int64_t FixedTypedArray<BigInt64ArrayTraits>::FromHandle(
Handle<Object> value, bool* lossless) {
DCHECK(value->IsBigInt());
- return BigInt::cast(*value)->AsInt64(lossless);
+ return BigInt::cast(*value).AsInt64(lossless);
}
template <>
inline uint64_t FixedTypedArray<BigUint64ArrayTraits>::FromHandle(
Handle<Object> value, bool* lossless) {
DCHECK(value->IsBigInt());
- return BigInt::cast(*value)->AsUint64(lossless);
+ return BigInt::cast(*value).AsUint64(lossless);
}
template <class Traits>
Handle<Object> FixedTypedArray<Traits>::get(Isolate* isolate,
FixedTypedArray<Traits> array,
int index) {
- return Traits::ToHandle(isolate, array->get_scalar(index));
+ return Traits::ToHandle(isolate, array.get_scalar(index));
}
template <class Traits>
void FixedTypedArray<Traits>::SetValue(uint32_t index, Object value) {
ElementType cast_value = Traits::defaultValue();
- if (value->IsSmi()) {
+ if (value.IsSmi()) {
int int_value = Smi::ToInt(value);
cast_value = from(int_value);
- } else if (value->IsHeapNumber()) {
- double double_value = HeapNumber::cast(value)->value();
+ } else if (value.IsHeapNumber()) {
+ double double_value = HeapNumber::cast(value).value();
cast_value = from(double_value);
} else {
// Clamp undefined to the default value. All other types have been
// converted to a number type further up in the call chain.
- DCHECK(value->IsUndefined());
+ DCHECK(value.IsUndefined());
}
set(index, cast_value);
}
@@ -904,15 +904,15 @@
template <>
inline void FixedTypedArray<BigInt64ArrayTraits>::SetValue(uint32_t index,
Object value) {
- DCHECK(value->IsBigInt());
- set(index, BigInt::cast(value)->AsInt64());
+ DCHECK(value.IsBigInt());
+ set(index, BigInt::cast(value).AsInt64());
}
template <>
inline void FixedTypedArray<BigUint64ArrayTraits>::SetValue(uint32_t index,
Object value) {
- DCHECK(value->IsBigInt());
- set(index, BigInt::cast(value)->AsUint64());
+ DCHECK(value.IsBigInt());
+ set(index, BigInt::cast(value).AsUint64());
}
Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
@@ -969,7 +969,7 @@
template <class Traits>
FixedTypedArray<Traits>::FixedTypedArray(Address ptr)
: FixedTypedArrayBase(ptr) {
- DCHECK(IsHeapObject() && map()->instance_type() == Traits::kInstanceType);
+ DCHECK(IsHeapObject() && map().instance_type() == Traits::kInstanceType);
}
template <class Traits>
@@ -978,15 +978,15 @@
}
int TemplateList::length() const {
- return Smi::ToInt(FixedArray::cast(*this)->get(kLengthIndex));
+ return Smi::ToInt(FixedArray::cast(*this).get(kLengthIndex));
}
Object TemplateList::get(int index) const {
- return FixedArray::cast(*this)->get(kFirstElementIndex + index);
+ return FixedArray::cast(*this).get(kFirstElementIndex + index);
}
void TemplateList::set(int index, Object value) {
- FixedArray::cast(*this)->set(kFirstElementIndex + index, value);
+ FixedArray::cast(*this).set(kFirstElementIndex + index, value);
}
} // namespace internal
diff --git a/src/objects/foreign-inl.h b/src/objects/foreign-inl.h
index 4243fba..e160427 100644
--- a/src/objects/foreign-inl.h
+++ b/src/objects/foreign-inl.h
@@ -23,7 +23,7 @@
// static
bool Foreign::IsNormalized(Object value) {
if (value == Smi::kZero) return true;
- return Foreign::cast(value)->foreign_address() != kNullAddress;
+ return Foreign::cast(value).foreign_address() != kNullAddress;
}
Address Foreign::foreign_address() {
diff --git a/src/objects/frame-array-inl.h b/src/objects/frame-array-inl.h
index 78d08da..5b342c6 100644
--- a/src/objects/frame-array-inl.h
+++ b/src/objects/frame-array-inl.h
@@ -33,17 +33,17 @@
#undef DEFINE_FRAME_ARRAY_ACCESSORS
bool FrameArray::IsWasmFrame(int frame_ix) const {
- const int flags = Flags(frame_ix)->value();
+ const int flags = Flags(frame_ix).value();
return (flags & kIsWasmFrame) != 0;
}
bool FrameArray::IsWasmInterpretedFrame(int frame_ix) const {
- const int flags = Flags(frame_ix)->value();
+ const int flags = Flags(frame_ix).value();
return (flags & kIsWasmInterpretedFrame) != 0;
}
bool FrameArray::IsAsmJsWasmFrame(int frame_ix) const {
- const int flags = Flags(frame_ix)->value();
+ const int flags = Flags(frame_ix).value();
return (flags & kIsAsmJsWasmFrame) != 0;
}
diff --git a/src/objects/free-space-inl.h b/src/objects/free-space-inl.h
index 6f87eb2..32988da 100644
--- a/src/objects/free-space-inl.h
+++ b/src/objects/free-space-inl.h
@@ -30,7 +30,7 @@
Heap* heap = GetHeapFromWritableObject(*this);
Object free_space_map =
Isolate::FromHeap(heap)->root(RootIndex::kFreeSpaceMap);
- DCHECK_IMPLIES(!map_slot().contains_value(free_space_map->ptr()),
+ DCHECK_IMPLIES(!map_slot().contains_value(free_space_map.ptr()),
!heap->deserialization_complete() &&
map_slot().contains_value(kNullAddress));
#endif
@@ -43,7 +43,7 @@
Heap* heap = GetHeapFromWritableObject(*this);
Object free_space_map =
Isolate::FromHeap(heap)->root(RootIndex::kFreeSpaceMap);
- DCHECK_IMPLIES(!map_slot().contains_value(free_space_map->ptr()),
+ DCHECK_IMPLIES(!map_slot().contains_value(free_space_map.ptr()),
!heap->deserialization_complete() &&
map_slot().contains_value(kNullAddress));
#endif
@@ -53,7 +53,7 @@
FreeSpace FreeSpace::cast(HeapObject o) {
SLOW_DCHECK(!GetHeapFromWritableObject(o)->deserialization_complete() ||
- o->IsFreeSpace());
+ o.IsFreeSpace());
return bit_cast<FreeSpace>(o);
}
diff --git a/src/objects/hash-table-inl.h b/src/objects/hash-table-inl.h
index d65d9de..3a279ee 100644
--- a/src/objects/hash-table-inl.h
+++ b/src/objects/hash-table-inl.h
@@ -193,7 +193,7 @@
bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key) {
Object hash = key->GetHash();
- if (!hash->IsSmi()) return false;
+ if (!hash.IsSmi()) return false;
return FindEntry(ReadOnlyRoots(isolate), key, Smi::ToInt(hash)) != kNotFound;
}
@@ -207,7 +207,7 @@
uint32_t ObjectHashTableShape::HashForObject(ReadOnlyRoots roots,
Object other) {
- return Smi::ToInt(other->GetHash());
+ return Smi::ToInt(other.GetHash());
}
} // namespace internal
diff --git a/src/objects/intl-objects.cc b/src/objects/intl-objects.cc
index 1b66c8e..0b75d1c 100644
--- a/src/objects/intl-objects.cc
+++ b/src/objects/intl-objects.cc
@@ -154,7 +154,7 @@
inline int FindFirstUpperOrNonAscii(String s, int length) {
for (int index = 0; index < length; ++index) {
- uint16_t ch = s->Get(index);
+ uint16_t ch = s.Get(index);
if (V8_UNLIKELY(IsASCIIUpper(ch) || ch & ~0x7F)) {
return index;
}
@@ -253,16 +253,16 @@
// one-byte sliced string with a two-byte parent string.
// Called from TF builtins.
String Intl::ConvertOneByteToLower(String src, String dst) {
- DCHECK_EQ(src->length(), dst->length());
- DCHECK(src->IsOneByteRepresentation());
- DCHECK(src->IsFlat());
- DCHECK(dst->IsSeqOneByteString());
+ DCHECK_EQ(src.length(), dst.length());
+ DCHECK(src.IsOneByteRepresentation());
+ DCHECK(src.IsFlat());
+ DCHECK(dst.IsSeqOneByteString());
DisallowHeapAllocation no_gc;
- const int length = src->length();
- String::FlatContent src_flat = src->GetFlatContent(no_gc);
- uint8_t* dst_data = SeqOneByteString::cast(dst)->GetChars(no_gc);
+ const int length = src.length();
+ String::FlatContent src_flat = src.GetFlatContent(no_gc);
+ uint8_t* dst_data = SeqOneByteString::cast(dst).GetChars(no_gc);
if (src_flat.IsOneByte()) {
const uint8_t* src_data = src_flat.ToOneByteVector().begin();
@@ -967,7 +967,7 @@
Handle<JSFunction> constructor = Handle<JSFunction>(
JSFunction::cast(
- isolate->context()->native_context()->intl_collator_function()),
+ isolate->context().native_context().intl_collator_function()),
isolate);
Handle<JSCollator> collator;
@@ -977,10 +977,9 @@
if (can_cache) {
isolate->set_icu_object_in_cache(
Isolate::ICUObjectCacheType::kDefaultCollator,
- std::static_pointer_cast<icu::UMemory>(
- collator->icu_collator()->get()));
+ std::static_pointer_cast<icu::UMemory>(collator->icu_collator().get()));
}
- icu::Collator* icu_collator = collator->icu_collator()->raw();
+ icu::Collator* icu_collator = collator->icu_collator().raw();
return Intl::CompareStrings(isolate, *icu_collator, string1, string2);
}
@@ -1052,7 +1051,7 @@
Handle<JSFunction> constructor = Handle<JSFunction>(
JSFunction::cast(
- isolate->context()->native_context()->intl_number_format_function()),
+ isolate->context().native_context().intl_number_format_function()),
isolate);
Handle<JSNumberFormat> number_format;
// 2. Let numberFormat be ? Construct(%NumberFormat%, « locales, options »).
@@ -1064,12 +1063,12 @@
isolate->set_icu_object_in_cache(
Isolate::ICUObjectCacheType::kDefaultNumberFormat,
std::static_pointer_cast<icu::UMemory>(
- number_format->icu_number_formatter()->get()));
+ number_format->icu_number_formatter().get()));
}
// Return FormatNumber(numberFormat, x).
icu::number::LocalizedNumberFormatter* icu_number_format =
- number_format->icu_number_formatter()->raw();
+ number_format->icu_number_formatter().raw();
return JSNumberFormat::FormatNumeric(isolate, *icu_number_format,
numeric_obj);
}
diff --git a/src/objects/js-array-buffer-inl.h b/src/objects/js-array-buffer-inl.h
index ceb8fbe..6c932ef 100644
--- a/src/objects/js-array-buffer-inl.h
+++ b/src/objects/js-array-buffer-inl.h
@@ -127,7 +127,7 @@
ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
bool JSArrayBufferView::WasDetached() const {
- return JSArrayBuffer::cast(buffer())->was_detached();
+ return JSArrayBuffer::cast(buffer()).was_detached();
}
size_t JSTypedArray::length() const { return ReadField<size_t>(kLengthOffset); }
@@ -141,7 +141,7 @@
// Checking that buffer()->backing_store() is not nullptr is not sufficient;
// it will be nullptr when byte_length is 0 as well.
FixedTypedArrayBase fta = FixedTypedArrayBase::cast(elements());
- return fta->base_pointer()->ptr() == fta.ptr();
+ return fta.base_pointer().ptr() == fta.ptr();
}
// static
diff --git a/src/objects/js-array-buffer.cc b/src/objects/js-array-buffer.cc
index 22a5111..1f73767 100644
--- a/src/objects/js-array-buffer.cc
+++ b/src/objects/js-array-buffer.cc
@@ -270,7 +270,7 @@
}
ExternalArrayType JSTypedArray::type() {
- switch (elements()->map()->instance_type()) {
+ switch (elements().map().instance_type()) {
#define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype) \
case FIXED_##TYPE##_ARRAY_TYPE: \
return kExternal##Type##Array;
@@ -284,7 +284,7 @@
}
size_t JSTypedArray::element_size() {
- switch (elements()->map()->instance_type()) {
+ switch (elements().map().instance_type()) {
#define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype) \
case FIXED_##TYPE##_ARRAY_TYPE: \
return sizeof(ctype);
diff --git a/src/objects/js-array-inl.h b/src/objects/js-array-inl.h
index 31c8735..f11d907 100644
--- a/src/objects/js-array-inl.h
+++ b/src/objects/js-array-inl.h
@@ -33,7 +33,7 @@
}
bool JSArray::AllowsSetLength() {
- bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
+ bool result = elements().IsFixedArray() || elements().IsFixedDoubleArray();
DCHECK(result == !HasFixedTypedArrayElements());
return result;
}
@@ -55,7 +55,7 @@
}
bool JSArray::HasArrayPrototype(Isolate* isolate) {
- return map()->prototype() == *isolate->initial_array_prototype();
+ return map().prototype() == *isolate->initial_array_prototype();
}
ACCESSORS(JSArrayIterator, iterated_object, Object, kIteratedObjectOffset)
@@ -63,7 +63,7 @@
IterationKind JSArrayIterator::kind() const {
return static_cast<IterationKind>(
- Smi::cast(READ_FIELD(*this, kKindOffset))->value());
+ Smi::cast(READ_FIELD(*this, kKindOffset)).value());
}
void JSArrayIterator::set_kind(IterationKind kind) {
diff --git a/src/objects/js-break-iterator.cc b/src/objects/js-break-iterator.cc
index 4516b34..4879fb4 100644
--- a/src/objects/js-break-iterator.cc
+++ b/src/objects/js-break-iterator.cc
@@ -124,7 +124,7 @@
Isolate* isolate, Handle<JSV8BreakIterator> break_iterator_holder,
Handle<String> text) {
icu::BreakIterator* break_iterator =
- break_iterator_holder->break_iterator()->raw();
+ break_iterator_holder->break_iterator().raw();
CHECK_NOT_NULL(break_iterator);
Managed<icu::UnicodeString> unicode_string =
Intl::SetTextToBreakIterator(isolate, text, break_iterator);
@@ -149,24 +149,24 @@
Handle<Object> JSV8BreakIterator::Current(
Isolate* isolate, Handle<JSV8BreakIterator> break_iterator) {
return isolate->factory()->NewNumberFromInt(
- break_iterator->break_iterator()->raw()->current());
+ break_iterator->break_iterator().raw()->current());
}
Handle<Object> JSV8BreakIterator::First(
Isolate* isolate, Handle<JSV8BreakIterator> break_iterator) {
return isolate->factory()->NewNumberFromInt(
- break_iterator->break_iterator()->raw()->first());
+ break_iterator->break_iterator().raw()->first());
}
Handle<Object> JSV8BreakIterator::Next(
Isolate* isolate, Handle<JSV8BreakIterator> break_iterator) {
return isolate->factory()->NewNumberFromInt(
- break_iterator->break_iterator()->raw()->next());
+ break_iterator->break_iterator().raw()->next());
}
String JSV8BreakIterator::BreakType(Isolate* isolate,
Handle<JSV8BreakIterator> break_iterator) {
- int32_t status = break_iterator->break_iterator()->raw()->getRuleStatus();
+ int32_t status = break_iterator->break_iterator().raw()->getRuleStatus();
// Keep return values in sync with JavaScript BreakType enum.
if (status >= UBRK_WORD_NONE && status < UBRK_WORD_NONE_LIMIT) {
return ReadOnlyRoots(isolate).none_string();
diff --git a/src/objects/js-collator.cc b/src/objects/js-collator.cc
index fac5c32..fd4ddcc 100644
--- a/src/objects/js-collator.cc
+++ b/src/objects/js-collator.cc
@@ -68,7 +68,7 @@
Handle<JSObject> options =
isolate->factory()->NewJSObject(isolate->object_function());
- icu::Collator* icu_collator = collator->icu_collator()->raw();
+ icu::Collator* icu_collator = collator->icu_collator().raw();
CHECK_NOT_NULL(icu_collator);
UErrorCode status = U_ZERO_ERROR;
diff --git a/src/objects/js-collection-inl.h b/src/objects/js-collection-inl.h
index 78b6cc5..7983ddc 100644
--- a/src/objects/js-collection-inl.h
+++ b/src/objects/js-collection-inl.h
@@ -63,8 +63,8 @@
Object JSMapIterator::CurrentValue() {
OrderedHashMap table = OrderedHashMap::cast(this->table());
int index = Smi::ToInt(this->index());
- Object value = table->ValueAt(index);
- DCHECK(!value->IsTheHole());
+ Object value = table.ValueAt(index);
+ DCHECK(!value.IsTheHole());
return value;
}
diff --git a/src/objects/js-date-time-format.cc b/src/objects/js-date-time-format.cc
index c885f81..8730e0a 100644
--- a/src/objects/js-date-time-format.cc
+++ b/src/objects/js-date-time-format.cc
@@ -360,8 +360,8 @@
Handle<Object> resolved_obj;
CHECK(!date_time_format->icu_locale().is_null());
- CHECK_NOT_NULL(date_time_format->icu_locale()->raw());
- icu::Locale* icu_locale = date_time_format->icu_locale()->raw();
+ CHECK_NOT_NULL(date_time_format->icu_locale().raw());
+ icu::Locale* icu_locale = date_time_format->icu_locale().raw();
Maybe<std::string> maybe_locale_str = Intl::ToLanguageTag(*icu_locale);
MAYBE_RETURN(maybe_locale_str, MaybeHandle<JSObject>());
std::string locale_str = maybe_locale_str.FromJust();
@@ -369,7 +369,7 @@
factory->NewStringFromAsciiChecked(locale_str.c_str());
icu::SimpleDateFormat* icu_simple_date_format =
- date_time_format->icu_simple_date_format()->raw();
+ date_time_format->icu_simple_date_format().raw();
// calendar
const icu::Calendar* calendar = icu_simple_date_format->getCalendar();
// getType() returns legacy calendar type name instead of LDML/BCP47 calendar
@@ -580,7 +580,7 @@
}
// 5. Return FormatDateTime(dtf, x).
icu::SimpleDateFormat* format =
- date_time_format->icu_simple_date_format()->raw();
+ date_time_format->icu_simple_date_format().raw();
return FormatDateTime(isolate, *format, x);
}
@@ -612,7 +612,7 @@
String);
}
- double const x = Handle<JSDate>::cast(date)->value()->Number();
+ double const x = Handle<JSDate>::cast(date)->value().Number();
// 2. If x is NaN, return "Invalid Date"
if (std::isnan(x)) {
return factory->Invalid_Date_string();
@@ -640,9 +640,8 @@
// 4. Let dateFormat be ? Construct(%DateTimeFormat%, « locales, options »).
Handle<JSFunction> constructor = Handle<JSFunction>(
- JSFunction::cast(isolate->context()
- ->native_context()
- ->intl_date_time_format_function()),
+ JSFunction::cast(
+ isolate->context().native_context().intl_date_time_format_function()),
isolate);
Handle<JSObject> obj;
ASSIGN_RETURN_ON_EXCEPTION(
@@ -659,11 +658,11 @@
if (can_cache) {
isolate->set_icu_object_in_cache(
cache_type, std::static_pointer_cast<icu::UMemory>(
- date_time_format->icu_simple_date_format()->get()));
+ date_time_format->icu_simple_date_format().get()));
}
// 5. Return FormatDateTime(dateFormat, x).
icu::SimpleDateFormat* format =
- date_time_format->icu_simple_date_format()->raw();
+ date_time_format->icu_simple_date_format().raw();
return FormatDateTime(isolate, *format, x);
}
@@ -779,7 +778,7 @@
MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::UnwrapDateTimeFormat(
Isolate* isolate, Handle<JSReceiver> format_holder) {
Handle<Context> native_context =
- Handle<Context>(isolate->context()->native_context(), isolate);
+ Handle<Context>(isolate->context().native_context(), isolate);
Handle<JSFunction> constructor = Handle<JSFunction>(
JSFunction::cast(native_context->intl_date_time_format_function()),
isolate);
@@ -975,16 +974,16 @@
Isolate* isolate, Handle<JSDateTimeFormat> date_time_format) {
Managed<icu::DateIntervalFormat> managed_format =
date_time_format->icu_date_interval_format();
- if (managed_format->get()) {
- return managed_format->raw();
+ if (managed_format.get()) {
+ return managed_format.raw();
}
icu::SimpleDateFormat* icu_simple_date_format =
- date_time_format->icu_simple_date_format()->raw();
+ date_time_format->icu_simple_date_format().raw();
UErrorCode status = U_ZERO_ERROR;
std::unique_ptr<icu::DateIntervalFormat> date_interval_format(
icu::DateIntervalFormat::createInstance(
SkeletonFromDateFormat(*icu_simple_date_format),
- *(date_time_format->icu_locale()->raw()), status));
+ *(date_time_format->icu_locale().raw()), status));
if (U_FAILURE(status)) {
return nullptr;
}
@@ -993,7 +992,7 @@
Managed<icu::DateIntervalFormat>::FromUniquePtr(
isolate, 0, std::move(date_interval_format));
date_time_format->set_icu_date_interval_format(*managed_interval_format);
- return (*managed_interval_format)->raw();
+ return (*managed_interval_format).raw();
}
Intl::HourCycle HourCycleFromPattern(const icu::UnicodeString pattern) {
@@ -1563,7 +1562,7 @@
double date_value) {
Factory* factory = isolate->factory();
icu::SimpleDateFormat* format =
- date_time_format->icu_simple_date_format()->raw();
+ date_time_format->icu_simple_date_format().raw();
CHECK_NOT_NULL(format);
icu::UnicodeString formatted;
diff --git a/src/objects/js-list-format.cc b/src/objects/js-list-format.cc
index 7f68bb2..bc2e5ce9 100644
--- a/src/objects/js-list-format.cc
+++ b/src/objects/js-list-format.cc
@@ -307,7 +307,7 @@
MAYBE_RETURN(maybe_array, Handle<T>());
std::vector<icu::UnicodeString> array = maybe_array.FromJust();
- icu::ListFormatter* formatter = format->icu_formatter()->raw();
+ icu::ListFormatter* formatter = format->icu_formatter().raw();
CHECK_NOT_NULL(formatter);
UErrorCode status = U_ZERO_ERROR;
diff --git a/src/objects/js-locale.cc b/src/objects/js-locale.cc
index c1e6f20..c091edc 100644
--- a/src/objects/js-locale.cc
+++ b/src/objects/js-locale.cc
@@ -94,7 +94,7 @@
Handle<Object> UnicodeKeywordValue(Isolate* isolate, Handle<JSLocale> locale,
const char* key) {
- icu::Locale* icu_locale = locale->icu_locale()->raw();
+ icu::Locale* icu_locale = locale->icu_locale().raw();
UErrorCode status = U_ZERO_ERROR;
std::string value =
icu_locale->getUnicodeKeywordValue<std::string>(key, status);
@@ -383,28 +383,28 @@
Handle<Object> JSLocale::Language(Isolate* isolate, Handle<JSLocale> locale) {
Factory* factory = isolate->factory();
- const char* language = locale->icu_locale()->raw()->getLanguage();
+ const char* language = locale->icu_locale().raw()->getLanguage();
if (strlen(language) == 0) return factory->undefined_value();
return factory->NewStringFromAsciiChecked(language);
}
Handle<Object> JSLocale::Script(Isolate* isolate, Handle<JSLocale> locale) {
Factory* factory = isolate->factory();
- const char* script = locale->icu_locale()->raw()->getScript();
+ const char* script = locale->icu_locale().raw()->getScript();
if (strlen(script) == 0) return factory->undefined_value();
return factory->NewStringFromAsciiChecked(script);
}
Handle<Object> JSLocale::Region(Isolate* isolate, Handle<JSLocale> locale) {
Factory* factory = isolate->factory();
- const char* region = locale->icu_locale()->raw()->getCountry();
+ const char* region = locale->icu_locale().raw()->getCountry();
if (strlen(region) == 0) return factory->undefined_value();
return factory->NewStringFromAsciiChecked(region);
}
Handle<String> JSLocale::BaseName(Isolate* isolate, Handle<JSLocale> locale) {
icu::Locale icu_locale =
- icu::Locale::createFromName(locale->icu_locale()->raw()->getBaseName());
+ icu::Locale::createFromName(locale->icu_locale().raw()->getBaseName());
std::string base_name = Intl::ToLanguageTag(icu_locale).FromJust();
return isolate->factory()->NewStringFromAsciiChecked(base_name.c_str());
}
@@ -427,7 +427,7 @@
Handle<Object> JSLocale::Numeric(Isolate* isolate, Handle<JSLocale> locale) {
Factory* factory = isolate->factory();
- icu::Locale* icu_locale = locale->icu_locale()->raw();
+ icu::Locale* icu_locale = locale->icu_locale().raw();
UErrorCode status = U_ZERO_ERROR;
std::string numeric =
icu_locale->getUnicodeKeywordValue<std::string>("kn", status);
@@ -440,7 +440,7 @@
}
std::string JSLocale::ToString(Handle<JSLocale> locale) {
- icu::Locale* icu_locale = locale->icu_locale()->raw();
+ icu::Locale* icu_locale = locale->icu_locale().raw();
return Intl::ToLanguageTag(*icu_locale).FromJust();
}
diff --git a/src/objects/js-number-format.cc b/src/objects/js-number-format.cc
index e978da11..eb34d79 100644
--- a/src/objects/js-number-format.cc
+++ b/src/objects/js-number-format.cc
@@ -638,7 +638,7 @@
UErrorCode status = U_ZERO_ERROR;
icu::number::LocalizedNumberFormatter* icu_number_formatter =
- number_format->icu_number_formatter()->raw();
+ number_format->icu_number_formatter().raw();
icu::UnicodeString skeleton = icu_number_formatter->toSkeleton(status);
CHECK(U_SUCCESS(status));
@@ -803,7 +803,7 @@
// old code copy from NumberFormat::Unwrap that has no spec comment and
// compiled but fail unit tests.
Handle<Context> native_context =
- Handle<Context>(isolate->context()->native_context(), isolate);
+ Handle<Context>(isolate->context().native_context(), isolate);
Handle<JSFunction> constructor = Handle<JSFunction>(
JSFunction::cast(native_context->intl_number_format_function()), isolate);
Handle<Object> object;
@@ -1471,7 +1471,7 @@
CHECK(numeric_obj->IsNumeric());
Factory* factory = isolate->factory();
icu::number::LocalizedNumberFormatter* fmt =
- number_format->icu_number_formatter()->raw();
+ number_format->icu_number_formatter().raw();
CHECK_NOT_NULL(fmt);
icu::FieldPositionIterator fp_iter;
diff --git a/src/objects/js-objects-inl.h b/src/objects/js-objects-inl.h
index d0cb98e..7ad0a1b 100644
--- a/src/objects/js-objects-inl.h
+++ b/src/objects/js-objects-inl.h
@@ -109,21 +109,21 @@
bool JSObject::PrototypeHasNoElements(Isolate* isolate, JSObject object) {
DisallowHeapAllocation no_gc;
- HeapObject prototype = HeapObject::cast(object->map()->prototype());
+ HeapObject prototype = HeapObject::cast(object.map().prototype());
ReadOnlyRoots roots(isolate);
HeapObject null = roots.null_value();
FixedArrayBase empty_fixed_array = roots.empty_fixed_array();
FixedArrayBase empty_slow_element_dictionary =
roots.empty_slow_element_dictionary();
while (prototype != null) {
- Map map = prototype->map();
- if (map->IsCustomElementsReceiverMap()) return false;
- FixedArrayBase elements = JSObject::cast(prototype)->elements();
+ Map map = prototype.map();
+ if (map.IsCustomElementsReceiverMap()) return false;
+ FixedArrayBase elements = JSObject::cast(prototype).elements();
if (elements != empty_fixed_array &&
elements != empty_slow_element_dictionary) {
return false;
}
- prototype = HeapObject::cast(map->prototype());
+ prototype = HeapObject::cast(map.prototype());
}
return true;
}
@@ -137,7 +137,7 @@
void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
JSObject::ValidateElements(*object);
- ElementsKind elements_kind = object->map()->elements_kind();
+ ElementsKind elements_kind = object->map().elements_kind();
if (!IsObjectElementsKind(elements_kind)) {
if (IsHoleyElementsKind(elements_kind)) {
TransitionElementsKind(object, HOLEY_ELEMENTS);
@@ -167,8 +167,8 @@
if (current == the_hole) {
is_holey = true;
target_kind = GetHoleyElementsKind(target_kind);
- } else if (!current->IsSmi()) {
- if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
+ } else if (!current.IsSmi()) {
+ if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current.IsNumber()) {
if (IsSmiElementsKind(target_kind)) {
if (is_holey) {
target_kind = HOLEY_DOUBLE_ELEMENTS;
@@ -226,13 +226,13 @@
void JSObject::SetMapAndElements(Handle<JSObject> object, Handle<Map> new_map,
Handle<FixedArrayBase> value) {
JSObject::MigrateToMap(object, new_map);
- DCHECK((object->map()->has_fast_smi_or_object_elements() ||
+ DCHECK((object->map().has_fast_smi_or_object_elements() ||
(*value == object->GetReadOnlyRoots().empty_fixed_array()) ||
- object->map()->has_fast_string_wrapper_elements()) ==
+ object->map().has_fast_string_wrapper_elements()) ==
(value->map() == object->GetReadOnlyRoots().fixed_array_map() ||
value->map() == object->GetReadOnlyRoots().fixed_cow_array_map()));
DCHECK((*value == object->GetReadOnlyRoots().empty_fixed_array()) ||
- (object->map()->has_fast_double_elements() ==
+ (object->map().has_fast_double_elements() ==
value->IsFixedDoubleArray()));
object->set_elements(*value);
}
@@ -243,16 +243,16 @@
}
void JSObject::initialize_elements() {
- FixedArrayBase elements = map()->GetInitialElements();
+ FixedArrayBase elements = map().GetInitialElements();
WRITE_FIELD(*this, kElementsOffset, elements);
}
InterceptorInfo JSObject::GetIndexedInterceptor() {
- return map()->GetIndexedInterceptor();
+ return map().GetIndexedInterceptor();
}
InterceptorInfo JSObject::GetNamedInterceptor() {
- return map()->GetNamedInterceptor();
+ return map().GetNamedInterceptor();
}
int JSObject::GetHeaderSize() const { return GetHeaderSize(map()); }
@@ -261,10 +261,10 @@
// Check for the most common kind of JavaScript object before
// falling into the generic switch. This speeds up the internal
// field operations considerably on average.
- InstanceType instance_type = map->instance_type();
+ InstanceType instance_type = map.instance_type();
return instance_type == JS_OBJECT_TYPE
? JSObject::kHeaderSize
- : GetHeaderSize(instance_type, map->has_prototype_slot());
+ : GetHeaderSize(instance_type, map.has_prototype_slot());
}
// static
@@ -279,7 +279,7 @@
// static
int JSObject::GetEmbedderFieldCount(const Map map) {
- int instance_size = map->instance_size();
+ int instance_size = map.instance_size();
if (instance_size == kVariableSizeSentinel) return 0;
// Embedder fields are located after the object header, whereas in-object
// properties are located at the end of the object. We don't have to round up
@@ -288,7 +288,7 @@
// kSystemPointerSize) anyway.
return (((instance_size - GetEmbedderFieldsStartOffset(map)) >>
kTaggedSizeLog2) -
- map->GetInObjectProperties()) /
+ map.GetInObjectProperties()) /
kEmbedderDataSlotSizeInTaggedSlots;
}
@@ -316,7 +316,7 @@
bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
if (!FLAG_unbox_double_fields) return false;
- return map()->IsUnboxedDoubleField(index);
+ return map().IsUnboxedDoubleField(index);
}
// Access fast-case object properties at index. The use of these routines
@@ -327,7 +327,7 @@
if (index.is_inobject()) {
return READ_FIELD(*this, index.offset());
} else {
- return property_array()->get(index.outobject_array_index());
+ return property_array().get(index.outobject_array_index());
}
}
@@ -355,7 +355,7 @@
RawFastInobjectPropertyAtPut(index, value, mode);
} else {
DCHECK_EQ(UPDATE_WRITE_BARRIER, mode);
- property_array()->set(index.outobject_array_index(), value);
+ property_array().set(index.outobject_array_index(), value);
}
}
@@ -371,10 +371,10 @@
void JSObject::FastPropertyAtPut(FieldIndex index, Object value) {
if (IsUnboxedDoubleField(index)) {
- DCHECK(value->IsMutableHeapNumber());
+ DCHECK(value.IsMutableHeapNumber());
// Ensure that all bits of the double value are preserved.
RawFastDoublePropertyAsBitsAtPut(
- index, MutableHeapNumber::cast(value)->value_as_bits());
+ index, MutableHeapNumber::cast(value).value_as_bits());
} else {
RawFastPropertyAtPut(index, value);
}
@@ -388,7 +388,7 @@
FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
if (details.representation().IsDouble()) {
// Nothing more to be done.
- if (value->IsUninitialized()) {
+ if (value.IsUninitialized()) {
return;
}
// Manipulating the signaling NaN used for the hole and uninitialized
@@ -396,17 +396,17 @@
// will change its value on ia32 (the x87 stack is used to return values
// and stores to the stack silently clear the signalling bit).
uint64_t bits;
- if (value->IsSmi()) {
+ if (value.IsSmi()) {
bits = bit_cast<uint64_t>(static_cast<double>(Smi::ToInt(value)));
} else {
- DCHECK(value->IsHeapNumber());
- bits = HeapNumber::cast(value)->value_as_bits();
+ DCHECK(value.IsHeapNumber());
+ bits = HeapNumber::cast(value).value_as_bits();
}
if (IsUnboxedDoubleField(index)) {
RawFastDoublePropertyAsBitsAtPut(index, bits);
} else {
auto box = MutableHeapNumber::cast(RawFastPropertyAt(index));
- box->set_value_as_bits(bits);
+ box.set_value_as_bits(bits);
}
} else {
RawFastPropertyAtPut(index, value);
@@ -414,7 +414,7 @@
}
int JSObject::GetInObjectPropertyOffset(int index) {
- return map()->GetInObjectPropertyOffset(index);
+ return map().GetInObjectPropertyOffset(index);
}
Object JSObject::InObjectPropertyAt(int index) {
@@ -433,15 +433,15 @@
void JSObject::InitializeBody(Map map, int start_offset,
Object pre_allocated_value, Object filler_value) {
- DCHECK_IMPLIES(filler_value->IsHeapObject(),
+ DCHECK_IMPLIES(filler_value.IsHeapObject(),
!ObjectInYoungGeneration(filler_value));
- DCHECK_IMPLIES(pre_allocated_value->IsHeapObject(),
+ DCHECK_IMPLIES(pre_allocated_value.IsHeapObject(),
!ObjectInYoungGeneration(pre_allocated_value));
- int size = map->instance_size();
+ int size = map.instance_size();
int offset = start_offset;
if (filler_value != pre_allocated_value) {
int end_of_pre_allocated_offset =
- size - (map->UnusedPropertyFields() * kTaggedSize);
+ size - (map.UnusedPropertyFields() * kTaggedSize);
DCHECK_LE(kHeaderSize, end_of_pre_allocated_offset);
while (offset < end_of_pre_allocated_offset) {
WRITE_FIELD(*this, offset, pre_allocated_value);
@@ -472,12 +472,12 @@
FeedbackVector JSFunction::feedback_vector() const {
DCHECK(has_feedback_vector());
- return FeedbackVector::cast(raw_feedback_cell()->value());
+ return FeedbackVector::cast(raw_feedback_cell().value());
}
ClosureFeedbackCellArray JSFunction::closure_feedback_cell_array() const {
DCHECK(has_closure_feedback_cell_array());
- return ClosureFeedbackCellArray::cast(raw_feedback_cell()->value());
+ return ClosureFeedbackCellArray::cast(raw_feedback_cell().value());
}
// Code objects that are marked for deoptimization are not considered to be
@@ -487,69 +487,69 @@
// TODO(jupvfranco): rename this function. Maybe RunOptimizedCode,
// or IsValidOptimizedCode.
bool JSFunction::IsOptimized() {
- return is_compiled() && code()->kind() == Code::OPTIMIZED_FUNCTION &&
- !code()->marked_for_deoptimization();
+ return is_compiled() && code().kind() == Code::OPTIMIZED_FUNCTION &&
+ !code().marked_for_deoptimization();
}
bool JSFunction::HasOptimizedCode() {
return IsOptimized() ||
- (has_feedback_vector() && feedback_vector()->has_optimized_code() &&
- !feedback_vector()->optimized_code()->marked_for_deoptimization());
+ (has_feedback_vector() && feedback_vector().has_optimized_code() &&
+ !feedback_vector().optimized_code().marked_for_deoptimization());
}
bool JSFunction::HasOptimizationMarker() {
- return has_feedback_vector() && feedback_vector()->has_optimization_marker();
+ return has_feedback_vector() && feedback_vector().has_optimization_marker();
}
void JSFunction::ClearOptimizationMarker() {
DCHECK(has_feedback_vector());
- feedback_vector()->ClearOptimizationMarker();
+ feedback_vector().ClearOptimizationMarker();
}
// Optimized code marked for deoptimization will tier back down to running
// interpreted on its next activation, and already doesn't count as IsOptimized.
bool JSFunction::IsInterpreted() {
- return is_compiled() && (code()->is_interpreter_trampoline_builtin() ||
- (code()->kind() == Code::OPTIMIZED_FUNCTION &&
- code()->marked_for_deoptimization()));
+ return is_compiled() && (code().is_interpreter_trampoline_builtin() ||
+ (code().kind() == Code::OPTIMIZED_FUNCTION &&
+ code().marked_for_deoptimization()));
}
bool JSFunction::ChecksOptimizationMarker() {
- return code()->checks_optimization_marker();
+ return code().checks_optimization_marker();
}
bool JSFunction::IsMarkedForOptimization() {
- return has_feedback_vector() && feedback_vector()->optimization_marker() ==
+ return has_feedback_vector() && feedback_vector().optimization_marker() ==
OptimizationMarker::kCompileOptimized;
}
bool JSFunction::IsMarkedForConcurrentOptimization() {
return has_feedback_vector() &&
- feedback_vector()->optimization_marker() ==
+ feedback_vector().optimization_marker() ==
OptimizationMarker::kCompileOptimizedConcurrent;
}
bool JSFunction::IsInOptimizationQueue() {
- return has_feedback_vector() && feedback_vector()->optimization_marker() ==
+ return has_feedback_vector() && feedback_vector().optimization_marker() ==
OptimizationMarker::kInOptimizationQueue;
}
void JSFunction::CompleteInobjectSlackTrackingIfActive() {
if (!has_prototype_slot()) return;
- if (has_initial_map() && initial_map()->IsInobjectSlackTrackingInProgress()) {
- initial_map()->CompleteInobjectSlackTracking(GetIsolate());
+ if (has_initial_map() && initial_map().IsInobjectSlackTrackingInProgress()) {
+ initial_map().CompleteInobjectSlackTracking(GetIsolate());
}
}
AbstractCode JSFunction::abstract_code() {
if (IsInterpreted()) {
- return AbstractCode::cast(shared()->GetBytecodeArray());
+ return AbstractCode::cast(shared().GetBytecodeArray());
} else {
return AbstractCode::cast(code());
}
}
-int JSFunction::length() { return shared()->length(); }
+int JSFunction::length() { return shared().length(); }
Code JSFunction::code() const {
return Code::cast(RELAXED_READ_FIELD(*this, kCodeOffset));
@@ -578,14 +578,14 @@
}
void JSFunction::ClearOptimizedCodeSlot(const char* reason) {
- if (has_feedback_vector() && feedback_vector()->has_optimized_code()) {
+ if (has_feedback_vector() && feedback_vector().has_optimized_code()) {
if (FLAG_trace_opt) {
PrintF("[evicting entry from optimizing code feedback slot (%s) for ",
reason);
ShortPrint();
PrintF("]\n");
}
- feedback_vector()->ClearOptimizedCode();
+ feedback_vector().ClearOptimizedCode();
}
}
@@ -594,17 +594,17 @@
DCHECK(ChecksOptimizationMarker());
DCHECK(!HasOptimizedCode());
- feedback_vector()->SetOptimizationMarker(marker);
+ feedback_vector().SetOptimizationMarker(marker);
}
bool JSFunction::has_feedback_vector() const {
- return shared()->is_compiled() &&
- raw_feedback_cell()->value()->IsFeedbackVector();
+ return shared().is_compiled() &&
+ raw_feedback_cell().value().IsFeedbackVector();
}
bool JSFunction::has_closure_feedback_cell_array() const {
- return shared()->is_compiled() &&
- raw_feedback_cell()->value()->IsClosureFeedbackCellArray();
+ return shared().is_compiled() &&
+ raw_feedback_cell().value().IsClosureFeedbackCellArray();
}
Context JSFunction::context() {
@@ -612,57 +612,57 @@
}
bool JSFunction::has_context() const {
- return READ_FIELD(*this, kContextOffset)->IsContext();
+ return READ_FIELD(*this, kContextOffset).IsContext();
}
-JSGlobalProxy JSFunction::global_proxy() { return context()->global_proxy(); }
+JSGlobalProxy JSFunction::global_proxy() { return context().global_proxy(); }
NativeContext JSFunction::native_context() {
- return context()->native_context();
+ return context().native_context();
}
void JSFunction::set_context(Object value) {
- DCHECK(value->IsUndefined() || value->IsContext());
+ DCHECK(value.IsUndefined() || value.IsContext());
WRITE_FIELD(*this, kContextOffset, value);
WRITE_BARRIER(*this, kContextOffset, value);
}
ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, Object,
- kPrototypeOrInitialMapOffset, map()->has_prototype_slot())
+ kPrototypeOrInitialMapOffset, map().has_prototype_slot())
bool JSFunction::has_prototype_slot() const {
- return map()->has_prototype_slot();
+ return map().has_prototype_slot();
}
Map JSFunction::initial_map() { return Map::cast(prototype_or_initial_map()); }
bool JSFunction::has_initial_map() {
DCHECK(has_prototype_slot());
- return prototype_or_initial_map()->IsMap();
+ return prototype_or_initial_map().IsMap();
}
bool JSFunction::has_instance_prototype() {
DCHECK(has_prototype_slot());
- return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
+ return has_initial_map() || !prototype_or_initial_map().IsTheHole();
}
bool JSFunction::has_prototype() {
DCHECK(has_prototype_slot());
- return map()->has_non_instance_prototype() || has_instance_prototype();
+ return map().has_non_instance_prototype() || has_instance_prototype();
}
bool JSFunction::has_prototype_property() {
return (has_prototype_slot() && IsConstructor()) ||
- IsGeneratorFunction(shared()->kind());
+ IsGeneratorFunction(shared().kind());
}
bool JSFunction::PrototypeRequiresRuntimeLookup() {
- return !has_prototype_property() || map()->has_non_instance_prototype();
+ return !has_prototype_property() || map().has_non_instance_prototype();
}
HeapObject JSFunction::instance_prototype() {
DCHECK(has_instance_prototype());
- if (has_initial_map()) return initial_map()->prototype();
+ if (has_initial_map()) return initial_map().prototype();
// When there is no initial map and the prototype is a JSReceiver, the
// initial map field is used for the prototype field.
return HeapObject::cast(prototype_or_initial_map());
@@ -672,19 +672,19 @@
DCHECK(has_prototype());
// If the function's prototype property has been set to a non-JSReceiver
// value, that value is stored in the constructor field of the map.
- if (map()->has_non_instance_prototype()) {
- Object prototype = map()->GetConstructor();
+ if (map().has_non_instance_prototype()) {
+ Object prototype = map().GetConstructor();
// The map must have a prototype in that field, not a back pointer.
- DCHECK(!prototype->IsMap());
- DCHECK(!prototype->IsFunctionTemplateInfo());
+ DCHECK(!prototype.IsMap());
+ DCHECK(!prototype.IsFunctionTemplateInfo());
return prototype;
}
return instance_prototype();
}
bool JSFunction::is_compiled() const {
- return code()->builtin_index() != Builtins::kCompileLazy &&
- shared()->is_compiled();
+ return code().builtin_index() != Builtins::kCompileLazy &&
+ shared().is_compiled();
}
bool JSFunction::NeedsResetDueToFlushedBytecode() {
@@ -694,14 +694,14 @@
Object maybe_shared = ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset);
Object maybe_code = RELAXED_READ_FIELD(*this, kCodeOffset);
- if (!maybe_shared->IsSharedFunctionInfo() || !maybe_code->IsCode()) {
+ if (!maybe_shared.IsSharedFunctionInfo() || !maybe_code.IsCode()) {
return false;
}
SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
Code code = Code::cast(maybe_code);
- return !shared->is_compiled() &&
- code->builtin_index() != Builtins::kCompileLazy;
+ return !shared.is_compiled() &&
+ code.builtin_index() != Builtins::kCompileLazy;
}
void JSFunction::ResetIfBytecodeFlushed() {
@@ -709,7 +709,7 @@
// Bytecode was flushed and function is now uncompiled, reset JSFunction
// by setting code to CompileLazy and clearing the feedback vector.
set_code(GetIsolate()->builtins()->builtin(i::Builtins::kCompileLazy));
- raw_feedback_cell()->reset();
+ raw_feedback_cell().reset();
}
}
@@ -726,7 +726,7 @@
ACCESSORS(JSDate, sec, Object, kSecOffset)
bool JSMessageObject::DidEnsureSourcePositionsAvailable() const {
- return shared_info()->IsUndefined();
+ return shared_info().IsUndefined();
}
int JSMessageObject::GetStartPosition() const {
@@ -758,7 +758,7 @@
SMI_ACCESSORS(JSMessageObject, error_level, kErrorLevelOffset)
ElementsKind JSObject::GetElementsKind() const {
- ElementsKind kind = map()->elements_kind();
+ ElementsKind kind = map().elements_kind();
#if VERIFY_HEAP && DEBUG
FixedArrayBase fixed_array =
FixedArrayBase::unchecked_cast(READ_FIELD(*this, kElementsOffset));
@@ -766,21 +766,21 @@
// If a GC was caused while constructing this object, the elements
// pointer may point to a one pointer filler map.
if (ElementsAreSafeToExamine()) {
- Map map = fixed_array->map();
+ Map map = fixed_array.map();
if (IsSmiOrObjectElementsKind(kind)) {
DCHECK(map == GetReadOnlyRoots().fixed_array_map() ||
map == GetReadOnlyRoots().fixed_cow_array_map());
} else if (IsDoubleElementsKind(kind)) {
- DCHECK(fixed_array->IsFixedDoubleArray() ||
+ DCHECK(fixed_array.IsFixedDoubleArray() ||
fixed_array == GetReadOnlyRoots().empty_fixed_array());
} else if (kind == DICTIONARY_ELEMENTS) {
- DCHECK(fixed_array->IsFixedArray());
- DCHECK(fixed_array->IsDictionary());
+ DCHECK(fixed_array.IsFixedArray());
+ DCHECK(fixed_array.IsDictionary());
} else {
DCHECK(kind > DICTIONARY_ELEMENTS || IsFrozenOrSealedElementsKind(kind));
}
DCHECK(!IsSloppyArgumentsElementsKind(kind) ||
- (elements()->IsFixedArray() && elements()->length() >= 2));
+ (elements().IsFixedArray() && elements().length() >= 2));
}
#endif
return kind;
@@ -850,23 +850,23 @@
bool JSObject::HasFixedTypedArrayElements() {
DCHECK(!elements().is_null());
- return map()->has_fixed_typed_array_elements();
+ return map().has_fixed_typed_array_elements();
}
-#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \
- bool JSObject::HasFixed##Type##Elements() { \
- FixedArrayBase array = elements(); \
- return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
+#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \
+ bool JSObject::HasFixed##Type##Elements() { \
+ FixedArrayBase array = elements(); \
+ return array.map().instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
}
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
#undef FIXED_TYPED_ELEMENTS_CHECK
-bool JSObject::HasNamedInterceptor() { return map()->has_named_interceptor(); }
+bool JSObject::HasNamedInterceptor() { return map().has_named_interceptor(); }
bool JSObject::HasIndexedInterceptor() {
- return map()->has_indexed_interceptor();
+ return map().has_indexed_interceptor();
}
void JSGlobalObject::set_global_dictionary(GlobalDictionary dictionary) {
@@ -889,7 +889,7 @@
ReadOnlyRoots roots = GetReadOnlyRoots();
DCHECK(!ObjectInYoungGeneration(roots.empty_fixed_array()));
DCHECK(!ObjectInYoungGeneration(roots.empty_property_dictionary()));
- if (map()->is_dictionary_map()) {
+ if (map().is_dictionary_map()) {
WRITE_FIELD(*this, kPropertiesOrHashOffset,
roots.empty_property_dictionary());
} else {
@@ -899,9 +899,9 @@
bool JSReceiver::HasFastProperties() const {
DCHECK(
- raw_properties_or_hash()->IsSmi() ||
- (raw_properties_or_hash()->IsDictionary() == map()->is_dictionary_map()));
- return !map()->is_dictionary_map();
+ raw_properties_or_hash().IsSmi() ||
+ (raw_properties_or_hash().IsDictionary() == map().is_dictionary_map()));
+ return !map().is_dictionary_map();
}
NameDictionary JSReceiver::property_dictionary() const {
@@ -909,7 +909,7 @@
DCHECK(!HasFastProperties());
Object prop = raw_properties_or_hash();
- if (prop->IsSmi()) {
+ if (prop.IsSmi()) {
return GetReadOnlyRoots().empty_property_dictionary();
}
@@ -922,7 +922,7 @@
DCHECK(HasFastProperties());
Object prop = raw_properties_or_hash();
- if (prop->IsSmi() || prop == GetReadOnlyRoots().empty_fixed_array()) {
+ if (prop.IsSmi() || prop == GetReadOnlyRoots().empty_fixed_array()) {
return GetReadOnlyRoots().empty_property_array();
}
@@ -993,7 +993,7 @@
}
bool JSGlobalObject::IsDetached() {
- return global_proxy()->IsDetachedFrom(*this);
+ return global_proxy().IsDetachedFrom(*this);
}
bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject global) const {
@@ -1046,7 +1046,7 @@
ObjectInYoungGeneration(object))) {
return false;
}
- return ShouldConvertToSlowElements(object->GetFastElementsUsage(),
+ return ShouldConvertToSlowElements(object.GetFastElementsUsage(),
*new_capacity);
}
diff --git a/src/objects/js-objects.cc b/src/objects/js-objects.cc
index be72b58..09e24f0 100644
--- a/src/objects/js-objects.cc
+++ b/src/objects/js-objects.cc
@@ -191,19 +191,19 @@
// Non-empty strings are the only non-JSReceivers that need to be handled
// explicitly by Object.assign.
if (!source->IsJSReceiver()) {
- return Just(!source->IsString() || String::cast(*source)->length() == 0);
+ return Just(!source->IsString() || String::cast(*source).length() == 0);
}
// If the target is deprecated, the object will be updated on first store. If
// the source for that store equals the target, this will invalidate the
// cached representation of the source. Preventively upgrade the target.
// Do this on each iteration since any property load could cause deprecation.
- if (target->map()->is_deprecated()) {
+ if (target->map().is_deprecated()) {
JSObject::MigrateInstance(Handle<JSObject>::cast(target));
}
Isolate* isolate = target->GetIsolate();
- Handle<Map> map(JSReceiver::cast(*source)->map(), isolate);
+ Handle<Map> map(JSReceiver::cast(*source).map(), isolate);
if (!map->IsJSObjectMap()) return Just(false);
if (!map->OnlyHasSimpleProperties()) return Just(false);
@@ -348,7 +348,7 @@
if (IsJSArgumentsObject()) return roots.Arguments_string();
if (IsJSArray()) return roots.Array_string();
if (IsJSArrayBuffer()) {
- if (JSArrayBuffer::cast(*this)->is_shared()) {
+ if (JSArrayBuffer::cast(*this).is_shared()) {
return roots.SharedArrayBuffer_string();
}
return roots.ArrayBuffer_string();
@@ -360,45 +360,45 @@
if (IsJSMap()) return roots.Map_string();
if (IsJSMapIterator()) return roots.MapIterator_string();
if (IsJSProxy()) {
- return map()->is_callable() ? roots.Function_string()
- : roots.Object_string();
+ return map().is_callable() ? roots.Function_string()
+ : roots.Object_string();
}
if (IsJSRegExp()) return roots.RegExp_string();
if (IsJSSet()) return roots.Set_string();
if (IsJSSetIterator()) return roots.SetIterator_string();
if (IsJSTypedArray()) {
-#define SWITCH_KIND(Type, type, TYPE, ctype) \
- if (map()->elements_kind() == TYPE##_ELEMENTS) { \
- return roots.Type##Array_string(); \
+#define SWITCH_KIND(Type, type, TYPE, ctype) \
+ if (map().elements_kind() == TYPE##_ELEMENTS) { \
+ return roots.Type##Array_string(); \
}
TYPED_ARRAYS(SWITCH_KIND)
#undef SWITCH_KIND
}
if (IsJSValue()) {
- Object value = JSValue::cast(*this)->value();
- if (value->IsBoolean()) return roots.Boolean_string();
- if (value->IsString()) return roots.String_string();
- if (value->IsNumber()) return roots.Number_string();
- if (value->IsBigInt()) return roots.BigInt_string();
- if (value->IsSymbol()) return roots.Symbol_string();
- if (value->IsScript()) return roots.Script_string();
+ Object value = JSValue::cast(*this).value();
+ if (value.IsBoolean()) return roots.Boolean_string();
+ if (value.IsString()) return roots.String_string();
+ if (value.IsNumber()) return roots.Number_string();
+ if (value.IsBigInt()) return roots.BigInt_string();
+ if (value.IsSymbol()) return roots.Symbol_string();
+ if (value.IsScript()) return roots.Script_string();
UNREACHABLE();
}
if (IsJSWeakMap()) return roots.WeakMap_string();
if (IsJSWeakSet()) return roots.WeakSet_string();
if (IsJSGlobalProxy()) return roots.global_string();
- Object maybe_constructor = map()->GetConstructor();
- if (maybe_constructor->IsJSFunction()) {
+ Object maybe_constructor = map().GetConstructor();
+ if (maybe_constructor.IsJSFunction()) {
JSFunction constructor = JSFunction::cast(maybe_constructor);
- if (constructor->shared()->IsApiFunction()) {
- maybe_constructor = constructor->shared()->get_api_func_data();
+ if (constructor.shared().IsApiFunction()) {
+ maybe_constructor = constructor.shared().get_api_func_data();
}
}
- if (maybe_constructor->IsFunctionTemplateInfo()) {
+ if (maybe_constructor.IsFunctionTemplateInfo()) {
FunctionTemplateInfo info = FunctionTemplateInfo::cast(maybe_constructor);
- if (info->class_name()->IsString()) return String::cast(info->class_name());
+ if (info.class_name().IsString()) return String::cast(info.class_name());
}
return roots.Object_string();
@@ -413,23 +413,22 @@
// constructor on the map provides the most accurate name.
// Don't provide the info for prototypes, since their constructors are
// reclaimed and replaced by Object in OptimizeAsPrototype.
- if (!receiver->IsJSProxy() && receiver->map()->new_target_is_base() &&
- !receiver->map()->is_prototype_map()) {
- Object maybe_constructor = receiver->map()->GetConstructor();
- if (maybe_constructor->IsJSFunction()) {
+ if (!receiver->IsJSProxy() && receiver->map().new_target_is_base() &&
+ !receiver->map().is_prototype_map()) {
+ Object maybe_constructor = receiver->map().GetConstructor();
+ if (maybe_constructor.IsJSFunction()) {
JSFunction constructor = JSFunction::cast(maybe_constructor);
- String name = constructor->shared()->DebugName();
- if (name->length() != 0 &&
- !name->Equals(ReadOnlyRoots(isolate).Object_string())) {
+ String name = constructor.shared().DebugName();
+ if (name.length() != 0 &&
+ !name.Equals(ReadOnlyRoots(isolate).Object_string())) {
return std::make_pair(handle(constructor, isolate),
handle(name, isolate));
}
- } else if (maybe_constructor->IsFunctionTemplateInfo()) {
+ } else if (maybe_constructor.IsFunctionTemplateInfo()) {
FunctionTemplateInfo info = FunctionTemplateInfo::cast(maybe_constructor);
- if (info->class_name()->IsString()) {
- return std::make_pair(
- MaybeHandle<JSFunction>(),
- handle(String::cast(info->class_name()), isolate));
+ if (info.class_name().IsString()) {
+ return std::make_pair(MaybeHandle<JSFunction>(),
+ handle(String::cast(info.class_name()), isolate));
}
}
}
@@ -452,10 +451,10 @@
Handle<Object> maybe_constructor = JSReceiver::GetDataProperty(&it);
if (maybe_constructor->IsJSFunction()) {
JSFunction constructor = JSFunction::cast(*maybe_constructor);
- String name = constructor->shared()->DebugName();
+ String name = constructor.shared().DebugName();
- if (name->length() != 0 &&
- !name->Equals(ReadOnlyRoots(isolate).Object_string())) {
+ if (name.length() != 0 &&
+ !name.Equals(ReadOnlyRoots(isolate).Object_string())) {
return std::make_pair(handle(constructor, isolate),
handle(name, isolate));
}
@@ -480,26 +479,26 @@
Handle<NativeContext> JSReceiver::GetCreationContext() {
JSReceiver receiver = *this;
// Externals are JSObjects with null as a constructor.
- DCHECK(!receiver->IsExternal(GetIsolate()));
- Object constructor = receiver->map()->GetConstructor();
+ DCHECK(!receiver.IsExternal(GetIsolate()));
+ Object constructor = receiver.map().GetConstructor();
JSFunction function;
- if (constructor->IsJSFunction()) {
+ if (constructor.IsJSFunction()) {
function = JSFunction::cast(constructor);
- } else if (constructor->IsFunctionTemplateInfo()) {
+ } else if (constructor.IsFunctionTemplateInfo()) {
// Remote objects don't have a creation context.
return Handle<NativeContext>::null();
- } else if (receiver->IsJSGeneratorObject()) {
- function = JSGeneratorObject::cast(receiver)->function();
+ } else if (receiver.IsJSGeneratorObject()) {
+ function = JSGeneratorObject::cast(receiver).function();
} else {
// Functions have null as a constructor,
// but any JSFunction knows its context immediately.
- CHECK(receiver->IsJSFunction());
+ CHECK(receiver.IsJSFunction());
function = JSFunction::cast(receiver);
}
- return function->has_context()
- ? Handle<NativeContext>(function->context()->native_context(),
- receiver->GetIsolate())
+ return function.has_context()
+ ? Handle<NativeContext>(function.context().native_context(),
+ receiver.GetIsolate())
: Handle<NativeContext>::null();
}
@@ -583,50 +582,50 @@
DCHECK_NE(PropertyArray::kNoHashSentinel, hash);
DCHECK(PropertyArray::HashField::is_valid(hash));
- ReadOnlyRoots roots = properties->GetReadOnlyRoots();
+ ReadOnlyRoots roots = properties.GetReadOnlyRoots();
if (properties == roots.empty_fixed_array() ||
properties == roots.empty_property_array() ||
properties == roots.empty_property_dictionary()) {
return Smi::FromInt(hash);
}
- if (properties->IsPropertyArray()) {
- PropertyArray::cast(properties)->SetHash(hash);
- DCHECK_LT(0, PropertyArray::cast(properties)->length());
+ if (properties.IsPropertyArray()) {
+ PropertyArray::cast(properties).SetHash(hash);
+ DCHECK_LT(0, PropertyArray::cast(properties).length());
return properties;
}
- if (properties->IsGlobalDictionary()) {
- GlobalDictionary::cast(properties)->SetHash(hash);
+ if (properties.IsGlobalDictionary()) {
+ GlobalDictionary::cast(properties).SetHash(hash);
return properties;
}
- DCHECK(properties->IsNameDictionary());
- NameDictionary::cast(properties)->SetHash(hash);
+ DCHECK(properties.IsNameDictionary());
+ NameDictionary::cast(properties).SetHash(hash);
return properties;
}
int GetIdentityHashHelper(JSReceiver object) {
DisallowHeapAllocation no_gc;
- Object properties = object->raw_properties_or_hash();
- if (properties->IsSmi()) {
+ Object properties = object.raw_properties_or_hash();
+ if (properties.IsSmi()) {
return Smi::ToInt(properties);
}
- if (properties->IsPropertyArray()) {
- return PropertyArray::cast(properties)->Hash();
+ if (properties.IsPropertyArray()) {
+ return PropertyArray::cast(properties).Hash();
}
- if (properties->IsNameDictionary()) {
- return NameDictionary::cast(properties)->Hash();
+ if (properties.IsNameDictionary()) {
+ return NameDictionary::cast(properties).Hash();
}
- if (properties->IsGlobalDictionary()) {
- return GlobalDictionary::cast(properties)->Hash();
+ if (properties.IsGlobalDictionary()) {
+ return GlobalDictionary::cast(properties).Hash();
}
#ifdef DEBUG
- ReadOnlyRoots roots = object->GetReadOnlyRoots();
+ ReadOnlyRoots roots = object.GetReadOnlyRoots();
DCHECK(properties == roots.empty_fixed_array() ||
properties == roots.empty_property_dictionary());
#endif
@@ -646,8 +645,8 @@
}
void JSReceiver::SetProperties(HeapObject properties) {
- DCHECK_IMPLIES(properties->IsPropertyArray() &&
- PropertyArray::cast(properties)->length() == 0,
+ DCHECK_IMPLIES(properties.IsPropertyArray() &&
+ PropertyArray::cast(properties).length() == 0,
properties == GetReadOnlyRoots().empty_property_array());
DisallowHeapAllocation no_gc;
int hash = GetIdentityHashHelper(*this);
@@ -679,7 +678,7 @@
int hash = isolate->GenerateIdentityHash(PropertyArray::HashField::kMax);
DCHECK_NE(PropertyArray::kNoHashSentinel, hash);
- key->SetIdentityHash(hash);
+ key.SetIdentityHash(hash);
return Smi::FromInt(hash);
}
@@ -702,7 +701,7 @@
if (object->IsJSGlobalObject()) {
// If we have a global object, invalidate the cell and swap in a new one.
Handle<GlobalDictionary> dictionary(
- JSGlobalObject::cast(*object)->global_dictionary(), isolate);
+ JSGlobalObject::cast(*object).global_dictionary(), isolate);
DCHECK_NE(GlobalDictionary::kNotFound, entry);
auto cell = PropertyCell::InvalidateEntry(isolate, dictionary, entry);
@@ -716,7 +715,7 @@
dictionary = NameDictionary::DeleteEntry(isolate, dictionary, entry);
object->SetProperties(*dictionary);
}
- if (object->map()->is_prototype_map()) {
+ if (object->map().is_prototype_map()) {
// Invalidate prototype validity cell as this may invalidate transitioning
// store IC handlers.
JSObject::InvalidatePrototypeChains(object->map());
@@ -985,7 +984,7 @@
// interceptor calls.
AssertNoContextChange ncc(isolate);
- if (interceptor->getter()->IsUndefined(isolate)) {
+ if (interceptor->getter().IsUndefined(isolate)) {
return isolate->factory()->undefined_value();
}
@@ -1031,7 +1030,7 @@
}
PropertyCallbackArguments args(isolate, interceptor->data(), *receiver,
*holder, Just(kDontThrow));
- if (!interceptor->query()->IsUndefined(isolate)) {
+ if (!interceptor->query().IsUndefined(isolate)) {
Handle<Object> result;
if (it->IsElement()) {
result = args.CallIndexedQuery(interceptor, it->index());
@@ -1043,7 +1042,7 @@
CHECK(result->ToInt32(&value));
return Just(static_cast<PropertyAttributes>(value));
}
- } else if (!interceptor->getter()->IsUndefined(isolate)) {
+ } else if (!interceptor->getter().IsUndefined(isolate)) {
// TODO(verwaest): Use GetPropertyWithInterceptor?
Handle<Object> result;
if (it->IsElement()) {
@@ -1066,7 +1065,7 @@
// interceptor calls.
AssertNoContextChange ncc(isolate);
- if (interceptor->setter()->IsUndefined(isolate)) return Just(false);
+ if (interceptor->setter().IsUndefined(isolate)) return Just(false);
Handle<JSObject> holder = it->GetHolder<JSObject>();
bool result;
@@ -1099,7 +1098,7 @@
// interceptor calls.
AssertNoContextChange ncc(isolate);
- if (interceptor->definer()->IsUndefined(isolate)) return Just(false);
+ if (interceptor->definer().IsUndefined(isolate)) return Just(false);
Handle<JSObject> holder = it->GetHolder<JSObject>();
bool result;
@@ -1521,7 +1520,7 @@
Isolate* isolate = it->isolate();
Handle<InterceptorInfo> interceptor = it->GetInterceptor();
- if (interceptor->descriptor()->IsUndefined(isolate)) return Just(false);
+ if (interceptor->descriptor().IsUndefined(isolate)) return Just(false);
Handle<Object> result;
Handle<JSObject> holder = it->GetHolder<JSObject>();
@@ -1729,7 +1728,7 @@
Maybe<bool> JSReceiver::TestIntegrityLevel(Handle<JSReceiver> receiver,
IntegrityLevel level) {
- if (!receiver->map()->IsCustomElementsReceiverMap()) {
+ if (!receiver->map().IsCustomElementsReceiverMap()) {
return JSObject::TestIntegrityLevel(Handle<JSObject>::cast(receiver),
level);
}
@@ -1817,7 +1816,7 @@
V8_WARN_UNUSED_RESULT Maybe<bool> FastGetOwnValuesOrEntries(
Isolate* isolate, Handle<JSReceiver> receiver, bool get_entries,
Handle<FixedArray>* result) {
- Handle<Map> map(JSReceiver::cast(*receiver)->map(), isolate);
+ Handle<Map> map(JSReceiver::cast(*receiver).map(), isolate);
if (!map->IsJSObjectMap()) return Just(false);
if (!map->OnlyHasSimpleProperties()) return Just(false);
@@ -1990,7 +1989,7 @@
for (PrototypeIterator iter(isolate, *this, kStartAtReceiver,
PrototypeIterator::END_AT_NULL);
!iter.IsAtEnd(); iter.AdvanceIgnoringProxies()) {
- if (iter.GetCurrent()->IsJSProxy()) return true;
+ if (iter.GetCurrent().IsJSProxy()) return true;
}
return false;
}
@@ -2008,7 +2007,7 @@
DCHECK(constructor->IsConstructor());
DCHECK(new_target->IsConstructor());
DCHECK(!constructor->has_initial_map() ||
- constructor->initial_map()->instance_type() != JS_FUNCTION_TYPE);
+ constructor->initial_map().instance_type() != JS_FUNCTION_TYPE);
Handle<Map> initial_map;
ASSIGN_RETURN_ON_EXCEPTION(
@@ -2053,7 +2052,7 @@
object->HasFrozenOrSealedElements());
FixedArray raw_elems = FixedArray::cast(object->elements());
Isolate* isolate = object->GetIsolate();
- if (raw_elems->map() != ReadOnlyRoots(isolate).fixed_cow_array_map()) return;
+ if (raw_elems.map() != ReadOnlyRoots(isolate).fixed_cow_array_map()) return;
Handle<FixedArray> elems(raw_elems, isolate);
Handle<FixedArray> writable_elems = isolate->factory()->CopyFixedArrayWithMap(
elems, isolate->factory()->fixed_array_map());
@@ -2188,7 +2187,7 @@
if (it->state() == LookupIterator::ACCESSOR) {
auto accessors = it->GetAccessors();
if (accessors->IsAccessorInfo()) {
- if (AccessorInfo::cast(*accessors)->all_can_read()) return true;
+ if (AccessorInfo::cast(*accessors).all_can_read()) return true;
}
} else if (it->state() == LookupIterator::INTERCEPTOR) {
if (it->GetInterceptor()->all_can_read()) return true;
@@ -2231,7 +2230,7 @@
// Cross-Origin [[Get]] of Well-Known Symbols does not throw, and returns
// undefined.
Handle<Name> name = it->GetName();
- if (name->IsSymbol() && Symbol::cast(*name)->is_well_known_symbol()) {
+ if (name->IsSymbol() && Symbol::cast(*name).is_well_known_symbol()) {
return it->factory()->undefined_value();
}
@@ -2273,7 +2272,7 @@
if (it->state() == LookupIterator::ACCESSOR) {
Handle<Object> accessors = it->GetAccessors();
if (accessors->IsAccessorInfo()) {
- if (AccessorInfo::cast(*accessors)->all_can_write()) return true;
+ if (AccessorInfo::cast(*accessors).all_can_write()) return true;
}
}
}
@@ -2317,7 +2316,7 @@
int entry = dictionary->FindEntry(ReadOnlyRoots(isolate), name, hash);
if (entry == GlobalDictionary::kNotFound) {
- DCHECK_IMPLIES(global_obj->map()->is_prototype_map(),
+ DCHECK_IMPLIES(global_obj->map().is_prototype_map(),
Map::IsPrototypeChainInvalidated(global_obj->map()));
auto cell = isolate->factory()->NewPropertyCell(name);
cell->set_value(*value);
@@ -2339,7 +2338,7 @@
int entry = dictionary->FindEntry(isolate, name);
if (entry == NameDictionary::kNotFound) {
- DCHECK_IMPLIES(object->map()->is_prototype_map(),
+ DCHECK_IMPLIES(object->map().is_prototype_map(),
Map::IsPrototypeChainInvalidated(object->map()));
dictionary =
NameDictionary::Add(isolate, dictionary, name, value, details);
@@ -2355,11 +2354,11 @@
}
void JSObject::JSObjectShortPrint(StringStream* accumulator) {
- switch (map()->instance_type()) {
+ switch (map().instance_type()) {
case JS_ARRAY_TYPE: {
- double length = JSArray::cast(*this)->length()->IsUndefined()
+ double length = JSArray::cast(*this).length().IsUndefined()
? 0
- : JSArray::cast(*this)->length()->Number();
+ : JSArray::cast(*this).length().Number();
accumulator->Add("<JSArray[%u]>", static_cast<uint32_t>(length));
break;
}
@@ -2368,7 +2367,7 @@
accumulator->Add("<JSBoundFunction");
accumulator->Add(" (BoundTargetFunction %p)>",
reinterpret_cast<void*>(
- bound_function->bound_target_function().ptr()));
+ bound_function.bound_target_function().ptr()));
break;
}
case JS_WEAK_MAP_TYPE: {
@@ -2382,9 +2381,9 @@
case JS_REGEXP_TYPE: {
accumulator->Add("<JSRegExp");
JSRegExp regexp = JSRegExp::cast(*this);
- if (regexp->source()->IsString()) {
+ if (regexp.source().IsString()) {
accumulator->Add(" ");
- String::cast(regexp->source())->StringShortPrint(accumulator);
+ String::cast(regexp.source()).StringShortPrint(accumulator);
}
accumulator->Add(">");
@@ -2392,11 +2391,11 @@
}
case JS_FUNCTION_TYPE: {
JSFunction function = JSFunction::cast(*this);
- Object fun_name = function->shared()->DebugName();
+ Object fun_name = function.shared().DebugName();
bool printed = false;
- if (fun_name->IsString()) {
+ if (fun_name.IsString()) {
String str = String::cast(fun_name);
- if (str->length() > 0) {
+ if (str.length() > 0) {
accumulator->Add("<JSFunction ");
accumulator->Put(str);
printed = true;
@@ -2406,10 +2405,10 @@
accumulator->Add("<JSFunction");
}
if (FLAG_trace_file_names) {
- Object source_name = Script::cast(function->shared()->script())->name();
- if (source_name->IsString()) {
+ Object source_name = Script::cast(function.shared().script()).name();
+ if (source_name.IsString()) {
String str = String::cast(source_name);
- if (str->length() > 0) {
+ if (str.length() > 0) {
accumulator->Add(" <");
accumulator->Put(str);
accumulator->Add(">");
@@ -2417,7 +2416,7 @@
}
}
accumulator->Add(" (sfi = %p)",
- reinterpret_cast<void*>(function->shared().ptr()));
+ reinterpret_cast<void*>(function.shared().ptr()));
accumulator->Put('>');
break;
}
@@ -2439,30 +2438,29 @@
default: {
Map map_of_this = map();
Heap* heap = GetHeap();
- Object constructor = map_of_this->GetConstructor();
+ Object constructor = map_of_this.GetConstructor();
bool printed = false;
- if (constructor->IsHeapObject() &&
+ if (constructor.IsHeapObject() &&
!heap->Contains(HeapObject::cast(constructor))) {
accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
} else {
bool global_object = IsJSGlobalProxy();
- if (constructor->IsJSFunction()) {
- if (!heap->Contains(JSFunction::cast(constructor)->shared())) {
+ if (constructor.IsJSFunction()) {
+ if (!heap->Contains(JSFunction::cast(constructor).shared())) {
accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
} else {
String constructor_name =
- JSFunction::cast(constructor)->shared()->Name();
- if (constructor_name->length() > 0) {
+ JSFunction::cast(constructor).shared().Name();
+ if (constructor_name.length() > 0) {
accumulator->Add(global_object ? "<GlobalObject " : "<");
accumulator->Put(constructor_name);
- accumulator->Add(
- " %smap = %p",
- map_of_this->is_deprecated() ? "deprecated-" : "",
- map_of_this);
+ accumulator->Add(" %smap = %p",
+ map_of_this.is_deprecated() ? "deprecated-" : "",
+ map_of_this);
printed = true;
}
}
- } else if (constructor->IsFunctionTemplateInfo()) {
+ } else if (constructor.IsFunctionTemplateInfo()) {
accumulator->Add(global_object ? "<RemoteObject>" : "<RemoteObject>");
printed = true;
}
@@ -2472,7 +2470,7 @@
}
if (IsJSValue()) {
accumulator->Add(" value = ");
- JSValue::cast(*this)->value()->ShortPrint(accumulator);
+ JSValue::cast(*this).value().ShortPrint(accumulator);
}
accumulator->Put('>');
break;
@@ -2502,52 +2500,52 @@
void JSObject::PrintInstanceMigration(FILE* file, Map original_map,
Map new_map) {
- if (new_map->is_dictionary_map()) {
+ if (new_map.is_dictionary_map()) {
PrintF(file, "[migrating to slow]\n");
return;
}
PrintF(file, "[migrating]");
- DescriptorArray o = original_map->instance_descriptors();
- DescriptorArray n = new_map->instance_descriptors();
- for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) {
- Representation o_r = o->GetDetails(i).representation();
- Representation n_r = n->GetDetails(i).representation();
+ DescriptorArray o = original_map.instance_descriptors();
+ DescriptorArray n = new_map.instance_descriptors();
+ for (int i = 0; i < original_map.NumberOfOwnDescriptors(); i++) {
+ Representation o_r = o.GetDetails(i).representation();
+ Representation n_r = n.GetDetails(i).representation();
if (!o_r.Equals(n_r)) {
- String::cast(o->GetKey(i))->PrintOn(file);
+ String::cast(o.GetKey(i)).PrintOn(file);
PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic());
- } else if (o->GetDetails(i).location() == kDescriptor &&
- n->GetDetails(i).location() == kField) {
- Name name = o->GetKey(i);
- if (name->IsString()) {
- String::cast(name)->PrintOn(file);
+ } else if (o.GetDetails(i).location() == kDescriptor &&
+ n.GetDetails(i).location() == kField) {
+ Name name = o.GetKey(i);
+ if (name.IsString()) {
+ String::cast(name).PrintOn(file);
} else {
PrintF(file, "{symbol %p}", reinterpret_cast<void*>(name.ptr()));
}
PrintF(file, " ");
}
}
- if (original_map->elements_kind() != new_map->elements_kind()) {
- PrintF(file, "elements_kind[%i->%i]", original_map->elements_kind(),
- new_map->elements_kind());
+ if (original_map.elements_kind() != new_map.elements_kind()) {
+ PrintF(file, "elements_kind[%i->%i]", original_map.elements_kind(),
+ new_map.elements_kind());
}
PrintF(file, "\n");
}
bool JSObject::IsUnmodifiedApiObject(FullObjectSlot o) {
Object object = *o;
- if (object->IsSmi()) return false;
+ if (object.IsSmi()) return false;
HeapObject heap_object = HeapObject::cast(object);
- if (!object->IsJSObject()) return false;
+ if (!object.IsJSObject()) return false;
JSObject js_object = JSObject::cast(object);
- if (!js_object->IsDroppableApiWrapper()) return false;
- Object maybe_constructor = js_object->map()->GetConstructor();
- if (!maybe_constructor->IsJSFunction()) return false;
+ if (!js_object.IsDroppableApiWrapper()) return false;
+ Object maybe_constructor = js_object.map().GetConstructor();
+ if (!maybe_constructor.IsJSFunction()) return false;
JSFunction constructor = JSFunction::cast(maybe_constructor);
- if (js_object->elements()->length() != 0) return false;
+ if (js_object.elements().length() != 0) return false;
// Check that the object is not a key in a WeakMap (over-approximation).
- if (!js_object->GetIdentityHash()->IsUndefined()) return false;
+ if (!js_object.GetIdentityHash().IsUndefined()) return false;
- return constructor->initial_map() == heap_object->map();
+ return constructor.initial_map() == heap_object.map();
}
// static
@@ -2561,16 +2559,16 @@
old_map->set_prototype_info(Smi::kZero);
if (FLAG_trace_prototype_users) {
PrintF("Moving prototype_info %p from map %p to map %p.\n",
- reinterpret_cast<void*>(new_map->prototype_info()->ptr()),
+ reinterpret_cast<void*>(new_map->prototype_info().ptr()),
reinterpret_cast<void*>(old_map->ptr()),
reinterpret_cast<void*>(new_map->ptr()));
}
if (was_registered) {
- if (new_map->prototype_info()->IsPrototypeInfo()) {
+ if (new_map->prototype_info().IsPrototypeInfo()) {
// The new map isn't registered with its prototype yet; reflect this fact
// in the PrototypeInfo it just inherited from the old map.
PrototypeInfo::cast(new_map->prototype_info())
- ->set_registry_slot(PrototypeInfo::UNREGISTERED);
+ .set_registry_slot(PrototypeInfo::UNREGISTERED);
}
JSObject::LazyRegisterPrototypeUser(new_map, isolate);
}
@@ -2621,7 +2619,7 @@
PropertyDetails details = new_map->GetLastDescriptorDetails();
int target_index = details.field_index() - new_map->GetInObjectProperties();
- int property_array_length = object->property_array()->length();
+ int property_array_length = object->property_array().length();
bool have_space = old_map->UnusedPropertyFields() > 0 ||
(details.location() == kField && target_index >= 0 &&
property_array_length > target_index);
@@ -2798,10 +2796,10 @@
// Can't use JSObject::FastPropertyAtPut() because proper map was not set
// yet.
if (new_map->IsUnboxedDoubleField(index)) {
- DCHECK(value->IsMutableHeapNumber());
+ DCHECK(value.IsMutableHeapNumber());
// Ensure that all bits of the double value are preserved.
object->RawFastDoublePropertyAsBitsAtPut(
- index, MutableHeapNumber::cast(value)->value_as_bits());
+ index, MutableHeapNumber::cast(value).value_as_bits());
if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) {
// Transition from tagged to untagged slot.
heap->ClearRecordedSlot(*object, object->RawField(index.offset()));
@@ -2975,7 +2973,7 @@
// Ensure that no transition was inserted for prototype migrations.
DCHECK_EQ(0, TransitionsAccessor(object->GetIsolate(), old_map)
.NumberOfTransitions());
- DCHECK(new_map->GetBackPointer()->IsUndefined());
+ DCHECK(new_map->GetBackPointer().IsUndefined());
DCHECK(object->map() != *old_map);
}
} else {
@@ -3015,15 +3013,14 @@
// static
MaybeHandle<NativeContext> JSObject::GetFunctionRealm(Handle<JSObject> object) {
- DCHECK(object->map()->is_constructor());
+ DCHECK(object->map().is_constructor());
DCHECK(!object->IsJSFunction());
return object->GetCreationContext();
}
void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
- DCHECK(object->map()->GetInObjectProperties() ==
- map->GetInObjectProperties());
- ElementsKind obj_kind = object->map()->elements_kind();
+ DCHECK(object->map().GetInObjectProperties() == map->GetInObjectProperties());
+ ElementsKind obj_kind = object->map().elements_kind();
ElementsKind map_kind = map->elements_kind();
if (map_kind != obj_kind) {
ElementsKind to_kind = GetMoreGeneralElementsKind(map_kind, obj_kind);
@@ -3133,7 +3130,7 @@
Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it);
DCHECK(maybe.IsJust());
DCHECK(!it.IsFound());
- DCHECK(object->map()->is_extensible() || name->IsPrivate());
+ DCHECK(object->map().is_extensible() || name->IsPrivate());
#endif
CHECK(Object::AddDataProperty(&it, value, attributes,
Just(ShouldThrow::kThrowOnError),
@@ -3378,7 +3375,7 @@
Name k = dictionary->NameAt(index);
// Dictionary keys are internalized upon insertion.
// TODO(jkummerow): Turn this into a DCHECK if it's not hit in the wild.
- CHECK(k->IsUniqueName());
+ CHECK(k.IsUniqueName());
Handle<Name> key(k, isolate);
// Properly mark the {new_map} if the {key} is an "interesting symbol".
@@ -3450,9 +3447,9 @@
}
void JSObject::RequireSlowElements(NumberDictionary dictionary) {
- if (dictionary->requires_slow_elements()) return;
- dictionary->set_requires_slow_elements();
- if (map()->is_prototype_map()) {
+ if (dictionary.requires_slow_elements()) return;
+ dictionary.set_requires_slow_elements();
+ if (map().is_prototype_map()) {
// If this object is a prototype (the callee will check), invalidate any
// prototype chains involving it.
InvalidatePrototypeChains(map());
@@ -3468,10 +3465,10 @@
FixedArrayBase elements = object->elements();
if (is_sloppy_arguments) {
- elements = SloppyArgumentsElements::cast(elements)->arguments();
+ elements = SloppyArgumentsElements::cast(elements).arguments();
}
- if (elements->IsNumberDictionary()) {
+ if (elements.IsNumberDictionary()) {
return handle(NumberDictionary::cast(elements), isolate);
}
}
@@ -3495,7 +3492,7 @@
if (is_sloppy_arguments) {
SloppyArgumentsElements::cast(object->elements())
- ->set_arguments(*dictionary);
+ .set_arguments(*dictionary);
} else {
object->set_elements(*dictionary);
}
@@ -3525,7 +3522,7 @@
DCHECK_EQ(LookupIterator::INTERCEPTOR, it->state());
Handle<InterceptorInfo> interceptor(it->GetInterceptor());
- if (interceptor->deleter()->IsUndefined(isolate)) return Nothing<bool>();
+ if (interceptor->deleter().IsUndefined(isolate)) return Nothing<bool>();
Handle<JSObject> holder = it->GetHolder<JSObject>();
Handle<Object> receiver = it->GetReceiver();
@@ -3591,12 +3588,12 @@
PropertyAttributes level) {
DCHECK(level == SEALED || level == FROZEN);
- uint32_t capacity = dict->Capacity();
+ uint32_t capacity = dict.Capacity();
for (uint32_t i = 0; i < capacity; i++) {
Object key;
- if (!dict->ToKey(roots, i, &key)) continue;
- if (key->FilterKey(ALL_PROPERTIES)) continue;
- PropertyDetails details = dict->DetailsAt(i);
+ if (!dict.ToKey(roots, i, &key)) continue;
+ if (key.FilterKey(ALL_PROPERTIES)) continue;
+ PropertyDetails details = dict.DetailsAt(i);
if (details.IsConfigurable()) return false;
if (level == FROZEN && details.kind() == kData && !details.IsReadOnly()) {
return false;
@@ -3607,14 +3604,14 @@
bool TestFastPropertiesIntegrityLevel(Map map, PropertyAttributes level) {
DCHECK(level == SEALED || level == FROZEN);
- DCHECK(!map->IsCustomElementsReceiverMap());
- DCHECK(!map->is_dictionary_map());
+ DCHECK(!map.IsCustomElementsReceiverMap());
+ DCHECK(!map.is_dictionary_map());
- DescriptorArray descriptors = map->instance_descriptors();
- int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ DescriptorArray descriptors = map.instance_descriptors();
+ int number_of_own_descriptors = map.NumberOfOwnDescriptors();
for (int i = 0; i < number_of_own_descriptors; i++) {
- if (descriptors->GetKey(i)->IsPrivate()) continue;
- PropertyDetails details = descriptors->GetDetails(i);
+ if (descriptors.GetKey(i).IsPrivate()) continue;
+ PropertyDetails details = descriptors.GetDetails(i);
if (details.IsConfigurable()) return false;
if (level == FROZEN && details.kind() == kData && !details.IsReadOnly()) {
return false;
@@ -3624,28 +3621,28 @@
}
bool TestPropertiesIntegrityLevel(JSObject object, PropertyAttributes level) {
- DCHECK(!object->map()->IsCustomElementsReceiverMap());
+ DCHECK(!object.map().IsCustomElementsReceiverMap());
- if (object->HasFastProperties()) {
- return TestFastPropertiesIntegrityLevel(object->map(), level);
+ if (object.HasFastProperties()) {
+ return TestFastPropertiesIntegrityLevel(object.map(), level);
}
return TestDictionaryPropertiesIntegrityLevel(
- object->property_dictionary(), object->GetReadOnlyRoots(), level);
+ object.property_dictionary(), object.GetReadOnlyRoots(), level);
}
bool TestElementsIntegrityLevel(JSObject object, PropertyAttributes level) {
- DCHECK(!object->HasSloppyArgumentsElements());
+ DCHECK(!object.HasSloppyArgumentsElements());
- ElementsKind kind = object->GetElementsKind();
+ ElementsKind kind = object.GetElementsKind();
if (IsDictionaryElementsKind(kind)) {
return TestDictionaryPropertiesIntegrityLevel(
- NumberDictionary::cast(object->elements()), object->GetReadOnlyRoots(),
+ NumberDictionary::cast(object.elements()), object.GetReadOnlyRoots(),
level);
}
if (IsFixedTypedArrayElementsKind(kind)) {
- if (level == FROZEN && JSArrayBufferView::cast(object)->byte_length() > 0)
+ if (level == FROZEN && JSArrayBufferView::cast(object).byte_length() > 0)
return false; // TypedArrays with elements can't be frozen.
return TestPropertiesIntegrityLevel(object, level);
}
@@ -3659,9 +3656,9 @@
}
bool FastTestIntegrityLevel(JSObject object, PropertyAttributes level) {
- DCHECK(!object->map()->IsCustomElementsReceiverMap());
+ DCHECK(!object.map().IsCustomElementsReceiverMap());
- return !object->map()->is_extensible() &&
+ return !object.map().is_extensible() &&
TestElementsIntegrityLevel(object, level) &&
TestPropertiesIntegrityLevel(object, level);
}
@@ -3670,7 +3667,7 @@
Maybe<bool> JSObject::TestIntegrityLevel(Handle<JSObject> object,
IntegrityLevel level) {
- if (!object->map()->IsCustomElementsReceiverMap() &&
+ if (!object->map().IsCustomElementsReceiverMap() &&
!object->HasSloppyArgumentsElements()) {
return Just(FastTestIntegrityLevel(*object, level));
}
@@ -3693,7 +3690,7 @@
NewTypeError(MessageTemplate::kNoAccess));
}
- if (!object->map()->is_extensible()) return Just(true);
+ if (!object->map().is_extensible()) return Just(true);
if (object->IsJSGlobalProxy()) {
PrototypeIterator iter(isolate, object);
@@ -3703,8 +3700,8 @@
should_throw);
}
- if (object->map()->has_named_interceptor() ||
- object->map()->has_indexed_interceptor()) {
+ if (object->map().has_named_interceptor() ||
+ object->map().has_indexed_interceptor()) {
RETURN_FAILURE(isolate, should_throw,
NewTypeError(MessageTemplate::kCannotPreventExt));
}
@@ -3727,7 +3724,7 @@
new_map->set_is_extensible(false);
JSObject::MigrateToMap(object, new_map);
- DCHECK(!object->map()->is_extensible());
+ DCHECK(!object->map().is_extensible());
return Just(true);
}
@@ -3741,10 +3738,10 @@
if (object->IsJSGlobalProxy()) {
PrototypeIterator iter(isolate, *object);
if (iter.IsAtEnd()) return false;
- DCHECK(iter.GetCurrent()->IsJSGlobalObject());
- return iter.GetCurrent<JSObject>()->map()->is_extensible();
+ DCHECK(iter.GetCurrent().IsJSGlobalObject());
+ return iter.GetCurrent<JSObject>().map().is_extensible();
}
- return object->map()->is_extensible();
+ return object->map().is_extensible();
}
template <typename Dictionary>
@@ -3755,13 +3752,13 @@
for (int i = 0; i < capacity; i++) {
Object k;
if (!dictionary->ToKey(roots, i, &k)) continue;
- if (k->FilterKey(ALL_PROPERTIES)) continue;
+ if (k.FilterKey(ALL_PROPERTIES)) continue;
PropertyDetails details = dictionary->DetailsAt(i);
int attrs = attributes;
// READ_ONLY is an invalid attribute for JS setters/getters.
if ((attributes & READ_ONLY) && details.kind() == kAccessor) {
Object v = dictionary->ValueAt(i);
- if (v->IsAccessorPair()) attrs &= ~READ_ONLY;
+ if (v.IsAccessorPair()) attrs &= ~READ_ONLY;
}
details = details.CopyAddAttributes(static_cast<PropertyAttributes>(attrs));
dictionary->DetailsAtPut(isolate, i, details);
@@ -3787,8 +3784,8 @@
NewTypeError(MessageTemplate::kNoAccess));
}
- if (attrs == NONE && !object->map()->is_extensible()) return Just(true);
- ElementsKind old_elements_kind = object->map()->elements_kind();
+ if (attrs == NONE && !object->map().is_extensible()) return Just(true);
+ ElementsKind old_elements_kind = object->map().elements_kind();
if (attrs != FROZEN && IsSealedElementsKind(old_elements_kind))
return Just(true);
if (old_elements_kind == PACKED_FROZEN_ELEMENTS) return Just(true);
@@ -3801,8 +3798,8 @@
PrototypeIterator::GetCurrent<JSObject>(iter), should_throw);
}
- if (object->map()->has_named_interceptor() ||
- object->map()->has_indexed_interceptor()) {
+ if (object->map().has_named_interceptor() ||
+ object->map().has_indexed_interceptor()) {
MessageTemplate message = MessageTemplate::kNone;
switch (attrs) {
case NONE:
@@ -3826,7 +3823,7 @@
!object->HasSlowStringWrapperElements()) {
int length = object->IsJSArray()
? Smi::ToInt(Handle<JSArray>::cast(object)->length())
- : object->elements()->length();
+ : object->elements().length();
new_element_dictionary =
length == 0 ? isolate->factory()->empty_slow_element_dictionary()
: object->GetElementsAccessor()->Normalize(object);
@@ -3883,7 +3880,7 @@
ReadOnlyRoots roots(isolate);
if (object->IsJSGlobalObject()) {
Handle<GlobalDictionary> dictionary(
- JSGlobalObject::cast(*object)->global_dictionary(), isolate);
+ JSGlobalObject::cast(*object).global_dictionary(), isolate);
JSObject::ApplyAttributesToDictionary(isolate, roots, dictionary,
attrs);
} else {
@@ -3895,15 +3892,14 @@
}
}
- if (object->map()->has_frozen_or_sealed_elements()) {
+ if (object->map().has_frozen_or_sealed_elements()) {
return Just(true);
}
// Both seal and preventExtensions always go through without modifications to
// typed array elements. Freeze works only if there are no actual elements.
if (object->HasFixedTypedArrayElements()) {
- if (attrs == FROZEN &&
- JSArrayBufferView::cast(*object)->byte_length() > 0) {
+ if (attrs == FROZEN && JSArrayBufferView::cast(*object).byte_length() > 0) {
isolate->Throw(*isolate->factory()->NewTypeError(
MessageTemplate::kCannotFreezeArrayBufferView));
return Nothing<bool>();
@@ -3911,8 +3907,8 @@
return Just(true);
}
- DCHECK(object->map()->has_dictionary_elements() ||
- object->map()->elements_kind() == SLOW_STRING_WRAPPER_ELEMENTS);
+ DCHECK(object->map().has_dictionary_elements() ||
+ object->map().elements_kind() == SLOW_STRING_WRAPPER_ELEMENTS);
if (!new_element_dictionary.is_null()) {
object->set_elements(*new_element_dictionary);
}
@@ -3948,41 +3944,41 @@
bool JSObject::HasEnumerableElements() {
// TODO(cbruni): cleanup
JSObject object = *this;
- switch (object->GetElementsKind()) {
+ switch (object.GetElementsKind()) {
case PACKED_SMI_ELEMENTS:
case PACKED_ELEMENTS:
case PACKED_FROZEN_ELEMENTS:
case PACKED_SEALED_ELEMENTS:
case PACKED_DOUBLE_ELEMENTS: {
- int length = object->IsJSArray()
- ? Smi::ToInt(JSArray::cast(object)->length())
- : object->elements()->length();
+ int length = object.IsJSArray()
+ ? Smi::ToInt(JSArray::cast(object).length())
+ : object.elements().length();
return length > 0;
}
case HOLEY_SMI_ELEMENTS:
case HOLEY_FROZEN_ELEMENTS:
case HOLEY_SEALED_ELEMENTS:
case HOLEY_ELEMENTS: {
- FixedArray elements = FixedArray::cast(object->elements());
- int length = object->IsJSArray()
- ? Smi::ToInt(JSArray::cast(object)->length())
- : elements->length();
+ FixedArray elements = FixedArray::cast(object.elements());
+ int length = object.IsJSArray()
+ ? Smi::ToInt(JSArray::cast(object).length())
+ : elements.length();
Isolate* isolate = GetIsolate();
for (int i = 0; i < length; i++) {
- if (!elements->is_the_hole(isolate, i)) return true;
+ if (!elements.is_the_hole(isolate, i)) return true;
}
return false;
}
case HOLEY_DOUBLE_ELEMENTS: {
- int length = object->IsJSArray()
- ? Smi::ToInt(JSArray::cast(object)->length())
- : object->elements()->length();
+ int length = object.IsJSArray()
+ ? Smi::ToInt(JSArray::cast(object).length())
+ : object.elements().length();
// Zero-length arrays would use the empty FixedArray...
if (length == 0) return false;
// ...so only cast to FixedDoubleArray otherwise.
- FixedDoubleArray elements = FixedDoubleArray::cast(object->elements());
+ FixedDoubleArray elements = FixedDoubleArray::cast(object.elements());
for (int i = 0; i < length; i++) {
- if (!elements->is_the_hole(i)) return true;
+ if (!elements.is_the_hole(i)) return true;
}
return false;
}
@@ -3991,12 +3987,12 @@
TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
{
- size_t length = JSTypedArray::cast(object)->length();
+ size_t length = JSTypedArray::cast(object).length();
return length > 0;
}
case DICTIONARY_ELEMENTS: {
- NumberDictionary elements = NumberDictionary::cast(object->elements());
- return elements->NumberOfEnumerableProperties() > 0;
+ NumberDictionary elements = NumberDictionary::cast(object.elements());
+ return elements.NumberOfEnumerableProperties() > 0;
}
case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
case SLOW_SLOPPY_ARGUMENTS_ELEMENTS:
@@ -4004,10 +4000,10 @@
return true;
case FAST_STRING_WRAPPER_ELEMENTS:
case SLOW_STRING_WRAPPER_ELEMENTS:
- if (String::cast(JSValue::cast(object)->value())->length() > 0) {
+ if (String::cast(JSValue::cast(object).value()).length() > 0) {
return true;
}
- return object->elements()->length() > 0;
+ return object.elements().length() > 0;
case NO_ELEMENTS:
return false;
}
@@ -4101,59 +4097,59 @@
Object JSObject::SlowReverseLookup(Object value) {
if (HasFastProperties()) {
- int number_of_own_descriptors = map()->NumberOfOwnDescriptors();
- DescriptorArray descs = map()->instance_descriptors();
- bool value_is_number = value->IsNumber();
+ int number_of_own_descriptors = map().NumberOfOwnDescriptors();
+ DescriptorArray descs = map().instance_descriptors();
+ bool value_is_number = value.IsNumber();
for (int i = 0; i < number_of_own_descriptors; i++) {
- PropertyDetails details = descs->GetDetails(i);
+ PropertyDetails details = descs.GetDetails(i);
if (details.location() == kField) {
DCHECK_EQ(kData, details.kind());
FieldIndex field_index = FieldIndex::ForDescriptor(map(), i);
if (IsUnboxedDoubleField(field_index)) {
if (value_is_number) {
double property = RawFastDoublePropertyAt(field_index);
- if (property == value->Number()) {
- return descs->GetKey(i);
+ if (property == value.Number()) {
+ return descs.GetKey(i);
}
}
} else {
Object property = RawFastPropertyAt(field_index);
if (field_index.is_double()) {
- DCHECK(property->IsMutableHeapNumber());
- if (value_is_number && property->Number() == value->Number()) {
- return descs->GetKey(i);
+ DCHECK(property.IsMutableHeapNumber());
+ if (value_is_number && property.Number() == value.Number()) {
+ return descs.GetKey(i);
}
} else if (property == value) {
- return descs->GetKey(i);
+ return descs.GetKey(i);
}
}
} else {
DCHECK_EQ(kDescriptor, details.location());
if (details.kind() == kData) {
- if (descs->GetStrongValue(i) == value) {
- return descs->GetKey(i);
+ if (descs.GetStrongValue(i) == value) {
+ return descs.GetKey(i);
}
}
}
}
return GetReadOnlyRoots().undefined_value();
} else if (IsJSGlobalObject()) {
- return JSGlobalObject::cast(*this)->global_dictionary()->SlowReverseLookup(
+ return JSGlobalObject::cast(*this).global_dictionary().SlowReverseLookup(
value);
} else {
- return property_dictionary()->SlowReverseLookup(value);
+ return property_dictionary().SlowReverseLookup(value);
}
}
void JSObject::PrototypeRegistryCompactionCallback(HeapObject value,
int old_index,
int new_index) {
- DCHECK(value->IsMap() && Map::cast(value)->is_prototype_map());
+ DCHECK(value.IsMap() && Map::cast(value).is_prototype_map());
Map map = Map::cast(value);
- DCHECK(map->prototype_info()->IsPrototypeInfo());
- PrototypeInfo proto_info = PrototypeInfo::cast(map->prototype_info());
- DCHECK_EQ(old_index, proto_info->registry_slot());
- proto_info->set_registry_slot(new_index);
+ DCHECK(map.prototype_info().IsPrototypeInfo());
+ PrototypeInfo proto_info = PrototypeInfo::cast(map.prototype_info());
+ DCHECK_EQ(old_index, proto_info.registry_slot());
+ proto_info.set_registry_slot(new_index);
}
// static
@@ -4168,10 +4164,10 @@
if (!current->IsJSObject()) return;
Handle<JSObject> current_obj = Handle<JSObject>::cast(current);
Map current_map = current_obj->map();
- if (current_map->is_prototype_map()) {
+ if (current_map.is_prototype_map()) {
// If the map is already marked as should be fast, we're done. Its
// prototypes will have been marked already as well.
- if (current_map->should_be_fast_prototype_map()) return;
+ if (current_map.should_be_fast_prototype_map()) return;
Handle<Map> map(current_map, isolate);
Map::SetShouldBeFastPrototypeMap(map, true, isolate);
JSObject::OptimizeAsPrototype(current_obj);
@@ -4184,8 +4180,8 @@
if (!object->HasFastProperties()) return false;
if (object->IsJSGlobalProxy()) return false;
if (object->GetIsolate()->bootstrapper()->IsActive()) return false;
- return !object->map()->is_prototype_map() ||
- !object->map()->should_be_fast_prototype_map();
+ return !object->map().is_prototype_map() ||
+ !object->map().should_be_fast_prototype_map();
}
// static
@@ -4197,8 +4193,8 @@
JSObject::NormalizeProperties(object, KEEP_INOBJECT_PROPERTIES, 0,
"NormalizeAsPrototype");
}
- if (object->map()->is_prototype_map()) {
- if (object->map()->should_be_fast_prototype_map() &&
+ if (object->map().is_prototype_map()) {
+ if (object->map().should_be_fast_prototype_map() &&
!object->HasFastProperties()) {
JSObject::MigrateSlowToFast(object, 0, "OptimizeAsPrototype");
}
@@ -4207,18 +4203,18 @@
handle(object->map(), object->GetIsolate()),
"CopyAsPrototype");
JSObject::MigrateToMap(object, new_map);
- object->map()->set_is_prototype_map(true);
+ object->map().set_is_prototype_map(true);
// Replace the pointer to the exact constructor with the Object function
// from the same context if undetectable from JS. This is to avoid keeping
// memory alive unnecessarily.
- Object maybe_constructor = object->map()->GetConstructor();
- if (maybe_constructor->IsJSFunction()) {
+ Object maybe_constructor = object->map().GetConstructor();
+ if (maybe_constructor.IsJSFunction()) {
JSFunction constructor = JSFunction::cast(maybe_constructor);
- if (!constructor->shared()->IsApiFunction()) {
- Context context = constructor->context()->native_context();
- JSFunction object_function = context->object_function();
- object->map()->SetConstructor(object_function);
+ if (!constructor.shared().IsApiFunction()) {
+ Context context = constructor.context().native_context();
+ JSFunction object_function = context.object_function();
+ object->map().SetConstructor(object_function);
}
}
}
@@ -4226,8 +4222,8 @@
// static
void JSObject::ReoptimizeIfPrototype(Handle<JSObject> object) {
- if (!object->map()->is_prototype_map()) return;
- if (!object->map()->should_be_fast_prototype_map()) return;
+ if (!object->map().is_prototype_map()) return;
+ if (!object->map().should_be_fast_prototype_map()) return;
OptimizeAsPrototype(object);
}
@@ -4269,7 +4265,7 @@
PrintF("Registering %p as a user of prototype %p (map=%p).\n",
reinterpret_cast<void*>(current_user->ptr()),
reinterpret_cast<void*>(proto->ptr()),
- reinterpret_cast<void*>(proto->map()->ptr()));
+ reinterpret_cast<void*>(proto->map().ptr()));
}
current_user = handle(proto->map(), isolate);
@@ -4283,23 +4279,23 @@
bool JSObject::UnregisterPrototypeUser(Handle<Map> user, Isolate* isolate) {
DCHECK(user->is_prototype_map());
// If it doesn't have a PrototypeInfo, it was never registered.
- if (!user->prototype_info()->IsPrototypeInfo()) return false;
+ if (!user->prototype_info().IsPrototypeInfo()) return false;
// If it had no prototype before, see if it had users that might expect
// registration.
- if (!user->prototype()->IsJSObject()) {
+ if (!user->prototype().IsJSObject()) {
Object users =
- PrototypeInfo::cast(user->prototype_info())->prototype_users();
- return users->IsWeakArrayList();
+ PrototypeInfo::cast(user->prototype_info()).prototype_users();
+ return users.IsWeakArrayList();
}
Handle<JSObject> prototype(JSObject::cast(user->prototype()), isolate);
Handle<PrototypeInfo> user_info =
Map::GetOrCreatePrototypeInfo(user, isolate);
int slot = user_info->registry_slot();
if (slot == PrototypeInfo::UNREGISTERED) return false;
- DCHECK(prototype->map()->is_prototype_map());
- Object maybe_proto_info = prototype->map()->prototype_info();
+ DCHECK(prototype->map().is_prototype_map());
+ Object maybe_proto_info = prototype->map().prototype_info();
// User knows its registry slot, prototype info and user registry must exist.
- DCHECK(maybe_proto_info->IsPrototypeInfo());
+ DCHECK(maybe_proto_info.IsPrototypeInfo());
Handle<PrototypeInfo> proto_info(PrototypeInfo::cast(maybe_proto_info),
isolate);
Handle<WeakArrayList> prototype_users(
@@ -4320,36 +4316,35 @@
// AccessorAssembler::InvalidateValidityCellIfPrototype() which does pre-checks
// before jumping here.
void InvalidateOnePrototypeValidityCellInternal(Map map) {
- DCHECK(map->is_prototype_map());
+ DCHECK(map.is_prototype_map());
if (FLAG_trace_prototype_users) {
PrintF("Invalidating prototype map %p 's cell\n",
reinterpret_cast<void*>(map.ptr()));
}
- Object maybe_cell = map->prototype_validity_cell();
- if (maybe_cell->IsCell()) {
+ Object maybe_cell = map.prototype_validity_cell();
+ if (maybe_cell.IsCell()) {
// Just set the value; the cell will be replaced lazily.
Cell cell = Cell::cast(maybe_cell);
- cell->set_value(Smi::FromInt(Map::kPrototypeChainInvalid));
+ cell.set_value(Smi::FromInt(Map::kPrototypeChainInvalid));
}
}
void InvalidatePrototypeChainsInternal(Map map) {
InvalidateOnePrototypeValidityCellInternal(map);
- Object maybe_proto_info = map->prototype_info();
- if (!maybe_proto_info->IsPrototypeInfo()) return;
+ Object maybe_proto_info = map.prototype_info();
+ if (!maybe_proto_info.IsPrototypeInfo()) return;
PrototypeInfo proto_info = PrototypeInfo::cast(maybe_proto_info);
- if (!proto_info->prototype_users()->IsWeakArrayList()) {
+ if (!proto_info.prototype_users().IsWeakArrayList()) {
return;
}
WeakArrayList prototype_users =
- WeakArrayList::cast(proto_info->prototype_users());
+ WeakArrayList::cast(proto_info.prototype_users());
// For now, only maps register themselves as users.
- for (int i = PrototypeUsers::kFirstIndex; i < prototype_users->length();
- ++i) {
+ for (int i = PrototypeUsers::kFirstIndex; i < prototype_users.length(); ++i) {
HeapObject heap_object;
- if (prototype_users->Get(i)->GetHeapObjectIfWeak(&heap_object) &&
- heap_object->IsMap()) {
+ if (prototype_users.Get(i)->GetHeapObjectIfWeak(&heap_object) &&
+ heap_object.IsMap()) {
// Walk the prototype chain (backwards, towards leaf objects) if
// necessary.
InvalidatePrototypeChainsInternal(Map::cast(heap_object));
@@ -4376,7 +4371,7 @@
// static
void JSObject::InvalidatePrototypeValidityCell(JSGlobalObject global) {
DisallowHeapAllocation no_gc;
- InvalidateOnePrototypeValidityCellInternal(global->map());
+ InvalidateOnePrototypeValidityCellInternal(global.map());
}
Maybe<bool> JSObject::SetPrototype(Handle<JSObject> object,
@@ -4404,7 +4399,7 @@
// SpiderMonkey behaves this way.
if (!value->IsJSReceiver() && !value->IsNull(isolate)) return Just(true);
- bool all_extensible = object->map()->is_extensible();
+ bool all_extensible = object->map().is_extensible();
Handle<JSObject> real_receiver = object;
if (from_javascript) {
// Find the first object in the chain whose prototype object is not
@@ -4416,7 +4411,7 @@
// JSProxies.
real_receiver = PrototypeIterator::GetCurrent<JSObject>(iter);
iter.Advance();
- all_extensible = all_extensible && real_receiver->map()->is_extensible();
+ all_extensible = all_extensible && real_receiver->map().is_extensible();
}
}
Handle<Map> map(real_receiver->map(), isolate);
@@ -4503,14 +4498,14 @@
void JSObject::ValidateElements(JSObject object) {
#ifdef ENABLE_SLOW_DCHECKS
if (FLAG_enable_slow_asserts) {
- object->GetElementsAccessor()->Validate(object);
+ object.GetElementsAccessor()->Validate(object);
}
#endif
}
bool JSObject::WouldConvertToSlowElements(uint32_t index) {
if (!HasFastElements()) return false;
- uint32_t capacity = static_cast<uint32_t>(elements()->length());
+ uint32_t capacity = static_cast<uint32_t>(elements().length());
uint32_t new_capacity;
return ShouldConvertToSlowElements(*this, capacity, index, &new_capacity);
}
@@ -4521,23 +4516,23 @@
uint32_t* new_capacity) {
// If properties with non-standard attributes or accessors were added, we
// cannot go back to fast elements.
- if (dictionary->requires_slow_elements()) return false;
+ if (dictionary.requires_slow_elements()) return false;
// Adding a property with this index will require slow elements.
if (index >= static_cast<uint32_t>(Smi::kMaxValue)) return false;
- if (object->IsJSArray()) {
- Object length = JSArray::cast(object)->length();
- if (!length->IsSmi()) return false;
+ if (object.IsJSArray()) {
+ Object length = JSArray::cast(object).length();
+ if (!length.IsSmi()) return false;
*new_capacity = static_cast<uint32_t>(Smi::ToInt(length));
- } else if (object->IsJSSloppyArgumentsObject()) {
+ } else if (object.IsJSSloppyArgumentsObject()) {
return false;
} else {
- *new_capacity = dictionary->max_number_key() + 1;
+ *new_capacity = dictionary.max_number_key() + 1;
}
*new_capacity = Max(index + 1, *new_capacity);
- uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
+ uint32_t dictionary_size = static_cast<uint32_t>(dictionary.Capacity()) *
NumberDictionary::kEntrySize;
// Turn fast if the dictionary only saves 50% space.
@@ -4545,24 +4540,24 @@
}
static ElementsKind BestFittingFastElementsKind(JSObject object) {
- if (!object->map()->CanHaveFastTransitionableElementsKind()) {
+ if (!object.map().CanHaveFastTransitionableElementsKind()) {
return HOLEY_ELEMENTS;
}
- if (object->HasSloppyArgumentsElements()) {
+ if (object.HasSloppyArgumentsElements()) {
return FAST_SLOPPY_ARGUMENTS_ELEMENTS;
}
- if (object->HasStringWrapperElements()) {
+ if (object.HasStringWrapperElements()) {
return FAST_STRING_WRAPPER_ELEMENTS;
}
- DCHECK(object->HasDictionaryElements());
- NumberDictionary dictionary = object->element_dictionary();
+ DCHECK(object.HasDictionaryElements());
+ NumberDictionary dictionary = object.element_dictionary();
ElementsKind kind = HOLEY_SMI_ELEMENTS;
- for (int i = 0; i < dictionary->Capacity(); i++) {
- Object key = dictionary->KeyAt(i);
- if (key->IsNumber()) {
- Object value = dictionary->ValueAt(i);
- if (!value->IsNumber()) return HOLEY_ELEMENTS;
- if (!value->IsSmi()) {
+ for (int i = 0; i < dictionary.Capacity(); i++) {
+ Object key = dictionary.KeyAt(i);
+ if (key.IsNumber()) {
+ Object value = dictionary.ValueAt(i);
+ if (!value.IsNumber()) return HOLEY_ELEMENTS;
+ if (!value.IsSmi()) {
if (!FLAG_unbox_double_arrays) return HOLEY_ELEMENTS;
kind = HOLEY_DOUBLE_ELEMENTS;
}
@@ -4575,7 +4570,7 @@
void JSObject::AddDataElement(Handle<JSObject> object, uint32_t index,
Handle<Object> value,
PropertyAttributes attributes) {
- DCHECK(object->map()->is_extensible());
+ DCHECK(object->map().is_extensible());
Isolate* isolate = object->GetIsolate();
@@ -4583,14 +4578,14 @@
uint32_t new_capacity = 0;
if (object->IsJSArray()) {
- CHECK(JSArray::cast(*object)->length()->ToArrayLength(&old_length));
+ CHECK(JSArray::cast(*object).length().ToArrayLength(&old_length));
}
ElementsKind kind = object->GetElementsKind();
FixedArrayBase elements = object->elements();
ElementsKind dictionary_kind = DICTIONARY_ELEMENTS;
if (IsSloppyArgumentsElementsKind(kind)) {
- elements = SloppyArgumentsElements::cast(elements)->arguments();
+ elements = SloppyArgumentsElements::cast(elements).arguments();
dictionary_kind = SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
} else if (IsStringWrapperElementsKind(kind)) {
dictionary_kind = SLOW_STRING_WRAPPER_ELEMENTS;
@@ -4598,13 +4593,13 @@
if (attributes != NONE) {
kind = dictionary_kind;
- } else if (elements->IsNumberDictionary()) {
+ } else if (elements.IsNumberDictionary()) {
kind = ShouldConvertToFastElements(
*object, NumberDictionary::cast(elements), index, &new_capacity)
? BestFittingFastElementsKind(*object)
: dictionary_kind;
} else if (ShouldConvertToSlowElements(
- *object, static_cast<uint32_t>(elements->length()), index,
+ *object, static_cast<uint32_t>(elements.length()), index,
&new_capacity)) {
kind = dictionary_kind;
}
@@ -4621,7 +4616,7 @@
if (object->IsJSArray() && index >= old_length) {
Handle<Object> new_length =
isolate->factory()->NewNumberFromUint(index + 1);
- JSArray::cast(*object)->set_length(*new_length);
+ JSArray::cast(*object).set_length(*new_length);
}
}
@@ -4644,7 +4639,7 @@
if (memento.is_null()) return false;
// Walk through to the Allocation Site
- site = handle(memento->GetAllocationSite(), heap->isolate());
+ site = handle(memento.GetAllocationSite(), heap->isolate());
}
return AllocationSite::DigestTransitionFeedback<update_or_check>(site,
to_kind);
@@ -4686,19 +4681,19 @@
} else {
DCHECK((IsSmiElementsKind(from_kind) && IsDoubleElementsKind(to_kind)) ||
(IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind)));
- uint32_t c = static_cast<uint32_t>(object->elements()->length());
+ uint32_t c = static_cast<uint32_t>(object->elements().length());
ElementsAccessor::ForKind(to_kind)->GrowCapacityAndConvert(object, c);
}
}
template <typename BackingStore>
static int HoleyElementsUsage(JSObject object, BackingStore store) {
- Isolate* isolate = object->GetIsolate();
- int limit = object->IsJSArray() ? Smi::ToInt(JSArray::cast(object)->length())
- : store->length();
+ Isolate* isolate = object.GetIsolate();
+ int limit = object.IsJSArray() ? Smi::ToInt(JSArray::cast(object).length())
+ : store.length();
int used = 0;
for (int i = 0; i < limit; ++i) {
- if (!store->is_the_hole(isolate, i)) ++used;
+ if (!store.is_the_hole(isolate, i)) ++used;
}
return used;
}
@@ -4711,10 +4706,10 @@
case PACKED_ELEMENTS:
case PACKED_FROZEN_ELEMENTS:
case PACKED_SEALED_ELEMENTS:
- return IsJSArray() ? Smi::ToInt(JSArray::cast(*this)->length())
- : store->length();
+ return IsJSArray() ? Smi::ToInt(JSArray::cast(*this).length())
+ : store.length();
case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
- store = SloppyArgumentsElements::cast(store)->arguments();
+ store = SloppyArgumentsElements::cast(store).arguments();
V8_FALLTHROUGH;
case HOLEY_SMI_ELEMENTS:
case HOLEY_ELEMENTS:
@@ -4723,7 +4718,7 @@
case FAST_STRING_WRAPPER_ELEMENTS:
return HoleyElementsUsage(*this, FixedArray::cast(store));
case HOLEY_DOUBLE_ELEMENTS:
- if (elements()->length() == 0) return 0;
+ if (elements().length() == 0) return 0;
return HoleyElementsUsage(*this, FixedDoubleArray::cast(store));
case SLOW_SLOPPY_ARGUMENTS_ELEMENTS:
@@ -4773,7 +4768,7 @@
// These object types can carry information relevant for embedders. The
// *_API_* types are generated through templates which can have embedder
// fields. The other types have their embedder fields added at compile time.
- auto instance_type = map()->instance_type();
+ auto instance_type = map().instance_type();
return instance_type == JS_API_OBJECT_TYPE ||
instance_type == JS_ARRAY_BUFFER_TYPE ||
instance_type == JS_DATA_VIEW_TYPE ||
@@ -4782,7 +4777,7 @@
}
bool JSObject::IsDroppableApiWrapper() {
- auto instance_type = map()->instance_type();
+ auto instance_type = map().instance_type();
return instance_type == JS_API_OBJECT_TYPE ||
instance_type == JS_SPECIAL_API_OBJECT_TYPE;
}
@@ -4790,7 +4785,7 @@
// static
MaybeHandle<NativeContext> JSBoundFunction::GetFunctionRealm(
Handle<JSBoundFunction> function) {
- DCHECK(function->map()->is_constructor());
+ DCHECK(function->map().is_constructor());
return JSReceiver::GetFunctionRealm(
handle(function->bound_target_function(), function->GetIsolate()));
}
@@ -4802,14 +4797,14 @@
Handle<String> target_name = prefix;
Factory* factory = isolate->factory();
// Concatenate the "bound " up to the last non-bound target.
- while (function->bound_target_function()->IsJSBoundFunction()) {
+ while (function->bound_target_function().IsJSBoundFunction()) {
ASSIGN_RETURN_ON_EXCEPTION(isolate, target_name,
factory->NewConsString(prefix, target_name),
String);
function = handle(JSBoundFunction::cast(function->bound_target_function()),
isolate);
}
- if (function->bound_target_function()->IsJSFunction()) {
+ if (function->bound_target_function().IsJSFunction()) {
Handle<JSFunction> target(
JSFunction::cast(function->bound_target_function()), isolate);
Handle<Object> name = JSFunction::GetName(isolate, target);
@@ -4823,14 +4818,14 @@
// static
Maybe<int> JSBoundFunction::GetLength(Isolate* isolate,
Handle<JSBoundFunction> function) {
- int nof_bound_arguments = function->bound_arguments()->length();
- while (function->bound_target_function()->IsJSBoundFunction()) {
+ int nof_bound_arguments = function->bound_arguments().length();
+ while (function->bound_target_function().IsJSBoundFunction()) {
function = handle(JSBoundFunction::cast(function->bound_target_function()),
isolate);
// Make sure we never overflow {nof_bound_arguments}, the number of
// arguments of a function is strictly limited by the max length of an
// JSAarray, Smi::kMaxValue is thus a reasonably good overestimate.
- int length = function->bound_arguments()->length();
+ int length = function->bound_arguments().length();
if (V8_LIKELY(Smi::kMaxValue - nof_bound_arguments > length)) {
nof_bound_arguments += length;
} else {
@@ -4856,17 +4851,17 @@
// static
Handle<Object> JSFunction::GetName(Isolate* isolate,
Handle<JSFunction> function) {
- if (function->shared()->name_should_print_as_anonymous()) {
+ if (function->shared().name_should_print_as_anonymous()) {
return isolate->factory()->anonymous_string();
}
- return handle(function->shared()->Name(), isolate);
+ return handle(function->shared().Name(), isolate);
}
// static
Handle<NativeContext> JSFunction::GetFunctionRealm(
Handle<JSFunction> function) {
- DCHECK(function->map()->is_constructor());
- return handle(function->context()->native_context(), function->GetIsolate());
+ DCHECK(function->map().is_constructor());
+ return handle(function->context().native_context(), function->GetIsolate());
}
void JSFunction::MarkForOptimization(ConcurrencyMode mode) {
@@ -4877,11 +4872,11 @@
}
DCHECK(!is_compiled() || IsInterpreted());
- DCHECK(shared()->IsInterpreted());
+ DCHECK(shared().IsInterpreted());
DCHECK(!IsOptimized());
DCHECK(!HasOptimizedCode());
- DCHECK(shared()->allows_lazy_compilation() ||
- !shared()->optimization_disabled());
+ DCHECK(shared().allows_lazy_compilation() ||
+ !shared().optimization_disabled());
if (mode == ConcurrencyMode::kConcurrent) {
if (IsInOptimizationQueue()) {
@@ -4907,16 +4902,16 @@
// static
void JSFunction::EnsureClosureFeedbackCellArray(Handle<JSFunction> function) {
Isolate* const isolate = function->GetIsolate();
- DCHECK(function->shared()->is_compiled());
- DCHECK(function->shared()->HasFeedbackMetadata());
+ DCHECK(function->shared().is_compiled());
+ DCHECK(function->shared().HasFeedbackMetadata());
if (function->has_closure_feedback_cell_array() ||
function->has_feedback_vector()) {
return;
}
- if (function->shared()->HasAsmWasmData()) return;
+ if (function->shared().HasAsmWasmData()) return;
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
- DCHECK(function->shared()->HasBytecodeArray());
+ DCHECK(function->shared().HasBytecodeArray());
Handle<HeapObject> feedback_cell_array =
ClosureFeedbackCellArray::New(isolate, shared);
// Many closure cell is used as a way to specify that there is no
@@ -4930,20 +4925,20 @@
isolate->factory()->NewOneClosureCell(feedback_cell_array);
function->set_raw_feedback_cell(*feedback_cell);
} else {
- function->raw_feedback_cell()->set_value(*feedback_cell_array);
+ function->raw_feedback_cell().set_value(*feedback_cell_array);
}
}
// static
void JSFunction::EnsureFeedbackVector(Handle<JSFunction> function) {
Isolate* const isolate = function->GetIsolate();
- DCHECK(function->shared()->is_compiled());
- DCHECK(function->shared()->HasFeedbackMetadata());
+ DCHECK(function->shared().is_compiled());
+ DCHECK(function->shared().HasFeedbackMetadata());
if (function->has_feedback_vector()) return;
- if (function->shared()->HasAsmWasmData()) return;
+ if (function->shared().HasAsmWasmData()) return;
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
- DCHECK(function->shared()->HasBytecodeArray());
+ DCHECK(function->shared().HasBytecodeArray());
EnsureClosureFeedbackCellArray(function);
Handle<ClosureFeedbackCellArray> closure_feedback_cell_array =
@@ -4955,7 +4950,7 @@
// for more details.
DCHECK(function->raw_feedback_cell() !=
isolate->heap()->many_closures_cell());
- function->raw_feedback_cell()->set_value(*feedback_vector);
+ function->raw_feedback_cell().set_value(*feedback_vector);
}
// static
@@ -5004,7 +4999,7 @@
// If the function is used as the global Array function, cache the
// updated initial maps (and transitioned versions) in the native context.
- Handle<Context> native_context(function->context()->native_context(),
+ Handle<Context> native_context(function->context().native_context(),
isolate);
Handle<Object> array_function(
native_context->get(Context::ARRAY_FUNCTION_INDEX), isolate);
@@ -5015,7 +5010,7 @@
}
// Deoptimize all code that embeds the previous initial map.
- initial_map->dependent_code()->DeoptimizeDependentCodeGroup(
+ initial_map->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kInitialMapChangedGroup);
} else {
// Put the value in the initial map field until an initial map is
@@ -5034,7 +5029,7 @@
void JSFunction::SetPrototype(Handle<JSFunction> function,
Handle<Object> value) {
DCHECK(function->IsConstructor() ||
- IsGeneratorFunction(function->shared()->kind()));
+ IsGeneratorFunction(function->shared().kind()));
Isolate* isolate = function->GetIsolate();
Handle<JSReceiver> construct_prototype;
@@ -5053,8 +5048,8 @@
new_map->SetConstructor(*value);
new_map->set_has_non_instance_prototype(true);
- FunctionKind kind = function->shared()->kind();
- Handle<Context> native_context(function->context()->native_context(),
+ FunctionKind kind = function->shared().kind();
+ Handle<Context> native_context(function->context().native_context(),
isolate);
construct_prototype = Handle<JSReceiver>(
@@ -5066,7 +5061,7 @@
isolate);
} else {
construct_prototype = Handle<JSReceiver>::cast(value);
- function->map()->set_has_non_instance_prototype(false);
+ function->map().set_has_non_instance_prototype(false);
}
SetInstancePrototype(isolate, function, construct_prototype);
@@ -5080,22 +5075,22 @@
map->SetConstructor(*function);
if (FLAG_trace_maps) {
LOG(function->GetIsolate(), MapEvent("InitialMap", Map(), *map, "",
- function->shared()->DebugName()));
+ function->shared().DebugName()));
}
}
void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) {
DCHECK(function->has_prototype_slot());
DCHECK(function->IsConstructor() ||
- IsResumableFunction(function->shared()->kind()));
+ IsResumableFunction(function->shared().kind()));
if (function->has_initial_map()) return;
Isolate* isolate = function->GetIsolate();
// First create a new map with the size and number of in-object properties
// suggested by the function.
InstanceType instance_type;
- if (IsResumableFunction(function->shared()->kind())) {
- instance_type = IsAsyncGeneratorFunction(function->shared()->kind())
+ if (IsResumableFunction(function->shared().kind())) {
+ instance_type = IsAsyncGeneratorFunction(function->shared().kind())
? JS_ASYNC_GENERATOR_OBJECT_TYPE
: JS_GENERATOR_OBJECT_TYPE;
} else {
@@ -5239,8 +5234,8 @@
// Check that |function|'s initial map still in sync with the |constructor|,
// otherwise we must create a new initial map for |function|.
if (new_target->has_initial_map() &&
- new_target->initial_map()->GetConstructor() == *constructor) {
- DCHECK(new_target->instance_prototype()->IsJSReceiver());
+ new_target->initial_map().GetConstructor() == *constructor) {
+ DCHECK(new_target->instance_prototype().IsJSReceiver());
return true;
}
InstanceType instance_type = constructor_initial_map->instance_type();
@@ -5250,7 +5245,7 @@
// Link initial map and constructor function if the new.target is actually a
// subclass constructor.
- if (!IsDerivedConstructor(new_target->shared()->kind())) return false;
+ if (!IsDerivedConstructor(new_target->shared().kind())) return false;
int instance_size;
int in_object_properties;
@@ -5272,7 +5267,7 @@
map->set_new_target_is_base(false);
Handle<HeapObject> prototype(new_target->instance_prototype(), isolate);
JSFunction::SetInitialMap(new_target, map, prototype);
- DCHECK(new_target->instance_prototype()->IsJSReceiver());
+ DCHECK(new_target->instance_prototype().IsJSReceiver());
map->SetConstructor(*constructor);
map->set_construction_counter(Map::kNoSlackTracking);
map->StartInobjectSlackTracking();
@@ -5356,15 +5351,15 @@
int JSFunction::ComputeInstanceSizeWithMinSlack(Isolate* isolate) {
CHECK(has_initial_map());
- if (initial_map()->IsInobjectSlackTrackingInProgress()) {
- int slack = initial_map()->ComputeMinObjectSlack(isolate);
- return initial_map()->InstanceSizeFromSlack(slack);
+ if (initial_map().IsInobjectSlackTrackingInProgress()) {
+ int slack = initial_map().ComputeMinObjectSlack(isolate);
+ return initial_map().InstanceSizeFromSlack(slack);
}
- return initial_map()->instance_size();
+ return initial_map().instance_size();
}
void JSFunction::PrintName(FILE* out) {
- std::unique_ptr<char[]> name = shared()->DebugName()->ToCString();
+ std::unique_ptr<char[]> name = shared().DebugName().ToCString();
PrintF(out, "%s", name.get());
}
@@ -5373,7 +5368,7 @@
Handle<Object> name =
JSReceiver::GetDataProperty(function, isolate->factory()->name_string());
if (name->IsString()) return Handle<String>::cast(name);
- return handle(function->shared()->DebugName(), isolate);
+ return handle(function->shared().DebugName(), isolate);
}
Handle<String> JSFunction::GetDebugName(Handle<JSFunction> function) {
@@ -5437,10 +5432,10 @@
if (maybe_class_positions->IsClassPositions()) {
ClassPositions class_positions =
ClassPositions::cast(*maybe_class_positions);
- int start_position = class_positions->start();
- int end_position = class_positions->end();
+ int start_position = class_positions.start();
+ int end_position = class_positions.end();
Handle<String> script_source(
- String::cast(Script::cast(shared_info->script())->source()), isolate);
+ String::cast(Script::cast(shared_info->script()).source()), isolate);
return isolate->factory()->NewSubString(script_source, start_position,
end_position);
}
@@ -5544,7 +5539,7 @@
if (has_feedback_vector()) {
FeedbackVector vector = feedback_vector();
Isolate* isolate = GetIsolate();
- if (vector->ClearSlots(isolate)) {
+ if (vector.ClearSlots(isolate)) {
IC::OnFeedbackChanged(isolate, vector, FeedbackSlot::Invalid(), *this,
"ClearTypeFeedbackInfo");
}
@@ -5578,7 +5573,7 @@
PropertyCellType original_cell_type = cell->property_details().cell_type();
DCHECK(original_cell_type == PropertyCellType::kInvalidated ||
original_cell_type == PropertyCellType::kUninitialized);
- DCHECK(cell->value()->IsTheHole(isolate));
+ DCHECK(cell->value().IsTheHole(isolate));
if (original_cell_type == PropertyCellType::kInvalidated) {
cell = PropertyCell::InvalidateEntry(isolate, dictionary, entry);
}
@@ -5630,8 +5625,8 @@
Object object(raw_object);
Smi index(smi_index);
return JSDate::cast(object)
- ->DoGetField(static_cast<FieldIndex>(index->value()))
- ->ptr();
+ .DoGetField(static_cast<FieldIndex>(index.value()))
+ .ptr();
}
Object JSDate::DoGetField(FieldIndex index) {
@@ -5641,10 +5636,10 @@
if (index < kFirstUncachedField) {
Object stamp = cache_stamp();
- if (stamp != date_cache->stamp() && stamp->IsSmi()) {
+ if (stamp != date_cache->stamp() && stamp.IsSmi()) {
// Since the stamp is not NaN, the value is also not NaN.
int64_t local_time_ms =
- date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
+ date_cache->ToLocal(static_cast<int64_t>(value().Number()));
SetCachedFields(local_time_ms, date_cache);
}
switch (index) {
@@ -5668,10 +5663,10 @@
}
if (index >= kFirstUTCField) {
- return GetUTCField(index, value()->Number(), date_cache);
+ return GetUTCField(index, value().Number(), date_cache);
}
- double time = value()->Number();
+ double time = value().Number();
if (std::isnan(time)) return GetReadOnlyRoots().nan_value();
int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
@@ -5782,13 +5777,13 @@
Isolate* isolate, Handle<JSMessageObject> message) {
if (!message->DidEnsureSourcePositionsAvailable()) {
DCHECK_EQ(message->start_position(), -1);
- DCHECK_GE(message->bytecode_offset()->value(), 0);
+ DCHECK_GE(message->bytecode_offset().value(), 0);
Handle<SharedFunctionInfo> shared_info(
SharedFunctionInfo::cast(message->shared_info()), isolate);
SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate, shared_info);
DCHECK(shared_info->HasBytecodeArray());
- int position = shared_info->abstract_code()->SourcePosition(
- message->bytecode_offset()->value());
+ int position = shared_info->abstract_code().SourcePosition(
+ message->bytecode_offset().value());
DCHECK_GE(position, 0);
message->set_start_position(position);
message->set_end_position(position + 1);
diff --git a/src/objects/js-plural-rules.cc b/src/objects/js-plural-rules.cc
index c9b4a33..8daf5db 100644
--- a/src/objects/js-plural-rules.cc
+++ b/src/objects/js-plural-rules.cc
@@ -199,11 +199,11 @@
MaybeHandle<String> JSPluralRules::ResolvePlural(
Isolate* isolate, Handle<JSPluralRules> plural_rules, double number) {
- icu::PluralRules* icu_plural_rules = plural_rules->icu_plural_rules()->raw();
+ icu::PluralRules* icu_plural_rules = plural_rules->icu_plural_rules().raw();
CHECK_NOT_NULL(icu_plural_rules);
icu::DecimalFormat* icu_decimal_format =
- plural_rules->icu_decimal_format()->raw();
+ plural_rules->icu_decimal_format().raw();
CHECK_NOT_NULL(icu_decimal_format);
// Currently, PluralRules doesn't implement all the options for rounding that
@@ -262,7 +262,7 @@
"type");
icu::DecimalFormat* icu_decimal_format =
- plural_rules->icu_decimal_format()->raw();
+ plural_rules->icu_decimal_format().raw();
CHECK_NOT_NULL(icu_decimal_format);
// This is a safe upcast as icu::DecimalFormat inherits from
@@ -296,7 +296,7 @@
// 6. Let pluralCategories be a List of Strings representing the
// possible results of PluralRuleSelect for the selected locale pr.
- icu::PluralRules* icu_plural_rules = plural_rules->icu_plural_rules()->raw();
+ icu::PluralRules* icu_plural_rules = plural_rules->icu_plural_rules().raw();
CHECK_NOT_NULL(icu_plural_rules);
UErrorCode status = U_ZERO_ERROR;
diff --git a/src/objects/js-proxy-inl.h b/src/objects/js-proxy-inl.h
index e0d0835..55c9f1c 100644
--- a/src/objects/js-proxy-inl.h
+++ b/src/objects/js-proxy-inl.h
@@ -22,7 +22,7 @@
ACCESSORS(JSProxy, target, Object, kTargetOffset)
ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
-bool JSProxy::IsRevoked() const { return !handler()->IsJSReceiver(); }
+bool JSProxy::IsRevoked() const { return !handler().IsJSReceiver(); }
} // namespace internal
} // namespace v8
diff --git a/src/objects/js-regexp-inl.h b/src/objects/js-regexp-inl.h
index 8322a3c..a82cb09 100644
--- a/src/objects/js-regexp-inl.h
+++ b/src/objects/js-regexp-inl.h
@@ -28,9 +28,9 @@
JSRegExp::Type JSRegExp::TypeTag() const {
Object data = this->data();
- if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
- Smi smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
- return static_cast<JSRegExp::Type>(smi->value());
+ if (data.IsUndefined()) return JSRegExp::NOT_COMPILED;
+ Smi smi = Smi::cast(FixedArray::cast(data).get(kTagIndex));
+ return static_cast<JSRegExp::Type>(smi.value());
}
int JSRegExp::CaptureCount() {
@@ -45,21 +45,21 @@
}
JSRegExp::Flags JSRegExp::GetFlags() {
- DCHECK(this->data()->IsFixedArray());
+ DCHECK(this->data().IsFixedArray());
Object data = this->data();
- Smi smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
- return Flags(smi->value());
+ Smi smi = Smi::cast(FixedArray::cast(data).get(kFlagsIndex));
+ return Flags(smi.value());
}
String JSRegExp::Pattern() {
- DCHECK(this->data()->IsFixedArray());
+ DCHECK(this->data().IsFixedArray());
Object data = this->data();
- String pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
+ String pattern = String::cast(FixedArray::cast(data).get(kSourceIndex));
return pattern;
}
Object JSRegExp::CaptureNameMap() {
- DCHECK(this->data()->IsFixedArray());
+ DCHECK(this->data().IsFixedArray());
DCHECK_EQ(TypeTag(), IRREGEXP);
Object value = DataAt(kIrregexpCaptureNameMapIndex);
DCHECK_NE(value, Smi::FromInt(JSRegExp::kUninitializedValue));
@@ -68,24 +68,24 @@
Object JSRegExp::DataAt(int index) const {
DCHECK(TypeTag() != NOT_COMPILED);
- return FixedArray::cast(data())->get(index);
+ return FixedArray::cast(data()).get(index);
}
void JSRegExp::SetDataAt(int index, Object value) {
DCHECK(TypeTag() != NOT_COMPILED);
DCHECK_GE(index,
kDataIndex); // Only implementation data can be set this way.
- FixedArray::cast(data())->set(index, value);
+ FixedArray::cast(data()).set(index, value);
}
bool JSRegExp::HasCompiledCode() const {
if (TypeTag() != IRREGEXP) return false;
#ifdef DEBUG
- DCHECK(DataAt(kIrregexpLatin1CodeIndex)->IsCode() ||
- DataAt(kIrregexpLatin1CodeIndex)->IsByteArray() ||
+ DCHECK(DataAt(kIrregexpLatin1CodeIndex).IsCode() ||
+ DataAt(kIrregexpLatin1CodeIndex).IsByteArray() ||
DataAt(kIrregexpLatin1CodeIndex) == Smi::FromInt(kUninitializedValue));
- DCHECK(DataAt(kIrregexpUC16CodeIndex)->IsCode() ||
- DataAt(kIrregexpUC16CodeIndex)->IsByteArray() ||
+ DCHECK(DataAt(kIrregexpUC16CodeIndex).IsCode() ||
+ DataAt(kIrregexpUC16CodeIndex).IsByteArray() ||
DataAt(kIrregexpUC16CodeIndex) == Smi::FromInt(kUninitializedValue));
#endif // DEBUG
Smi uninitialized = Smi::FromInt(kUninitializedValue);
diff --git a/src/objects/js-relative-time-format.cc b/src/objects/js-relative-time-format.cc
index 1e446a2..d97d9c2 100644
--- a/src/objects/js-relative-time-format.cc
+++ b/src/objects/js-relative-time-format.cc
@@ -197,7 +197,7 @@
format_holder->StyleAsString(), NONE);
JSObject::AddProperty(isolate, result, factory->numeric_string(),
format_holder->NumericAsString(), NONE);
- std::string locale_str(format_holder->locale()->ToCString().get());
+ std::string locale_str(format_holder->locale().ToCString().get());
icu::Locale icu_locale = Intl::CreateICULocale(locale_str);
std::string numbering_system = Intl::GetNumberingSystem(icu_locale);
JSObject::AddProperty(
@@ -313,7 +313,7 @@
isolate->factory()->NewStringFromAsciiChecked(func_name)),
T);
}
- icu::RelativeDateTimeFormatter* formatter = format->icu_formatter()->raw();
+ icu::RelativeDateTimeFormatter* formatter = format->icu_formatter().raw();
CHECK_NOT_NULL(formatter);
URelativeDateTimeUnit unit_enum;
if (!GetURelativeDateTimeUnit(unit, &unit_enum)) {
diff --git a/src/objects/js-segment-iterator.cc b/src/objects/js-segment-iterator.cc
index 1ac4b34..311618b 100644
--- a/src/objects/js-segment-iterator.cc
+++ b/src/objects/js-segment-iterator.cc
@@ -26,7 +26,7 @@
MaybeHandle<String> JSSegmentIterator::GetSegment(Isolate* isolate,
int32_t start,
int32_t end) const {
- return Intl::ToString(isolate, *(unicode_string()->raw()), start, end);
+ return Intl::ToString(isolate, *(unicode_string().raw()), start, end);
}
Handle<String> JSSegmentIterator::GranularityAsString() const {
@@ -80,7 +80,7 @@
if (!is_break_type_set()) {
return GetReadOnlyRoots().undefined_value_handle();
}
- icu::BreakIterator* break_iterator = icu_break_iterator()->raw();
+ icu::BreakIterator* break_iterator = icu_break_iterator().raw();
int32_t rule_status = break_iterator->getRuleStatus();
switch (granularity()) {
case JSSegmenter::Granularity::GRAPHEME:
@@ -128,7 +128,7 @@
Handle<Object> JSSegmentIterator::Index(
Isolate* isolate, Handle<JSSegmentIterator> segment_iterator) {
icu::BreakIterator* icu_break_iterator =
- segment_iterator->icu_break_iterator()->raw();
+ segment_iterator->icu_break_iterator().raw();
CHECK_NOT_NULL(icu_break_iterator);
return isolate->factory()->NewNumberFromInt(icu_break_iterator->current());
}
@@ -138,7 +138,7 @@
Isolate* isolate, Handle<JSSegmentIterator> segment_iterator) {
Factory* factory = isolate->factory();
icu::BreakIterator* icu_break_iterator =
- segment_iterator->icu_break_iterator()->raw();
+ segment_iterator->icu_break_iterator().raw();
// 3. Let _previousIndex be iterator.[[SegmentIteratorIndex]].
int32_t prev = icu_break_iterator->current();
// 4. Let done be AdvanceSegmentIterator(iterator, forwards).
@@ -192,7 +192,7 @@
Handle<Object> from_obj) {
Factory* factory = isolate->factory();
icu::BreakIterator* icu_break_iterator =
- segment_iterator->icu_break_iterator()->raw();
+ segment_iterator->icu_break_iterator().raw();
// 3. If from is not undefined,
if (!from_obj->IsUndefined()) {
// a. Let from be ? ToIndex(from).
@@ -244,7 +244,7 @@
Handle<Object> from_obj) {
Factory* factory = isolate->factory();
icu::BreakIterator* icu_break_iterator =
- segment_iterator->icu_break_iterator()->raw();
+ segment_iterator->icu_break_iterator().raw();
// 3. If from is not undefined,
if (!from_obj->IsUndefined()) {
// a. Let from be ? ToIndex(from).
diff --git a/src/objects/js-weak-refs-inl.h b/src/objects/js-weak-refs-inl.h
index 862223c..6632a31 100644
--- a/src/objects/js-weak-refs-inl.h
+++ b/src/objects/js-weak-refs-inl.h
@@ -70,14 +70,14 @@
// Add to active_cells.
weak_cell->set_next(finalization_group->active_cells());
- if (finalization_group->active_cells()->IsWeakCell()) {
- WeakCell::cast(finalization_group->active_cells())->set_prev(*weak_cell);
+ if (finalization_group->active_cells().IsWeakCell()) {
+ WeakCell::cast(finalization_group->active_cells()).set_prev(*weak_cell);
}
finalization_group->set_active_cells(*weak_cell);
if (!key->IsUndefined(isolate)) {
Handle<ObjectHashTable> key_map;
- if (finalization_group->key_map()->IsUndefined(isolate)) {
+ if (finalization_group->key_map().IsUndefined(isolate)) {
key_map = ObjectHashTable::New(isolate, 1);
} else {
key_map =
@@ -85,12 +85,12 @@
}
Object value = key_map->Lookup(key);
- if (value->IsWeakCell()) {
+ if (value.IsWeakCell()) {
WeakCell existing_weak_cell = WeakCell::cast(value);
- existing_weak_cell->set_key_list_prev(*weak_cell);
+ existing_weak_cell.set_key_list_prev(*weak_cell);
weak_cell->set_key_list_next(existing_weak_cell);
} else {
- DCHECK(value->IsTheHole(isolate));
+ DCHECK(value.IsTheHole(isolate));
}
key_map = ObjectHashTable::Put(key_map, key, weak_cell);
finalization_group->set_key_map(*key_map);
@@ -103,17 +103,17 @@
// Iterate through the doubly linked list of WeakCells associated with the
// key. Each WeakCell will be in the "active_cells" or "cleared_cells" list of
// its FinalizationGroup; remove it from there.
- if (!finalization_group->key_map()->IsUndefined(isolate)) {
+ if (!finalization_group->key_map().IsUndefined(isolate)) {
Handle<ObjectHashTable> key_map =
handle(ObjectHashTable::cast(finalization_group->key_map()), isolate);
Object value = key_map->Lookup(key);
Object undefined = ReadOnlyRoots(isolate).undefined_value();
- while (value->IsWeakCell()) {
+ while (value.IsWeakCell()) {
WeakCell weak_cell = WeakCell::cast(value);
- weak_cell->RemoveFromFinalizationGroupCells(isolate);
- value = weak_cell->key_list_next();
- weak_cell->set_key_list_prev(undefined);
- weak_cell->set_key_list_next(undefined);
+ weak_cell.RemoveFromFinalizationGroupCells(isolate);
+ value = weak_cell.key_list_next();
+ weak_cell.set_key_list_prev(undefined);
+ weak_cell.set_key_list_next(undefined);
}
bool was_present;
key_map = ObjectHashTable::Remove(isolate, key_map, key, &was_present);
@@ -122,7 +122,7 @@
}
bool JSFinalizationGroup::NeedsCleanup() const {
- return cleared_cells()->IsWeakCell();
+ return cleared_cells().IsWeakCell();
}
bool JSFinalizationGroup::scheduled_for_cleanup() const {
@@ -138,23 +138,23 @@
Handle<JSFinalizationGroup> finalization_group, Isolate* isolate) {
Handle<WeakCell> weak_cell =
handle(WeakCell::cast(finalization_group->cleared_cells()), isolate);
- DCHECK(weak_cell->prev()->IsUndefined(isolate));
+ DCHECK(weak_cell->prev().IsUndefined(isolate));
finalization_group->set_cleared_cells(weak_cell->next());
weak_cell->set_next(ReadOnlyRoots(isolate).undefined_value());
- if (finalization_group->cleared_cells()->IsWeakCell()) {
+ if (finalization_group->cleared_cells().IsWeakCell()) {
WeakCell cleared_cells_head =
WeakCell::cast(finalization_group->cleared_cells());
- DCHECK_EQ(cleared_cells_head->prev(), *weak_cell);
- cleared_cells_head->set_prev(ReadOnlyRoots(isolate).undefined_value());
+ DCHECK_EQ(cleared_cells_head.prev(), *weak_cell);
+ cleared_cells_head.set_prev(ReadOnlyRoots(isolate).undefined_value());
} else {
- DCHECK(finalization_group->cleared_cells()->IsUndefined(isolate));
+ DCHECK(finalization_group->cleared_cells().IsUndefined(isolate));
}
// Also remove the WeakCell from the key_map (if it's there).
- if (!weak_cell->key()->IsUndefined(isolate)) {
- if (weak_cell->key_list_prev()->IsUndefined(isolate) &&
- weak_cell->key_list_next()->IsUndefined(isolate)) {
+ if (!weak_cell->key().IsUndefined(isolate)) {
+ if (weak_cell->key_list_prev().IsUndefined(isolate) &&
+ weak_cell->key_list_next().IsUndefined(isolate)) {
// weak_cell is the only one associated with its key; remove the key
// from the hash table.
Handle<ObjectHashTable> key_map =
@@ -164,7 +164,7 @@
key_map = ObjectHashTable::Remove(isolate, key_map, key, &was_present);
DCHECK(was_present);
finalization_group->set_key_map(*key_map);
- } else if (weak_cell->key_list_prev()->IsUndefined()) {
+ } else if (weak_cell->key_list_prev().IsUndefined()) {
// weak_cell is the list head for its key; we need to change the value of
// the key in the hash table.
Handle<ObjectHashTable> key_map =
@@ -180,10 +180,10 @@
} else {
// weak_cell is somewhere in the middle of its key list.
WeakCell prev = WeakCell::cast(weak_cell->key_list_prev());
- prev->set_key_list_next(weak_cell->key_list_next());
- if (!weak_cell->key_list_next()->IsUndefined()) {
+ prev.set_key_list_next(weak_cell->key_list_next());
+ if (!weak_cell->key_list_next().IsUndefined()) {
WeakCell next = WeakCell::cast(weak_cell->key_list_next());
- next->set_key_list_prev(weak_cell->key_list_prev());
+ next.set_key_list_prev(weak_cell->key_list_prev());
}
}
}
@@ -200,41 +200,41 @@
// only called for WeakCells which haven't been unregistered yet, so they will
// be in the active_cells list. (The caller must guard against calling this
// for unregistered WeakCells by checking that the target is not undefined.)
- DCHECK(target()->IsJSReceiver());
+ DCHECK(target().IsJSReceiver());
set_target(ReadOnlyRoots(isolate).undefined_value());
JSFinalizationGroup fg = JSFinalizationGroup::cast(finalization_group());
- if (prev()->IsWeakCell()) {
- DCHECK_NE(fg->active_cells(), *this);
+ if (prev().IsWeakCell()) {
+ DCHECK_NE(fg.active_cells(), *this);
WeakCell prev_cell = WeakCell::cast(prev());
- prev_cell->set_next(next());
+ prev_cell.set_next(next());
gc_notify_updated_slot(prev_cell, prev_cell.RawField(WeakCell::kNextOffset),
next());
} else {
- DCHECK_EQ(fg->active_cells(), *this);
- fg->set_active_cells(next());
+ DCHECK_EQ(fg.active_cells(), *this);
+ fg.set_active_cells(next());
gc_notify_updated_slot(
fg, fg.RawField(JSFinalizationGroup::kActiveCellsOffset), next());
}
- if (next()->IsWeakCell()) {
+ if (next().IsWeakCell()) {
WeakCell next_cell = WeakCell::cast(next());
- next_cell->set_prev(prev());
+ next_cell.set_prev(prev());
gc_notify_updated_slot(next_cell, next_cell.RawField(WeakCell::kPrevOffset),
prev());
}
set_prev(ReadOnlyRoots(isolate).undefined_value());
- Object cleared_head = fg->cleared_cells();
- if (cleared_head->IsWeakCell()) {
+ Object cleared_head = fg.cleared_cells();
+ if (cleared_head.IsWeakCell()) {
WeakCell cleared_head_cell = WeakCell::cast(cleared_head);
- cleared_head_cell->set_prev(*this);
+ cleared_head_cell.set_prev(*this);
gc_notify_updated_slot(cleared_head_cell,
cleared_head_cell.RawField(WeakCell::kPrevOffset),
*this);
}
- set_next(fg->cleared_cells());
+ set_next(fg.cleared_cells());
gc_notify_updated_slot(*this, RawField(WeakCell::kNextOffset), next());
- fg->set_cleared_cells(*this);
+ fg.set_cleared_cells(*this);
gc_notify_updated_slot(
fg, fg.RawField(JSFinalizationGroup::kClearedCellsOffset), *this);
}
@@ -245,24 +245,24 @@
// It's important to set_target to undefined here. This guards that we won't
// call Nullify (which assumes that the WeakCell is in active_cells).
- DCHECK(target()->IsUndefined() || target()->IsJSReceiver());
+ DCHECK(target().IsUndefined() || target().IsJSReceiver());
set_target(ReadOnlyRoots(isolate).undefined_value());
JSFinalizationGroup fg = JSFinalizationGroup::cast(finalization_group());
- if (fg->active_cells() == *this) {
- DCHECK(prev()->IsUndefined(isolate));
- fg->set_active_cells(next());
- } else if (fg->cleared_cells() == *this) {
- DCHECK(!prev()->IsWeakCell());
- fg->set_cleared_cells(next());
+ if (fg.active_cells() == *this) {
+ DCHECK(prev().IsUndefined(isolate));
+ fg.set_active_cells(next());
+ } else if (fg.cleared_cells() == *this) {
+ DCHECK(!prev().IsWeakCell());
+ fg.set_cleared_cells(next());
} else {
- DCHECK(prev()->IsWeakCell());
+ DCHECK(prev().IsWeakCell());
WeakCell prev_cell = WeakCell::cast(prev());
- prev_cell->set_next(next());
+ prev_cell.set_next(next());
}
- if (next()->IsWeakCell()) {
+ if (next().IsWeakCell()) {
WeakCell next_cell = WeakCell::cast(next());
- next_cell->set_prev(prev());
+ next_cell.set_prev(prev());
}
set_prev(ReadOnlyRoots(isolate).undefined_value());
set_next(ReadOnlyRoots(isolate).undefined_value());
diff --git a/src/objects/layout-descriptor-inl.h b/src/objects/layout-descriptor-inl.h
index bbe008d..7092233 100644
--- a/src/objects/layout-descriptor-inl.h
+++ b/src/objects/layout-descriptor-inl.h
@@ -153,10 +153,10 @@
int LayoutDescriptor::CalculateCapacity(Map map, DescriptorArray descriptors,
int num_descriptors) {
- int inobject_properties = map->GetInObjectProperties();
+ int inobject_properties = map.GetInObjectProperties();
if (inobject_properties == 0) return 0;
- DCHECK_LE(num_descriptors, descriptors->number_of_descriptors());
+ DCHECK_LE(num_descriptors, descriptors.number_of_descriptors());
int layout_descriptor_length;
const int kMaxWordsPerField = kDoubleSize / kTaggedSize;
@@ -170,7 +170,7 @@
layout_descriptor_length = 0;
for (int i = 0; i < num_descriptors; i++) {
- PropertyDetails details = descriptors->GetDetails(i);
+ PropertyDetails details = descriptors.GetDetails(i);
if (!InobjectUnboxedField(inobject_properties, details)) continue;
int field_index = details.field_index();
int field_width_in_words = details.field_width_in_words();
@@ -186,19 +186,19 @@
LayoutDescriptor layout_descriptor, Map map, DescriptorArray descriptors,
int num_descriptors) {
DisallowHeapAllocation no_allocation;
- int inobject_properties = map->GetInObjectProperties();
+ int inobject_properties = map.GetInObjectProperties();
for (int i = 0; i < num_descriptors; i++) {
- PropertyDetails details = descriptors->GetDetails(i);
+ PropertyDetails details = descriptors.GetDetails(i);
if (!InobjectUnboxedField(inobject_properties, details)) {
DCHECK(details.location() != kField ||
- layout_descriptor->IsTagged(details.field_index()));
+ layout_descriptor.IsTagged(details.field_index()));
continue;
}
int field_index = details.field_index();
- layout_descriptor = layout_descriptor->SetRawData(field_index);
+ layout_descriptor = layout_descriptor.SetRawData(field_index);
if (details.field_width_in_words() > 1) {
- layout_descriptor = layout_descriptor->SetRawData(field_index + 1);
+ layout_descriptor = layout_descriptor.SetRawData(field_index + 1);
}
}
return layout_descriptor;
@@ -224,12 +224,12 @@
layout_descriptor_(LayoutDescriptor::FastPointerLayout()) {
if (!FLAG_unbox_double_fields) return;
- layout_descriptor_ = map->layout_descriptor_gc_safe();
- if (layout_descriptor_->IsFastPointerLayout()) {
+ layout_descriptor_ = map.layout_descriptor_gc_safe();
+ if (layout_descriptor_.IsFastPointerLayout()) {
return;
}
- header_size_ = map->GetInObjectPropertiesStartInWords() * kTaggedSize;
+ header_size_ = map.GetInObjectPropertiesStartInWords() * kTaggedSize;
DCHECK_GE(header_size_, 0);
all_fields_tagged_ = false;
@@ -242,7 +242,7 @@
if (offset_in_bytes < header_size_) return true;
int field_index = (offset_in_bytes - header_size_) / kTaggedSize;
- return layout_descriptor_->IsTagged(field_index);
+ return layout_descriptor_.IsTagged(field_index);
}
} // namespace internal
diff --git a/src/objects/layout-descriptor.cc b/src/objects/layout-descriptor.cc
index 266a45a..0199559 100644
--- a/src/objects/layout-descriptor.cc
+++ b/src/objects/layout-descriptor.cc
@@ -54,9 +54,9 @@
DisallowHeapAllocation no_allocation;
LayoutDescriptor layout_desc = *layout_descriptor;
- layout_desc = layout_desc->SetRawData(field_index);
+ layout_desc = layout_desc.SetRawData(field_index);
if (details.field_width_in_words() > 1) {
- layout_desc = layout_desc->SetRawData(field_index + 1);
+ layout_desc = layout_desc.SetRawData(field_index + 1);
}
return handle(layout_desc, isolate);
}
@@ -66,25 +66,25 @@
Handle<LayoutDescriptor> full_layout_descriptor) {
DisallowHeapAllocation no_allocation;
LayoutDescriptor layout_descriptor = map->layout_descriptor();
- if (layout_descriptor->IsSlowLayout()) {
+ if (layout_descriptor.IsSlowLayout()) {
return full_layout_descriptor;
}
if (!InobjectUnboxedField(map->GetInObjectProperties(), details)) {
DCHECK(details.location() != kField ||
- layout_descriptor->IsTagged(details.field_index()));
+ layout_descriptor.IsTagged(details.field_index()));
return handle(layout_descriptor, isolate);
}
int field_index = details.field_index();
int new_capacity = field_index + details.field_width_in_words();
- if (new_capacity > layout_descriptor->capacity()) {
+ if (new_capacity > layout_descriptor.capacity()) {
// Current map's layout descriptor runs out of space, so use the full
// layout descriptor.
return full_layout_descriptor;
}
- layout_descriptor = layout_descriptor->SetRawData(field_index);
+ layout_descriptor = layout_descriptor.SetRawData(field_index);
if (details.field_width_in_words() > 1) {
- layout_descriptor = layout_descriptor->SetRawData(field_index + 1);
+ layout_descriptor = layout_descriptor.SetRawData(field_index + 1);
}
return handle(layout_descriptor, isolate);
}
@@ -202,8 +202,8 @@
int max_sequence_length = (end_offset - offset_in_bytes) / kTaggedSize;
int field_index = Max(0, (offset_in_bytes - header_size_) / kTaggedSize);
int sequence_length;
- bool tagged = layout_descriptor_->IsTagged(field_index, max_sequence_length,
- &sequence_length);
+ bool tagged = layout_descriptor_.IsTagged(field_index, max_sequence_length,
+ &sequence_length);
DCHECK_GT(sequence_length, 0);
if (offset_in_bytes < header_size_) {
// Object headers do not contain non-tagged fields. Check if the contiguous
@@ -257,11 +257,11 @@
bool LayoutDescriptor::IsConsistentWithMap(Map map, bool check_tail) {
if (FLAG_unbox_double_fields) {
- DescriptorArray descriptors = map->instance_descriptors();
- int nof_descriptors = map->NumberOfOwnDescriptors();
+ DescriptorArray descriptors = map.instance_descriptors();
+ int nof_descriptors = map.NumberOfOwnDescriptors();
int last_field_index = 0;
for (int i = 0; i < nof_descriptors; i++) {
- PropertyDetails details = descriptors->GetDetails(i);
+ PropertyDetails details = descriptors.GetDetails(i);
if (details.location() != kField) continue;
FieldIndex field_index = FieldIndex::ForDescriptor(map, i);
bool tagged_expected =
diff --git a/src/objects/literal-objects-inl.h b/src/objects/literal-objects-inl.h
index fafbb17..4489feb 100644
--- a/src/objects/literal-objects-inl.h
+++ b/src/objects/literal-objects-inl.h
@@ -70,7 +70,7 @@
}
bool ArrayBoilerplateDescription::is_empty() const {
- return constant_elements()->length() == 0;
+ return constant_elements().length() == 0;
}
} // namespace internal
diff --git a/src/objects/literal-objects.cc b/src/objects/literal-objects.cc
index cbca776..d3f1424 100644
--- a/src/objects/literal-objects.cc
+++ b/src/objects/literal-objects.cc
@@ -113,7 +113,7 @@
value_kind == ClassBoilerplate::kSetter);
Object raw_accessor = descriptor_array_template->GetStrongValue(entry);
AccessorPair pair;
- if (raw_accessor->IsAccessorPair()) {
+ if (raw_accessor.IsAccessorPair()) {
pair = AccessorPair::cast(raw_accessor);
} else {
Handle<AccessorPair> new_pair = isolate->factory()->NewAccessorPair();
@@ -122,9 +122,9 @@
descriptor_array_template->Set(entry, &d);
pair = *new_pair;
}
- pair->set(value_kind == ClassBoilerplate::kGetter ? ACCESSOR_GETTER
- : ACCESSOR_SETTER,
- *value);
+ pair.set(value_kind == ClassBoilerplate::kGetter ? ACCESSOR_GETTER
+ : ACCESSOR_SETTER,
+ *value);
}
}
}
@@ -165,7 +165,7 @@
}
inline int GetExistingValueIndex(Object value) {
- return value->IsSmi() ? Smi::ToInt(value) : -1;
+ return value.IsSmi() ? Smi::ToInt(value) : -1;
}
template <typename Dictionary, typename Key>
@@ -215,13 +215,13 @@
Object existing_value = dictionary->ValueAt(entry);
if (value_kind == ClassBoilerplate::kData) {
// Computed value is a normal method.
- if (existing_value->IsAccessorPair()) {
+ if (existing_value.IsAccessorPair()) {
AccessorPair current_pair = AccessorPair::cast(existing_value);
int existing_getter_index =
- GetExistingValueIndex(current_pair->getter());
+ GetExistingValueIndex(current_pair.getter());
int existing_setter_index =
- GetExistingValueIndex(current_pair->setter());
+ GetExistingValueIndex(current_pair.setter());
// At least one of the accessors must already be defined.
DCHECK(existing_getter_index >= 0 || existing_setter_index >= 0);
if (existing_getter_index < key_index &&
@@ -243,7 +243,7 @@
// and then it was overwritten by the current computed method which
// in turn was later overwritten by the setter method. So we clear
// the getter.
- current_pair->set_getter(*isolate->factory()->null_value());
+ current_pair.set_getter(*isolate->factory()->null_value());
} else if (existing_setter_index < key_index) {
DCHECK_LT(key_index, existing_getter_index);
@@ -251,19 +251,18 @@
// and then it was overwritten by the current computed method which
// in turn was later overwritten by the getter method. So we clear
// the setter.
- current_pair->set_setter(*isolate->factory()->null_value());
+ current_pair.set_setter(*isolate->factory()->null_value());
}
}
} else {
// Overwrite existing value if it was defined before the computed one
// (AccessorInfo "length" property is always defined before).
- DCHECK_IMPLIES(!existing_value->IsSmi(),
- existing_value->IsAccessorInfo());
- DCHECK_IMPLIES(!existing_value->IsSmi(),
- AccessorInfo::cast(existing_value)->name() ==
+ DCHECK_IMPLIES(!existing_value.IsSmi(),
+ existing_value.IsAccessorInfo());
+ DCHECK_IMPLIES(!existing_value.IsSmi(),
+ AccessorInfo::cast(existing_value).name() ==
*isolate->factory()->length_string());
- if (!existing_value->IsSmi() ||
- Smi::ToInt(existing_value) < key_index) {
+ if (!existing_value.IsSmi() || Smi::ToInt(existing_value) < key_index) {
PropertyDetails details(kData, DONT_ENUM, PropertyCellType::kNoCell,
enum_order);
dictionary->DetailsAtPut(isolate, entry, details);
@@ -274,14 +273,14 @@
AccessorComponent component = value_kind == ClassBoilerplate::kGetter
? ACCESSOR_GETTER
: ACCESSOR_SETTER;
- if (existing_value->IsAccessorPair()) {
+ if (existing_value.IsAccessorPair()) {
// Update respective component of existing AccessorPair.
AccessorPair current_pair = AccessorPair::cast(existing_value);
int existing_component_index =
- GetExistingValueIndex(current_pair->get(component));
+ GetExistingValueIndex(current_pair.get(component));
if (existing_component_index < key_index) {
- current_pair->set(component, value);
+ current_pair.set(component, value);
}
} else {
@@ -380,7 +379,7 @@
AddToDictionaryTemplate(isolate, properties_dictionary_template_, name,
value_index, value_kind, value);
} else {
- *temp_handle_.location() = value->ptr();
+ *temp_handle_.location() = value.ptr();
AddToDescriptorArrayTemplate(isolate, descriptor_array_template_, name,
value_kind, temp_handle_);
}
diff --git a/src/objects/lookup-cache-inl.h b/src/objects/lookup-cache-inl.h
index 7d4878d..677da9e 100644
--- a/src/objects/lookup-cache-inl.h
+++ b/src/objects/lookup-cache-inl.h
@@ -14,10 +14,10 @@
// static
int DescriptorLookupCache::Hash(Map source, Name name) {
- DCHECK(name->IsUniqueName());
+ DCHECK(name.IsUniqueName());
// Uses only lower 32 bits if pointers are larger.
uint32_t source_hash = static_cast<uint32_t>(source.ptr()) >> kTaggedSizeLog2;
- uint32_t name_hash = name->hash_field();
+ uint32_t name_hash = name.hash_field();
return (source_hash ^ name_hash) % kLength;
}
diff --git a/src/objects/lookup-inl.h b/src/objects/lookup-inl.h
index 8de434b..c80f63a 100644
--- a/src/objects/lookup-inl.h
+++ b/src/objects/lookup-inl.h
@@ -111,7 +111,7 @@
bool LookupIterator::ExtendingNonExtensible(Handle<JSReceiver> receiver) {
DCHECK(receiver.is_identical_to(GetStoreTarget<JSReceiver>()));
- return !receiver->map()->is_extensible() &&
+ return !receiver->map().is_extensible() &&
(IsElement() || !name_->IsPrivate());
}
@@ -120,7 +120,7 @@
return transition_->IsPropertyCell() ||
(transition_map()->is_dictionary_map() &&
!GetStoreTarget<JSReceiver>()->HasFastProperties()) ||
- transition_map()->GetBackPointer()->IsMap();
+ transition_map()->GetBackPointer().IsMap();
}
void LookupIterator::UpdateProtector() {
@@ -166,9 +166,9 @@
Handle<T> LookupIterator::GetStoreTarget() const {
DCHECK(receiver_->IsJSReceiver());
if (receiver_->IsJSGlobalProxy()) {
- Map map = JSGlobalProxy::cast(*receiver_)->map();
- if (map->has_hidden_prototype()) {
- return handle(JSGlobalObject::cast(map->prototype()), isolate_);
+ Map map = JSGlobalProxy::cast(*receiver_).map();
+ if (map.has_hidden_prototype()) {
+ return handle(JSGlobalObject::cast(map.prototype()), isolate_);
}
}
return Handle<T>::cast(receiver_);
@@ -176,8 +176,8 @@
template <bool is_element>
InterceptorInfo LookupIterator::GetInterceptor(JSObject holder) {
- return is_element ? holder->GetIndexedInterceptor()
- : holder->GetNamedInterceptor();
+ return is_element ? holder.GetIndexedInterceptor()
+ : holder.GetNamedInterceptor();
}
inline Handle<InterceptorInfo> LookupIterator::GetInterceptor() const {
diff --git a/src/objects/lookup.cc b/src/objects/lookup.cc
index d3956db..25703d8 100644
--- a/src/objects/lookup.cc
+++ b/src/objects/lookup.cc
@@ -118,7 +118,7 @@
PropertyConstness::kConst, value);
// Reload information; this is no-op if nothing changed.
it.property_details_ =
- new_map->instance_descriptors()->GetDetails(descriptor_number);
+ new_map->instance_descriptors().GetDetails(descriptor_number);
it.transition_ = new_map;
}
return it;
@@ -151,7 +151,7 @@
holder_ = initial_holder_;
JSReceiver holder = *holder_;
- Map map = holder->map();
+ Map map = holder.map();
state_ = LookupInHolder<is_element>(map, holder);
if (IsFound()) return;
@@ -169,9 +169,9 @@
has_property_ = false;
JSReceiver holder = *holder_;
- Map map = holder->map();
+ Map map = holder.map();
- if (map->IsSpecialReceiverMap()) {
+ if (map.IsSpecialReceiverMap()) {
state_ = IsElement() ? LookupInSpecialHolder<true>(map, holder)
: LookupInSpecialHolder<false>(map, holder);
if (IsFound()) return;
@@ -195,7 +195,7 @@
return;
}
holder = maybe_holder;
- map = holder->map();
+ map = holder.map();
state_ = LookupInHolder<is_element>(map, holder);
} while (!IsFound());
@@ -219,7 +219,7 @@
// Strings are the only objects with properties (only elements) directly on
// the wrapper. Hence we can skip generating the wrapper for all other cases.
if (receiver->IsString() &&
- index < static_cast<uint32_t>(String::cast(*receiver)->length())) {
+ index < static_cast<uint32_t>(String::cast(*receiver).length())) {
// TODO(verwaest): Speed this up. Perhaps use a cached wrapper on the native
// context, ensuring that we don't leak it into JS?
Handle<JSFunction> constructor = isolate->string_function();
@@ -228,7 +228,7 @@
return result;
}
auto root =
- handle(receiver->GetPrototypeChainRootMap(isolate)->prototype(), isolate);
+ handle(receiver->GetPrototypeChainRootMap(isolate).prototype(), isolate);
if (root->IsNull(isolate)) {
isolate->PushStackTraceAndDie(reinterpret_cast<void*>(receiver->ptr()));
}
@@ -265,7 +265,7 @@
#undef TYPED_ARRAY_CONTEXT_SLOTS
};
- if (!holder->IsJSFunction()) return false;
+ if (!holder.IsJSFunction()) return false;
return std::any_of(
std::begin(context_slots), std::end(context_slots),
@@ -305,7 +305,7 @@
isolate_->InvalidateTypedArraySpeciesProtector();
return;
}
- if (holder_->map()->is_prototype_map()) {
+ if (holder_->map().is_prototype_map()) {
DisallowHeapAllocation no_gc;
// Setting the constructor of any prototype with the @@species protector
// (of any realm) also needs to invalidate the protector.
@@ -327,7 +327,7 @@
if (!isolate_->IsRegExpSpeciesLookupChainIntact()) return;
isolate_->InvalidateRegExpSpeciesProtector();
} else if (isolate_->IsInAnyContext(
- holder_->map()->prototype(),
+ holder_->map().prototype(),
Context::TYPED_ARRAY_PROTOTYPE_INDEX)) {
if (!isolate_->IsTypedArraySpeciesLookupChainIntact()) return;
isolate_->InvalidateTypedArraySpeciesProtector();
@@ -466,7 +466,7 @@
if (holder_obj->IsJSGlobalObject()) {
Handle<GlobalDictionary> dictionary(
- JSGlobalObject::cast(*holder_obj)->global_dictionary(), isolate());
+ JSGlobalObject::cast(*holder_obj).global_dictionary(), isolate());
Handle<PropertyCell> cell(dictionary->CellAt(dictionary_entry()),
isolate());
property_details_ = cell->property_details();
@@ -493,7 +493,7 @@
// Update the property details if the representation was None.
if (constness() != new_constness || representation().IsNone()) {
property_details_ =
- new_map->instance_descriptors()->GetDetails(descriptor_number());
+ new_map->instance_descriptors().GetDetails(descriptor_number());
}
return;
}
@@ -538,7 +538,7 @@
if (!IsElement() && !holder_obj->HasFastProperties()) {
PropertyDetails details(kData, attributes, PropertyCellType::kMutable);
- if (holder_obj->map()->is_prototype_map() &&
+ if (holder_obj->map().is_prototype_map() &&
(property_details_.attributes() & READ_ONLY) == 0 &&
(attributes & READ_ONLY) != 0) {
// Invalidate prototype validity cell when a property is reconfigured
@@ -548,7 +548,7 @@
}
if (holder_obj->IsJSGlobalObject()) {
Handle<GlobalDictionary> dictionary(
- JSGlobalObject::cast(*holder_obj)->global_dictionary(), isolate());
+ JSGlobalObject::cast(*holder_obj).global_dictionary(), isolate());
Handle<PropertyCell> cell = PropertyCell::PrepareForValue(
isolate(), dictionary, dictionary_entry(), value, details);
@@ -593,7 +593,7 @@
DCHECK(state_ != LookupIterator::ACCESSOR ||
(GetAccessors()->IsAccessorInfo() &&
- AccessorInfo::cast(*GetAccessors())->is_special_data_property()));
+ AccessorInfo::cast(*GetAccessors()).is_special_data_property()));
DCHECK_NE(INTEGER_INDEXED_EXOTIC, state_);
DCHECK(state_ == NOT_FOUND || !HolderIsReceiverOrHiddenPrototype());
@@ -610,7 +610,7 @@
global, name(), PropertyCellType::kUninitialized, &entry);
Handle<GlobalDictionary> dictionary(global->global_dictionary(),
isolate_);
- DCHECK(cell->value()->IsTheHole(isolate_));
+ DCHECK(cell->value().IsTheHole(isolate_));
DCHECK(!value->IsTheHole(isolate_));
transition_ = cell;
// Assign an enumeration index to the property and update
@@ -682,11 +682,11 @@
number_ = static_cast<uint32_t>(number);
property_details_ = transition->GetLastDescriptorDetails();
state_ = DATA;
- } else if (receiver->map()->is_dictionary_map()) {
+ } else if (receiver->map().is_dictionary_map()) {
Handle<NameDictionary> dictionary(receiver->property_dictionary(),
isolate_);
int entry;
- if (receiver->map()->is_prototype_map() && receiver->IsJSObject()) {
+ if (receiver->map().is_prototype_map() && receiver->IsJSObject()) {
JSObject::InvalidatePrototypeChains(receiver->map());
}
dictionary = NameDictionary::Add(isolate(), dictionary, name(),
@@ -712,7 +712,7 @@
accessor->Delete(object, number_);
} else {
DCHECK(!name()->IsPrivateName());
- bool is_prototype_map = holder->map()->is_prototype_map();
+ bool is_prototype_map = holder->map().is_prototype_map();
RuntimeCallTimerScope stats_scope(
isolate_, is_prototype_map
? RuntimeCallCounterId::kPrototypeObject_DeleteProperty
@@ -746,7 +746,7 @@
attributes = static_cast<PropertyAttributes>(attributes | DONT_ENUM);
}
- if (!IsElement() && !receiver->map()->is_dictionary_map()) {
+ if (!IsElement() && !receiver->map().is_dictionary_map()) {
Handle<Map> old_map(receiver->map(), isolate_);
if (!holder_.is_identical_to(receiver)) {
@@ -820,11 +820,11 @@
if (receiver->HasSlowArgumentsElements()) {
FixedArray parameter_map = FixedArray::cast(receiver->elements());
- uint32_t length = parameter_map->length() - 2;
+ uint32_t length = parameter_map.length() - 2;
if (number_ < length) {
- parameter_map->set(number_ + 2, ReadOnlyRoots(heap()).the_hole_value());
+ parameter_map.set(number_ + 2, ReadOnlyRoots(heap()).the_hole_value());
}
- FixedArray::cast(receiver->elements())->set(1, *dictionary);
+ FixedArray::cast(receiver->elements()).set(1, *dictionary);
} else {
receiver->set_elements(*dictionary);
}
@@ -832,7 +832,7 @@
ReloadPropertyInformation<true>();
} else {
PropertyNormalizationMode mode = CLEAR_INOBJECT_PROPERTIES;
- if (receiver->map()->is_prototype_map()) {
+ if (receiver->map().is_prototype_map()) {
JSObject::InvalidatePrototypeChains(receiver->map());
mode = KEEP_INOBJECT_PROPERTIES;
}
@@ -864,9 +864,9 @@
if (!receiver_->IsJSReceiver()) return false;
JSReceiver current = JSReceiver::cast(*receiver_);
JSReceiver object = *holder_;
- if (!current->map()->has_hidden_prototype()) return false;
+ if (!current.map().has_hidden_prototype()) return false;
// JSProxy do not occur as hidden prototypes.
- if (object->IsJSProxy()) return false;
+ if (object.IsJSProxy()) return false;
PrototypeIterator iter(isolate(), current, kStartAtPrototype,
PrototypeIterator::END_AT_NON_HIDDEN);
while (!iter.IsAtEnd()) {
@@ -884,9 +884,9 @@
return accessor->Get(holder, number_);
} else if (holder_->IsJSGlobalObject()) {
Handle<JSGlobalObject> holder = GetHolder<JSGlobalObject>();
- result = holder->global_dictionary()->ValueAt(number_);
+ result = holder->global_dictionary().ValueAt(number_);
} else if (!holder_->HasFastProperties()) {
- result = holder_->property_dictionary()->ValueAt(number_);
+ result = holder_->property_dictionary().ValueAt(number_);
} else if (property_details_.location() == kField) {
DCHECK_EQ(kData, property_details_.kind());
Handle<JSObject> holder = GetHolder<JSObject>();
@@ -894,7 +894,7 @@
return JSObject::FastPropertyAt(holder, property_details_.representation(),
field_index);
} else {
- result = holder_->map()->instance_descriptors()->GetStrongValue(number_);
+ result = holder_->map().instance_descriptors().GetStrongValue(number_);
}
return handle(result, isolate_);
}
@@ -907,14 +907,14 @@
Handle<JSObject> holder = GetHolder<JSObject>();
FieldIndex field_index = FieldIndex::ForDescriptor(holder->map(), number_);
if (property_details_.representation().IsDouble()) {
- if (!value->IsNumber()) return false;
+ if (!value.IsNumber()) return false;
uint64_t bits;
if (holder->IsUnboxedDoubleField(field_index)) {
bits = holder->RawFastDoublePropertyAsBitsAt(field_index);
} else {
Object current_value = holder->RawFastPropertyAt(field_index);
- DCHECK(current_value->IsMutableHeapNumber());
- bits = MutableHeapNumber::cast(current_value)->value_as_bits();
+ DCHECK(current_value.IsMutableHeapNumber());
+ bits = MutableHeapNumber::cast(current_value).value_as_bits();
}
// Use bit representation of double to to check for hole double, since
// manipulating the signaling NaN used for the hole in C++, e.g. with
@@ -925,14 +925,14 @@
// Uninitialized double field.
return true;
}
- return Object::SameNumberValue(bit_cast<double>(bits), value->Number());
+ return Object::SameNumberValue(bit_cast<double>(bits), value.Number());
} else {
Object current_value = holder->RawFastPropertyAt(field_index);
- if (current_value->IsUninitialized(isolate()) || current_value == value) {
+ if (current_value.IsUninitialized(isolate()) || current_value == value) {
return true;
}
- return current_value->IsNumber() && value->IsNumber() &&
- Object::SameNumberValue(current_value->Number(), value->Number());
+ return current_value.IsNumber() && value.IsNumber() &&
+ Object::SameNumberValue(current_value.Number(), value.Number());
}
}
@@ -958,7 +958,7 @@
DCHECK_EQ(kField, property_details_.location());
DCHECK(!IsElement());
Map holder_map = holder_->map();
- return handle(holder_map->FindFieldOwner(isolate(), descriptor_number()),
+ return handle(holder_map.FindFieldOwner(isolate(), descriptor_number()),
isolate_);
}
@@ -975,14 +975,14 @@
DCHECK(holder_->HasFastProperties());
DCHECK_EQ(kField, property_details_.location());
return handle(
- holder_->map()->instance_descriptors()->GetFieldType(descriptor_number()),
+ holder_->map().instance_descriptors().GetFieldType(descriptor_number()),
isolate_);
}
Handle<PropertyCell> LookupIterator::GetPropertyCell() const {
DCHECK(!IsElement());
Handle<JSGlobalObject> holder = GetHolder<JSGlobalObject>();
- return handle(holder->global_dictionary()->CellAt(dictionary_entry()),
+ return handle(holder->global_dictionary().CellAt(dictionary_entry()),
isolate_);
}
@@ -1012,30 +1012,30 @@
DCHECK_IMPLIES(!initializing_store && property_details_.constness() ==
PropertyConstness::kConst,
IsConstFieldValueEqualTo(*value));
- JSObject::cast(*holder)->WriteToField(descriptor_number(),
- property_details_, *value);
+ JSObject::cast(*holder).WriteToField(descriptor_number(),
+ property_details_, *value);
} else {
DCHECK_EQ(kDescriptor, property_details_.location());
DCHECK_EQ(PropertyConstness::kConst, property_details_.constness());
}
} else if (holder->IsJSGlobalObject()) {
GlobalDictionary dictionary =
- JSGlobalObject::cast(*holder)->global_dictionary();
- dictionary->CellAt(dictionary_entry())->set_value(*value);
+ JSGlobalObject::cast(*holder).global_dictionary();
+ dictionary.CellAt(dictionary_entry()).set_value(*value);
} else {
DCHECK_IMPLIES(holder->IsJSProxy(), name()->IsPrivate());
NameDictionary dictionary = holder->property_dictionary();
- dictionary->ValueAtPut(dictionary_entry(), *value);
+ dictionary.ValueAtPut(dictionary_entry(), *value);
}
}
template <bool is_element>
bool LookupIterator::SkipInterceptor(JSObject holder) {
auto info = GetInterceptor<is_element>(holder);
- if (!is_element && name_->IsSymbol() && !info->can_intercept_symbols()) {
+ if (!is_element && name_->IsSymbol() && !info.can_intercept_symbols()) {
return true;
}
- if (info->non_masking()) {
+ if (info.non_masking()) {
switch (interceptor_state_) {
case InterceptorState::kUninitialized:
interceptor_state_ = InterceptorState::kSkipNonMasking;
@@ -1051,18 +1051,18 @@
JSReceiver LookupIterator::NextHolder(Map map) {
DisallowHeapAllocation no_gc;
- if (map->prototype() == ReadOnlyRoots(heap()).null_value()) {
+ if (map.prototype() == ReadOnlyRoots(heap()).null_value()) {
return JSReceiver();
}
- if (!check_prototype_chain() && !map->has_hidden_prototype()) {
+ if (!check_prototype_chain() && !map.has_hidden_prototype()) {
return JSReceiver();
}
- return JSReceiver::cast(map->prototype());
+ return JSReceiver::cast(map.prototype());
}
LookupIterator::State LookupIterator::NotFound(JSReceiver const holder) const {
DCHECK(!IsElement());
- if (!holder->IsJSTypedArray() || !name_->IsString()) return NOT_FOUND;
+ if (!holder.IsJSTypedArray() || !name_->IsString()) return NOT_FOUND;
return IsSpecialIndex(String::cast(*name_)) ? INTEGER_INDEXED_EXOTIC
: NOT_FOUND;
}
@@ -1071,8 +1071,8 @@
template <bool is_element>
bool HasInterceptor(Map map) {
- return is_element ? map->has_indexed_interceptor()
- : map->has_named_interceptor();
+ return is_element ? map.has_indexed_interceptor()
+ : map.has_named_interceptor();
}
} // namespace
@@ -1083,10 +1083,10 @@
STATIC_ASSERT(INTERCEPTOR == BEFORE_PROPERTY);
switch (state_) {
case NOT_FOUND:
- if (map->IsJSProxyMap()) {
+ if (map.IsJSProxyMap()) {
if (is_element || !name_->IsPrivate()) return JSPROXY;
}
- if (map->is_access_check_needed()) {
+ if (map.is_access_check_needed()) {
if (is_element || !name_->IsPrivate()) return ACCESS_CHECK;
}
V8_FALLTHROUGH;
@@ -1097,15 +1097,15 @@
}
V8_FALLTHROUGH;
case INTERCEPTOR:
- if (!is_element && map->IsJSGlobalObjectMap()) {
+ if (!is_element && map.IsJSGlobalObjectMap()) {
GlobalDictionary dict =
- JSGlobalObject::cast(holder)->global_dictionary();
- int number = dict->FindEntry(isolate(), name_);
+ JSGlobalObject::cast(holder).global_dictionary();
+ int number = dict.FindEntry(isolate(), name_);
if (number == GlobalDictionary::kNotFound) return NOT_FOUND;
number_ = static_cast<uint32_t>(number);
- PropertyCell cell = dict->CellAt(number_);
- if (cell->value()->IsTheHole(isolate_)) return NOT_FOUND;
- property_details_ = cell->property_details();
+ PropertyCell cell = dict.CellAt(number_);
+ if (cell.value().IsTheHole(isolate_)) return NOT_FOUND;
+ property_details_ = cell.property_details();
has_property_ = true;
switch (property_details_.kind()) {
case v8::internal::kData:
@@ -1136,31 +1136,31 @@
if (is_element) {
JSObject js_object = JSObject::cast(holder);
- ElementsAccessor* accessor = js_object->GetElementsAccessor();
- FixedArrayBase backing_store = js_object->elements();
+ ElementsAccessor* accessor = js_object.GetElementsAccessor();
+ FixedArrayBase backing_store = js_object.elements();
number_ =
accessor->GetEntryForIndex(isolate_, js_object, backing_store, index_);
if (number_ == kMaxUInt32) {
- return holder->IsJSTypedArray() ? INTEGER_INDEXED_EXOTIC : NOT_FOUND;
+ return holder.IsJSTypedArray() ? INTEGER_INDEXED_EXOTIC : NOT_FOUND;
}
property_details_ = accessor->GetDetails(js_object, number_);
- if (map->has_frozen_or_sealed_elements()) {
- PropertyAttributes attrs = map->has_sealed_elements() ? SEALED : FROZEN;
+ if (map.has_frozen_or_sealed_elements()) {
+ PropertyAttributes attrs = map.has_sealed_elements() ? SEALED : FROZEN;
property_details_ = property_details_.CopyAddAttributes(attrs);
}
- } else if (!map->is_dictionary_map()) {
- DescriptorArray descriptors = map->instance_descriptors();
- int number = descriptors->SearchWithCache(isolate_, *name_, map);
+ } else if (!map.is_dictionary_map()) {
+ DescriptorArray descriptors = map.instance_descriptors();
+ int number = descriptors.SearchWithCache(isolate_, *name_, map);
if (number == DescriptorArray::kNotFound) return NotFound(holder);
number_ = static_cast<uint32_t>(number);
- property_details_ = descriptors->GetDetails(number_);
+ property_details_ = descriptors.GetDetails(number_);
} else {
- DCHECK_IMPLIES(holder->IsJSProxy(), name()->IsPrivate());
- NameDictionary dict = holder->property_dictionary();
- int number = dict->FindEntry(isolate(), name_);
+ DCHECK_IMPLIES(holder.IsJSProxy(), name()->IsPrivate());
+ NameDictionary dict = holder.property_dictionary();
+ int number = dict.FindEntry(isolate(), name_);
if (number == NameDictionary::kNotFound) return NotFound(holder);
number_ = static_cast<uint32_t>(number);
- property_details_ = dict->DetailsAt(number_);
+ property_details_ = dict.DetailsAt(number_);
}
has_property_ = true;
switch (property_details_.kind()) {
@@ -1180,8 +1180,8 @@
AccessCheckInfo access_check_info =
AccessCheckInfo::Get(isolate_, Handle<JSObject>::cast(holder_));
if (!access_check_info.is_null()) {
- Object interceptor = IsElement() ? access_check_info->indexed_interceptor()
- : access_check_info->named_interceptor();
+ Object interceptor = IsElement() ? access_check_info.indexed_interceptor()
+ : access_check_info.named_interceptor();
if (interceptor != Object()) {
return handle(InterceptorInfo::cast(interceptor), isolate_);
}
@@ -1199,7 +1199,7 @@
DCHECK(GetAccessors()->IsAccessorPair());
AccessorPair accessor_pair = AccessorPair::cast(*GetAccessors());
- Handle<Object> getter(accessor_pair->getter(), isolate());
+ Handle<Object> getter(accessor_pair.getter(), isolate());
MaybeHandle<Name> maybe_name =
FunctionTemplateInfo::TryGetCachedPropertyName(isolate(), getter);
if (maybe_name.is_null()) return false;
diff --git a/src/objects/lookup.h b/src/objects/lookup.h
index 1ad8ce8..09757cb 100644
--- a/src/objects/lookup.h
+++ b/src/objects/lookup.h
@@ -217,7 +217,7 @@
void NextInternal(Map map, JSReceiver holder);
template <bool is_element>
inline State LookupInHolder(Map map, JSReceiver holder) {
- return map->IsSpecialReceiverMap()
+ return map.IsSpecialReceiverMap()
? LookupInSpecialHolder<is_element>(map, holder)
: LookupInRegularHolder<is_element>(map, holder);
}
diff --git a/src/objects/managed.h b/src/objects/managed.h
index b12127b..9653efa 100644
--- a/src/objects/managed.h
+++ b/src/objects/managed.h
@@ -59,7 +59,7 @@
// Get a reference to the shared pointer to the C++ object.
V8_INLINE const std::shared_ptr<CppType>& get() { return *GetSharedPtrPtr(); }
- static Managed cast(Object obj) { return Managed(obj->ptr()); }
+ static Managed cast(Object obj) { return Managed(obj.ptr()); }
static Managed unchecked_cast(Object obj) { return bit_cast<Managed>(obj); }
// Allocate a new {CppType} and wrap it in a {Managed<CppType>}.
diff --git a/src/objects/map-inl.h b/src/objects/map-inl.h
index 760793f..8b6bfaa 100644
--- a/src/objects/map-inl.h
+++ b/src/objects/map-inl.h
@@ -98,18 +98,18 @@
InterceptorInfo Map::GetNamedInterceptor() {
DCHECK(has_named_interceptor());
FunctionTemplateInfo info = GetFunctionTemplateInfo();
- return InterceptorInfo::cast(info->GetNamedPropertyHandler());
+ return InterceptorInfo::cast(info.GetNamedPropertyHandler());
}
InterceptorInfo Map::GetIndexedInterceptor() {
DCHECK(has_indexed_interceptor());
FunctionTemplateInfo info = GetFunctionTemplateInfo();
- return InterceptorInfo::cast(info->GetIndexedPropertyHandler());
+ return InterceptorInfo::cast(info.GetIndexedPropertyHandler());
}
bool Map::IsMostGeneralFieldType(Representation representation,
FieldType field_type) {
- return !representation.IsHeapObject() || field_type->IsAny();
+ return !representation.IsHeapObject() || field_type.IsAny();
}
bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) {
@@ -139,7 +139,7 @@
bool Map::IsUnboxedDoubleField(FieldIndex index) const {
if (!FLAG_unbox_double_fields) return false;
if (index.is_hidden_field() || !index.is_inobject()) return false;
- return !layout_descriptor()->IsTagged(index.property_index());
+ return !layout_descriptor().IsTagged(index.property_index());
}
bool Map::TooManyFastProperties(StoreOrigin store_origin) const {
@@ -161,7 +161,7 @@
}
PropertyDetails Map::GetLastDescriptorDetails() const {
- return instance_descriptors()->GetDetails(LastAdded());
+ return instance_descriptors().GetDetails(LastAdded());
}
int Map::LastAdded() const {
@@ -175,7 +175,7 @@
}
void Map::SetNumberOfOwnDescriptors(int number) {
- DCHECK_LE(number, instance_descriptors()->number_of_descriptors());
+ DCHECK_LE(number, instance_descriptors().number_of_descriptors());
CHECK_LE(static_cast<unsigned>(number),
static_cast<unsigned>(kMaxNumberOfDescriptors));
set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
@@ -370,19 +370,19 @@
void Map::CopyUnusedPropertyFields(Map map) {
set_used_or_unused_instance_size_in_words(
- map->used_or_unused_instance_size_in_words());
- DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
+ map.used_or_unused_instance_size_in_words());
+ DCHECK_EQ(UnusedPropertyFields(), map.UnusedPropertyFields());
}
void Map::CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map map) {
- int value = map->used_or_unused_instance_size_in_words();
+ int value = map.used_or_unused_instance_size_in_words();
if (value >= JSValue::kFieldsAdded) {
// Unused in-object fields. Adjust the offset from the object’s start
// so it matches the distance to the object’s end.
- value += instance_size_in_words() - map->instance_size_in_words();
+ value += instance_size_in_words() - map.instance_size_in_words();
}
set_used_or_unused_instance_size_in_words(value);
- DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
+ DCHECK_EQ(UnusedPropertyFields(), map.UnusedPropertyFields());
}
void Map::AccountAddedPropertyField() {
@@ -442,8 +442,8 @@
}
bool Map::should_be_fast_prototype_map() const {
- if (!prototype_info()->IsPrototypeInfo()) return false;
- return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
+ if (!prototype_info().IsPrototypeInfo()) return false;
+ return PrototypeInfo::cast(prototype_info()).should_be_fast_map();
}
void Map::set_elements_kind(ElementsKind elements_kind) {
@@ -526,7 +526,7 @@
bool Map::CanBeDeprecated() const {
int descriptor = LastAdded();
for (int i = 0; i <= descriptor; i++) {
- PropertyDetails details = instance_descriptors()->GetDetails(i);
+ PropertyDetails details = instance_descriptors().GetDetails(i);
if (details.representation().IsNone()) return true;
if (details.representation().IsSmi()) return true;
if (details.representation().IsDouble()) return true;
@@ -541,7 +541,7 @@
void Map::NotifyLeafMapLayoutChange(Isolate* isolate) {
if (is_stable()) {
mark_unstable();
- dependent_code()->DeoptimizeDependentCodeGroup(
+ dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPrototypeCheckGroup);
}
}
@@ -576,7 +576,7 @@
}
void Map::set_prototype(HeapObject value, WriteBarrierMode mode) {
- DCHECK(value->IsNull() || value->IsJSReceiver());
+ DCHECK(value.IsNull() || value.IsJSReceiver());
WRITE_FIELD(*this, kPrototypeOffset, value);
CONDITIONAL_WRITE_BARRIER(*this, kPrototypeOffset, value, mode);
}
@@ -603,13 +603,13 @@
int number_of_own_descriptors) {
SetInstanceDescriptors(isolate, descriptors, number_of_own_descriptors);
if (FLAG_unbox_double_fields) {
- if (layout_descriptor()->IsSlowLayout()) {
+ if (layout_descriptor().IsSlowLayout()) {
set_layout_descriptor(layout_desc);
}
#ifdef VERIFY_HEAP
// TODO(ishell): remove these checks from VERIFY_HEAP mode.
if (FLAG_verify_heap) {
- CHECK(layout_descriptor()->IsConsistentWithMap(*this));
+ CHECK(layout_descriptor().IsConsistentWithMap(*this));
CHECK_EQ(Map::GetVisitorId(*this), visitor_id());
}
#else
@@ -622,14 +622,14 @@
void Map::InitializeDescriptors(Isolate* isolate, DescriptorArray descriptors,
LayoutDescriptor layout_desc) {
SetInstanceDescriptors(isolate, descriptors,
- descriptors->number_of_descriptors());
+ descriptors.number_of_descriptors());
if (FLAG_unbox_double_fields) {
set_layout_descriptor(layout_desc);
#ifdef VERIFY_HEAP
// TODO(ishell): remove these checks from VERIFY_HEAP mode.
if (FLAG_verify_heap) {
- CHECK(layout_descriptor()->IsConsistentWithMap(*this));
+ CHECK(layout_descriptor().IsConsistentWithMap(*this));
}
#else
SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(*this));
@@ -661,11 +661,11 @@
void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
DescriptorArray descriptors = instance_descriptors();
int number_of_own_descriptors = NumberOfOwnDescriptors();
- DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
+ DCHECK(descriptors.number_of_descriptors() == number_of_own_descriptors);
{
// The following two operations need to happen before the marking write
// barrier.
- descriptors->Append(desc);
+ descriptors.Append(desc);
SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
MarkingBarrierForDescriptorArray(isolate->heap(), *this, descriptors,
number_of_own_descriptors + 1);
@@ -689,7 +689,7 @@
HeapObject Map::GetBackPointer() const {
Object object = constructor_or_backpointer();
- if (object->IsMap()) {
+ if (object.IsMap()) {
return Map::cast(object);
}
return GetReadOnlyRoots().undefined_value();
@@ -718,10 +718,10 @@
void Map::SetBackPointer(Object value, WriteBarrierMode mode) {
CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE);
- CHECK(value->IsMap());
- CHECK(GetBackPointer()->IsUndefined());
- CHECK_IMPLIES(value->IsMap(), Map::cast(value)->GetConstructor() ==
- constructor_or_backpointer());
+ CHECK(value.IsMap());
+ CHECK(GetBackPointer().IsUndefined());
+ CHECK_IMPLIES(value.IsMap(), Map::cast(value).GetConstructor() ==
+ constructor_or_backpointer());
set_constructor_or_backpointer(value, mode);
}
@@ -732,34 +732,34 @@
bool Map::IsPrototypeValidityCellValid() const {
Object validity_cell = prototype_validity_cell();
- Object value = validity_cell->IsSmi() ? Smi::cast(validity_cell)
- : Cell::cast(validity_cell)->value();
+ Object value = validity_cell.IsSmi() ? Smi::cast(validity_cell)
+ : Cell::cast(validity_cell).value();
return value == Smi::FromInt(Map::kPrototypeChainValid);
}
Object Map::GetConstructor() const {
Object maybe_constructor = constructor_or_backpointer();
// Follow any back pointers.
- while (maybe_constructor->IsMap()) {
+ while (maybe_constructor.IsMap()) {
maybe_constructor =
- Map::cast(maybe_constructor)->constructor_or_backpointer();
+ Map::cast(maybe_constructor).constructor_or_backpointer();
}
return maybe_constructor;
}
FunctionTemplateInfo Map::GetFunctionTemplateInfo() const {
Object constructor = GetConstructor();
- if (constructor->IsJSFunction()) {
- DCHECK(JSFunction::cast(constructor)->shared()->IsApiFunction());
- return JSFunction::cast(constructor)->shared()->get_api_func_data();
+ if (constructor.IsJSFunction()) {
+ DCHECK(JSFunction::cast(constructor).shared().IsApiFunction());
+ return JSFunction::cast(constructor).shared().get_api_func_data();
}
- DCHECK(constructor->IsFunctionTemplateInfo());
+ DCHECK(constructor.IsFunctionTemplateInfo());
return FunctionTemplateInfo::cast(constructor);
}
void Map::SetConstructor(Object constructor, WriteBarrierMode mode) {
// Never overwrite a back pointer with a constructor.
- CHECK(!constructor_or_backpointer()->IsMap());
+ CHECK(!constructor_or_backpointer().IsMap());
set_constructor_or_backpointer(constructor, mode);
}
@@ -775,7 +775,7 @@
void Map::InobjectSlackTrackingStep(Isolate* isolate) {
// Slack tracking should only be performed on an initial map.
- DCHECK(GetBackPointer()->IsUndefined());
+ DCHECK(GetBackPointer().IsUndefined());
if (!IsInobjectSlackTrackingInProgress()) return;
int counter = construction_counter();
set_construction_counter(counter - 1);
@@ -808,7 +808,7 @@
bool HeapObject::IsNormalizedMapCache() const {
if (!IsWeakFixedArray()) return false;
- if (WeakFixedArray::cast(*this)->length() != NormalizedMapCache::kEntries) {
+ if (WeakFixedArray::cast(*this).length() != NormalizedMapCache::kEntries) {
return false;
}
return true;
diff --git a/src/objects/map-updater.cc b/src/objects/map-updater.cc
index 1332e7b..96f4479 100644
--- a/src/objects/map-updater.cc
+++ b/src/objects/map-updater.cc
@@ -34,9 +34,8 @@
is_transitionable_fast_elements_kind_(
IsTransitionableFastElementsKind(new_elements_kind_)) {
// We shouldn't try to update remote objects.
- DCHECK(!old_map->FindRootMap(isolate)
- ->GetConstructor()
- ->IsFunctionTemplateInfo());
+ DCHECK(
+ !old_map->FindRootMap(isolate).GetConstructor().IsFunctionTemplateInfo());
}
Name MapUpdater::GetKey(int descriptor) const {
@@ -89,7 +88,7 @@
if (location == kField) {
return handle(GetFieldType(descriptor), isolate_);
} else {
- return GetValue(descriptor)->OptimalType(isolate_, representation);
+ return GetValue(descriptor).OptimalType(isolate_, representation);
}
}
@@ -102,7 +101,7 @@
return handle(descriptors->GetFieldType(descriptor), isolate_);
} else {
return descriptors->GetStrongValue(descriptor)
- ->OptimalType(isolate_, representation);
+ .OptimalType(isolate_, representation);
}
}
@@ -243,7 +242,7 @@
.representation()
.Equals(new_representation_));
DCHECK(old_descriptors_->GetFieldType(modified_descriptor_)
- ->NowIs(new_field_type_));
+ .NowIs(new_field_type_));
result_map_ = old_map_;
state_ = kEnd;
@@ -303,7 +302,7 @@
if (root_map_->is_deprecated()) {
state_ = kEnd;
result_map_ = handle(
- JSFunction::cast(root_map_->GetConstructor())->initial_map(), isolate_);
+ JSFunction::cast(root_map_->GetConstructor()).initial_map(), isolate_);
result_map_ = Map::AsElementsKind(isolate_, result_map_, to_kind);
DCHECK(result_map_->is_dictionary_map());
return state_;
@@ -427,7 +426,7 @@
if (modified_descriptor_ >= 0) {
DescriptorArray target_descriptors = target_map_->instance_descriptors();
PropertyDetails details =
- target_descriptors->GetDetails(modified_descriptor_);
+ target_descriptors.GetDetails(modified_descriptor_);
DCHECK_EQ(new_kind_, details.kind());
DCHECK_EQ(GetDetails(modified_descriptor_).attributes(),
details.attributes());
@@ -437,12 +436,12 @@
if (new_location_ == kField) {
DCHECK_EQ(kField, details.location());
DCHECK(new_field_type_->NowIs(
- target_descriptors->GetFieldType(modified_descriptor_)));
+ target_descriptors.GetFieldType(modified_descriptor_)));
} else {
DCHECK(details.location() == kField ||
EqualImmutableValues(
*new_value_,
- target_descriptors->GetStrongValue(modified_descriptor_)));
+ target_descriptors.GetStrongValue(modified_descriptor_)));
}
}
#endif
@@ -673,9 +672,9 @@
TransitionsAccessor(isolate_, current, &no_allocation)
.SearchTransition(name, details.kind(), details.attributes());
if (next.is_null()) break;
- DescriptorArray next_descriptors = next->instance_descriptors();
+ DescriptorArray next_descriptors = next.instance_descriptors();
- PropertyDetails next_details = next_descriptors->GetDetails(i);
+ PropertyDetails next_details = next_descriptors.GetDetails(i);
DCHECK_EQ(details.kind(), next_details.kind());
DCHECK_EQ(details.attributes(), next_details.attributes());
if (details.constness() != next_details.constness()) break;
@@ -683,13 +682,13 @@
if (!details.representation().Equals(next_details.representation())) break;
if (next_details.location() == kField) {
- FieldType next_field_type = next_descriptors->GetFieldType(i);
- if (!descriptors->GetFieldType(i)->NowIs(next_field_type)) {
+ FieldType next_field_type = next_descriptors.GetFieldType(i);
+ if (!descriptors->GetFieldType(i).NowIs(next_field_type)) {
break;
}
} else {
if (!EqualImmutableValues(descriptors->GetStrongValue(i),
- next_descriptors->GetStrongValue(i))) {
+ next_descriptors.GetStrongValue(i))) {
break;
}
}
@@ -716,7 +715,7 @@
Map maybe_transition = transitions.SearchTransition(
GetKey(split_nof), split_details.kind(), split_details.attributes());
if (!maybe_transition.is_null()) {
- maybe_transition->DeprecateTransitionTree(isolate_);
+ maybe_transition.DeprecateTransitionTree(isolate_);
}
// If |maybe_transition| is not nullptr then the transition array already
diff --git a/src/objects/map.cc b/src/objects/map.cc
index fb3bb5e..6839e93 100644
--- a/src/objects/map.cc
+++ b/src/objects/map.cc
@@ -34,12 +34,12 @@
}
int constructor_function_index = GetConstructorFunctionIndex();
if (constructor_function_index != Map::kNoConstructorFunctionIndex) {
- Context native_context = isolate->context()->native_context();
+ Context native_context = isolate->context().native_context();
JSFunction constructor_function =
- JSFunction::cast(native_context->get(constructor_function_index));
- return constructor_function->initial_map();
+ JSFunction::cast(native_context.get(constructor_function_index));
+ return constructor_function.initial_map();
}
- return ReadOnlyRoots(isolate).null_value()->map();
+ return ReadOnlyRoots(isolate).null_value().map();
}
// static
@@ -73,9 +73,9 @@
PropertyAttributes attributes) {
OFStream os(file);
os << "[reconfiguring]";
- Name name = instance_descriptors()->GetKey(modify_index);
- if (name->IsString()) {
- String::cast(name)->PrintOn(file);
+ Name name = instance_descriptors().GetKey(modify_index);
+ if (name.IsString()) {
+ String::cast(name).PrintOn(file);
} else {
os << "{symbol " << reinterpret_cast<void*>(name.ptr()) << "}";
}
@@ -88,7 +88,7 @@
VisitorId Map::GetVisitorId(Map map) {
STATIC_ASSERT(kVisitorIdCount <= 256);
- const int instance_type = map->instance_type();
+ const int instance_type = map.instance_type();
if (instance_type < FIRST_NONSTRING_TYPE) {
switch (instance_type & kStringRepresentationMask) {
@@ -301,7 +301,7 @@
case WASM_TABLE_TYPE:
case JS_BOUND_FUNCTION_TYPE: {
const bool has_raw_data_fields =
- (FLAG_unbox_double_fields && !map->HasFastPointerLayout()) ||
+ (FLAG_unbox_double_fields && !map.HasFastPointerLayout()) ||
(COMPRESS_POINTERS_BOOL && JSObject::GetEmbedderFieldCount(map) > 0);
return has_raw_data_fields ? kVisitJSObject : kVisitJSObjectFast;
}
@@ -368,9 +368,9 @@
MaybeHandle<FieldType> new_field_type, MaybeHandle<Object> new_value) {
OFStream os(file);
os << "[generalizing]";
- Name name = instance_descriptors()->GetKey(modify_index);
- if (name->IsString()) {
- String::cast(name)->PrintOn(file);
+ Name name = instance_descriptors().GetKey(modify_index);
+ if (name.IsString()) {
+ String::cast(name).PrintOn(file);
} else {
os << "{symbol " << reinterpret_cast<void*>(name.ptr()) << "}";
}
@@ -429,9 +429,9 @@
PropertyConstness constness,
Representation representation,
TransitionFlag flag) {
- DCHECK(DescriptorArray::kNotFound ==
- map->instance_descriptors()->Search(*name,
- map->NumberOfOwnDescriptors()));
+ DCHECK(
+ DescriptorArray::kNotFound ==
+ map->instance_descriptors().Search(*name, map->NumberOfOwnDescriptors()));
// Ensure the descriptor array does not get too big.
if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
@@ -477,7 +477,7 @@
bool Map::TransitionRemovesTaggedField(Map target) const {
int inobject = NumberOfFields();
- int target_inobject = target->NumberOfFields();
+ int target_inobject = target.NumberOfFields();
for (int i = target_inobject; i < inobject; i++) {
FieldIndex index = FieldIndex::ForPropertyIndex(*this, i);
if (!IsUnboxedDoubleField(index)) return true;
@@ -487,11 +487,11 @@
bool Map::TransitionChangesTaggedFieldToUntaggedField(Map target) const {
int inobject = NumberOfFields();
- int target_inobject = target->NumberOfFields();
+ int target_inobject = target.NumberOfFields();
int limit = Min(inobject, target_inobject);
for (int i = 0; i < limit; i++) {
FieldIndex index = FieldIndex::ForPropertyIndex(target, i);
- if (!IsUnboxedDoubleField(index) && target->IsUnboxedDoubleField(index)) {
+ if (!IsUnboxedDoubleField(index) && target.IsUnboxedDoubleField(index)) {
return true;
}
}
@@ -504,9 +504,9 @@
}
bool Map::InstancesNeedRewriting(Map target) const {
- int target_number_of_fields = target->NumberOfFields();
- int target_inobject = target->GetInObjectProperties();
- int target_unused = target->UnusedPropertyFields();
+ int target_number_of_fields = target.NumberOfFields();
+ int target_inobject = target.GetInObjectProperties();
+ int target_unused = target.UnusedPropertyFields();
int old_number_of_fields;
return InstancesNeedRewriting(target, target_number_of_fields,
@@ -524,11 +524,11 @@
// If smi descriptors were replaced by double descriptors, rewrite.
DescriptorArray old_desc = instance_descriptors();
- DescriptorArray new_desc = target->instance_descriptors();
+ DescriptorArray new_desc = target.instance_descriptors();
int limit = NumberOfOwnDescriptors();
for (int i = 0; i < limit; i++) {
- if (new_desc->GetDetails(i).representation().IsDouble() !=
- old_desc->GetDetails(i).representation().IsDouble()) {
+ if (new_desc.GetDetails(i).representation().IsDouble() !=
+ old_desc.GetDetails(i).representation().IsDouble()) {
return true;
}
}
@@ -552,7 +552,7 @@
DescriptorArray descriptors = instance_descriptors();
int result = 0;
for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
- if (descriptors->GetDetails(i).location() == kField) result++;
+ if (descriptors.GetDetails(i).location() == kField) result++;
}
return result;
}
@@ -562,7 +562,7 @@
int mutable_count = 0;
int const_count = 0;
for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
- PropertyDetails details = descriptors->GetDetails(i);
+ PropertyDetails details = descriptors.GetDetails(i);
if (details.location() == kField) {
switch (details.constness()) {
case PropertyConstness::kMutable:
@@ -621,7 +621,7 @@
MaybeHandle<FieldType> field_type = FieldType::None(isolate);
if (details.location() == kField) {
field_type = handle(
- map->instance_descriptors()->GetFieldType(modify_index), isolate);
+ map->instance_descriptors().GetFieldType(modify_index), isolate);
}
map->PrintGeneralization(
isolate, stdout, reason, modify_index,
@@ -641,14 +641,14 @@
TransitionsAccessor transitions(isolate, *this, &no_gc);
int num_transitions = transitions.NumberOfTransitions();
for (int i = 0; i < num_transitions; ++i) {
- transitions.GetTarget(i)->DeprecateTransitionTree(isolate);
+ transitions.GetTarget(i).DeprecateTransitionTree(isolate);
}
- DCHECK(!constructor_or_backpointer()->IsFunctionTemplateInfo());
+ DCHECK(!constructor_or_backpointer().IsFunctionTemplateInfo());
set_is_deprecated(true);
if (FLAG_trace_maps) {
LOG(isolate, MapEvent("Deprecate", *this, Map()));
}
- dependent_code()->DeoptimizeDependentCodeGroup(
+ dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kTransitionGroup);
NotifyLeafMapLayoutChange(isolate);
}
@@ -658,7 +658,7 @@
void Map::ReplaceDescriptors(Isolate* isolate, DescriptorArray new_descriptors,
LayoutDescriptor new_layout_descriptor) {
// Don't overwrite the empty descriptor array or initial map's descriptors.
- if (NumberOfOwnDescriptors() == 0 || GetBackPointer()->IsUndefined(isolate)) {
+ if (NumberOfOwnDescriptors() == 0 || GetBackPointer().IsUndefined(isolate)) {
return;
}
@@ -668,13 +668,13 @@
// all its elements.
Map current = *this;
MarkingBarrierForDescriptorArray(isolate->heap(), current, to_replace,
- to_replace->number_of_descriptors());
- while (current->instance_descriptors() == to_replace) {
- Object next = current->GetBackPointer();
- if (next->IsUndefined(isolate)) break; // Stop overwriting at initial map.
- current->SetEnumLength(kInvalidEnumCacheSentinel);
- current->UpdateDescriptors(isolate, new_descriptors, new_layout_descriptor,
- current->NumberOfOwnDescriptors());
+ to_replace.number_of_descriptors());
+ while (current.instance_descriptors() == to_replace) {
+ Object next = current.GetBackPointer();
+ if (next.IsUndefined(isolate)) break; // Stop overwriting at initial map.
+ current.SetEnumLength(kInvalidEnumCacheSentinel);
+ current.UpdateDescriptors(isolate, new_descriptors, new_layout_descriptor,
+ current.NumberOfOwnDescriptors());
current = Map::cast(next);
}
set_owns_descriptors(false);
@@ -683,13 +683,13 @@
Map Map::FindRootMap(Isolate* isolate) const {
Map result = *this;
while (true) {
- Object back = result->GetBackPointer();
- if (back->IsUndefined(isolate)) {
+ Object back = result.GetBackPointer();
+ if (back.IsUndefined(isolate)) {
// Initial map always owns descriptors and doesn't have unused entries
// in the descriptor array.
- DCHECK(result->owns_descriptors());
- DCHECK_EQ(result->NumberOfOwnDescriptors(),
- result->instance_descriptors()->number_of_descriptors());
+ DCHECK(result.owns_descriptors());
+ DCHECK_EQ(result.NumberOfOwnDescriptors(),
+ result.instance_descriptors().number_of_descriptors());
return result;
}
result = Map::cast(back);
@@ -698,13 +698,13 @@
Map Map::FindFieldOwner(Isolate* isolate, int descriptor) const {
DisallowHeapAllocation no_allocation;
- DCHECK_EQ(kField, instance_descriptors()->GetDetails(descriptor).location());
+ DCHECK_EQ(kField, instance_descriptors().GetDetails(descriptor).location());
Map result = *this;
while (true) {
- Object back = result->GetBackPointer();
- if (back->IsUndefined(isolate)) break;
+ Object back = result.GetBackPointer();
+ if (back.IsUndefined(isolate)) break;
const Map parent = Map::cast(back);
- if (parent->NumberOfOwnDescriptors() <= descriptor) break;
+ if (parent.NumberOfOwnDescriptors() <= descriptor) break;
result = parent;
}
return result;
@@ -717,7 +717,7 @@
DCHECK(new_wrapped_type->IsSmi() || new_wrapped_type->IsWeak());
// We store raw pointers in the queue, so no allocations are allowed.
DisallowHeapAllocation no_allocation;
- PropertyDetails details = instance_descriptors()->GetDetails(descriptor);
+ PropertyDetails details = instance_descriptors().GetDetails(descriptor);
if (details.location() != kField) return;
DCHECK_EQ(kData, details.kind());
@@ -735,8 +735,8 @@
Map target = transitions.GetTarget(i);
backlog.push(target);
}
- DescriptorArray descriptors = current->instance_descriptors();
- PropertyDetails details = descriptors->GetDetails(descriptor);
+ DescriptorArray descriptors = current.instance_descriptors();
+ PropertyDetails details = descriptors.GetDetails(descriptor);
// It is allowed to change representation here only from None
// to something or from Smi or HeapObject to Tagged.
@@ -746,17 +746,17 @@
// Skip if already updated the shared descriptor.
if (new_constness != details.constness() ||
!new_representation.Equals(details.representation()) ||
- descriptors->GetFieldType(descriptor) != *new_wrapped_type.object()) {
+ descriptors.GetFieldType(descriptor) != *new_wrapped_type.object()) {
Descriptor d = Descriptor::DataField(
- name, descriptors->GetFieldIndex(descriptor), details.attributes(),
+ name, descriptors.GetFieldIndex(descriptor), details.attributes(),
new_constness, new_representation, new_wrapped_type);
- descriptors->Replace(descriptor, &d);
+ descriptors.Replace(descriptor, &d);
}
}
}
bool FieldTypeIsCleared(Representation rep, FieldType type) {
- return type->IsNone() && rep.IsHeapObject();
+ return type.IsNone() && rep.IsHeapObject();
}
// static
@@ -821,7 +821,7 @@
MaybeObjectHandle wrapped_type(WrapFieldType(isolate, new_field_type));
field_owner->UpdateFieldType(isolate, modify_index, name, new_constness,
new_representation, wrapped_type);
- field_owner->dependent_code()->DeoptimizeDependentCodeGroup(
+ field_owner->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kFieldOwnerGroup);
if (FLAG_trace_generalization) {
@@ -865,7 +865,7 @@
do {
target = TransitionsAccessor(isolate, target, &no_allocation)
.GetMigrationTarget();
- } while (!target.is_null() && target->is_deprecated());
+ } while (!target.is_null() && target.is_deprecated());
if (target.is_null()) return Map();
// TODO(ishell): if this validation ever become a bottleneck consider adding a
@@ -876,12 +876,12 @@
// types instead of old_map's types.
// Go to slow map updating if the old_map has fast properties with cleared
// field types.
- int old_nof = old_map->NumberOfOwnDescriptors();
- DescriptorArray old_descriptors = old_map->instance_descriptors();
+ int old_nof = old_map.NumberOfOwnDescriptors();
+ DescriptorArray old_descriptors = old_map.instance_descriptors();
for (int i = 0; i < old_nof; i++) {
- PropertyDetails old_details = old_descriptors->GetDetails(i);
+ PropertyDetails old_details = old_descriptors.GetDetails(i);
if (old_details.location() == kField && old_details.kind() == kData) {
- FieldType old_type = old_descriptors->GetFieldType(i);
+ FieldType old_type = old_descriptors.GetFieldType(i);
if (FieldTypeIsCleared(old_details.representation(), old_type)) {
return Map();
}
@@ -935,8 +935,8 @@
// Figure out the most restrictive integrity level transition (it should
// be the last one in the transition tree).
- DCHECK(!map->is_extensible());
- Map previous = Map::cast(map->GetBackPointer());
+ DCHECK(!map.is_extensible());
+ Map previous = Map::cast(map.GetBackPointer());
TransitionsAccessor last_transitions(isolate, previous, no_allocation);
if (!last_transitions.HasIntegrityLevelTransitionTo(
map, &(info.integrity_level_symbol), &(info.integrity_level))) {
@@ -953,8 +953,8 @@
// Now walk up the back pointer chain and skip all integrity level
// transitions. If we encounter any non-integrity level transition interleaved
// with integrity level transitions, just bail out.
- while (!source_map->is_extensible()) {
- previous = Map::cast(source_map->GetBackPointer());
+ while (!source_map.is_extensible()) {
+ previous = Map::cast(source_map.GetBackPointer());
TransitionsAccessor transitions(isolate, previous, no_allocation);
if (!transitions.HasIntegrityLevelTransitionTo(source_map)) {
return info;
@@ -963,7 +963,7 @@
}
// Integrity-level transitions never change number of descriptors.
- CHECK_EQ(map->NumberOfOwnDescriptors(), source_map->NumberOfOwnDescriptors());
+ CHECK_EQ(map.NumberOfOwnDescriptors(), source_map.NumberOfOwnDescriptors());
info.has_integrity_level_transition = true;
info.integrity_level_source_map = source_map;
@@ -977,26 +977,25 @@
DisallowDeoptimization no_deoptimization(isolate);
// Check the state of the root map.
- Map root_map = old_map->FindRootMap(isolate);
- if (root_map->is_deprecated()) {
- JSFunction constructor = JSFunction::cast(root_map->GetConstructor());
- DCHECK(constructor->has_initial_map());
- DCHECK(constructor->initial_map()->is_dictionary_map());
- if (constructor->initial_map()->elements_kind() !=
- old_map->elements_kind()) {
+ Map root_map = old_map.FindRootMap(isolate);
+ if (root_map.is_deprecated()) {
+ JSFunction constructor = JSFunction::cast(root_map.GetConstructor());
+ DCHECK(constructor.has_initial_map());
+ DCHECK(constructor.initial_map().is_dictionary_map());
+ if (constructor.initial_map().elements_kind() != old_map.elements_kind()) {
return Map();
}
- return constructor->initial_map();
+ return constructor.initial_map();
}
- if (!old_map->EquivalentToForTransition(root_map)) return Map();
+ if (!old_map.EquivalentToForTransition(root_map)) return Map();
- ElementsKind from_kind = root_map->elements_kind();
- ElementsKind to_kind = old_map->elements_kind();
+ ElementsKind from_kind = root_map.elements_kind();
+ ElementsKind to_kind = old_map.elements_kind();
IntegrityLevelTransitionInfo info(old_map);
- if (root_map->is_extensible() != old_map->is_extensible()) {
- DCHECK(!old_map->is_extensible());
- DCHECK(root_map->is_extensible());
+ if (root_map.is_extensible() != old_map.is_extensible()) {
+ DCHECK(!old_map.is_extensible());
+ DCHECK(root_map.is_extensible());
info = DetectIntegrityLevelTransitions(old_map, isolate, &no_allocation);
// Bail out if there were some private symbol transitions mixed up
// with the integrity level transitions.
@@ -1007,17 +1006,17 @@
to_kind == SLOW_STRING_WRAPPER_ELEMENTS ||
IsFixedTypedArrayElementsKind(to_kind) ||
IsHoleyFrozenOrSealedElementsKind(to_kind));
- to_kind = info.integrity_level_source_map->elements_kind();
+ to_kind = info.integrity_level_source_map.elements_kind();
}
if (from_kind != to_kind) {
// Try to follow existing elements kind transitions.
- root_map = root_map->LookupElementsTransitionMap(isolate, to_kind);
+ root_map = root_map.LookupElementsTransitionMap(isolate, to_kind);
if (root_map.is_null()) return Map();
// From here on, use the map with correct elements kind as root map.
}
// Replay the transitions as they were before the integrity level transition.
- Map result = root_map->TryReplayPropertyTransitions(
+ Map result = root_map.TryReplayPropertyTransitions(
isolate, info.integrity_level_source_map);
if (result.is_null()) return Map();
@@ -1028,9 +1027,9 @@
}
DCHECK_IMPLIES(!result.is_null(),
- old_map->elements_kind() == result->elements_kind());
+ old_map.elements_kind() == result.elements_kind());
DCHECK_IMPLIES(!result.is_null(),
- old_map->instance_type() == result->instance_type());
+ old_map.instance_type() == result.instance_type());
return result;
}
@@ -1040,21 +1039,21 @@
int root_nof = NumberOfOwnDescriptors();
- int old_nof = old_map->NumberOfOwnDescriptors();
- DescriptorArray old_descriptors = old_map->instance_descriptors();
+ int old_nof = old_map.NumberOfOwnDescriptors();
+ DescriptorArray old_descriptors = old_map.instance_descriptors();
Map new_map = *this;
for (int i = root_nof; i < old_nof; ++i) {
- PropertyDetails old_details = old_descriptors->GetDetails(i);
+ PropertyDetails old_details = old_descriptors.GetDetails(i);
Map transition =
TransitionsAccessor(isolate, new_map, &no_allocation)
- .SearchTransition(old_descriptors->GetKey(i), old_details.kind(),
+ .SearchTransition(old_descriptors.GetKey(i), old_details.kind(),
old_details.attributes());
if (transition.is_null()) return Map();
new_map = transition;
- DescriptorArray new_descriptors = new_map->instance_descriptors();
+ DescriptorArray new_descriptors = new_map.instance_descriptors();
- PropertyDetails new_details = new_descriptors->GetDetails(i);
+ PropertyDetails new_details = new_descriptors.GetDetails(i);
DCHECK_EQ(old_details.kind(), new_details.kind());
DCHECK_EQ(old_details.attributes(), new_details.attributes());
if (!IsGeneralizableTo(old_details.constness(), new_details.constness())) {
@@ -1066,7 +1065,7 @@
}
if (new_details.location() == kField) {
if (new_details.kind() == kData) {
- FieldType new_type = new_descriptors->GetFieldType(i);
+ FieldType new_type = new_descriptors.GetFieldType(i);
// Cleared field types need special treatment. They represent lost
// knowledge, so we must first generalize the new_type to "Any".
if (FieldTypeIsCleared(new_details.representation(), new_type)) {
@@ -1074,29 +1073,29 @@
}
DCHECK_EQ(kData, old_details.kind());
DCHECK_EQ(kField, old_details.location());
- FieldType old_type = old_descriptors->GetFieldType(i);
+ FieldType old_type = old_descriptors.GetFieldType(i);
if (FieldTypeIsCleared(old_details.representation(), old_type) ||
- !old_type->NowIs(new_type)) {
+ !old_type.NowIs(new_type)) {
return Map();
}
} else {
DCHECK_EQ(kAccessor, new_details.kind());
#ifdef DEBUG
- FieldType new_type = new_descriptors->GetFieldType(i);
- DCHECK(new_type->IsAny());
+ FieldType new_type = new_descriptors.GetFieldType(i);
+ DCHECK(new_type.IsAny());
#endif
UNREACHABLE();
}
} else {
DCHECK_EQ(kDescriptor, new_details.location());
if (old_details.location() == kField ||
- old_descriptors->GetStrongValue(i) !=
- new_descriptors->GetStrongValue(i)) {
+ old_descriptors.GetStrongValue(i) !=
+ new_descriptors.GetStrongValue(i)) {
return Map();
}
}
}
- if (new_map->NumberOfOwnDescriptors() != old_nof) return Map();
+ if (new_map.NumberOfOwnDescriptors() != old_nof) return Map();
return new_map;
}
@@ -1148,11 +1147,11 @@
descriptors->number_of_descriptors());
Map current = *map;
- while (current->instance_descriptors() == *descriptors) {
- Object next = current->GetBackPointer();
- if (next->IsUndefined(isolate)) break; // Stop overwriting at initial map.
- current->UpdateDescriptors(isolate, *new_descriptors, layout_descriptor,
- current->NumberOfOwnDescriptors());
+ while (current.instance_descriptors() == *descriptors) {
+ Object next = current.GetBackPointer();
+ if (next.IsUndefined(isolate)) break; // Stop overwriting at initial map.
+ current.UpdateDescriptors(isolate, *new_descriptors, layout_descriptor,
+ current.NumberOfOwnDescriptors());
current = Map::cast(next);
}
map->UpdateDescriptors(isolate, *new_descriptors, layout_descriptor,
@@ -1162,7 +1161,7 @@
// static
Handle<Map> Map::GetObjectCreateMap(Isolate* isolate,
Handle<HeapObject> prototype) {
- Handle<Map> map(isolate->native_context()->object_function()->initial_map(),
+ Handle<Map> map(isolate->native_context()->object_function().initial_map(),
isolate);
if (map->prototype() == *prototype) return map;
if (prototype->IsNull(isolate)) {
@@ -1170,7 +1169,7 @@
}
if (prototype->IsJSObject()) {
Handle<JSObject> js_prototype = Handle<JSObject>::cast(prototype);
- if (!js_prototype->map()->is_prototype_map()) {
+ if (!js_prototype->map().is_prototype_map()) {
JSObject::OptimizeAsPrototype(js_prototype);
}
Handle<PrototypeInfo> info =
@@ -1192,7 +1191,7 @@
// static
MaybeHandle<Map> Map::TryGetObjectCreateMap(Isolate* isolate,
Handle<HeapObject> prototype) {
- Handle<Map> map(isolate->native_context()->object_function()->initial_map(),
+ Handle<Map> map(isolate->native_context()->object_function().initial_map(),
isolate);
if (map->prototype() == *prototype) return map;
if (prototype->IsNull(isolate)) {
@@ -1200,7 +1199,7 @@
}
if (!prototype->IsJSObject()) return MaybeHandle<Map>();
Handle<JSObject> js_prototype = Handle<JSObject>::cast(prototype);
- if (!js_prototype->map()->is_prototype_map()) return MaybeHandle<Map>();
+ if (!js_prototype->map().is_prototype_map()) return MaybeHandle<Map>();
Handle<PrototypeInfo> info =
Map::GetOrCreatePrototypeInfo(js_prototype, isolate);
if (!info->HasObjectCreateMap()) return MaybeHandle<Map>();
@@ -1239,25 +1238,25 @@
// Check the state of the root map.
Map root_map = FindRootMap(isolate);
if (!EquivalentToForElementsKindTransition(root_map)) return Map();
- root_map = root_map->LookupElementsTransitionMap(isolate, kind);
+ root_map = root_map.LookupElementsTransitionMap(isolate, kind);
DCHECK(!root_map.is_null());
// Starting from the next existing elements kind transition try to
// replay the property transitions that does not involve instance rewriting
// (ElementsTransitionAndStoreStub does not support that).
- for (root_map = root_map->ElementsTransitionMap();
- !root_map.is_null() && root_map->has_fast_elements();
- root_map = root_map->ElementsTransitionMap()) {
+ for (root_map = root_map.ElementsTransitionMap();
+ !root_map.is_null() && root_map.has_fast_elements();
+ root_map = root_map.ElementsTransitionMap()) {
// If root_map's elements kind doesn't match any of the elements kind in
// the candidates there is no need to do any additional work.
- if (!HasElementsKind(candidates, root_map->elements_kind())) continue;
- Map current = root_map->TryReplayPropertyTransitions(isolate, *this);
+ if (!HasElementsKind(candidates, root_map.elements_kind())) continue;
+ Map current = root_map.TryReplayPropertyTransitions(isolate, *this);
if (current.is_null()) continue;
if (InstancesNeedRewriting(current)) continue;
if (ContainsMap(candidates, current) &&
- (packed || !IsFastPackedElementsKind(current->elements_kind()))) {
+ (packed || !IsFastPackedElementsKind(current.elements_kind()))) {
transition = current;
- packed = packed && IsFastPackedElementsKind(current->elements_kind());
+ packed = packed && IsFastPackedElementsKind(current.elements_kind());
}
}
}
@@ -1267,25 +1266,25 @@
static Map FindClosestElementsTransition(Isolate* isolate, Map map,
ElementsKind to_kind) {
// Ensure we are requested to search elements kind transition "near the root".
- DCHECK_EQ(map->FindRootMap(isolate)->NumberOfOwnDescriptors(),
- map->NumberOfOwnDescriptors());
+ DCHECK_EQ(map.FindRootMap(isolate).NumberOfOwnDescriptors(),
+ map.NumberOfOwnDescriptors());
Map current_map = map;
- ElementsKind kind = map->elements_kind();
+ ElementsKind kind = map.elements_kind();
while (kind != to_kind) {
- Map next_map = current_map->ElementsTransitionMap();
+ Map next_map = current_map.ElementsTransitionMap();
if (next_map.is_null()) return current_map;
- kind = next_map->elements_kind();
+ kind = next_map.elements_kind();
current_map = next_map;
}
- DCHECK_EQ(to_kind, current_map->elements_kind());
+ DCHECK_EQ(to_kind, current_map.elements_kind());
return current_map;
}
Map Map::LookupElementsTransitionMap(Isolate* isolate, ElementsKind to_kind) {
Map to_map = FindClosestElementsTransition(isolate, *this, to_kind);
- if (to_map->elements_kind() == to_kind) return to_map;
+ if (to_map.elements_kind() == to_kind) return to_map;
return Map();
}
@@ -1306,24 +1305,24 @@
ElementsKind from_kind = map->elements_kind();
if (from_kind == to_kind) return map;
- Context native_context = isolate->context()->native_context();
+ Context native_context = isolate->context().native_context();
if (from_kind == FAST_SLOPPY_ARGUMENTS_ELEMENTS) {
- if (*map == native_context->fast_aliased_arguments_map()) {
+ if (*map == native_context.fast_aliased_arguments_map()) {
DCHECK_EQ(SLOW_SLOPPY_ARGUMENTS_ELEMENTS, to_kind);
- return handle(native_context->slow_aliased_arguments_map(), isolate);
+ return handle(native_context.slow_aliased_arguments_map(), isolate);
}
} else if (from_kind == SLOW_SLOPPY_ARGUMENTS_ELEMENTS) {
- if (*map == native_context->slow_aliased_arguments_map()) {
+ if (*map == native_context.slow_aliased_arguments_map()) {
DCHECK_EQ(FAST_SLOPPY_ARGUMENTS_ELEMENTS, to_kind);
- return handle(native_context->fast_aliased_arguments_map(), isolate);
+ return handle(native_context.fast_aliased_arguments_map(), isolate);
}
} else if (IsFastElementsKind(from_kind) && IsFastElementsKind(to_kind)) {
// Reuse map transitions for JSArrays.
DisallowHeapAllocation no_gc;
- if (native_context->GetInitialJSArrayMap(from_kind) == *map) {
+ if (native_context.GetInitialJSArrayMap(from_kind) == *map) {
Object maybe_transitioned_map =
- native_context->get(Context::ArrayMapIndex(to_kind));
- if (maybe_transitioned_map->IsMap()) {
+ native_context.get(Context::ArrayMapIndex(to_kind));
+ if (maybe_transitioned_map.IsMap()) {
return handle(Map::cast(maybe_transitioned_map), isolate);
}
}
@@ -1333,8 +1332,8 @@
// Check if we can go back in the elements kind transition chain.
if (IsHoleyElementsKind(from_kind) &&
to_kind == GetPackedElementsKind(from_kind) &&
- map->GetBackPointer()->IsMap() &&
- Map::cast(map->GetBackPointer())->elements_kind() == to_kind) {
+ map->GetBackPointer().IsMap() &&
+ Map::cast(map->GetBackPointer()).elements_kind() == to_kind) {
return handle(Map::cast(map->GetBackPointer()), isolate);
}
@@ -1402,8 +1401,8 @@
DescriptorArray descs = instance_descriptors();
int limit = NumberOfOwnDescriptors();
for (int i = 0; i < limit; i++) {
- if ((descs->GetDetails(i).attributes() & ONLY_ENUMERABLE) == 0 &&
- !descs->GetKey(i)->FilterKey(ENUMERABLE_STRINGS)) {
+ if ((descs.GetDetails(i).attributes() & ONLY_ENUMERABLE) == 0 &&
+ !descs.GetKey(i).FilterKey(ENUMERABLE_STRINGS)) {
result++;
}
}
@@ -1415,7 +1414,7 @@
int number_of_own_descriptors = NumberOfOwnDescriptors();
DescriptorArray descs = instance_descriptors();
for (int i = 0; i < number_of_own_descriptors; i++) {
- PropertyDetails details = descs->GetDetails(i);
+ PropertyDetails details = descs.GetDetails(i);
if (details.location() == kField) {
int candidate = details.field_index() + details.field_width_in_words();
if (candidate > free_index) free_index = candidate;
@@ -1440,20 +1439,20 @@
for (PrototypeIterator iter(isolate, *this); !iter.IsAtEnd();
iter.Advance()) {
// Be conservative, don't walk into proxies.
- if (iter.GetCurrent()->IsJSProxy()) return true;
+ if (iter.GetCurrent().IsJSProxy()) return true;
// String wrappers have non-configurable, non-writable elements.
- if (iter.GetCurrent()->IsStringWrapper()) return true;
+ if (iter.GetCurrent().IsStringWrapper()) return true;
JSObject current = iter.GetCurrent<JSObject>();
- if (current->HasDictionaryElements() &&
- current->element_dictionary()->requires_slow_elements()) {
+ if (current.HasDictionaryElements() &&
+ current.element_dictionary().requires_slow_elements()) {
return true;
}
- if (current->HasSlowArgumentsElements()) {
- FixedArray parameter_map = FixedArray::cast(current->elements());
- Object arguments = parameter_map->get(1);
- if (NumberDictionary::cast(arguments)->requires_slow_elements()) {
+ if (current.HasSlowArgumentsElements()) {
+ FixedArray parameter_map = FixedArray::cast(current.elements());
+ Object arguments = parameter_map.get(1);
+ if (NumberDictionary::cast(arguments).requires_slow_elements()) {
return true;
}
}
@@ -1596,8 +1595,8 @@
// Function's initial map is a sloppy function map. Same holds for
// GeneratorFunction / AsyncFunction and its initial map.
Object constructor = map->GetConstructor();
- DCHECK(constructor->IsJSFunction());
- DCHECK(*map == JSFunction::cast(constructor)->initial_map() ||
+ DCHECK(constructor.IsJSFunction());
+ DCHECK(*map == JSFunction::cast(constructor).initial_map() ||
*map == *isolate->strict_function_map() ||
*map == *isolate->strict_function_with_name_map() ||
*map == *isolate->generator_function_map() ||
@@ -1613,7 +1612,7 @@
// does not contain descriptors that do not belong to the map.
DCHECK(map->owns_descriptors());
DCHECK_EQ(map->NumberOfOwnDescriptors(),
- map->instance_descriptors()->number_of_descriptors());
+ map->instance_descriptors().number_of_descriptors());
}
} // namespace
@@ -1669,7 +1668,7 @@
// array, implying that its NumberOfOwnDescriptors equals the number of
// descriptors in the descriptor array.
DCHECK_EQ(map->NumberOfOwnDescriptors(),
- map->instance_descriptors()->number_of_descriptors());
+ map->instance_descriptors().number_of_descriptors());
Handle<Map> result = CopyDropDescriptors(isolate, map);
Handle<Name> name = descriptor->GetKey();
@@ -1716,14 +1715,14 @@
child->may_have_interesting_symbols());
DCHECK_IMPLIES(parent->may_have_interesting_symbols(),
child->may_have_interesting_symbols());
- if (!parent->GetBackPointer()->IsUndefined(isolate)) {
+ if (!parent->GetBackPointer().IsUndefined(isolate)) {
parent->set_owns_descriptors(false);
} else {
// |parent| is initial map and it must keep the ownership, there must be no
// descriptors in the descriptors array that do not belong to the map.
DCHECK(parent->owns_descriptors());
DCHECK_EQ(parent->NumberOfOwnDescriptors(),
- parent->instance_descriptors()->number_of_descriptors());
+ parent->instance_descriptors().number_of_descriptors());
}
if (parent->is_prototype_map()) {
DCHECK(child->is_prototype_map());
@@ -1850,7 +1849,7 @@
#ifdef VERIFY_HEAP
// TODO(ishell): remove these checks from VERIFY_HEAP mode.
if (FLAG_verify_heap) {
- CHECK(child->layout_descriptor()->IsConsistentWithMap(*child));
+ CHECK(child->layout_descriptor().IsConsistentWithMap(*child));
}
#else
SLOW_DCHECK(child->layout_descriptor()->IsConsistentWithMap(*child));
@@ -1877,14 +1876,14 @@
Map maybe_elements_transition_map;
if (flag == INSERT_TRANSITION) {
// Ensure we are requested to add elements kind transition "near the root".
- DCHECK_EQ(map->FindRootMap(isolate)->NumberOfOwnDescriptors(),
+ DCHECK_EQ(map->FindRootMap(isolate).NumberOfOwnDescriptors(),
map->NumberOfOwnDescriptors());
maybe_elements_transition_map = map->ElementsTransitionMap();
- DCHECK(maybe_elements_transition_map.is_null() ||
- (maybe_elements_transition_map->elements_kind() ==
- DICTIONARY_ELEMENTS &&
- kind == DICTIONARY_ELEMENTS));
+ DCHECK(
+ maybe_elements_transition_map.is_null() ||
+ (maybe_elements_transition_map.elements_kind() == DICTIONARY_ELEMENTS &&
+ kind == DICTIONARY_ELEMENTS));
DCHECK(!IsFastElementsKind(kind) ||
IsMoreGeneralElementsKindTransition(map->elements_kind(), kind));
DCHECK(kind != map->elements_kind());
@@ -2072,12 +2071,12 @@
bool CanHoldValue(DescriptorArray descriptors, int descriptor,
PropertyConstness constness, Object value) {
- PropertyDetails details = descriptors->GetDetails(descriptor);
+ PropertyDetails details = descriptors.GetDetails(descriptor);
if (details.location() == kField) {
if (details.kind() == kData) {
return IsGeneralizableTo(constness, details.constness()) &&
- value->FitsRepresentation(details.representation()) &&
- descriptors->GetFieldType(descriptor)->NowContains(value);
+ value.FitsRepresentation(details.representation()) &&
+ descriptors.GetFieldType(descriptor).NowContains(value);
} else {
DCHECK_EQ(kAccessor, details.kind());
return false;
@@ -2102,7 +2101,7 @@
}
PropertyAttributes attributes =
- map->instance_descriptors()->GetDetails(descriptor).attributes();
+ map->instance_descriptors().GetDetails(descriptor).attributes();
Representation representation = value->OptimalRepresentation();
Handle<FieldType> type = value->OptimalType(isolate, representation);
@@ -2149,9 +2148,9 @@
Handle<Map> transition(maybe_transition, isolate);
int descriptor = transition->LastAdded();
- DCHECK_EQ(attributes, transition->instance_descriptors()
- ->GetDetails(descriptor)
- .attributes());
+ DCHECK_EQ(
+ attributes,
+ transition->instance_descriptors().GetDetails(descriptor).attributes());
return UpdateDescriptorForValue(isolate, transition, descriptor, constness,
value);
@@ -2184,11 +2183,11 @@
Handle<Object> maybe_constructor(map->GetConstructor(), isolate);
if (FLAG_feedback_normalization && map->new_target_is_base() &&
maybe_constructor->IsJSFunction() &&
- !JSFunction::cast(*maybe_constructor)->shared()->native()) {
+ !JSFunction::cast(*maybe_constructor).shared().native()) {
Handle<JSFunction> constructor =
Handle<JSFunction>::cast(maybe_constructor);
DCHECK_NE(*constructor,
- constructor->context()->native_context()->object_function());
+ constructor->context().native_context().object_function());
Handle<Map> initial_map(constructor->initial_map(), isolate);
result = Map::Normalize(isolate, initial_map, CLEAR_INOBJECT_PROPERTIES,
reason);
@@ -2197,7 +2196,7 @@
JSFunction::SetInitialMap(constructor, result, prototype);
// Deoptimize all code that embeds the previous initial map.
- initial_map->dependent_code()->DeoptimizeDependentCodeGroup(
+ initial_map->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kInitialMapChangedGroup);
if (!result->EquivalentToForNormalization(*map,
CLEAR_INOBJECT_PROPERTIES)) {
@@ -2218,7 +2217,7 @@
// Dictionaries have to be reconfigured in-place.
DCHECK(!map->is_dictionary_map());
- if (!map->GetBackPointer()->IsMap()) {
+ if (!map->GetBackPointer().IsMap()) {
// There is no benefit from reconstructing transition tree for maps without
// back pointers.
return CopyGeneralizeAllFields(isolate, map, map->elements_kind(),
@@ -2269,12 +2268,12 @@
Handle<Map> transition(maybe_transition, isolate);
DescriptorArray descriptors = transition->instance_descriptors();
int descriptor = transition->LastAdded();
- DCHECK(descriptors->GetKey(descriptor)->Equals(*name));
+ DCHECK(descriptors.GetKey(descriptor).Equals(*name));
- DCHECK_EQ(kAccessor, descriptors->GetDetails(descriptor).kind());
- DCHECK_EQ(attributes, descriptors->GetDetails(descriptor).attributes());
+ DCHECK_EQ(kAccessor, descriptors.GetDetails(descriptor).kind());
+ DCHECK_EQ(attributes, descriptors.GetDetails(descriptor).attributes());
- Handle<Object> maybe_pair(descriptors->GetStrongValue(descriptor), isolate);
+ Handle<Object> maybe_pair(descriptors.GetStrongValue(descriptor), isolate);
if (!maybe_pair->IsAccessorPair()) {
return Map::Normalize(isolate, map, mode,
"TransitionToAccessorFromNonPair");
@@ -2295,7 +2294,7 @@
if (descriptor != map->LastAdded()) {
return Map::Normalize(isolate, map, mode, "AccessorsOverwritingNonLast");
}
- PropertyDetails old_details = old_descriptors->GetDetails(descriptor);
+ PropertyDetails old_details = old_descriptors.GetDetails(descriptor);
if (old_details.kind() != kAccessor) {
return Map::Normalize(isolate, map, mode,
"AccessorsOverwritingNonAccessors");
@@ -2305,7 +2304,7 @@
return Map::Normalize(isolate, map, mode, "AccessorsWithAttributes");
}
- Handle<Object> maybe_pair(old_descriptors->GetStrongValue(descriptor),
+ Handle<Object> maybe_pair(old_descriptors.GetStrongValue(descriptor),
isolate);
if (!maybe_pair->IsAccessorPair()) {
return Map::Normalize(isolate, map, mode, "AccessorsOverwritingNonPair");
@@ -2316,12 +2315,12 @@
bool overwriting_accessor = false;
if (!getter->IsNull(isolate) &&
- !current_pair->get(ACCESSOR_GETTER)->IsNull(isolate) &&
+ !current_pair->get(ACCESSOR_GETTER).IsNull(isolate) &&
current_pair->get(ACCESSOR_GETTER) != *getter) {
overwriting_accessor = true;
}
if (!setter->IsNull(isolate) &&
- !current_pair->get(ACCESSOR_SETTER)->IsNull(isolate) &&
+ !current_pair->get(ACCESSOR_SETTER).IsNull(isolate) &&
current_pair->get(ACCESSOR_SETTER) != *setter) {
overwriting_accessor = true;
}
@@ -2355,7 +2354,7 @@
// Share descriptors only if map owns descriptors and it not an initial map.
if (flag == INSERT_TRANSITION && map->owns_descriptors() &&
- !map->GetBackPointer()->IsUndefined(isolate) &&
+ !map->GetBackPointer().IsUndefined(isolate) &&
TransitionsAccessor(isolate, map).CanHaveMoreTransitions()) {
return ShareDescriptor(isolate, map, descriptors, descriptor);
}
@@ -2438,31 +2437,31 @@
namespace {
bool CheckEquivalent(const Map first, const Map second) {
- return first->GetConstructor() == second->GetConstructor() &&
- first->prototype() == second->prototype() &&
- first->instance_type() == second->instance_type() &&
- first->bit_field() == second->bit_field() &&
- first->is_extensible() == second->is_extensible() &&
- first->new_target_is_base() == second->new_target_is_base() &&
- first->has_hidden_prototype() == second->has_hidden_prototype();
+ return first.GetConstructor() == second.GetConstructor() &&
+ first.prototype() == second.prototype() &&
+ first.instance_type() == second.instance_type() &&
+ first.bit_field() == second.bit_field() &&
+ first.is_extensible() == second.is_extensible() &&
+ first.new_target_is_base() == second.new_target_is_base() &&
+ first.has_hidden_prototype() == second.has_hidden_prototype();
}
} // namespace
bool Map::EquivalentToForTransition(const Map other) const {
- CHECK_EQ(GetConstructor(), other->GetConstructor());
- CHECK_EQ(instance_type(), other->instance_type());
- CHECK_EQ(has_hidden_prototype(), other->has_hidden_prototype());
+ CHECK_EQ(GetConstructor(), other.GetConstructor());
+ CHECK_EQ(instance_type(), other.instance_type());
+ CHECK_EQ(has_hidden_prototype(), other.has_hidden_prototype());
- if (bit_field() != other->bit_field()) return false;
- if (new_target_is_base() != other->new_target_is_base()) return false;
- if (prototype() != other->prototype()) return false;
+ if (bit_field() != other.bit_field()) return false;
+ if (new_target_is_base() != other.new_target_is_base()) return false;
+ if (prototype() != other.prototype()) return false;
if (instance_type() == JS_FUNCTION_TYPE) {
// JSFunctions require more checks to ensure that sloppy function is
// not equivalent to strict function.
- int nof = Min(NumberOfOwnDescriptors(), other->NumberOfOwnDescriptors());
- return instance_descriptors()->IsEqualUpTo(other->instance_descriptors(),
- nof);
+ int nof = Min(NumberOfOwnDescriptors(), other.NumberOfOwnDescriptors());
+ return instance_descriptors().IsEqualUpTo(other.instance_descriptors(),
+ nof);
}
return true;
}
@@ -2476,10 +2475,10 @@
DescriptorArray descriptors = instance_descriptors();
int nof = NumberOfOwnDescriptors();
for (int i = 0; i < nof; i++) {
- PropertyDetails details = descriptors->GetDetails(i);
+ PropertyDetails details = descriptors.GetDetails(i);
if (details.location() == kField) {
DCHECK(IsMostGeneralFieldType(details.representation(),
- descriptors->GetFieldType(i)));
+ descriptors.GetFieldType(i)));
}
}
#endif
@@ -2489,15 +2488,15 @@
bool Map::EquivalentToForNormalization(const Map other,
PropertyNormalizationMode mode) const {
int properties =
- mode == CLEAR_INOBJECT_PROPERTIES ? 0 : other->GetInObjectProperties();
- return CheckEquivalent(*this, other) && bit_field2() == other->bit_field2() &&
+ mode == CLEAR_INOBJECT_PROPERTIES ? 0 : other.GetInObjectProperties();
+ return CheckEquivalent(*this, other) && bit_field2() == other.bit_field2() &&
GetInObjectProperties() == properties &&
JSObject::GetEmbedderFieldCount(*this) ==
JSObject::GetEmbedderFieldCount(other);
}
static void GetMinInobjectSlack(Map map, void* data) {
- int slack = map->UnusedPropertyFields();
+ int slack = map.UnusedPropertyFields();
if (*reinterpret_cast<int*>(data) > slack) {
*reinterpret_cast<int*>(data) = slack;
}
@@ -2506,7 +2505,7 @@
int Map::ComputeMinObjectSlack(Isolate* isolate) {
DisallowHeapAllocation no_gc;
// Has to be an initial map.
- DCHECK(GetBackPointer()->IsUndefined(isolate));
+ DCHECK(GetBackPointer().IsUndefined(isolate));
int slack = UnusedPropertyFields();
TransitionsAccessor transitions(isolate, *this, &no_gc);
@@ -2519,22 +2518,22 @@
DCHECK_GE(slack, 0);
#ifdef DEBUG
int old_visitor_id = Map::GetVisitorId(map);
- int new_unused = map->UnusedPropertyFields() - slack;
+ int new_unused = map.UnusedPropertyFields() - slack;
#endif
- map->set_instance_size(map->InstanceSizeFromSlack(slack));
- map->set_construction_counter(Map::kNoSlackTracking);
+ map.set_instance_size(map.InstanceSizeFromSlack(slack));
+ map.set_construction_counter(Map::kNoSlackTracking);
DCHECK_EQ(old_visitor_id, Map::GetVisitorId(map));
- DCHECK_EQ(new_unused, map->UnusedPropertyFields());
+ DCHECK_EQ(new_unused, map.UnusedPropertyFields());
}
static void StopSlackTracking(Map map, void* data) {
- map->set_construction_counter(Map::kNoSlackTracking);
+ map.set_construction_counter(Map::kNoSlackTracking);
}
void Map::CompleteInobjectSlackTracking(Isolate* isolate) {
DisallowHeapAllocation no_gc;
// Has to be an initial map.
- DCHECK(GetBackPointer()->IsUndefined(isolate));
+ DCHECK(GetBackPointer().IsUndefined(isolate));
int slack = ComputeMinObjectSlack(isolate);
TransitionsAccessor transitions(isolate, *this, &no_gc);
@@ -2557,12 +2556,12 @@
// static
Handle<PrototypeInfo> Map::GetOrCreatePrototypeInfo(Handle<JSObject> prototype,
Isolate* isolate) {
- Object maybe_proto_info = prototype->map()->prototype_info();
- if (maybe_proto_info->IsPrototypeInfo()) {
+ Object maybe_proto_info = prototype->map().prototype_info();
+ if (maybe_proto_info.IsPrototypeInfo()) {
return handle(PrototypeInfo::cast(maybe_proto_info), isolate);
}
Handle<PrototypeInfo> proto_info = isolate->factory()->NewPrototypeInfo();
- prototype->map()->set_prototype_info(*proto_info);
+ prototype->map().set_prototype_info(*proto_info);
return proto_info;
}
@@ -2570,7 +2569,7 @@
Handle<PrototypeInfo> Map::GetOrCreatePrototypeInfo(Handle<Map> prototype_map,
Isolate* isolate) {
Object maybe_proto_info = prototype_map->prototype_info();
- if (maybe_proto_info->IsPrototypeInfo()) {
+ if (maybe_proto_info.IsPrototypeInfo()) {
return handle(PrototypeInfo::cast(maybe_proto_info), isolate);
}
Handle<PrototypeInfo> proto_info = isolate->factory()->NewPrototypeInfo();
@@ -2581,7 +2580,7 @@
// static
void Map::SetShouldBeFastPrototypeMap(Handle<Map> map, bool value,
Isolate* isolate) {
- if (value == false && !map->prototype_info()->IsPrototypeInfo()) {
+ if (value == false && !map->prototype_info().IsPrototypeInfo()) {
// "False" is the implicit default value, so there's nothing to do.
return;
}
@@ -2599,7 +2598,7 @@
maybe_prototype = isolate->global_object();
} else {
maybe_prototype =
- handle(map->GetPrototypeChainRootMap(isolate)->prototype(), isolate);
+ handle(map->GetPrototypeChainRootMap(isolate).prototype(), isolate);
}
if (!maybe_prototype->IsJSObject()) {
return handle(Smi::FromInt(Map::kPrototypeChainValid), isolate);
@@ -2610,9 +2609,9 @@
JSObject::LazyRegisterPrototypeUser(handle(prototype->map(), isolate),
isolate);
- Object maybe_cell = prototype->map()->prototype_validity_cell();
+ Object maybe_cell = prototype->map().prototype_validity_cell();
// Return existing cell if it's still valid.
- if (maybe_cell->IsCell()) {
+ if (maybe_cell.IsCell()) {
Handle<Cell> cell(Cell::cast(maybe_cell), isolate);
if (cell->value() == Smi::FromInt(Map::kPrototypeChainValid)) {
return cell;
@@ -2621,17 +2620,17 @@
// Otherwise create a new cell.
Handle<Cell> cell = isolate->factory()->NewCell(
handle(Smi::FromInt(Map::kPrototypeChainValid), isolate));
- prototype->map()->set_prototype_validity_cell(*cell);
+ prototype->map().set_prototype_validity_cell(*cell);
return cell;
}
// static
bool Map::IsPrototypeChainInvalidated(Map map) {
- DCHECK(map->is_prototype_map());
- Object maybe_cell = map->prototype_validity_cell();
- if (maybe_cell->IsCell()) {
+ DCHECK(map.is_prototype_map());
+ Object maybe_cell = map.prototype_validity_cell();
+ if (maybe_cell.IsCell()) {
Cell cell = Cell::cast(maybe_cell);
- return cell->value() != Smi::FromInt(Map::kPrototypeChainValid);
+ return cell.value() != Smi::FromInt(Map::kPrototypeChainValid);
}
return true;
}
@@ -2648,16 +2647,16 @@
Handle<JSObject> prototype_jsobj = Handle<JSObject>::cast(prototype);
JSObject::OptimizeAsPrototype(prototype_jsobj, enable_prototype_setup_mode);
- Object maybe_constructor = prototype_jsobj->map()->GetConstructor();
- if (maybe_constructor->IsJSFunction()) {
+ Object maybe_constructor = prototype_jsobj->map().GetConstructor();
+ if (maybe_constructor.IsJSFunction()) {
JSFunction constructor = JSFunction::cast(maybe_constructor);
- Object data = constructor->shared()->function_data();
- is_hidden = (data->IsFunctionTemplateInfo() &&
- FunctionTemplateInfo::cast(data)->hidden_prototype()) ||
+ Object data = constructor.shared().function_data();
+ is_hidden = (data.IsFunctionTemplateInfo() &&
+ FunctionTemplateInfo::cast(data).hidden_prototype()) ||
prototype->IsJSGlobalObject();
- } else if (maybe_constructor->IsFunctionTemplateInfo()) {
+ } else if (maybe_constructor.IsFunctionTemplateInfo()) {
is_hidden =
- FunctionTemplateInfo::cast(maybe_constructor)->hidden_prototype() ||
+ FunctionTemplateInfo::cast(maybe_constructor).hidden_prototype() ||
prototype->IsJSGlobalObject();
}
} else {
@@ -2705,7 +2704,7 @@
}
Map normalized_map = Map::cast(heap_object);
- if (!normalized_map->EquivalentToForNormalization(*fast_map, mode)) {
+ if (!normalized_map.EquivalentToForNormalization(*fast_map, mode)) {
return MaybeHandle<Map>();
}
return handle(normalized_map, GetIsolate());
diff --git a/src/objects/maybe-object-inl.h b/src/objects/maybe-object-inl.h
index 2ae37ef..02f5b48 100644
--- a/src/objects/maybe-object-inl.h
+++ b/src/objects/maybe-object-inl.h
@@ -22,8 +22,8 @@
// static
MaybeObject MaybeObject::FromSmi(Smi smi) {
- DCHECK(HAS_SMI_TAG(smi->ptr()));
- return MaybeObject(smi->ptr());
+ DCHECK(HAS_SMI_TAG(smi.ptr()));
+ return MaybeObject(smi.ptr());
}
// static
@@ -42,20 +42,20 @@
//
HeapObjectReference::HeapObjectReference(Object object)
- : MaybeObject(object->ptr()) {}
+ : MaybeObject(object.ptr()) {}
// static
HeapObjectReference HeapObjectReference::Strong(Object object) {
- DCHECK(!object->IsSmi());
+ DCHECK(!object.IsSmi());
DCHECK(!HasWeakHeapObjectTag(object));
return HeapObjectReference(object);
}
// static
HeapObjectReference HeapObjectReference::Weak(Object object) {
- DCHECK(!object->IsSmi());
+ DCHECK(!object.IsSmi());
DCHECK(!HasWeakHeapObjectTag(object));
- return HeapObjectReference(object->ptr() | kWeakHeapObjectMask);
+ return HeapObjectReference(object.ptr() | kWeakHeapObjectMask);
}
// static
diff --git a/src/objects/module-inl.h b/src/objects/module-inl.h
index 09e1934..25e7918 100644
--- a/src/objects/module-inl.h
+++ b/src/objects/module-inl.h
@@ -40,7 +40,7 @@
ModuleInfo Module::info() const {
return (status() >= kEvaluating)
? ModuleInfo::cast(code())
- : GetSharedFunctionInfo()->scope_info()->ModuleDescriptorInfo();
+ : GetSharedFunctionInfo().scope_info().ModuleDescriptorInfo();
}
CAST_ACCESSOR(JSModuleNamespace)
@@ -84,12 +84,12 @@
#ifdef DEBUG
bool ModuleInfo::Equals(ModuleInfo other) const {
- return regular_exports() == other->regular_exports() &&
- regular_imports() == other->regular_imports() &&
- special_exports() == other->special_exports() &&
- namespace_imports() == other->namespace_imports() &&
- module_requests() == other->module_requests() &&
- module_request_positions() == other->module_request_positions();
+ return regular_exports() == other.regular_exports() &&
+ regular_imports() == other.regular_imports() &&
+ special_exports() == other.special_exports() &&
+ namespace_imports() == other.namespace_imports() &&
+ module_requests() == other.module_requests() &&
+ module_request_positions() == other.module_request_positions();
}
#endif
diff --git a/src/objects/module.cc b/src/objects/module.cc
index 1797e7c..2de1545 100644
--- a/src/objects/module.cc
+++ b/src/objects/module.cc
@@ -120,7 +120,7 @@
Handle<String> name,
Handle<ModuleInfoEntry> entry) {
Handle<ObjectHashTable> exports(module->exports(), isolate);
- DCHECK(exports->Lookup(name)->IsTheHole(isolate));
+ DCHECK(exports->Lookup(name).IsTheHole(isolate));
exports = ObjectHashTable::Put(exports, name, entry);
module->set_exports(*exports);
}
@@ -130,12 +130,12 @@
DCHECK_LT(0, names->length());
Handle<Cell> cell =
isolate->factory()->NewCell(isolate->factory()->undefined_value());
- module->regular_exports()->set(ExportIndex(cell_index), *cell);
+ module->regular_exports().set(ExportIndex(cell_index), *cell);
Handle<ObjectHashTable> exports(module->exports(), isolate);
for (int i = 0, n = names->length(); i < n; ++i) {
Handle<String> name(String::cast(names->get(i)), isolate);
- DCHECK(exports->Lookup(name)->IsTheHole(isolate));
+ DCHECK(exports->Lookup(name).IsTheHole(isolate));
exports = ObjectHashTable::Put(exports, name, cell);
}
module->set_exports(*exports);
@@ -146,10 +146,10 @@
Object cell;
switch (ModuleDescriptor::GetCellIndexKind(cell_index)) {
case ModuleDescriptor::kImport:
- cell = regular_imports()->get(ImportIndex(cell_index));
+ cell = regular_imports().get(ImportIndex(cell_index));
break;
case ModuleDescriptor::kExport:
- cell = regular_exports()->get(ExportIndex(cell_index));
+ cell = regular_exports().get(ExportIndex(cell_index));
break;
case ModuleDescriptor::kInvalid:
UNREACHABLE();
@@ -159,7 +159,7 @@
Handle<Object> Module::LoadVariable(Isolate* isolate, Handle<Module> module,
int cell_index) {
- return handle(module->GetCell(cell_index)->value(), isolate);
+ return handle(module->GetCell(cell_index).value(), isolate);
}
void Module::StoreVariable(Handle<Module> module, int cell_index,
@@ -167,7 +167,7 @@
DisallowHeapAllocation no_gc;
DCHECK_EQ(ModuleDescriptor::GetCellIndexKind(cell_index),
ModuleDescriptor::kExport);
- module->GetCell(cell_index)->set_value(*value);
+ module->GetCell(cell_index).set_value(*value);
}
#ifdef DEBUG
@@ -176,7 +176,7 @@
StdoutStream os;
os << "Changing module status from " << status() << " to " << new_status
<< " for ";
- script()->GetNameOrSourceURL()->Print(os);
+ script().GetNameOrSourceURL().Print(os);
#ifndef OBJECT_PRINT
os << "\n";
#endif // OBJECT_PRINT
@@ -215,24 +215,24 @@
DCHECK(module->status() == kPreInstantiating ||
module->status() == kInstantiating);
- DCHECK(module->exception()->IsTheHole(isolate));
- DCHECK(module->import_meta()->IsTheHole(isolate));
+ DCHECK(module->exception().IsTheHole(isolate));
+ DCHECK(module->import_meta().IsTheHole(isolate));
// The namespace object cannot exist, because it would have been created
// by RunInitializationCode, which is called only after this module's SCC
// succeeds instantiation.
- DCHECK(!module->module_namespace()->IsJSModuleNamespace());
+ DCHECK(!module->module_namespace().IsJSModuleNamespace());
Handle<ObjectHashTable> exports =
- ObjectHashTable::New(isolate, module->info()->RegularExportCount());
+ ObjectHashTable::New(isolate, module->info().RegularExportCount());
Handle<FixedArray> regular_exports =
- factory->NewFixedArray(module->regular_exports()->length());
+ factory->NewFixedArray(module->regular_exports().length());
Handle<FixedArray> regular_imports =
- factory->NewFixedArray(module->regular_imports()->length());
+ factory->NewFixedArray(module->regular_imports().length());
Handle<FixedArray> requested_modules =
- factory->NewFixedArray(module->requested_modules()->length());
+ factory->NewFixedArray(module->requested_modules().length());
if (module->status() == kInstantiating) {
- module->set_code(JSFunction::cast(module->code())->shared());
+ module->set_code(JSFunction::cast(module->code()).shared());
}
#ifdef DEBUG
module->PrintStatusTransition(kUninstantiated);
@@ -248,9 +248,9 @@
void Module::RecordError(Isolate* isolate) {
DisallowHeapAllocation no_alloc;
- DCHECK(exception()->IsTheHole(isolate));
+ DCHECK(exception().IsTheHole(isolate));
Object the_exception = isolate->pending_exception();
- DCHECK(!the_exception->IsTheHole(isolate));
+ DCHECK(!the_exception.IsTheHole(isolate));
set_code(info());
#ifdef DEBUG
@@ -263,7 +263,7 @@
Object Module::GetException() {
DisallowHeapAllocation no_alloc;
DCHECK_EQ(status(), Module::kErrored);
- DCHECK(!exception()->IsTheHole());
+ DCHECK(!exception().IsTheHole());
return exception();
}
@@ -274,14 +274,14 @@
switch (status()) {
case kUninstantiated:
case kPreInstantiating:
- DCHECK(code()->IsSharedFunctionInfo());
+ DCHECK(code().IsSharedFunctionInfo());
return SharedFunctionInfo::cast(code());
case kInstantiating:
- DCHECK(code()->IsJSFunction());
- return JSFunction::cast(code())->shared();
+ DCHECK(code().IsJSFunction());
+ return JSFunction::cast(code()).shared();
case kInstantiated:
- DCHECK(code()->IsJSGeneratorObject());
- return JSGeneratorObject::cast(code())->function()->shared();
+ DCHECK(code().IsJSGeneratorObject());
+ return JSGeneratorObject::cast(code()).function().shared();
case kEvaluating:
case kEvaluated:
case kErrored:
@@ -296,9 +296,9 @@
MessageLocation loc, bool must_resolve,
Module::ResolveSet* resolve_set) {
Handle<Module> requested_module(
- Module::cast(module->requested_modules()->get(module_request)), isolate);
+ Module::cast(module->requested_modules().get(module_request)), isolate);
Handle<String> specifier(
- String::cast(module->info()->module_requests()->get(module_request)),
+ String::cast(module->info().module_requests().get(module_request)),
isolate);
MaybeHandle<Cell> result =
Module::ResolveExport(isolate, requested_module, specifier, name, loc,
@@ -314,7 +314,7 @@
Module::ResolveSet* resolve_set) {
DCHECK_GE(module->status(), kPreInstantiating);
DCHECK_NE(module->status(), kEvaluating);
- Handle<Object> object(module->exports()->Lookup(export_name), isolate);
+ Handle<Object> object(module->exports().Lookup(export_name), isolate);
if (object->IsCell()) {
// Already resolved (e.g. because it's a local export).
return Handle<Cell>::cast(object);
@@ -362,7 +362,7 @@
// The export table may have changed but the entry in question should be
// unchanged.
Handle<ObjectHashTable> exports(module->exports(), isolate);
- DCHECK(exports->Lookup(export_name)->IsModuleInfoEntry());
+ DCHECK(exports->Lookup(export_name).IsModuleInfoEntry());
exports = ObjectHashTable::Put(exports, export_name, cell);
module->set_exports(*exports);
@@ -383,12 +383,12 @@
// Go through all star exports looking for the given name. If multiple star
// exports provide the name, make sure they all map it to the same cell.
Handle<Cell> unique_cell;
- Handle<FixedArray> special_exports(module->info()->special_exports(),
+ Handle<FixedArray> special_exports(module->info().special_exports(),
isolate);
for (int i = 0, n = special_exports->length(); i < n; ++i) {
i::Handle<i::ModuleInfoEntry> entry(
i::ModuleInfoEntry::cast(special_exports->get(i)), isolate);
- if (!entry->export_name()->IsUndefined(isolate)) {
+ if (!entry->export_name().IsUndefined(isolate)) {
continue; // Indirect export.
}
@@ -414,7 +414,7 @@
if (!unique_cell.is_null()) {
// Found a unique star export for this name.
Handle<ObjectHashTable> exports(module->exports(), isolate);
- DCHECK(exports->Lookup(export_name)->IsTheHole(isolate));
+ DCHECK(exports->Lookup(export_name).IsTheHole(isolate));
exports = ObjectHashTable::Put(exports, export_name, unique_cell);
module->set_exports(*exports);
return unique_cell;
@@ -438,7 +438,7 @@
if (FLAG_trace_module_status) {
StdoutStream os;
os << "Instantiating module ";
- module->script()->GetNameOrSourceURL()->Print(os);
+ module->script().GetNameOrSourceURL().Print(os);
#ifndef OBJECT_PRINT
os << "\n";
#endif // OBJECT_PRINT
@@ -531,7 +531,7 @@
bool Module::RunInitializationCode(Isolate* isolate, Handle<Module> module) {
DCHECK_EQ(module->status(), kInstantiating);
Handle<JSFunction> function(JSFunction::cast(module->code()), isolate);
- DCHECK_EQ(MODULE_SCOPE, function->shared()->scope_info()->scope_type());
+ DCHECK_EQ(MODULE_SCOPE, function->shared().scope_info().scope_type());
Handle<Object> receiver = isolate->factory()->undefined_value();
Handle<Object> argv[] = {module};
MaybeHandle<Object> maybe_generator =
@@ -636,7 +636,7 @@
.ToHandle(&cell)) {
return false;
}
- module->regular_imports()->set(ImportIndex(entry->cell_index()), *cell);
+ module->regular_imports().set(ImportIndex(entry->cell_index()), *cell);
}
// Resolve indirect exports.
@@ -663,7 +663,7 @@
if (FLAG_trace_module_status) {
StdoutStream os;
os << "Evaluating module ";
- module->script()->GetNameOrSourceURL()->Print(os);
+ module->script().GetNameOrSourceURL().Print(os);
#ifndef OBJECT_PRINT
os << "\n";
#endif // OBJECT_PRINT
@@ -709,7 +709,7 @@
Handle<JSGeneratorObject> generator(JSGeneratorObject::cast(module->code()),
isolate);
module->set_code(
- generator->function()->shared()->scope_info()->ModuleDescriptorInfo());
+ generator->function().shared().scope_info().ModuleDescriptorInfo());
module->SetStatus(kEvaluating);
module->set_dfs_index(*dfs_index);
module->set_dfs_ancestor_index(*dfs_index);
@@ -747,10 +747,10 @@
ASSIGN_RETURN_ON_EXCEPTION(
isolate, result, Execution::Call(isolate, resume, generator, 0, nullptr),
Object);
- DCHECK(JSIteratorResult::cast(*result)->done()->BooleanValue(isolate));
+ DCHECK(JSIteratorResult::cast(*result).done().BooleanValue(isolate));
CHECK(MaybeTransitionComponent(isolate, module, stack, kEvaluated));
- return handle(JSIteratorResult::cast(*result)->value(), isolate);
+ return handle(JSIteratorResult::cast(*result).value(), isolate);
}
namespace {
@@ -759,7 +759,7 @@
UnorderedModuleSet* visited) {
DCHECK_GE(module->status(), Module::kInstantiating);
- if (module->module_namespace()->IsJSModuleNamespace()) return; // Shortcut.
+ if (module->module_namespace().IsJSModuleNamespace()) return; // Shortcut.
bool cycle = !visited->insert(module).second;
if (cycle) return;
@@ -770,17 +770,16 @@
// Maybe split special_exports into indirect_exports and star_exports.
ReadOnlyRoots roots(isolate);
- Handle<FixedArray> special_exports(module->info()->special_exports(),
- isolate);
+ Handle<FixedArray> special_exports(module->info().special_exports(), isolate);
for (int i = 0, n = special_exports->length(); i < n; ++i) {
Handle<ModuleInfoEntry> entry(
ModuleInfoEntry::cast(special_exports->get(i)), isolate);
- if (!entry->export_name()->IsUndefined(roots)) {
+ if (!entry->export_name().IsUndefined(roots)) {
continue; // Indirect export.
}
Handle<Module> requested_module(
- Module::cast(module->requested_modules()->get(entry->module_request())),
+ Module::cast(module->requested_modules().get(entry->module_request())),
isolate);
// Recurse.
@@ -798,7 +797,7 @@
Handle<String> name(String::cast(key), isolate);
if (name->Equals(roots.default_string())) continue;
- if (!exports->Lookup(name)->IsTheHole(roots)) continue;
+ if (!exports->Lookup(name).IsTheHole(roots)) continue;
Handle<Cell> cell(Cell::cast(requested_exports->ValueAt(i)), isolate);
auto insert_result = more_exports.insert(std::make_pair(name, cell));
@@ -833,7 +832,7 @@
Handle<Module> module,
int module_request) {
Handle<Module> requested_module(
- Module::cast(module->requested_modules()->get(module_request)), isolate);
+ Module::cast(module->requested_modules().get(module_request)), isolate);
return Module::GetModuleNamespace(isolate, requested_module);
}
@@ -899,7 +898,7 @@
MaybeHandle<Object> JSModuleNamespace::GetExport(Isolate* isolate,
Handle<String> name) {
- Handle<Object> object(module()->exports()->Lookup(name), isolate);
+ Handle<Object> object(module().exports().Lookup(name), isolate);
if (object->IsTheHole(isolate)) {
return isolate->factory()->undefined_value();
}
@@ -921,7 +920,7 @@
Isolate* isolate = it->isolate();
- Handle<Object> lookup(object->module()->exports()->Lookup(name), isolate);
+ Handle<Object> lookup(object->module().exports().Lookup(name), isolate);
if (lookup->IsTheHole(isolate)) {
return Just(ABSENT);
}
diff --git a/src/objects/name-inl.h b/src/objects/name-inl.h
index 2848151..b3e04bb 100644
--- a/src/objects/name-inl.h
+++ b/src/objects/name-inl.h
@@ -45,7 +45,7 @@
}
bool Name::IsUniqueName() const {
- uint32_t type = map()->instance_type();
+ uint32_t type = map().instance_type();
bool result = (type & (kIsNotStringMask | kIsNotInternalizedMask)) !=
(kStringTag | kNotInternalizedTag);
SLOW_DCHECK(result == HeapObject::IsUniqueName());
@@ -60,11 +60,11 @@
bool Name::Equals(Name other) {
if (other == *this) return true;
- if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
- this->IsSymbol() || other->IsSymbol()) {
+ if ((this->IsInternalizedString() && other.IsInternalizedString()) ||
+ this->IsSymbol() || other.IsSymbol()) {
return false;
}
- return String::cast(*this)->SlowEquals(String::cast(other));
+ return String::cast(*this).SlowEquals(String::cast(other));
}
bool Name::Equals(Isolate* isolate, Handle<Name> one, Handle<Name> two) {
@@ -88,26 +88,26 @@
uint32_t field = hash_field();
if (IsHashFieldComputed(field)) return field >> kHashShift;
// Slow case: compute hash code and set it. Has to be a string.
- return String::cast(*this)->ComputeAndSetHash();
+ return String::cast(*this).ComputeAndSetHash();
}
bool Name::IsInterestingSymbol() const {
- return IsSymbol() && Symbol::cast(*this)->is_interesting_symbol();
+ return IsSymbol() && Symbol::cast(*this).is_interesting_symbol();
}
bool Name::IsPrivate() {
- return this->IsSymbol() && Symbol::cast(*this)->is_private();
+ return this->IsSymbol() && Symbol::cast(*this).is_private();
}
bool Name::IsPrivateName() {
bool is_private_name =
- this->IsSymbol() && Symbol::cast(*this)->is_private_name();
+ this->IsSymbol() && Symbol::cast(*this).is_private_name();
DCHECK_IMPLIES(is_private_name, IsPrivate());
return is_private_name;
}
bool Name::AsArrayIndex(uint32_t* index) {
- return IsString() && String::cast(*this)->AsArrayIndex(index);
+ return IsString() && String::cast(*this).AsArrayIndex(index);
}
// static
diff --git a/src/objects/object-macros.h b/src/objects/object-macros.h
index 65e34df..c511584 100644
--- a/src/objects/object-macros.h
+++ b/src/objects/object-macros.h
@@ -225,9 +225,9 @@
return instance_type == forinstancetype; \
}
-#define TYPE_CHECKER(type, ...) \
- bool HeapObject::Is##type() const { \
- return InstanceTypeChecker::Is##type(map()->instance_type()); \
+#define TYPE_CHECKER(type, ...) \
+ bool HeapObject::Is##type() const { \
+ return InstanceTypeChecker::Is##type(map().instance_type()); \
}
#define RELAXED_INT16_ACCESSORS(holder, name, offset) \
@@ -274,50 +274,50 @@
#define RELAXED_WRITE_WEAK_FIELD(p, offset, value) \
MaybeObjectSlot(FIELD_ADDR(p, offset)).Relaxed_Store(value)
-#define WRITE_BARRIER(object, offset, value) \
- do { \
- DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
- MarkingBarrier(object, (object)->RawField(offset), value); \
- GenerationalBarrier(object, (object)->RawField(offset), value); \
+#define WRITE_BARRIER(object, offset, value) \
+ do { \
+ DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
+ MarkingBarrier(object, (object).RawField(offset), value); \
+ GenerationalBarrier(object, (object).RawField(offset), value); \
} while (false)
-#define WEAK_WRITE_BARRIER(object, offset, value) \
- do { \
- DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
- MarkingBarrier(object, (object)->RawMaybeWeakField(offset), value); \
- GenerationalBarrier(object, (object)->RawMaybeWeakField(offset), value); \
+#define WEAK_WRITE_BARRIER(object, offset, value) \
+ do { \
+ DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
+ MarkingBarrier(object, (object).RawMaybeWeakField(offset), value); \
+ GenerationalBarrier(object, (object).RawMaybeWeakField(offset), value); \
} while (false)
-#define EPHEMERON_KEY_WRITE_BARRIER(object, offset, value) \
- do { \
- DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
- EphemeronHashTable table = EphemeronHashTable::cast(object); \
- MarkingBarrier(object, (object)->RawField(offset), value); \
- GenerationalEphemeronKeyBarrier(table, (object)->RawField(offset), value); \
+#define EPHEMERON_KEY_WRITE_BARRIER(object, offset, value) \
+ do { \
+ DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
+ EphemeronHashTable table = EphemeronHashTable::cast(object); \
+ MarkingBarrier(object, (object).RawField(offset), value); \
+ GenerationalEphemeronKeyBarrier(table, (object).RawField(offset), value); \
} while (false)
-#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \
- do { \
- DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
- DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
- if (mode != SKIP_WRITE_BARRIER) { \
- if (mode == UPDATE_WRITE_BARRIER) { \
- MarkingBarrier(object, (object)->RawField(offset), value); \
- } \
- GenerationalBarrier(object, (object)->RawField(offset), value); \
- } \
+#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \
+ do { \
+ DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
+ DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
+ if (mode != SKIP_WRITE_BARRIER) { \
+ if (mode == UPDATE_WRITE_BARRIER) { \
+ MarkingBarrier(object, (object).RawField(offset), value); \
+ } \
+ GenerationalBarrier(object, (object).RawField(offset), value); \
+ } \
} while (false)
-#define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode) \
- do { \
- DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
- DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
- if (mode != SKIP_WRITE_BARRIER) { \
- if (mode == UPDATE_WRITE_BARRIER) { \
- MarkingBarrier(object, (object)->RawMaybeWeakField(offset), value); \
- } \
- GenerationalBarrier(object, (object)->RawMaybeWeakField(offset), value); \
- } \
+#define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode) \
+ do { \
+ DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
+ DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
+ if (mode != SKIP_WRITE_BARRIER) { \
+ if (mode == UPDATE_WRITE_BARRIER) { \
+ MarkingBarrier(object, (object).RawMaybeWeakField(offset), value); \
+ } \
+ GenerationalBarrier(object, (object).RawMaybeWeakField(offset), value); \
+ } \
} while (false)
#define CONDITIONAL_EPHEMERON_KEY_WRITE_BARRIER(object, offset, value, mode) \
@@ -327,9 +327,9 @@
EphemeronHashTable table = EphemeronHashTable::cast(object); \
if (mode != SKIP_WRITE_BARRIER) { \
if (mode == UPDATE_WRITE_BARRIER) { \
- MarkingBarrier(object, (object)->RawField(offset), value); \
+ MarkingBarrier(object, (object).RawField(offset), value); \
} \
- GenerationalEphemeronKeyBarrier(table, (object)->RawField(offset), \
+ GenerationalEphemeronKeyBarrier(table, (object).RawField(offset), \
value); \
} \
} while (false)
diff --git a/src/objects/oddball-inl.h b/src/objects/oddball-inl.h
index 1263fd9..0b69ef8 100644
--- a/src/objects/oddball-inl.h
+++ b/src/objects/oddball-inl.h
@@ -54,7 +54,7 @@
bool HeapObject::IsBoolean() const {
return IsOddball() &&
- ((Oddball::cast(*this)->kind() & Oddball::kNotBooleanMask) == 0);
+ ((Oddball::cast(*this).kind() & Oddball::kNotBooleanMask) == 0);
}
} // namespace internal
diff --git a/src/objects/ordered-hash-table-inl.h b/src/objects/ordered-hash-table-inl.h
index fb562ec..b3ebf16 100644
--- a/src/objects/ordered-hash-table-inl.h
+++ b/src/objects/ordered-hash-table-inl.h
@@ -185,8 +185,8 @@
Object OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
TableType table = TableType::cast(this->table());
int index = Smi::ToInt(this->index());
- Object key = table->KeyAt(index);
- DCHECK(!key->IsTheHole());
+ Object key = table.KeyAt(index);
+ DCHECK(!key.IsTheHole());
return key;
}
diff --git a/src/objects/ordered-hash-table.cc b/src/objects/ordered-hash-table.cc
index 37089fe..cdfc928 100644
--- a/src/objects/ordered-hash-table.cc
+++ b/src/objects/ordered-hash-table.cc
@@ -86,10 +86,10 @@
template <class Derived, int entrysize>
bool OrderedHashTable<Derived, entrysize>::HasKey(Isolate* isolate,
Derived table, Object key) {
- DCHECK_IMPLIES(entrysize == 1, table->IsOrderedHashSet());
- DCHECK_IMPLIES(entrysize == 2, table->IsOrderedHashMap());
+ DCHECK_IMPLIES(entrysize == 1, table.IsOrderedHashSet());
+ DCHECK_IMPLIES(entrysize == 2, table.IsOrderedHashMap());
DisallowHeapAllocation no_gc;
- int entry = table->FindEntry(isolate, key);
+ int entry = table.FindEntry(isolate, key);
return entry != kNotFound;
}
@@ -99,21 +99,21 @@
int entry;
// This special cases for Smi, so that we avoid the HandleScope
// creation below.
- if (key->IsSmi()) {
+ if (key.IsSmi()) {
uint32_t hash = ComputeUnseededHash(Smi::ToInt(key));
entry = HashToEntry(hash & Smi::kMaxValue);
} else {
HandleScope scope(isolate);
- Object hash = key->GetHash();
+ Object hash = key.GetHash();
// If the object does not have an identity hash, it was never used as a key
- if (hash->IsUndefined(isolate)) return kNotFound;
+ if (hash.IsUndefined(isolate)) return kNotFound;
entry = HashToEntry(Smi::ToInt(hash));
}
// Walk the chain in the bucket to find the key.
while (entry != kNotFound) {
Object candidate_key = KeyAt(entry);
- if (candidate_key->SameValueZero(key)) break;
+ if (candidate_key.SameValueZero(key)) break;
entry = NextChainEntry(entry);
}
@@ -123,13 +123,13 @@
Handle<OrderedHashSet> OrderedHashSet::Add(Isolate* isolate,
Handle<OrderedHashSet> table,
Handle<Object> key) {
- int hash = key->GetOrCreateHash(isolate)->value();
+ int hash = key->GetOrCreateHash(isolate).value();
int entry = table->HashToEntry(hash);
// Walk the chain of the bucket and try finding the key.
while (entry != kNotFound) {
Object candidate_key = table->KeyAt(entry);
// Do not add if we have the key already
- if (candidate_key->SameValueZero(*key)) return table;
+ if (candidate_key.SameValueZero(*key)) return table;
entry = table->NextChainEntry(entry);
}
@@ -164,12 +164,12 @@
Object key = table->get(index);
if (convert == GetKeysConversion::kConvertToString) {
uint32_t index_value;
- if (key->ToArrayIndex(&index_value)) {
+ if (key.ToArrayIndex(&index_value)) {
// Avoid trashing the Number2String cache if indices get very large.
bool use_cache = i < kMaxStringTableEntries;
key = *isolate->factory()->Uint32ToString(index_value, use_cache);
} else {
- CHECK(key->IsName());
+ CHECK(key.IsName());
}
}
result->set(i, key);
@@ -203,12 +203,12 @@
DisallowHeapAllocation no_gc;
for (int old_entry = 0; old_entry < (nof + nod); ++old_entry) {
Object key = table->KeyAt(old_entry);
- if (key->IsTheHole(isolate)) {
+ if (key.IsTheHole(isolate)) {
table->SetRemovedIndexAt(removed_holes_index++, old_entry);
continue;
}
- Object hash = key->GetHash();
+ Object hash = key.GetHash();
int bucket = Smi::ToInt(hash) & (new_buckets - 1);
Object chain_entry = new_table->get(HashTableStartIndex() + bucket);
new_table->set(HashTableStartIndex() + bucket, Smi::FromInt(new_entry));
@@ -257,20 +257,20 @@
bool OrderedHashTable<Derived, entrysize>::Delete(Isolate* isolate,
Derived table, Object key) {
DisallowHeapAllocation no_gc;
- int entry = table->FindEntry(isolate, key);
+ int entry = table.FindEntry(isolate, key);
if (entry == kNotFound) return false;
- int nof = table->NumberOfElements();
- int nod = table->NumberOfDeletedElements();
- int index = table->EntryToIndex(entry);
+ int nof = table.NumberOfElements();
+ int nod = table.NumberOfDeletedElements();
+ int index = table.EntryToIndex(entry);
Object hole = ReadOnlyRoots(isolate).the_hole_value();
for (int i = 0; i < entrysize; ++i) {
- table->set(index + i, hole);
+ table.set(index + i, hole);
}
- table->SetNumberOfElements(nof - 1);
- table->SetNumberOfDeletedElements(nod + 1);
+ table.SetNumberOfElements(nof - 1);
+ table.SetNumberOfDeletedElements(nod + 1);
return true;
}
@@ -278,11 +278,11 @@
Address OrderedHashMap::GetHash(Isolate* isolate, Address raw_key) {
DisallowHeapAllocation no_gc;
Object key(raw_key);
- Object hash = key->GetHash();
+ Object hash = key.GetHash();
// If the object does not have an identity hash, it was never used as a key
- if (hash->IsUndefined(isolate)) return Smi::FromInt(-1).ptr();
- DCHECK(hash->IsSmi());
- DCHECK_GE(Smi::cast(hash)->value(), 0);
+ if (hash.IsUndefined(isolate)) return Smi::FromInt(-1).ptr();
+ DCHECK(hash.IsSmi());
+ DCHECK_GE(Smi::cast(hash).value(), 0);
return hash.ptr();
}
@@ -290,7 +290,7 @@
Handle<OrderedHashMap> table,
Handle<Object> key,
Handle<Object> value) {
- int hash = key->GetOrCreateHash(isolate)->value();
+ int hash = key->GetOrCreateHash(isolate).value();
int entry = table->HashToEntry(hash);
// Walk the chain of the bucket and try finding the key.
{
@@ -299,7 +299,7 @@
while (entry != kNotFound) {
Object candidate_key = table->KeyAt(entry);
// Do not add if we have the key already
- if (candidate_key->SameValueZero(raw_key)) return table;
+ if (candidate_key.SameValueZero(raw_key)) return table;
entry = table->NextChainEntry(entry);
}
}
@@ -326,14 +326,14 @@
Isolate* isolate, Object key) {
DisallowHeapAllocation no_gc;
- DCHECK(key->IsUniqueName());
+ DCHECK(key.IsUniqueName());
Name raw_key = Name::cast(key);
- int entry = HashToEntry(raw_key->Hash());
+ int entry = HashToEntry(raw_key.Hash());
while (entry != kNotFound) {
Object candidate_key = KeyAt(entry);
- DCHECK(candidate_key->IsTheHole() ||
- Name::cast(candidate_key)->IsUniqueName());
+ DCHECK(candidate_key.IsTheHole() ||
+ Name::cast(candidate_key).IsUniqueName());
if (candidate_key == raw_key) return entry;
// TODO(gsathya): This is loading the bucket count from the hash
@@ -377,7 +377,7 @@
void OrderedNameDictionary::SetEntry(Isolate* isolate, int entry, Object key,
Object value, PropertyDetails details) {
DisallowHeapAllocation gc;
- DCHECK_IMPLIES(!key->IsName(), key->IsTheHole(isolate));
+ DCHECK_IMPLIES(!key.IsName(), key.IsTheHole(isolate));
DisallowHeapAllocation no_gc;
int index = EntryToIndex(entry);
this->set(index, key);
@@ -554,7 +554,7 @@
}
}
- int hash = key->GetOrCreateHash(isolate)->value();
+ int hash = key->GetOrCreateHash(isolate).value();
int nof = table->NumberOfElements();
// Read the existing bucket values.
@@ -597,7 +597,7 @@
}
}
- int hash = key->GetOrCreateHash(isolate)->value();
+ int hash = key->GetOrCreateHash(isolate).value();
int nof = table->NumberOfElements();
// Read the existing bucket values.
@@ -633,10 +633,10 @@
SmallOrderedHashTable<SmallOrderedNameDictionary>::FindEntry(Isolate* isolate,
Object key) {
DisallowHeapAllocation no_gc;
- DCHECK(key->IsUniqueName());
+ DCHECK(key.IsUniqueName());
Name raw_key = Name::cast(key);
- int entry = HashToFirstEntry(raw_key->Hash());
+ int entry = HashToFirstEntry(raw_key.Hash());
// Walk the chain in the bucket to find the key.
while (entry != kNotFound) {
@@ -692,7 +692,7 @@
void SmallOrderedNameDictionary::SetEntry(Isolate* isolate, int entry,
Object key, Object value,
PropertyDetails details) {
- DCHECK_IMPLIES(!key->IsName(), key->IsTheHole(isolate));
+ DCHECK_IMPLIES(!key.IsName(), key.IsTheHole(isolate));
SetDataEntry(entry, SmallOrderedNameDictionary::kValueIndex, value);
SetDataEntry(entry, SmallOrderedNameDictionary::kKeyIndex, key);
@@ -713,19 +713,19 @@
bool SmallOrderedHashTable<Derived>::Delete(Isolate* isolate, Derived table,
Object key) {
DisallowHeapAllocation no_gc;
- int entry = table->FindEntry(isolate, key);
+ int entry = table.FindEntry(isolate, key);
if (entry == kNotFound) return false;
- int nof = table->NumberOfElements();
- int nod = table->NumberOfDeletedElements();
+ int nof = table.NumberOfElements();
+ int nod = table.NumberOfDeletedElements();
Object hole = ReadOnlyRoots(isolate).the_hole_value();
for (int j = 0; j < Derived::kEntrySize; j++) {
- table->SetDataEntry(entry, j, hole);
+ table.SetDataEntry(entry, j, hole);
}
- table->SetNumberOfElements(nof - 1);
- table->SetNumberOfDeletedElements(nod + 1);
+ table.SetNumberOfElements(nof - 1);
+ table.SetNumberOfDeletedElements(nod + 1);
return true;
}
@@ -765,9 +765,9 @@
DisallowHeapAllocation no_gc;
for (int old_entry = 0; old_entry < (nof + nod); ++old_entry) {
Object key = table->KeyAt(old_entry);
- if (key->IsTheHole(isolate)) continue;
+ if (key.IsTheHole(isolate)) continue;
- int hash = Smi::ToInt(key->GetHash());
+ int hash = Smi::ToInt(key.GetHash());
int bucket = new_table->HashToBucket(hash);
int chain = new_table->GetFirstEntry(bucket);
@@ -848,15 +848,15 @@
template <class Derived>
int SmallOrderedHashTable<Derived>::FindEntry(Isolate* isolate, Object key) {
DisallowHeapAllocation no_gc;
- Object hash = key->GetHash();
+ Object hash = key.GetHash();
- if (hash->IsUndefined(isolate)) return kNotFound;
+ if (hash.IsUndefined(isolate)) return kNotFound;
int entry = HashToFirstEntry(Smi::ToInt(hash));
// Walk the chain in the bucket to find the key.
while (entry != kNotFound) {
Object candidate_key = KeyAt(entry);
- if (candidate_key->SameValueZero(key)) return entry;
+ if (candidate_key.SameValueZero(key)) return entry;
entry = GetNextEntry(entry);
}
return kNotFound;
@@ -1092,114 +1092,112 @@
int entry, Object key, Object value,
PropertyDetails details) {
DisallowHeapAllocation no_gc;
- if (table->IsSmallOrderedNameDictionary()) {
- return SmallOrderedNameDictionary::cast(table)->SetEntry(
- isolate, entry, key, value, details);
+ if (table.IsSmallOrderedNameDictionary()) {
+ return SmallOrderedNameDictionary::cast(table).SetEntry(isolate, entry, key,
+ value, details);
}
- DCHECK(table->IsOrderedNameDictionary());
- return OrderedNameDictionary::cast(table)->SetEntry(isolate, entry, key,
- value, details);
+ DCHECK(table.IsOrderedNameDictionary());
+ return OrderedNameDictionary::cast(table).SetEntry(isolate, entry, key, value,
+ details);
}
int OrderedNameDictionaryHandler::FindEntry(Isolate* isolate, HeapObject table,
Name key) {
DisallowHeapAllocation no_gc;
- if (table->IsSmallOrderedNameDictionary()) {
- int entry =
- SmallOrderedNameDictionary::cast(table)->FindEntry(isolate, key);
+ if (table.IsSmallOrderedNameDictionary()) {
+ int entry = SmallOrderedNameDictionary::cast(table).FindEntry(isolate, key);
return entry == SmallOrderedNameDictionary::kNotFound
? OrderedNameDictionaryHandler::kNotFound
: entry;
}
- DCHECK(table->IsOrderedNameDictionary());
- int entry = OrderedNameDictionary::cast(table)->FindEntry(isolate, key);
+ DCHECK(table.IsOrderedNameDictionary());
+ int entry = OrderedNameDictionary::cast(table).FindEntry(isolate, key);
return entry == OrderedNameDictionary::kNotFound
? OrderedNameDictionaryHandler::kNotFound
: entry;
}
Object OrderedNameDictionaryHandler::ValueAt(HeapObject table, int entry) {
- if (table->IsSmallOrderedNameDictionary()) {
- return SmallOrderedNameDictionary::cast(table)->ValueAt(entry);
+ if (table.IsSmallOrderedNameDictionary()) {
+ return SmallOrderedNameDictionary::cast(table).ValueAt(entry);
}
- DCHECK(table->IsOrderedNameDictionary());
- return OrderedNameDictionary::cast(table)->ValueAt(entry);
+ DCHECK(table.IsOrderedNameDictionary());
+ return OrderedNameDictionary::cast(table).ValueAt(entry);
}
void OrderedNameDictionaryHandler::ValueAtPut(HeapObject table, int entry,
Object value) {
- if (table->IsSmallOrderedNameDictionary()) {
- return SmallOrderedNameDictionary::cast(table)->ValueAtPut(entry, value);
+ if (table.IsSmallOrderedNameDictionary()) {
+ return SmallOrderedNameDictionary::cast(table).ValueAtPut(entry, value);
}
- DCHECK(table->IsOrderedNameDictionary());
- OrderedNameDictionary::cast(table)->ValueAtPut(entry, value);
+ DCHECK(table.IsOrderedNameDictionary());
+ OrderedNameDictionary::cast(table).ValueAtPut(entry, value);
}
PropertyDetails OrderedNameDictionaryHandler::DetailsAt(HeapObject table,
int entry) {
- if (table->IsSmallOrderedNameDictionary()) {
- return SmallOrderedNameDictionary::cast(table)->DetailsAt(entry);
+ if (table.IsSmallOrderedNameDictionary()) {
+ return SmallOrderedNameDictionary::cast(table).DetailsAt(entry);
}
- DCHECK(table->IsOrderedNameDictionary());
- return OrderedNameDictionary::cast(table)->DetailsAt(entry);
+ DCHECK(table.IsOrderedNameDictionary());
+ return OrderedNameDictionary::cast(table).DetailsAt(entry);
}
void OrderedNameDictionaryHandler::DetailsAtPut(HeapObject table, int entry,
PropertyDetails details) {
- if (table->IsSmallOrderedNameDictionary()) {
- return SmallOrderedNameDictionary::cast(table)->DetailsAtPut(entry,
- details);
+ if (table.IsSmallOrderedNameDictionary()) {
+ return SmallOrderedNameDictionary::cast(table).DetailsAtPut(entry, details);
}
- DCHECK(table->IsOrderedNameDictionary());
- OrderedNameDictionary::cast(table)->DetailsAtPut(entry, details);
+ DCHECK(table.IsOrderedNameDictionary());
+ OrderedNameDictionary::cast(table).DetailsAtPut(entry, details);
}
int OrderedNameDictionaryHandler::Hash(HeapObject table) {
- if (table->IsSmallOrderedNameDictionary()) {
- return SmallOrderedNameDictionary::cast(table)->Hash();
+ if (table.IsSmallOrderedNameDictionary()) {
+ return SmallOrderedNameDictionary::cast(table).Hash();
}
- DCHECK(table->IsOrderedNameDictionary());
- return OrderedNameDictionary::cast(table)->Hash();
+ DCHECK(table.IsOrderedNameDictionary());
+ return OrderedNameDictionary::cast(table).Hash();
}
void OrderedNameDictionaryHandler::SetHash(HeapObject table, int hash) {
- if (table->IsSmallOrderedNameDictionary()) {
- return SmallOrderedNameDictionary::cast(table)->SetHash(hash);
+ if (table.IsSmallOrderedNameDictionary()) {
+ return SmallOrderedNameDictionary::cast(table).SetHash(hash);
}
- DCHECK(table->IsOrderedNameDictionary());
- OrderedNameDictionary::cast(table)->SetHash(hash);
+ DCHECK(table.IsOrderedNameDictionary());
+ OrderedNameDictionary::cast(table).SetHash(hash);
}
Name OrderedNameDictionaryHandler::KeyAt(HeapObject table, int entry) {
- if (table->IsSmallOrderedNameDictionary()) {
- return Name::cast(SmallOrderedNameDictionary::cast(table)->KeyAt(entry));
+ if (table.IsSmallOrderedNameDictionary()) {
+ return Name::cast(SmallOrderedNameDictionary::cast(table).KeyAt(entry));
}
- return Name::cast(OrderedNameDictionary::cast(table)->KeyAt(entry));
+ return Name::cast(OrderedNameDictionary::cast(table).KeyAt(entry));
}
int OrderedNameDictionaryHandler::NumberOfElements(HeapObject table) {
- if (table->IsSmallOrderedNameDictionary()) {
- return SmallOrderedNameDictionary::cast(table)->NumberOfElements();
+ if (table.IsSmallOrderedNameDictionary()) {
+ return SmallOrderedNameDictionary::cast(table).NumberOfElements();
}
- return OrderedNameDictionary::cast(table)->NumberOfElements();
+ return OrderedNameDictionary::cast(table).NumberOfElements();
}
int OrderedNameDictionaryHandler::Capacity(HeapObject table) {
- if (table->IsSmallOrderedNameDictionary()) {
- return SmallOrderedNameDictionary::cast(table)->Capacity();
+ if (table.IsSmallOrderedNameDictionary()) {
+ return SmallOrderedNameDictionary::cast(table).Capacity();
}
- return OrderedNameDictionary::cast(table)->Capacity();
+ return OrderedNameDictionary::cast(table).Capacity();
}
Handle<HeapObject> OrderedNameDictionaryHandler::Shrink(
@@ -1233,21 +1231,21 @@
void OrderedHashTableIterator<Derived, TableType>::Transition() {
DisallowHeapAllocation no_allocation;
TableType table = TableType::cast(this->table());
- if (!table->IsObsolete()) return;
+ if (!table.IsObsolete()) return;
int index = Smi::ToInt(this->index());
- while (table->IsObsolete()) {
- TableType next_table = table->NextTable();
+ while (table.IsObsolete()) {
+ TableType next_table = table.NextTable();
if (index > 0) {
- int nod = table->NumberOfDeletedElements();
+ int nod = table.NumberOfDeletedElements();
if (nod == TableType::kClearedTableSentinel) {
index = 0;
} else {
int old_index = index;
for (int i = 0; i < nod; ++i) {
- int removed_index = table->RemovedIndexAt(i);
+ int removed_index = table.RemovedIndexAt(i);
if (removed_index >= old_index) break;
--index;
}
@@ -1270,9 +1268,9 @@
TableType table = TableType::cast(this->table());
int index = Smi::ToInt(this->index());
- int used_capacity = table->UsedCapacity();
+ int used_capacity = table.UsedCapacity();
- while (index < used_capacity && table->KeyAt(index)->IsTheHole(ro_roots)) {
+ while (index < used_capacity && table.KeyAt(index).IsTheHole(ro_roots)) {
index++;
}
diff --git a/src/objects/ordered-hash-table.h b/src/objects/ordered-hash-table.h
index 949c94f..16db568 100644
--- a/src/objects/ordered-hash-table.h
+++ b/src/objects/ordered-hash-table.h
@@ -126,7 +126,7 @@
return get(EntryToIndex(entry));
}
- bool IsObsolete() { return !get(NextTableIndex())->IsSmi(); }
+ bool IsObsolete() { return !get(NextTableIndex()).IsSmi(); }
// The next newer table. This is only valid if the table is obsolete.
Derived NextTable() { return Derived::cast(get(NextTableIndex())); }
diff --git a/src/objects/property-array-inl.h b/src/objects/property-array-inl.h
index 3e9e2aa..6bdd5e7 100644
--- a/src/objects/property-array-inl.h
+++ b/src/objects/property-array-inl.h
@@ -84,7 +84,7 @@
DisallowHeapAllocation no_gc;
ObjectSlot dst_slot(data_start() + dst_index);
- ObjectSlot src_slot(src->data_start() + src_index);
+ ObjectSlot src_slot(src.data_start() + src_index);
isolate->heap()->CopyRange(*this, dst_slot, src_slot, len, mode);
}
diff --git a/src/objects/property-descriptor.cc b/src/objects/property-descriptor.cc
index cc66471..075d0c2 100644
--- a/src/objects/property-descriptor.cc
+++ b/src/objects/property-descriptor.cc
@@ -44,21 +44,21 @@
PropertyDescriptor* desc) {
if (!obj->IsJSObject()) return false;
Map map = Handle<JSObject>::cast(obj)->map();
- if (map->instance_type() != JS_OBJECT_TYPE) return false;
- if (map->is_access_check_needed()) return false;
- if (map->prototype() != *isolate->initial_object_prototype()) return false;
+ if (map.instance_type() != JS_OBJECT_TYPE) return false;
+ if (map.is_access_check_needed()) return false;
+ if (map.prototype() != *isolate->initial_object_prototype()) return false;
// During bootstrapping, the object_function_prototype_map hasn't been
// set up yet.
if (isolate->bootstrapper()->IsActive()) return false;
- if (JSObject::cast(map->prototype())->map() !=
+ if (JSObject::cast(map.prototype()).map() !=
isolate->native_context()->object_function_prototype_map()) {
return false;
}
// TODO(jkummerow): support dictionary properties?
- if (map->is_dictionary_map()) return false;
+ if (map.is_dictionary_map()) return false;
Handle<DescriptorArray> descs =
- Handle<DescriptorArray>(map->instance_descriptors(), isolate);
- for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
+ Handle<DescriptorArray>(map.instance_descriptors(), isolate);
+ for (int i = 0; i < map.NumberOfOwnDescriptors(); i++) {
PropertyDetails details = descs->GetDetails(i);
Name key = descs->GetKey(i);
Handle<Object> value;
diff --git a/src/objects/prototype-info-inl.h b/src/objects/prototype-info-inl.h
index 56104ba..17b4243 100644
--- a/src/objects/prototype-info-inl.h
+++ b/src/objects/prototype-info-inl.h
@@ -48,19 +48,19 @@
void PrototypeUsers::MarkSlotEmpty(WeakArrayList array, int index) {
DCHECK_GT(index, 0);
- DCHECK_LT(index, array->length());
+ DCHECK_LT(index, array.length());
// Chain the empty slots into a linked list (each empty slot contains the
// index of the next empty slot).
- array->Set(index, MaybeObject::FromObject(empty_slot_index(array)));
+ array.Set(index, MaybeObject::FromObject(empty_slot_index(array)));
set_empty_slot_index(array, index);
}
Smi PrototypeUsers::empty_slot_index(WeakArrayList array) {
- return array->Get(kEmptySlotIndex).ToSmi();
+ return array.Get(kEmptySlotIndex).ToSmi();
}
void PrototypeUsers::set_empty_slot_index(WeakArrayList array, int index) {
- array->Set(kEmptySlotIndex, MaybeObject::FromObject(Smi::FromInt(index)));
+ array.Set(kEmptySlotIndex, MaybeObject::FromObject(Smi::FromInt(index)));
}
} // namespace internal
diff --git a/src/objects/prototype-inl.h b/src/objects/prototype-inl.h
index 05cd84d..5f7c3e2 100644
--- a/src/objects/prototype-inl.h
+++ b/src/objects/prototype-inl.h
@@ -41,29 +41,29 @@
PrototypeIterator::PrototypeIterator(Isolate* isolate, Map receiver_map,
WhereToEnd where_to_end)
: isolate_(isolate),
- object_(receiver_map->GetPrototypeChainRootMap(isolate_)->prototype()),
+ object_(receiver_map.GetPrototypeChainRootMap(isolate_).prototype()),
where_to_end_(where_to_end),
- is_at_end_(object_->IsNull(isolate_)),
+ is_at_end_(object_.IsNull(isolate_)),
seen_proxies_(0) {
if (!is_at_end_ && where_to_end_ == END_AT_NON_HIDDEN) {
- DCHECK(object_->IsJSReceiver());
- Map map = JSReceiver::cast(object_)->map();
- is_at_end_ = !map->has_hidden_prototype();
+ DCHECK(object_.IsJSReceiver());
+ Map map = JSReceiver::cast(object_).map();
+ is_at_end_ = !map.has_hidden_prototype();
}
}
PrototypeIterator::PrototypeIterator(Isolate* isolate, Handle<Map> receiver_map,
WhereToEnd where_to_end)
: isolate_(isolate),
- handle_(receiver_map->GetPrototypeChainRootMap(isolate_)->prototype(),
+ handle_(receiver_map->GetPrototypeChainRootMap(isolate_).prototype(),
isolate_),
where_to_end_(where_to_end),
is_at_end_(handle_->IsNull(isolate_)),
seen_proxies_(0) {
if (!is_at_end_ && where_to_end_ == END_AT_NON_HIDDEN) {
DCHECK(handle_->IsJSReceiver());
- Map map = JSReceiver::cast(*handle_)->map();
- is_at_end_ = !map->has_hidden_prototype();
+ Map map = JSReceiver::cast(*handle_).map();
+ is_at_end_ = !map.has_hidden_prototype();
}
}
@@ -79,7 +79,7 @@
}
void PrototypeIterator::Advance() {
- if (handle_.is_null() && object_->IsJSProxy()) {
+ if (handle_.is_null() && object_.IsJSProxy()) {
is_at_end_ = true;
object_ = ReadOnlyRoots(isolate_).null_value();
return;
@@ -93,11 +93,11 @@
void PrototypeIterator::AdvanceIgnoringProxies() {
Object object = handle_.is_null() ? object_ : *handle_;
- Map map = HeapObject::cast(object)->map();
+ Map map = HeapObject::cast(object).map();
- HeapObject prototype = map->prototype();
- is_at_end_ = where_to_end_ == END_AT_NON_HIDDEN ? !map->has_hidden_prototype()
- : prototype->IsNull(isolate_);
+ HeapObject prototype = map.prototype();
+ is_at_end_ = where_to_end_ == END_AT_NON_HIDDEN ? !map.has_hidden_prototype()
+ : prototype.IsNull(isolate_);
if (handle_.is_null()) {
object_ = prototype;
@@ -107,7 +107,7 @@
}
V8_WARN_UNUSED_RESULT bool PrototypeIterator::AdvanceFollowingProxies() {
- DCHECK(!(handle_.is_null() && object_->IsJSProxy()));
+ DCHECK(!(handle_.is_null() && object_.IsJSProxy()));
if (!HasAccess()) {
// Abort the lookup if we do not have access to the current object.
handle_ = isolate_->factory()->null_value();
diff --git a/src/objects/scope-info.cc b/src/objects/scope-info.cc
index 1871a72..b688bd4 100644
--- a/src/objects/scope-info.cc
+++ b/src/objects/scope-info.cc
@@ -26,27 +26,27 @@
#ifdef DEBUG
bool ScopeInfo::Equals(ScopeInfo other) const {
- if (length() != other->length()) return false;
+ if (length() != other.length()) return false;
for (int index = 0; index < length(); ++index) {
Object entry = get(index);
- Object other_entry = other->get(index);
- if (entry->IsSmi()) {
+ Object other_entry = other.get(index);
+ if (entry.IsSmi()) {
if (entry != other_entry) return false;
} else {
- if (HeapObject::cast(entry)->map()->instance_type() !=
- HeapObject::cast(other_entry)->map()->instance_type()) {
+ if (HeapObject::cast(entry).map().instance_type() !=
+ HeapObject::cast(other_entry).map().instance_type()) {
return false;
}
- if (entry->IsString()) {
- if (!String::cast(entry)->Equals(String::cast(other_entry))) {
+ if (entry.IsString()) {
+ if (!String::cast(entry).Equals(String::cast(other_entry))) {
return false;
}
- } else if (entry->IsScopeInfo()) {
- if (!ScopeInfo::cast(entry)->Equals(ScopeInfo::cast(other_entry))) {
+ } else if (entry.IsScopeInfo()) {
+ if (!ScopeInfo::cast(entry).Equals(ScopeInfo::cast(other_entry))) {
return false;
}
- } else if (entry->IsModuleInfo()) {
- if (!ModuleInfo::cast(entry)->Equals(ModuleInfo::cast(other_entry))) {
+ } else if (entry.IsModuleInfo()) {
+ if (!ModuleInfo::cast(entry).Equals(ModuleInfo::cast(other_entry))) {
return false;
}
} else {
@@ -161,7 +161,7 @@
{
DisallowHeapAllocation no_gc;
ScopeInfo scope_info = *scope_info_handle;
- WriteBarrierMode mode = scope_info->GetWriteBarrierMode(no_gc);
+ WriteBarrierMode mode = scope_info.GetWriteBarrierMode(no_gc);
bool has_simple_parameters = false;
bool is_asm_module = false;
@@ -195,16 +195,16 @@
IsDebugEvaluateScopeField::encode(scope->is_debug_evaluate_scope()) |
ForceContextAllocationField::encode(
scope->ForceContextForLanguageMode());
- scope_info->SetFlags(flags);
+ scope_info.SetFlags(flags);
- scope_info->SetParameterCount(parameter_count);
- scope_info->SetContextLocalCount(context_local_count);
+ scope_info.SetParameterCount(parameter_count);
+ scope_info.SetContextLocalCount(context_local_count);
// Add context locals' names and info, module variables' names and info.
// Context locals are added using their index.
int context_local_base = index;
int context_local_info_base = context_local_base + context_local_count;
- int module_var_entry = scope_info->ModuleVariablesIndex();
+ int module_var_entry = scope_info.ModuleVariablesIndex();
for (Variable* var : *scope->locals()) {
switch (var->location()) {
@@ -219,23 +219,23 @@
InitFlagField::encode(var->initialization_flag()) |
MaybeAssignedFlagField::encode(var->maybe_assigned()) |
ParameterNumberField::encode(ParameterNumberField::kMax);
- scope_info->set(context_local_base + local_index, *var->name(), mode);
- scope_info->set(context_local_info_base + local_index,
- Smi::FromInt(info));
+ scope_info.set(context_local_base + local_index, *var->name(), mode);
+ scope_info.set(context_local_info_base + local_index,
+ Smi::FromInt(info));
break;
}
case VariableLocation::MODULE: {
- scope_info->set(module_var_entry + kModuleVariableNameOffset,
- *var->name(), mode);
- scope_info->set(module_var_entry + kModuleVariableIndexOffset,
- Smi::FromInt(var->index()));
+ scope_info.set(module_var_entry + kModuleVariableNameOffset,
+ *var->name(), mode);
+ scope_info.set(module_var_entry + kModuleVariableIndexOffset,
+ Smi::FromInt(var->index()));
uint32_t properties =
VariableModeField::encode(var->mode()) |
InitFlagField::encode(var->initialization_flag()) |
MaybeAssignedFlagField::encode(var->maybe_assigned()) |
ParameterNumberField::encode(ParameterNumberField::kMax);
- scope_info->set(module_var_entry + kModuleVariablePropertiesOffset,
- Smi::FromInt(properties));
+ scope_info.set(module_var_entry + kModuleVariablePropertiesOffset,
+ Smi::FromInt(properties));
module_var_entry += kModuleVariableEntryLength;
break;
}
@@ -257,9 +257,9 @@
if (parameter->location() != VariableLocation::CONTEXT) continue;
int index = parameter->index() - Context::MIN_CONTEXT_SLOTS;
int info_index = context_local_info_base + index;
- int info = Smi::ToInt(scope_info->get(info_index));
+ int info = Smi::ToInt(scope_info.get(info_index));
info = ParameterNumberField::update(info, i);
- scope_info->set(info_index, Smi::FromInt(info));
+ scope_info.set(info_index, Smi::FromInt(info));
}
// TODO(verwaest): Remove this unnecessary entry.
@@ -272,9 +272,9 @@
InitFlagField::encode(var->initialization_flag()) |
MaybeAssignedFlagField::encode(var->maybe_assigned()) |
ParameterNumberField::encode(ParameterNumberField::kMax);
- scope_info->set(context_local_base + local_index, *var->name(), mode);
- scope_info->set(context_local_info_base + local_index,
- Smi::FromInt(info));
+ scope_info.set(context_local_base + local_index, *var->name(), mode);
+ scope_info.set(context_local_info_base + local_index,
+ Smi::FromInt(info));
}
}
}
@@ -282,16 +282,16 @@
index += 2 * context_local_count;
// If the receiver is allocated, add its index.
- DCHECK_EQ(index, scope_info->ReceiverInfoIndex());
+ DCHECK_EQ(index, scope_info.ReceiverInfoIndex());
if (has_receiver) {
int var_index = scope->AsDeclarationScope()->receiver()->index();
- scope_info->set(index++, Smi::FromInt(var_index));
+ scope_info.set(index++, Smi::FromInt(var_index));
// ?? DCHECK(receiver_info != CONTEXT || var_index ==
// scope_info->ContextLength() - 1);
}
// If present, add the function variable name and its index.
- DCHECK_EQ(index, scope_info->FunctionNameInfoIndex());
+ DCHECK_EQ(index, scope_info.FunctionNameInfoIndex());
if (has_function_name) {
Variable* var = scope->AsDeclarationScope()->function_var();
int var_index = -1;
@@ -300,28 +300,28 @@
var_index = var->index();
name = *var->name();
}
- scope_info->set(index++, name, mode);
- scope_info->set(index++, Smi::FromInt(var_index));
+ scope_info.set(index++, name, mode);
+ scope_info.set(index++, Smi::FromInt(var_index));
DCHECK(function_name_info != CONTEXT ||
- var_index == scope_info->ContextLength() - 1);
+ var_index == scope_info.ContextLength() - 1);
}
- DCHECK_EQ(index, scope_info->InferredFunctionNameIndex());
+ DCHECK_EQ(index, scope_info.InferredFunctionNameIndex());
if (has_inferred_function_name) {
// The inferred function name is taken from the SFI.
index++;
}
- DCHECK_EQ(index, scope_info->PositionInfoIndex());
+ DCHECK_EQ(index, scope_info.PositionInfoIndex());
if (has_position_info) {
- scope_info->set(index++, Smi::FromInt(scope->start_position()));
- scope_info->set(index++, Smi::FromInt(scope->end_position()));
+ scope_info.set(index++, Smi::FromInt(scope->start_position()));
+ scope_info.set(index++, Smi::FromInt(scope->end_position()));
}
// If present, add the outer scope info.
- DCHECK(index == scope_info->OuterScopeInfoIndex());
+ DCHECK(index == scope_info.OuterScopeInfoIndex());
if (has_outer_scope_info) {
- scope_info->set(index++, *outer_scope.ToHandleChecked(), mode);
+ scope_info.set(index++, *outer_scope.ToHandleChecked(), mode);
}
}
@@ -575,7 +575,7 @@
void ScopeInfo::SetFunctionName(Object name) {
DCHECK(HasFunctionName());
- DCHECK(name->IsString() || name == SharedFunctionInfo::kNoSharedNameSentinel);
+ DCHECK(name.IsString() || name == SharedFunctionInfo::kNoSharedNameSentinel);
set(FunctionNameInfoIndex(), name);
}
@@ -617,12 +617,12 @@
String ScopeInfo::FunctionDebugName() const {
Object name = FunctionName();
- if (name->IsString() && String::cast(name)->length() > 0) {
+ if (name.IsString() && String::cast(name).length() > 0) {
return String::cast(name);
}
if (HasInferredFunctionName()) {
name = InferredFunctionName();
- if (name->IsString()) return String::cast(name);
+ if (name.IsString()) return String::cast(name);
}
return GetReadOnlyRoots().empty_string();
}
@@ -706,15 +706,15 @@
// variable is a compiler-introduced temporary. However, to avoid conflict
// with user declarations, the current temporaries like .generator_object and
// .result start with a dot, so we can use that as a flag. It's a hack!
- return name->length() == 0 || name->Get(0) == '.' ||
- name->Equals(name->GetReadOnlyRoots().this_string());
+ return name.length() == 0 || name.Get(0) == '.' ||
+ name.Equals(name.GetReadOnlyRoots().this_string());
}
int ScopeInfo::ModuleIndex(String name, VariableMode* mode,
InitializationFlag* init_flag,
MaybeAssignedFlag* maybe_assigned_flag) {
DisallowHeapAllocation no_gc;
- DCHECK(name->IsInternalizedString());
+ DCHECK(name.IsInternalizedString());
DCHECK_EQ(scope_type(), MODULE_SCOPE);
DCHECK_NOT_NULL(mode);
DCHECK_NOT_NULL(init_flag);
@@ -724,7 +724,7 @@
int entry = ModuleVariablesIndex();
for (int i = 0; i < module_vars_count; ++i) {
String var_name = String::cast(get(entry + kModuleVariableNameOffset));
- if (name->Equals(var_name)) {
+ if (name.Equals(var_name)) {
int index;
ModuleVariable(i, nullptr, &index, mode, init_flag, maybe_assigned_flag);
return index;
@@ -741,24 +741,24 @@
InitializationFlag* init_flag,
MaybeAssignedFlag* maybe_assigned_flag) {
DisallowHeapAllocation no_gc;
- DCHECK(name->IsInternalizedString());
+ DCHECK(name.IsInternalizedString());
DCHECK_NOT_NULL(mode);
DCHECK_NOT_NULL(init_flag);
DCHECK_NOT_NULL(maybe_assigned_flag);
- if (scope_info->length() == 0) return -1;
+ if (scope_info.length() == 0) return -1;
- int start = scope_info->ContextLocalNamesIndex();
- int end = start + scope_info->ContextLocalCount();
+ int start = scope_info.ContextLocalNamesIndex();
+ int end = start + scope_info.ContextLocalCount();
for (int i = start; i < end; ++i) {
- if (name != scope_info->get(i)) continue;
+ if (name != scope_info.get(i)) continue;
int var = i - start;
- *mode = scope_info->ContextLocalMode(var);
- *init_flag = scope_info->ContextLocalInitFlag(var);
- *maybe_assigned_flag = scope_info->ContextLocalMaybeAssignedFlag(var);
+ *mode = scope_info.ContextLocalMode(var);
+ *init_flag = scope_info.ContextLocalInitFlag(var);
+ *maybe_assigned_flag = scope_info.ContextLocalMaybeAssignedFlag(var);
int result = Context::MIN_CONTEXT_SLOTS + var;
- DCHECK_LT(result, scope_info->ContextLength());
+ DCHECK_LT(result, scope_info.ContextLength());
return result;
}
@@ -773,7 +773,7 @@
}
int ScopeInfo::FunctionContextSlotIndex(String name) const {
- DCHECK(name->IsInternalizedString());
+ DCHECK(name.IsInternalizedString());
if (length() > 0) {
if (FunctionVariableField::decode(Flags()) == CONTEXT &&
FunctionName() == name) {
@@ -954,22 +954,22 @@
}
int ModuleInfo::RegularExportCount() const {
- DCHECK_EQ(regular_exports()->length() % kRegularExportLength, 0);
- return regular_exports()->length() / kRegularExportLength;
+ DCHECK_EQ(regular_exports().length() % kRegularExportLength, 0);
+ return regular_exports().length() / kRegularExportLength;
}
String ModuleInfo::RegularExportLocalName(int i) const {
- return String::cast(regular_exports()->get(i * kRegularExportLength +
- kRegularExportLocalNameOffset));
+ return String::cast(regular_exports().get(i * kRegularExportLength +
+ kRegularExportLocalNameOffset));
}
int ModuleInfo::RegularExportCellIndex(int i) const {
- return Smi::ToInt(regular_exports()->get(i * kRegularExportLength +
- kRegularExportCellIndexOffset));
+ return Smi::ToInt(regular_exports().get(i * kRegularExportLength +
+ kRegularExportCellIndexOffset));
}
FixedArray ModuleInfo::RegularExportExportNames(int i) const {
- return FixedArray::cast(regular_exports()->get(
+ return FixedArray::cast(regular_exports().get(
i * kRegularExportLength + kRegularExportExportNamesOffset));
}
diff --git a/src/objects/script-inl.h b/src/objects/script-inl.h
index 33c794e..07450c7 100644
--- a/src/objects/script-inl.h
+++ b/src/objects/script-inl.h
@@ -47,11 +47,11 @@
this->type() == TYPE_WASM)
bool Script::is_wrapped() const {
- return eval_from_shared_or_wrapped_arguments()->IsFixedArray();
+ return eval_from_shared_or_wrapped_arguments().IsFixedArray();
}
bool Script::has_eval_from_shared() const {
- return eval_from_shared_or_wrapped_arguments()->IsSharedFunctionInfo();
+ return eval_from_shared_or_wrapped_arguments().IsSharedFunctionInfo();
}
void Script::set_eval_from_shared(SharedFunctionInfo shared,
@@ -104,13 +104,13 @@
bool Script::HasValidSource() {
Object src = this->source();
- if (!src->IsString()) return true;
+ if (!src.IsString()) return true;
String src_str = String::cast(src);
if (!StringShape(src_str).IsExternal()) return true;
- if (src_str->IsOneByteRepresentation()) {
- return ExternalOneByteString::cast(src)->resource() != nullptr;
- } else if (src_str->IsTwoByteRepresentation()) {
- return ExternalTwoByteString::cast(src)->resource() != nullptr;
+ if (src_str.IsOneByteRepresentation()) {
+ return ExternalOneByteString::cast(src).resource() != nullptr;
+ } else if (src_str.IsTwoByteRepresentation()) {
+ return ExternalTwoByteString::cast(src).resource() != nullptr;
}
return true;
}
diff --git a/src/objects/shared-function-info-inl.h b/src/objects/shared-function-info-inl.h
index e523352..0eb1c31 100644
--- a/src/objects/shared-function-info-inl.h
+++ b/src/objects/shared-function-info-inl.h
@@ -142,8 +142,8 @@
bool SharedFunctionInfo::HasSharedName() const {
Object value = name_or_scope_info();
- if (value->IsScopeInfo()) {
- return ScopeInfo::cast(value)->HasSharedFunctionName();
+ if (value.IsScopeInfo()) {
+ return ScopeInfo::cast(value).HasSharedFunctionName();
}
return value != kNoSharedNameSentinel;
}
@@ -151,9 +151,9 @@
String SharedFunctionInfo::Name() const {
if (!HasSharedName()) return GetReadOnlyRoots().empty_string();
Object value = name_or_scope_info();
- if (value->IsScopeInfo()) {
- if (ScopeInfo::cast(value)->HasFunctionName()) {
- return String::cast(ScopeInfo::cast(value)->FunctionName());
+ if (value.IsScopeInfo()) {
+ if (ScopeInfo::cast(value).HasFunctionName()) {
+ return String::cast(ScopeInfo::cast(value).FunctionName());
}
return GetReadOnlyRoots().empty_string();
}
@@ -162,10 +162,10 @@
void SharedFunctionInfo::SetName(String name) {
Object maybe_scope_info = name_or_scope_info();
- if (maybe_scope_info->IsScopeInfo()) {
- ScopeInfo::cast(maybe_scope_info)->SetFunctionName(name);
+ if (maybe_scope_info.IsScopeInfo()) {
+ ScopeInfo::cast(maybe_scope_info).SetFunctionName(name);
} else {
- DCHECK(maybe_scope_info->IsString() ||
+ DCHECK(maybe_scope_info.IsString() ||
maybe_scope_info == kNoSharedNameSentinel);
set_name_or_scope_info(name);
}
@@ -338,7 +338,7 @@
ScopeInfo SharedFunctionInfo::scope_info() const {
Object maybe_scope_info = name_or_scope_info();
- if (maybe_scope_info->IsScopeInfo()) {
+ if (maybe_scope_info.IsScopeInfo()) {
return ScopeInfo::cast(maybe_scope_info);
}
return ScopeInfo::Empty(GetIsolate());
@@ -348,14 +348,14 @@
WriteBarrierMode mode) {
// Move the existing name onto the ScopeInfo.
Object name = name_or_scope_info();
- if (name->IsScopeInfo()) {
- name = ScopeInfo::cast(name)->FunctionName();
+ if (name.IsScopeInfo()) {
+ name = ScopeInfo::cast(name).FunctionName();
}
- DCHECK(name->IsString() || name == kNoSharedNameSentinel);
+ DCHECK(name.IsString() || name == kNoSharedNameSentinel);
// Only set the function name for function scopes.
- scope_info->SetFunctionName(name);
- if (HasInferredName() && inferred_name()->length() != 0) {
- scope_info->SetInferredFunctionName(inferred_name());
+ scope_info.SetFunctionName(name);
+ if (HasInferredName() && inferred_name().length() != 0) {
+ scope_info.SetInferredFunctionName(inferred_name());
}
WRITE_FIELD(*this, kNameOrScopeInfoOffset, scope_info);
CONDITIONAL_WRITE_BARRIER(*this, kNameOrScopeInfoOffset, scope_info, mode);
@@ -373,31 +373,31 @@
bool SharedFunctionInfo::HasOuterScopeInfo() const {
ScopeInfo outer_info;
if (!is_compiled()) {
- if (!outer_scope_info()->IsScopeInfo()) return false;
+ if (!outer_scope_info().IsScopeInfo()) return false;
outer_info = ScopeInfo::cast(outer_scope_info());
} else {
- if (!scope_info()->HasOuterScopeInfo()) return false;
- outer_info = scope_info()->OuterScopeInfo();
+ if (!scope_info().HasOuterScopeInfo()) return false;
+ outer_info = scope_info().OuterScopeInfo();
}
- return outer_info->length() > 0;
+ return outer_info.length() > 0;
}
ScopeInfo SharedFunctionInfo::GetOuterScopeInfo() const {
DCHECK(HasOuterScopeInfo());
if (!is_compiled()) return ScopeInfo::cast(outer_scope_info());
- return scope_info()->OuterScopeInfo();
+ return scope_info().OuterScopeInfo();
}
void SharedFunctionInfo::set_outer_scope_info(HeapObject value,
WriteBarrierMode mode) {
DCHECK(!is_compiled());
- DCHECK(raw_outer_scope_info_or_feedback_metadata()->IsTheHole());
- DCHECK(value->IsScopeInfo() || value->IsTheHole());
+ DCHECK(raw_outer_scope_info_or_feedback_metadata().IsTheHole());
+ DCHECK(value.IsScopeInfo() || value.IsTheHole());
set_raw_outer_scope_info_or_feedback_metadata(value, mode);
}
bool SharedFunctionInfo::HasFeedbackMetadata() const {
- return raw_outer_scope_info_or_feedback_metadata()->IsFeedbackMetadata();
+ return raw_outer_scope_info_or_feedback_metadata().IsFeedbackMetadata();
}
FeedbackMetadata SharedFunctionInfo::feedback_metadata() const {
@@ -408,14 +408,14 @@
void SharedFunctionInfo::set_feedback_metadata(FeedbackMetadata value,
WriteBarrierMode mode) {
DCHECK(!HasFeedbackMetadata());
- DCHECK(value->IsFeedbackMetadata());
+ DCHECK(value.IsFeedbackMetadata());
set_raw_outer_scope_info_or_feedback_metadata(value, mode);
}
bool SharedFunctionInfo::is_compiled() const {
Object data = function_data();
return data != Smi::FromEnum(Builtins::kCompileLazy) &&
- !data->IsUncompiledData();
+ !data.IsUncompiledData();
}
IsCompiledScope SharedFunctionInfo::is_compiled_scope() const {
@@ -424,19 +424,19 @@
IsCompiledScope::IsCompiledScope(const SharedFunctionInfo shared,
Isolate* isolate)
- : retain_bytecode_(shared->HasBytecodeArray()
- ? handle(shared->GetBytecodeArray(), isolate)
+ : retain_bytecode_(shared.HasBytecodeArray()
+ ? handle(shared.GetBytecodeArray(), isolate)
: MaybeHandle<BytecodeArray>()),
- is_compiled_(shared->is_compiled()) {
+ is_compiled_(shared.is_compiled()) {
DCHECK_IMPLIES(!retain_bytecode_.is_null(), is_compiled());
}
bool SharedFunctionInfo::has_simple_parameters() {
- return scope_info()->HasSimpleParameters();
+ return scope_info().HasSimpleParameters();
}
bool SharedFunctionInfo::IsApiFunction() const {
- return function_data()->IsFunctionTemplateInfo();
+ return function_data().IsFunctionTemplateInfo();
}
FunctionTemplateInfo SharedFunctionInfo::get_api_func_data() {
@@ -445,40 +445,40 @@
}
bool SharedFunctionInfo::HasBytecodeArray() const {
- return function_data()->IsBytecodeArray() ||
- function_data()->IsInterpreterData();
+ return function_data().IsBytecodeArray() ||
+ function_data().IsInterpreterData();
}
BytecodeArray SharedFunctionInfo::GetBytecodeArray() const {
DCHECK(HasBytecodeArray());
- if (HasDebugInfo() && GetDebugInfo()->HasInstrumentedBytecodeArray()) {
- return GetDebugInfo()->OriginalBytecodeArray();
- } else if (function_data()->IsBytecodeArray()) {
+ if (HasDebugInfo() && GetDebugInfo().HasInstrumentedBytecodeArray()) {
+ return GetDebugInfo().OriginalBytecodeArray();
+ } else if (function_data().IsBytecodeArray()) {
return BytecodeArray::cast(function_data());
} else {
- DCHECK(function_data()->IsInterpreterData());
- return InterpreterData::cast(function_data())->bytecode_array();
+ DCHECK(function_data().IsInterpreterData());
+ return InterpreterData::cast(function_data()).bytecode_array();
}
}
BytecodeArray SharedFunctionInfo::GetDebugBytecodeArray() const {
DCHECK(HasBytecodeArray());
- DCHECK(HasDebugInfo() && GetDebugInfo()->HasInstrumentedBytecodeArray());
- if (function_data()->IsBytecodeArray()) {
+ DCHECK(HasDebugInfo() && GetDebugInfo().HasInstrumentedBytecodeArray());
+ if (function_data().IsBytecodeArray()) {
return BytecodeArray::cast(function_data());
} else {
- DCHECK(function_data()->IsInterpreterData());
- return InterpreterData::cast(function_data())->bytecode_array();
+ DCHECK(function_data().IsInterpreterData());
+ return InterpreterData::cast(function_data()).bytecode_array();
}
}
void SharedFunctionInfo::SetDebugBytecodeArray(BytecodeArray bytecode) {
DCHECK(HasBytecodeArray());
- if (function_data()->IsBytecodeArray()) {
+ if (function_data().IsBytecodeArray()) {
set_function_data(bytecode);
} else {
- DCHECK(function_data()->IsInterpreterData());
- interpreter_data()->set_bytecode_array(bytecode);
+ DCHECK(function_data().IsInterpreterData());
+ interpreter_data().set_bytecode_array(bytecode);
}
}
@@ -500,22 +500,22 @@
// check if it is old. Note, this is done this way since this function can be
// called by the concurrent marker.
Object data = function_data();
- if (!data->IsBytecodeArray()) return false;
+ if (!data.IsBytecodeArray()) return false;
if (mode == BytecodeFlushMode::kStressFlushBytecode) return true;
BytecodeArray bytecode = BytecodeArray::cast(data);
- return bytecode->IsOld();
+ return bytecode.IsOld();
}
Code SharedFunctionInfo::InterpreterTrampoline() const {
DCHECK(HasInterpreterData());
- return interpreter_data()->interpreter_trampoline();
+ return interpreter_data().interpreter_trampoline();
}
bool SharedFunctionInfo::HasInterpreterData() const {
- return function_data()->IsInterpreterData();
+ return function_data().IsInterpreterData();
}
InterpreterData SharedFunctionInfo::interpreter_data() const {
@@ -530,7 +530,7 @@
}
bool SharedFunctionInfo::HasAsmWasmData() const {
- return function_data()->IsAsmWasmData();
+ return function_data().IsAsmWasmData();
}
AsmWasmData SharedFunctionInfo::asm_wasm_data() const {
@@ -545,7 +545,7 @@
}
bool SharedFunctionInfo::HasBuiltinId() const {
- return function_data()->IsSmi();
+ return function_data().IsSmi();
}
int SharedFunctionInfo::builtin_id() const {
@@ -561,7 +561,7 @@
}
bool SharedFunctionInfo::HasUncompiledData() const {
- return function_data()->IsUncompiledData();
+ return function_data().IsUncompiledData();
}
UncompiledData SharedFunctionInfo::uncompiled_data() const {
@@ -571,12 +571,12 @@
void SharedFunctionInfo::set_uncompiled_data(UncompiledData uncompiled_data) {
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy));
- DCHECK(uncompiled_data->IsUncompiledData());
+ DCHECK(uncompiled_data.IsUncompiledData());
set_function_data(uncompiled_data);
}
bool SharedFunctionInfo::HasUncompiledDataWithPreparseData() const {
- return function_data()->IsUncompiledDataWithPreparseData();
+ return function_data().IsUncompiledDataWithPreparseData();
}
UncompiledDataWithPreparseData
@@ -588,13 +588,12 @@
void SharedFunctionInfo::set_uncompiled_data_with_preparse_data(
UncompiledDataWithPreparseData uncompiled_data_with_preparse_data) {
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy));
- DCHECK(
- uncompiled_data_with_preparse_data->IsUncompiledDataWithPreparseData());
+ DCHECK(uncompiled_data_with_preparse_data.IsUncompiledDataWithPreparseData());
set_function_data(uncompiled_data_with_preparse_data);
}
bool SharedFunctionInfo::HasUncompiledDataWithoutPreparseData() const {
- return function_data()->IsUncompiledDataWithoutPreparseData();
+ return function_data().IsUncompiledDataWithoutPreparseData();
}
void SharedFunctionInfo::ClearPreparseData() {
@@ -613,12 +612,12 @@
UncompiledDataWithPreparseData::kSize);
STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize ==
UncompiledData::kSize);
- data->synchronized_set_map(
+ data.synchronized_set_map(
GetReadOnlyRoots().uncompiled_data_without_preparse_data_map());
// Fill the remaining space with filler.
heap->CreateFillerObjectAt(
- data->address() + UncompiledDataWithoutPreparseData::kSize,
+ data.address() + UncompiledDataWithoutPreparseData::kSize,
UncompiledDataWithPreparseData::kSize -
UncompiledDataWithoutPreparseData::kSize,
ClearRecordedSlots::kNo);
@@ -633,13 +632,13 @@
int end_position, int function_literal_id,
std::function<void(HeapObject object, ObjectSlot slot, HeapObject target)>
gc_notify_updated_slot) {
- data->set_inferred_name(inferred_name);
+ data.set_inferred_name(inferred_name);
gc_notify_updated_slot(
- data, data->RawField(UncompiledData::kInferredNameOffset), inferred_name);
- data->set_start_position(start_position);
- data->set_end_position(end_position);
- data->set_function_literal_id(function_literal_id);
- data->clear_padding();
+ data, data.RawField(UncompiledData::kInferredNameOffset), inferred_name);
+ data.set_start_position(start_position);
+ data.set_end_position(end_position);
+ data.set_function_literal_id(function_literal_id);
+ data.clear_padding();
}
void UncompiledDataWithPreparseData::Initialize(
@@ -650,9 +649,9 @@
gc_notify_updated_slot) {
UncompiledData::Initialize(data, inferred_name, start_position, end_position,
function_literal_id, gc_notify_updated_slot);
- data->set_preparse_data(scope_data);
+ data.set_preparse_data(scope_data);
gc_notify_updated_slot(
- data, data->RawField(UncompiledDataWithPreparseData::kPreparseDataOffset),
+ data, data.RawField(UncompiledDataWithPreparseData::kPreparseDataOffset),
scope_data);
}
@@ -661,32 +660,32 @@
}
bool SharedFunctionInfo::HasWasmExportedFunctionData() const {
- return function_data()->IsWasmExportedFunctionData();
+ return function_data().IsWasmExportedFunctionData();
}
bool SharedFunctionInfo::HasWasmCapiFunctionData() const {
- return function_data()->IsWasmCapiFunctionData();
+ return function_data().IsWasmCapiFunctionData();
}
Object SharedFunctionInfo::script() const {
Object maybe_script = script_or_debug_info();
- if (maybe_script->IsDebugInfo()) {
- return DebugInfo::cast(maybe_script)->script();
+ if (maybe_script.IsDebugInfo()) {
+ return DebugInfo::cast(maybe_script).script();
}
return maybe_script;
}
void SharedFunctionInfo::set_script(Object script) {
Object maybe_debug_info = script_or_debug_info();
- if (maybe_debug_info->IsDebugInfo()) {
- DebugInfo::cast(maybe_debug_info)->set_script(script);
+ if (maybe_debug_info.IsDebugInfo()) {
+ DebugInfo::cast(maybe_debug_info).set_script(script);
} else {
set_script_or_debug_info(script);
}
}
bool SharedFunctionInfo::HasDebugInfo() const {
- return script_or_debug_info()->IsDebugInfo();
+ return script_or_debug_info().IsDebugInfo();
}
DebugInfo SharedFunctionInfo::GetDebugInfo() const {
@@ -696,37 +695,37 @@
void SharedFunctionInfo::SetDebugInfo(DebugInfo debug_info) {
DCHECK(!HasDebugInfo());
- DCHECK_EQ(debug_info->script(), script_or_debug_info());
+ DCHECK_EQ(debug_info.script(), script_or_debug_info());
set_script_or_debug_info(debug_info);
}
bool SharedFunctionInfo::HasInferredName() {
Object scope_info = name_or_scope_info();
- if (scope_info->IsScopeInfo()) {
- return ScopeInfo::cast(scope_info)->HasInferredFunctionName();
+ if (scope_info.IsScopeInfo()) {
+ return ScopeInfo::cast(scope_info).HasInferredFunctionName();
}
return HasUncompiledData();
}
String SharedFunctionInfo::inferred_name() {
Object maybe_scope_info = name_or_scope_info();
- if (maybe_scope_info->IsScopeInfo()) {
+ if (maybe_scope_info.IsScopeInfo()) {
ScopeInfo scope_info = ScopeInfo::cast(maybe_scope_info);
- if (scope_info->HasInferredFunctionName()) {
- Object name = scope_info->InferredFunctionName();
- if (name->IsString()) return String::cast(name);
+ if (scope_info.HasInferredFunctionName()) {
+ Object name = scope_info.InferredFunctionName();
+ if (name.IsString()) return String::cast(name);
}
} else if (HasUncompiledData()) {
- return uncompiled_data()->inferred_name();
+ return uncompiled_data().inferred_name();
}
return GetReadOnlyRoots().empty_string();
}
bool SharedFunctionInfo::IsUserJavaScript() {
Object script_obj = script();
- if (script_obj->IsUndefined()) return false;
+ if (script_obj.IsUndefined()) return false;
Script script = Script::cast(script_obj);
- return script->IsUserJavaScript();
+ return script.IsUserJavaScript();
}
bool SharedFunctionInfo::IsSubjectToDebugging() {
diff --git a/src/objects/slots-inl.h b/src/objects/slots-inl.h
index 1650e47..48e4359 100644
--- a/src/objects/slots-inl.h
+++ b/src/objects/slots-inl.h
@@ -31,7 +31,7 @@
const Object FullObjectSlot::operator*() const { return Object(*location()); }
-void FullObjectSlot::store(Object value) const { *location() = value->ptr(); }
+void FullObjectSlot::store(Object value) const { *location() = value.ptr(); }
Object FullObjectSlot::Acquire_Load() const {
return Object(base::AsAtomicPointer::Acquire_Load(location()));
@@ -42,16 +42,16 @@
}
void FullObjectSlot::Relaxed_Store(Object value) const {
- base::AsAtomicPointer::Relaxed_Store(location(), value->ptr());
+ base::AsAtomicPointer::Relaxed_Store(location(), value.ptr());
}
void FullObjectSlot::Release_Store(Object value) const {
- base::AsAtomicPointer::Release_Store(location(), value->ptr());
+ base::AsAtomicPointer::Release_Store(location(), value.ptr());
}
Object FullObjectSlot::Release_CompareAndSwap(Object old, Object target) const {
Address result = base::AsAtomicPointer::Release_CompareAndSwap(
- location(), old->ptr(), target->ptr());
+ location(), old.ptr(), target.ptr());
return Object(result);
}
@@ -99,7 +99,7 @@
}
void FullHeapObjectSlot::StoreHeapObject(HeapObject value) const {
- *location() = value->ptr();
+ *location() = value.ptr();
}
//
diff --git a/src/objects/stack-frame-info.cc b/src/objects/stack-frame-info.cc
index 21431cd..d8d9770 100644
--- a/src/objects/stack-frame-info.cc
+++ b/src/objects/stack-frame-info.cc
@@ -10,59 +10,59 @@
namespace internal {
int StackTraceFrame::GetLineNumber(Handle<StackTraceFrame> frame) {
- if (frame->frame_info()->IsUndefined()) InitializeFrameInfo(frame);
+ if (frame->frame_info().IsUndefined()) InitializeFrameInfo(frame);
int line = GetFrameInfo(frame)->line_number();
return line != StackFrameBase::kNone ? line : Message::kNoLineNumberInfo;
}
int StackTraceFrame::GetColumnNumber(Handle<StackTraceFrame> frame) {
- if (frame->frame_info()->IsUndefined()) InitializeFrameInfo(frame);
+ if (frame->frame_info().IsUndefined()) InitializeFrameInfo(frame);
int column = GetFrameInfo(frame)->column_number();
return column != StackFrameBase::kNone ? column : Message::kNoColumnInfo;
}
int StackTraceFrame::GetScriptId(Handle<StackTraceFrame> frame) {
- if (frame->frame_info()->IsUndefined()) InitializeFrameInfo(frame);
+ if (frame->frame_info().IsUndefined()) InitializeFrameInfo(frame);
int id = GetFrameInfo(frame)->script_id();
return id != StackFrameBase::kNone ? id : Message::kNoScriptIdInfo;
}
Handle<Object> StackTraceFrame::GetFileName(Handle<StackTraceFrame> frame) {
- if (frame->frame_info()->IsUndefined()) InitializeFrameInfo(frame);
+ if (frame->frame_info().IsUndefined()) InitializeFrameInfo(frame);
auto name = GetFrameInfo(frame)->script_name();
return handle(name, frame->GetIsolate());
}
Handle<Object> StackTraceFrame::GetScriptNameOrSourceUrl(
Handle<StackTraceFrame> frame) {
- if (frame->frame_info()->IsUndefined()) InitializeFrameInfo(frame);
+ if (frame->frame_info().IsUndefined()) InitializeFrameInfo(frame);
auto name = GetFrameInfo(frame)->script_name_or_source_url();
return handle(name, frame->GetIsolate());
}
Handle<Object> StackTraceFrame::GetFunctionName(Handle<StackTraceFrame> frame) {
- if (frame->frame_info()->IsUndefined()) InitializeFrameInfo(frame);
+ if (frame->frame_info().IsUndefined()) InitializeFrameInfo(frame);
auto name = GetFrameInfo(frame)->function_name();
return handle(name, frame->GetIsolate());
}
bool StackTraceFrame::IsEval(Handle<StackTraceFrame> frame) {
- if (frame->frame_info()->IsUndefined()) InitializeFrameInfo(frame);
+ if (frame->frame_info().IsUndefined()) InitializeFrameInfo(frame);
return GetFrameInfo(frame)->is_eval();
}
bool StackTraceFrame::IsConstructor(Handle<StackTraceFrame> frame) {
- if (frame->frame_info()->IsUndefined()) InitializeFrameInfo(frame);
+ if (frame->frame_info().IsUndefined()) InitializeFrameInfo(frame);
return GetFrameInfo(frame)->is_constructor();
}
bool StackTraceFrame::IsWasm(Handle<StackTraceFrame> frame) {
- if (frame->frame_info()->IsUndefined()) InitializeFrameInfo(frame);
+ if (frame->frame_info().IsUndefined()) InitializeFrameInfo(frame);
return GetFrameInfo(frame)->is_wasm();
}
bool StackTraceFrame::IsUserJavaScript(Handle<StackTraceFrame> frame) {
- if (frame->frame_info()->IsUndefined()) InitializeFrameInfo(frame);
+ if (frame->frame_info().IsUndefined()) InitializeFrameInfo(frame);
return GetFrameInfo(frame)->is_user_java_script();
}
diff --git a/src/objects/string-comparator.cc b/src/objects/string-comparator.cc
index b29f9c3..6f517ed 100644
--- a/src/objects/string-comparator.cc
+++ b/src/objects/string-comparator.cc
@@ -40,7 +40,7 @@
}
bool StringComparator::Equals(String string_1, String string_2) {
- int length = string_1->length();
+ int length = string_1.length();
state_1_.Init(string_1);
state_2_.Init(string_2);
while (true) {
diff --git a/src/objects/string-inl.h b/src/objects/string-inl.h
index 5e50ab9..f83d68f 100644
--- a/src/objects/string-inl.h
+++ b/src/objects/string-inl.h
@@ -58,13 +58,12 @@
CAST_ACCESSOR(String)
CAST_ACCESSOR(ThinString)
-StringShape::StringShape(const String str)
- : type_(str->map()->instance_type()) {
+StringShape::StringShape(const String str) : type_(str.map().instance_type()) {
set_valid();
DCHECK_EQ(type_ & kIsNotStringMask, kStringTag);
}
-StringShape::StringShape(Map map) : type_(map->instance_type()) {
+StringShape::StringShape(Map map) : type_(map.instance_type()) {
set_valid();
DCHECK_EQ(type_ & kIsNotStringMask, kStringTag);
}
@@ -149,18 +148,18 @@
STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
bool String::IsOneByteRepresentation() const {
- uint32_t type = map()->instance_type();
+ uint32_t type = map().instance_type();
return (type & kStringEncodingMask) == kOneByteStringTag;
}
bool String::IsTwoByteRepresentation() const {
- uint32_t type = map()->instance_type();
+ uint32_t type = map().instance_type();
return (type & kStringEncodingMask) == kTwoByteStringTag;
}
bool String::IsOneByteRepresentationUnderneath(String string) {
while (true) {
- uint32_t type = string.map()->instance_type();
+ uint32_t type = string.map().instance_type();
STATIC_ASSERT(kIsIndirectStringTag != 0);
STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
DCHECK(string.IsFlat());
@@ -211,7 +210,7 @@
bool IsMatch(String s) override {
DisallowHeapAllocation no_gc;
- if (s->IsOneByteRepresentation()) {
+ if (s.IsOneByteRepresentation()) {
const uint8_t* chars = s.GetChars<uint8_t>(no_gc);
return CompareChars(chars, chars_.begin(), chars_.length()) == 0;
}
@@ -310,7 +309,7 @@
bool String::Equals(String other) {
if (other == *this) return true;
- if (this->IsInternalizedString() && other->IsInternalizedString()) {
+ if (this->IsInternalizedString() && other.IsInternalizedString()) {
return false;
}
return SlowEquals(other);
@@ -352,22 +351,22 @@
DCHECK(index >= 0 && index < length());
switch (StringShape(*this).full_representation_tag()) {
case kSeqStringTag | kOneByteStringTag:
- return SeqOneByteString::cast(*this)->Get(index);
+ return SeqOneByteString::cast(*this).Get(index);
case kSeqStringTag | kTwoByteStringTag:
- return SeqTwoByteString::cast(*this)->Get(index);
+ return SeqTwoByteString::cast(*this).Get(index);
case kConsStringTag | kOneByteStringTag:
case kConsStringTag | kTwoByteStringTag:
- return ConsString::cast(*this)->Get(index);
+ return ConsString::cast(*this).Get(index);
case kExternalStringTag | kOneByteStringTag:
- return ExternalOneByteString::cast(*this)->Get(index);
+ return ExternalOneByteString::cast(*this).Get(index);
case kExternalStringTag | kTwoByteStringTag:
- return ExternalTwoByteString::cast(*this)->Get(index);
+ return ExternalTwoByteString::cast(*this).Get(index);
case kSlicedStringTag | kOneByteStringTag:
case kSlicedStringTag | kTwoByteStringTag:
- return SlicedString::cast(*this)->Get(index);
+ return SlicedString::cast(*this).Get(index);
case kThinStringTag | kOneByteStringTag:
case kThinStringTag | kTwoByteStringTag:
- return ThinString::cast(*this)->Get(index);
+ return ThinString::cast(*this).Get(index);
default:
break;
}
@@ -380,13 +379,13 @@
DCHECK(StringShape(*this).IsSequential());
return this->IsOneByteRepresentation()
- ? SeqOneByteString::cast(*this)->SeqOneByteStringSet(index, value)
- : SeqTwoByteString::cast(*this)->SeqTwoByteStringSet(index, value);
+ ? SeqOneByteString::cast(*this).SeqOneByteStringSet(index, value)
+ : SeqTwoByteString::cast(*this).SeqTwoByteStringSet(index, value);
}
bool String::IsFlat() {
if (!StringShape(*this).IsCons()) return true;
- return ConsString::cast(*this)->second()->length() == 0;
+ return ConsString::cast(*this).second().length() == 0;
}
String String::GetUnderlying() {
@@ -407,40 +406,40 @@
const int offset) {
DisallowHeapAllocation no_gc;
int slice_offset = offset;
- const int length = string->length();
+ const int length = string.length();
DCHECK(offset <= length);
while (true) {
- int32_t type = string->map()->instance_type();
+ int32_t type = string.map().instance_type();
switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
case kSeqStringTag | kOneByteStringTag:
visitor->VisitOneByteString(
- SeqOneByteString::cast(string)->GetChars(no_gc) + slice_offset,
+ SeqOneByteString::cast(string).GetChars(no_gc) + slice_offset,
length - offset);
return ConsString();
case kSeqStringTag | kTwoByteStringTag:
visitor->VisitTwoByteString(
- SeqTwoByteString::cast(string)->GetChars(no_gc) + slice_offset,
+ SeqTwoByteString::cast(string).GetChars(no_gc) + slice_offset,
length - offset);
return ConsString();
case kExternalStringTag | kOneByteStringTag:
visitor->VisitOneByteString(
- ExternalOneByteString::cast(string)->GetChars() + slice_offset,
+ ExternalOneByteString::cast(string).GetChars() + slice_offset,
length - offset);
return ConsString();
case kExternalStringTag | kTwoByteStringTag:
visitor->VisitTwoByteString(
- ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
+ ExternalTwoByteString::cast(string).GetChars() + slice_offset,
length - offset);
return ConsString();
case kSlicedStringTag | kOneByteStringTag:
case kSlicedStringTag | kTwoByteStringTag: {
SlicedString slicedString = SlicedString::cast(string);
- slice_offset += slicedString->offset();
- string = slicedString->parent();
+ slice_offset += slicedString.offset();
+ string = slicedString.parent();
continue;
}
@@ -450,7 +449,7 @@
case kThinStringTag | kOneByteStringTag:
case kThinStringTag | kTwoByteStringTag:
- string = ThinString::cast(string)->actual();
+ string = ThinString::cast(string).actual();
continue;
default:
@@ -534,7 +533,7 @@
void SlicedString::set_parent(Isolate* isolate, String parent,
WriteBarrierMode mode) {
- DCHECK(parent->IsSeqString() || parent->IsExternalString());
+ DCHECK(parent.IsSeqString() || parent.IsExternalString());
WRITE_FIELD(*this, kParentOffset, parent);
CONDITIONAL_WRITE_BARRIER(*this, kParentOffset, parent, mode);
}
@@ -574,7 +573,7 @@
}
bool ExternalString::is_uncached() const {
- InstanceType type = map()->instance_type();
+ InstanceType type = map().instance_type();
return (type & kUncachedExternalStringMask) == kUncachedExternalStringTag;
}
@@ -585,9 +584,9 @@
void ExternalString::set_address_as_resource(Address address) {
WriteField<Address>(kResourceOffset, address);
if (IsExternalOneByteString()) {
- ExternalOneByteString::cast(*this)->update_data_cache();
+ ExternalOneByteString::cast(*this).update_data_cache();
} else {
- ExternalTwoByteString::cast(*this)->update_data_cache();
+ ExternalTwoByteString::cast(*this).update_data_cache();
}
}
@@ -767,7 +766,7 @@
int length)
: string_(string),
first_(first),
- length_(length == -1 ? string->length() : length),
+ length_(length == -1 ? string.length() : length),
no_gc_(no_gc) {}
class SubStringRange::iterator final {
@@ -797,7 +796,7 @@
friend class String;
friend class SubStringRange;
iterator(String from, int offset, const DisallowHeapAllocation& no_gc)
- : content_(from->GetFlatContent(no_gc)), offset_(offset) {}
+ : content_(from.GetFlatContent(no_gc)), offset_(offset) {}
String::FlatContent content_;
int offset_;
};
diff --git a/src/objects/string-table-inl.h b/src/objects/string-table-inl.h
index b0e88ae..1b7b7d1 100644
--- a/src/objects/string-table-inl.h
+++ b/src/objects/string-table-inl.h
@@ -28,16 +28,16 @@
}
bool StringSetShape::IsMatch(String key, Object value) {
- DCHECK(value->IsString());
- return key->Equals(String::cast(value));
+ DCHECK(value.IsString());
+ return key.Equals(String::cast(value));
}
uint32_t StringSetShape::Hash(Isolate* isolate, String key) {
- return key->Hash();
+ return key.Hash();
}
uint32_t StringSetShape::HashForObject(ReadOnlyRoots roots, Object object) {
- return String::cast(object)->Hash();
+ return String::cast(object).Hash();
}
bool StringTableShape::IsMatch(Key key, Object value) {
@@ -69,7 +69,7 @@
}
uint32_t StringTableShape::HashForObject(ReadOnlyRoots roots, Object object) {
- return String::cast(object)->Hash();
+ return String::cast(object).Hash();
}
RootIndex StringTableShape::GetMapRootIndex() {
diff --git a/src/objects/string.cc b/src/objects/string.cc
index 673d20e..732dd38 100644
--- a/src/objects/string.cc
+++ b/src/objects/string.cc
@@ -25,14 +25,14 @@
Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons,
AllocationType allocation) {
- DCHECK_NE(cons->second()->length(), 0);
+ DCHECK_NE(cons->second().length(), 0);
// TurboFan can create cons strings with empty first parts.
- while (cons->first()->length() == 0) {
+ while (cons->first().length() == 0) {
// We do not want to call this function recursively. Therefore we call
// String::Flatten only in those cases where String::SlowFlatten is not
// called again.
- if (cons->second()->IsConsString() && !cons->second()->IsFlat()) {
+ if (cons->second().IsConsString() && !cons->second().IsFlat()) {
cons = handle(ConsString::cast(cons->second()), isolate);
} else {
return String::Flatten(isolate, handle(cons->second(), isolate));
@@ -73,16 +73,16 @@
void MigrateExternalStringResource(Isolate* isolate, String from, String to) {
StringClass cast_from = StringClass::cast(from);
StringClass cast_to = StringClass::cast(to);
- const typename StringClass::Resource* to_resource = cast_to->resource();
+ const typename StringClass::Resource* to_resource = cast_to.resource();
if (to_resource == nullptr) {
// |to| is a just-created internalized copy of |from|. Migrate the resource.
- cast_to->SetResource(isolate, cast_from->resource());
+ cast_to.SetResource(isolate, cast_from.resource());
// Zap |from|'s resource pointer to reflect the fact that |from| has
// relinquished ownership of its resource.
isolate->heap()->UpdateExternalString(
- from, ExternalString::cast(from)->ExternalPayloadSize(), 0);
- cast_from->SetResource(isolate, nullptr);
- } else if (to_resource != cast_from->resource()) {
+ from, ExternalString::cast(from).ExternalPayloadSize(), 0);
+ cast_from.SetResource(isolate, nullptr);
+ } else if (to_resource != cast_from.resource()) {
// |to| already existed and has its own resource. Finalize |from|.
isolate->heap()->FinalizeExternalString(from);
}
@@ -93,13 +93,13 @@
void String::MakeThin(Isolate* isolate, String internalized) {
DisallowHeapAllocation no_gc;
DCHECK_NE(*this, internalized);
- DCHECK(internalized->IsInternalizedString());
+ DCHECK(internalized.IsInternalizedString());
if (this->IsExternalString()) {
- if (internalized->IsExternalOneByteString()) {
+ if (internalized.IsExternalOneByteString()) {
MigrateExternalStringResource<ExternalOneByteString>(isolate, *this,
internalized);
- } else if (internalized->IsExternalTwoByteString()) {
+ } else if (internalized.IsExternalTwoByteString()) {
MigrateExternalStringResource<ExternalTwoByteString>(isolate, *this,
internalized);
} else {
@@ -112,14 +112,14 @@
int old_size = this->Size();
isolate->heap()->NotifyObjectLayoutChange(*this, old_size, no_gc);
- bool one_byte = internalized->IsOneByteRepresentation();
+ bool one_byte = internalized.IsOneByteRepresentation();
Handle<Map> map = one_byte ? isolate->factory()->thin_one_byte_string_map()
: isolate->factory()->thin_string_map();
DCHECK_GE(old_size, ThinString::kSize);
this->synchronized_set_map(*map);
ThinString thin = ThinString::cast(*this);
- thin->set_actual(internalized);
- Address thin_end = thin->address() + ThinString::kSize;
+ thin.set_actual(internalized);
+ Address thin_end = thin.address() + ThinString::kSize;
int size_delta = old_size - ThinString::kSize;
if (size_delta != 0) {
Heap* heap = isolate->heap();
@@ -188,9 +188,9 @@
this->synchronized_set_map(new_map);
ExternalTwoByteString self = ExternalTwoByteString::cast(*this);
- self->SetResource(isolate, resource);
+ self.SetResource(isolate, resource);
heap->RegisterExternalString(*this);
- if (is_internalized) self->Hash(); // Force regeneration of the hash value.
+ if (is_internalized) self.Hash(); // Force regeneration of the hash value.
return true;
}
@@ -261,15 +261,15 @@
this->synchronized_set_map(new_map);
ExternalOneByteString self = ExternalOneByteString::cast(*this);
- self->SetResource(isolate, resource);
+ self.SetResource(isolate, resource);
heap->RegisterExternalString(*this);
- if (is_internalized) self->Hash(); // Force regeneration of the hash value.
+ if (is_internalized) self.Hash(); // Force regeneration of the hash value.
return true;
}
bool String::SupportsExternalization() {
if (this->IsThinString()) {
- return i::ThinString::cast(*this)->actual()->SupportsExternalization();
+ return i::ThinString::cast(*this).actual().SupportsExternalization();
}
Isolate* isolate;
@@ -495,22 +495,22 @@
int offset = 0;
if (shape.representation_tag() == kConsStringTag) {
ConsString cons = ConsString::cast(string);
- if (cons->second()->length() != 0) {
+ if (cons.second().length() != 0) {
return FlatContent();
}
- string = cons->first();
+ string = cons.first();
shape = StringShape(string);
} else if (shape.representation_tag() == kSlicedStringTag) {
SlicedString slice = SlicedString::cast(string);
- offset = slice->offset();
- string = slice->parent();
+ offset = slice.offset();
+ string = slice.parent();
shape = StringShape(string);
DCHECK(shape.representation_tag() != kConsStringTag &&
shape.representation_tag() != kSlicedStringTag);
}
if (shape.representation_tag() == kThinStringTag) {
ThinString thin = ThinString::cast(string);
- string = thin->actual();
+ string = thin.actual();
shape = StringShape(string);
DCHECK(!shape.IsCons());
DCHECK(!shape.IsSliced());
@@ -518,18 +518,18 @@
if (shape.encoding_tag() == kOneByteStringTag) {
const uint8_t* start;
if (shape.representation_tag() == kSeqStringTag) {
- start = SeqOneByteString::cast(string)->GetChars(no_gc);
+ start = SeqOneByteString::cast(string).GetChars(no_gc);
} else {
- start = ExternalOneByteString::cast(string)->GetChars();
+ start = ExternalOneByteString::cast(string).GetChars();
}
return FlatContent(start + offset, length);
} else {
DCHECK_EQ(shape.encoding_tag(), kTwoByteStringTag);
const uc16* start;
if (shape.representation_tag() == kSeqStringTag) {
- start = SeqTwoByteString::cast(string)->GetChars(no_gc);
+ start = SeqTwoByteString::cast(string).GetChars(no_gc);
} else {
- start = ExternalTwoByteString::cast(string)->GetChars();
+ start = ExternalTwoByteString::cast(string).GetChars();
}
return FlatContent(start + offset, length);
}
@@ -595,38 +595,38 @@
while (true) {
DCHECK_LE(0, from);
DCHECK_LE(from, to);
- DCHECK_LE(to, source->length());
+ DCHECK_LE(to, source.length());
switch (StringShape(source).full_representation_tag()) {
case kOneByteStringTag | kExternalStringTag: {
- CopyChars(sink, ExternalOneByteString::cast(source)->GetChars() + from,
+ CopyChars(sink, ExternalOneByteString::cast(source).GetChars() + from,
to - from);
return;
}
case kTwoByteStringTag | kExternalStringTag: {
- const uc16* data = ExternalTwoByteString::cast(source)->GetChars();
+ const uc16* data = ExternalTwoByteString::cast(source).GetChars();
CopyChars(sink, data + from, to - from);
return;
}
case kOneByteStringTag | kSeqStringTag: {
- CopyChars(sink, SeqOneByteString::cast(source)->GetChars(no_gc) + from,
+ CopyChars(sink, SeqOneByteString::cast(source).GetChars(no_gc) + from,
to - from);
return;
}
case kTwoByteStringTag | kSeqStringTag: {
- CopyChars(sink, SeqTwoByteString::cast(source)->GetChars(no_gc) + from,
+ CopyChars(sink, SeqTwoByteString::cast(source).GetChars(no_gc) + from,
to - from);
return;
}
case kOneByteStringTag | kConsStringTag:
case kTwoByteStringTag | kConsStringTag: {
ConsString cons_string = ConsString::cast(source);
- String first = cons_string->first();
- int boundary = first->length();
+ String first = cons_string.first();
+ int boundary = first.length();
if (to - boundary >= boundary - from) {
// Right hand side is longer. Recurse over left.
if (from < boundary) {
WriteToFlat(first, sink, from, boundary);
- if (from == 0 && cons_string->second() == first) {
+ if (from == 0 && cons_string.second() == first) {
CopyChars(sink + boundary, sink, boundary);
return;
}
@@ -636,19 +636,19 @@
from -= boundary;
}
to -= boundary;
- source = cons_string->second();
+ source = cons_string.second();
} else {
// Left hand side is longer. Recurse over right.
if (to > boundary) {
- String second = cons_string->second();
+ String second = cons_string.second();
// When repeatedly appending to a string, we get a cons string that
// is unbalanced to the left, a list, essentially. We inline the
// common case of sequential one-byte right child.
if (to - boundary == 1) {
- sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
- } else if (second->IsSeqOneByteString()) {
+ sink[boundary - from] = static_cast<sinkchar>(second.Get(0));
+ } else if (second.IsSeqOneByteString()) {
CopyChars(sink + boundary - from,
- SeqOneByteString::cast(second)->GetChars(no_gc),
+ SeqOneByteString::cast(second).GetChars(no_gc),
to - boundary);
} else {
WriteToFlat(second, sink + boundary - from, 0, to - boundary);
@@ -662,13 +662,13 @@
case kOneByteStringTag | kSlicedStringTag:
case kTwoByteStringTag | kSlicedStringTag: {
SlicedString slice = SlicedString::cast(source);
- unsigned offset = slice->offset();
- WriteToFlat(slice->parent(), sink, from + offset, to + offset);
+ unsigned offset = slice.offset();
+ WriteToFlat(slice.parent(), sink, from + offset, to + offset);
return;
}
case kOneByteStringTag | kThinStringTag:
case kTwoByteStringTag | kThinStringTag:
- source = ThinString::cast(source)->actual();
+ source = ThinString::cast(source).actual();
break;
}
}
@@ -729,15 +729,15 @@
DisallowHeapAllocation no_gc;
// Fast check: negative check with lengths.
int len = length();
- if (len != other->length()) return false;
+ if (len != other.length()) return false;
if (len == 0) return true;
// Fast check: if at least one ThinString is involved, dereference it/them
// and restart.
- if (this->IsThinString() || other->IsThinString()) {
- if (other->IsThinString()) other = ThinString::cast(other)->actual();
+ if (this->IsThinString() || other.IsThinString()) {
+ if (other.IsThinString()) other = ThinString::cast(other).actual();
if (this->IsThinString()) {
- return ThinString::cast(*this)->actual()->Equals(other);
+ return ThinString::cast(*this).actual().Equals(other);
} else {
return this->Equals(other);
}
@@ -745,13 +745,13 @@
// Fast check: if hash code is computed for both strings
// a fast negative check can be performed.
- if (HasHashCode() && other->HasHashCode()) {
+ if (HasHashCode() && other.HasHashCode()) {
#ifdef ENABLE_SLOW_DCHECKS
if (FLAG_enable_slow_asserts) {
- if (Hash() != other->Hash()) {
+ if (Hash() != other.Hash()) {
bool found_difference = false;
for (int i = 0; i < len; i++) {
- if (Get(i) != other->Get(i)) {
+ if (Get(i) != other.Get(i)) {
found_difference = true;
break;
}
@@ -760,16 +760,16 @@
}
}
#endif
- if (Hash() != other->Hash()) return false;
+ if (Hash() != other.Hash()) return false;
}
// We know the strings are both non-empty. Compare the first chars
// before we try to flatten the strings.
- if (this->Get(0) != other->Get(0)) return false;
+ if (this->Get(0) != other.Get(0)) return false;
- if (IsSeqOneByteString() && other->IsSeqOneByteString()) {
- const uint8_t* str1 = SeqOneByteString::cast(*this)->GetChars(no_gc);
- const uint8_t* str2 = SeqOneByteString::cast(other)->GetChars(no_gc);
+ if (IsSeqOneByteString() && other.IsSeqOneByteString()) {
+ const uint8_t* str1 = SeqOneByteString::cast(*this).GetChars(no_gc);
+ const uint8_t* str2 = SeqOneByteString::cast(other).GetChars(no_gc);
return CompareRawStringContents(str1, str2, len);
}
@@ -788,9 +788,9 @@
// and restart.
if (one->IsThinString() || two->IsThinString()) {
if (one->IsThinString())
- one = handle(ThinString::cast(*one)->actual(), isolate);
+ one = handle(ThinString::cast(*one).actual(), isolate);
if (two->IsThinString())
- two = handle(ThinString::cast(*two)->actual(), isolate);
+ two = handle(ThinString::cast(*two).actual(), isolate);
return String::Equals(isolate, one, two);
}
@@ -1430,9 +1430,9 @@
DCHECK(index >= 0 && index < this->length());
// Check for a flattened cons string
- if (second()->length() == 0) {
+ if (second().length() == 0) {
String left = first();
- return left->Get(index);
+ return left.Get(index);
}
String string = String::cast(*this);
@@ -1440,26 +1440,24 @@
while (true) {
if (StringShape(string).IsCons()) {
ConsString cons_string = ConsString::cast(string);
- String left = cons_string->first();
- if (left->length() > index) {
+ String left = cons_string.first();
+ if (left.length() > index) {
string = left;
} else {
- index -= left->length();
- string = cons_string->second();
+ index -= left.length();
+ string = cons_string.second();
}
} else {
- return string->Get(index);
+ return string.Get(index);
}
}
UNREACHABLE();
}
-uint16_t ThinString::Get(int index) { return actual()->Get(index); }
+uint16_t ThinString::Get(int index) { return actual().Get(index); }
-uint16_t SlicedString::Get(int index) {
- return parent()->Get(offset() + index);
-}
+uint16_t SlicedString::Get(int index) { return parent().Get(offset() + index); }
int ExternalString::ExternalPayloadSize() const {
int length_multiplier = IsTwoByteRepresentation() ? i::kShortSize : kCharSize;
@@ -1531,13 +1529,13 @@
int offset = 0;
while (true) {
// Loop until the string is found which contains the target offset.
- String string = cons_string->first();
- int length = string->length();
+ String string = cons_string.first();
+ int length = string.length();
int32_t type;
if (consumed < offset + length) {
// Target offset is in the left branch.
// Keep going if we're still in a ConString.
- type = string->map()->instance_type();
+ type = string.map().instance_type();
if ((type & kStringRepresentationMask) == kConsStringTag) {
cons_string = ConsString::cast(string);
PushLeft(cons_string);
@@ -1550,15 +1548,15 @@
// Update progress through the string.
offset += length;
// Keep going if we're still in a ConString.
- string = cons_string->second();
- type = string->map()->instance_type();
+ string = cons_string.second();
+ type = string.map().instance_type();
if ((type & kStringRepresentationMask) == kConsStringTag) {
cons_string = ConsString::cast(string);
PushRight(cons_string);
continue;
}
// Need this to be updated for the current string.
- length = string->length();
+ length = string.length();
// Account for the possibility of an empty right leaf.
// This happens only if we have asked for an offset outside the string.
if (length == 0) {
@@ -1594,12 +1592,12 @@
}
// Go right.
ConsString cons_string = frames_[OffsetForDepth(depth_ - 1)];
- String string = cons_string->second();
- int32_t type = string->map()->instance_type();
+ String string = cons_string.second();
+ int32_t type = string.map().instance_type();
if ((type & kStringRepresentationMask) != kConsStringTag) {
// Pop stack so next iteration is in correct place.
Pop();
- int length = string->length();
+ int length = string.length();
// Could be a flattened ConsString.
if (length == 0) continue;
consumed_ += length;
@@ -1610,11 +1608,11 @@
// Need to traverse all the way left.
while (true) {
// Continue left.
- string = cons_string->first();
- type = string->map()->instance_type();
+ string = cons_string.first();
+ type = string.map().instance_type();
if ((type & kStringRepresentationMask) != kConsStringTag) {
AdjustMaximumDepth();
- int length = string->length();
+ int length = string.length();
if (length == 0) break; // Skip empty left-hand sides of ConsStrings.
consumed_ += length;
return string;
diff --git a/src/objects/struct-inl.h b/src/objects/struct-inl.h
index 42ec9a0..7609757 100644
--- a/src/objects/struct-inl.h
+++ b/src/objects/struct-inl.h
@@ -55,8 +55,8 @@
}
void AccessorPair::SetComponents(Object getter, Object setter) {
- if (!getter->IsNull()) set_getter(getter);
- if (!setter->IsNull()) set_setter(setter);
+ if (!getter.IsNull()) set_getter(getter);
+ if (!setter.IsNull()) set_setter(setter);
}
bool AccessorPair::Equals(Object getter_value, Object setter_value) {
diff --git a/src/objects/template-objects.cc b/src/objects/template-objects.cc
index 5a83680..7b4a25d 100644
--- a/src/objects/template-objects.cc
+++ b/src/objects/template-objects.cc
@@ -23,20 +23,20 @@
// Check the template weakmap to see if the template object already exists.
Handle<EphemeronHashTable> template_weakmap =
- native_context->template_weakmap()->IsUndefined(isolate)
+ native_context->template_weakmap().IsUndefined(isolate)
? EphemeronHashTable::New(isolate, 0)
: handle(EphemeronHashTable::cast(native_context->template_weakmap()),
isolate);
uint32_t hash = shared_info->Hash();
Object maybe_cached_template = template_weakmap->Lookup(shared_info, hash);
- while (!maybe_cached_template->IsTheHole()) {
+ while (!maybe_cached_template.IsTheHole()) {
CachedTemplateObject cached_template =
CachedTemplateObject::cast(maybe_cached_template);
- if (cached_template->slot_id() == slot_id)
- return handle(cached_template->template_object(), isolate);
+ if (cached_template.slot_id() == slot_id)
+ return handle(cached_template.template_object(), isolate);
- maybe_cached_template = cached_template->next();
+ maybe_cached_template = cached_template.next();
}
// Create the raw object from the {raw_strings}.
diff --git a/src/objects/templates-inl.h b/src/objects/templates-inl.h
index 19739be..4fb3689 100644
--- a/src/objects/templates-inl.h
+++ b/src/objects/templates-inl.h
@@ -58,26 +58,26 @@
FunctionTemplateRareData FunctionTemplateInfo::EnsureFunctionTemplateRareData(
Isolate* isolate, Handle<FunctionTemplateInfo> function_template_info) {
HeapObject extra = function_template_info->rare_data();
- if (extra->IsUndefined(isolate)) {
+ if (extra.IsUndefined(isolate)) {
return AllocateFunctionTemplateRareData(isolate, function_template_info);
} else {
return FunctionTemplateRareData::cast(extra);
}
}
-#define RARE_ACCESSORS(Name, CamelName, Type) \
- Type FunctionTemplateInfo::Get##CamelName() { \
- HeapObject extra = rare_data(); \
- HeapObject undefined = GetReadOnlyRoots().undefined_value(); \
- return extra == undefined ? undefined \
- : FunctionTemplateRareData::cast(extra)->Name(); \
- } \
- inline void FunctionTemplateInfo::Set##CamelName( \
- Isolate* isolate, Handle<FunctionTemplateInfo> function_template_info, \
- Handle<Type> Name) { \
- FunctionTemplateRareData rare_data = \
- EnsureFunctionTemplateRareData(isolate, function_template_info); \
- rare_data->set_##Name(*Name); \
+#define RARE_ACCESSORS(Name, CamelName, Type) \
+ Type FunctionTemplateInfo::Get##CamelName() { \
+ HeapObject extra = rare_data(); \
+ HeapObject undefined = GetReadOnlyRoots().undefined_value(); \
+ return extra == undefined ? undefined \
+ : FunctionTemplateRareData::cast(extra).Name(); \
+ } \
+ inline void FunctionTemplateInfo::Set##CamelName( \
+ Isolate* isolate, Handle<FunctionTemplateInfo> function_template_info, \
+ Handle<Type> Name) { \
+ FunctionTemplateRareData rare_data = \
+ EnsureFunctionTemplateRareData(isolate, function_template_info); \
+ rare_data.set_##Name(*Name); \
}
RARE_ACCESSORS(prototype_template, PrototypeTemplate, Object)
@@ -116,33 +116,33 @@
CAST_ACCESSOR(ObjectTemplateInfo)
bool FunctionTemplateInfo::instantiated() {
- return shared_function_info()->IsSharedFunctionInfo();
+ return shared_function_info().IsSharedFunctionInfo();
}
bool FunctionTemplateInfo::BreakAtEntry() {
Object maybe_shared = shared_function_info();
- if (maybe_shared->IsSharedFunctionInfo()) {
+ if (maybe_shared.IsSharedFunctionInfo()) {
SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
- return shared->BreakAtEntry();
+ return shared.BreakAtEntry();
}
return false;
}
FunctionTemplateInfo FunctionTemplateInfo::GetParent(Isolate* isolate) {
Object parent = GetParentTemplate();
- return parent->IsUndefined(isolate) ? FunctionTemplateInfo()
- : FunctionTemplateInfo::cast(parent);
+ return parent.IsUndefined(isolate) ? FunctionTemplateInfo()
+ : FunctionTemplateInfo::cast(parent);
}
ObjectTemplateInfo ObjectTemplateInfo::GetParent(Isolate* isolate) {
Object maybe_ctor = constructor();
- if (maybe_ctor->IsUndefined(isolate)) return ObjectTemplateInfo();
+ if (maybe_ctor.IsUndefined(isolate)) return ObjectTemplateInfo();
FunctionTemplateInfo constructor = FunctionTemplateInfo::cast(maybe_ctor);
while (true) {
- constructor = constructor->GetParent(isolate);
+ constructor = constructor.GetParent(isolate);
if (constructor.is_null()) return ObjectTemplateInfo();
- Object maybe_obj = constructor->GetInstanceTemplate();
- if (!maybe_obj->IsUndefined(isolate)) {
+ Object maybe_obj = constructor.GetInstanceTemplate();
+ if (!maybe_obj.IsUndefined(isolate)) {
return ObjectTemplateInfo::cast(maybe_obj);
}
}
@@ -151,7 +151,7 @@
int ObjectTemplateInfo::embedder_field_count() const {
Object value = data();
- DCHECK(value->IsSmi());
+ DCHECK(value.IsSmi());
return EmbedderFieldCount::decode(Smi::ToInt(value));
}
@@ -163,7 +163,7 @@
bool ObjectTemplateInfo::immutable_proto() const {
Object value = data();
- DCHECK(value->IsSmi());
+ DCHECK(value.IsSmi());
return IsImmutablePrototype::decode(Smi::ToInt(value));
}
@@ -173,7 +173,7 @@
}
bool FunctionTemplateInfo::IsTemplateFor(JSObject object) {
- return IsTemplateFor(object->map());
+ return IsTemplateFor(object.map());
}
} // namespace internal
diff --git a/src/objects/transitions-inl.h b/src/objects/transitions-inl.h
index 7ae5aa2..893de78 100644
--- a/src/objects/transitions-inl.h
+++ b/src/objects/transitions-inl.h
@@ -45,16 +45,16 @@
}
void TransitionArray::SetPrototypeTransitions(WeakFixedArray transitions) {
- DCHECK(transitions->IsWeakFixedArray());
+ DCHECK(transitions.IsWeakFixedArray());
WeakFixedArray::Set(kPrototypeTransitionsIndex,
HeapObjectReference::Strong(transitions));
}
int TransitionArray::NumberOfPrototypeTransitions(
WeakFixedArray proto_transitions) {
- if (proto_transitions->length() == 0) return 0;
+ if (proto_transitions.length() == 0) return 0;
MaybeObject raw =
- proto_transitions->Get(kProtoTransitionNumberOfEntriesOffset);
+ proto_transitions.Get(kProtoTransitionNumberOfEntriesOffset);
return raw.ToSmi().value();
}
@@ -76,7 +76,7 @@
return GetSimpleTransitionKey(map);
}
case kFullTransitionArray:
- return transitions()->GetKey(transition_number);
+ return transitions().GetKey(transition_number);
}
UNREACHABLE();
}
@@ -94,23 +94,23 @@
// static
PropertyDetails TransitionsAccessor::GetTargetDetails(Name name, Map target) {
- DCHECK(!IsSpecialTransition(name->GetReadOnlyRoots(), name));
- int descriptor = target->LastAdded();
- DescriptorArray descriptors = target->instance_descriptors();
+ DCHECK(!IsSpecialTransition(name.GetReadOnlyRoots(), name));
+ int descriptor = target.LastAdded();
+ DescriptorArray descriptors = target.instance_descriptors();
// Transitions are allowed only for the last added property.
- DCHECK(descriptors->GetKey(descriptor)->Equals(name));
- return descriptors->GetDetails(descriptor);
+ DCHECK(descriptors.GetKey(descriptor).Equals(name));
+ return descriptors.GetDetails(descriptor);
}
// static
PropertyDetails TransitionsAccessor::GetSimpleTargetDetails(Map transition) {
- return transition->GetLastDescriptorDetails();
+ return transition.GetLastDescriptorDetails();
}
// static
Name TransitionsAccessor::GetSimpleTransitionKey(Map transition) {
- int descriptor = transition->LastAdded();
- return transition->instance_descriptors()->GetKey(descriptor);
+ int descriptor = transition.LastAdded();
+ return transition.instance_descriptors().GetKey(descriptor);
}
// static
@@ -138,7 +138,7 @@
case kWeakRef:
return Map::cast(raw_transitions_->GetHeapObjectAssumeWeak());
case kFullTransitionArray:
- return transitions()->GetTarget(transition_number);
+ return transitions().GetTarget(transition_number);
}
UNREACHABLE();
}
@@ -146,7 +146,7 @@
void TransitionArray::SetRawTarget(int transition_number, MaybeObject value) {
DCHECK(transition_number < number_of_transitions());
DCHECK(value->IsWeak());
- DCHECK(value->GetHeapObjectAssumeWeak()->IsMap());
+ DCHECK(value->GetHeapObjectAssumeWeak().IsMap());
WeakFixedArray::Set(ToTargetIndex(transition_number), value);
}
@@ -155,7 +155,7 @@
MaybeObject raw = GetRawTarget(transition_number);
HeapObject heap_object;
if (raw->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsUndefined(isolate)) {
+ heap_object.IsUndefined(isolate)) {
return false;
}
*target = TransitionsAccessor::GetTargetFromRaw(raw);
@@ -171,7 +171,7 @@
}
int TransitionArray::SearchName(Name name, int* out_insertion_index) {
- DCHECK(name->IsUniqueName());
+ DCHECK(name.IsUniqueName());
return internal::Search<ALL_ENTRIES>(this, name, number_of_entries(),
out_insertion_index);
}
@@ -195,20 +195,20 @@
}
void TransitionsAccessor::Initialize() {
- raw_transitions_ = map_->raw_transitions();
+ raw_transitions_ = map_.raw_transitions();
HeapObject heap_object;
if (raw_transitions_->IsSmi() || raw_transitions_->IsCleared()) {
encoding_ = kUninitialized;
} else if (raw_transitions_->IsWeak()) {
encoding_ = kWeakRef;
} else if (raw_transitions_->GetHeapObjectIfStrong(&heap_object)) {
- if (heap_object->IsTransitionArray()) {
+ if (heap_object.IsTransitionArray()) {
encoding_ = kFullTransitionArray;
- } else if (heap_object->IsPrototypeInfo()) {
+ } else if (heap_object.IsPrototypeInfo()) {
encoding_ = kPrototypeInfo;
} else {
- DCHECK(map_->is_deprecated());
- DCHECK(heap_object->IsMap());
+ DCHECK(map_.is_deprecated());
+ DCHECK(heap_object.IsMap());
encoding_ = kMigrationTarget;
}
} else {
@@ -301,7 +301,7 @@
DCHECK_EQ(kData, details.kind());
if (details.attributes() != NONE) return Handle<String>::null();
Name name = GetSimpleTransitionKey(target);
- if (!name->IsString()) return Handle<String>::null();
+ if (!name.IsString()) return Handle<String>::null();
return handle(String::cast(name), isolate_);
}
}
diff --git a/src/objects/transitions.cc b/src/objects/transitions.cc
index 23a3b23..8bd1f46 100644
--- a/src/objects/transitions.cc
+++ b/src/objects/transitions.cc
@@ -60,7 +60,7 @@
PropertyDetails new_details = is_special_transition
? PropertyDetails::Empty()
: GetTargetDetails(*name, *target);
- if (flag == SIMPLE_PROPERTY_TRANSITION && key->Equals(*name) &&
+ if (flag == SIMPLE_PROPERTY_TRANSITION && key.Equals(*name) &&
old_details.kind() == new_details.kind() &&
old_details.attributes() == new_details.attributes()) {
ReplaceTransitions(HeapObjectReference::Weak(*target));
@@ -103,17 +103,16 @@
{
DisallowHeapAllocation no_gc;
TransitionArray array = transitions();
- number_of_transitions = array->number_of_transitions();
+ number_of_transitions = array.number_of_transitions();
new_nof = number_of_transitions;
- int index =
- is_special_transition
- ? array->SearchSpecial(Symbol::cast(*name), &insertion_index)
- : array->Search(details.kind(), *name, details.attributes(),
- &insertion_index);
+ int index = is_special_transition
+ ? array.SearchSpecial(Symbol::cast(*name), &insertion_index)
+ : array.Search(details.kind(), *name, details.attributes(),
+ &insertion_index);
// If an existing entry was found, overwrite it and return.
if (index != kNotFound) {
- array->SetRawTarget(index, HeapObjectReference::Weak(*target));
+ array.SetRawTarget(index, HeapObjectReference::Weak(*target));
return;
}
@@ -122,15 +121,15 @@
DCHECK(insertion_index >= 0 && insertion_index <= number_of_transitions);
// If there is enough capacity, insert new entry into the existing array.
- if (new_nof <= array->Capacity()) {
- array->SetNumberOfTransitions(new_nof);
+ if (new_nof <= array.Capacity()) {
+ array.SetNumberOfTransitions(new_nof);
for (index = number_of_transitions; index > insertion_index; --index) {
- array->SetKey(index, array->GetKey(index - 1));
- array->SetRawTarget(index, array->GetRawTarget(index - 1));
+ array.SetKey(index, array.GetKey(index - 1));
+ array.SetRawTarget(index, array.GetRawTarget(index - 1));
}
- array->SetKey(index, *name);
- array->SetRawTarget(index, HeapObjectReference::Weak(*target));
- SLOW_DCHECK(array->IsSortedNoDuplicates());
+ array.SetKey(index, *name);
+ array.SetRawTarget(index, HeapObjectReference::Weak(*target));
+ SLOW_DCHECK(array.IsSortedNoDuplicates());
return;
}
}
@@ -146,18 +145,17 @@
Reload();
DisallowHeapAllocation no_gc;
TransitionArray array = transitions();
- if (array->number_of_transitions() != number_of_transitions) {
- DCHECK(array->number_of_transitions() < number_of_transitions);
+ if (array.number_of_transitions() != number_of_transitions) {
+ DCHECK(array.number_of_transitions() < number_of_transitions);
- number_of_transitions = array->number_of_transitions();
+ number_of_transitions = array.number_of_transitions();
new_nof = number_of_transitions;
insertion_index = kNotFound;
- int index =
- is_special_transition
- ? array->SearchSpecial(Symbol::cast(*name), &insertion_index)
- : array->Search(details.kind(), *name, details.attributes(),
- &insertion_index);
+ int index = is_special_transition
+ ? array.SearchSpecial(Symbol::cast(*name), &insertion_index)
+ : array.Search(details.kind(), *name, details.attributes(),
+ &insertion_index);
if (index == kNotFound) {
++new_nof;
} else {
@@ -168,17 +166,17 @@
result->SetNumberOfTransitions(new_nof);
}
- if (array->HasPrototypeTransitions()) {
- result->SetPrototypeTransitions(array->GetPrototypeTransitions());
+ if (array.HasPrototypeTransitions()) {
+ result->SetPrototypeTransitions(array.GetPrototypeTransitions());
}
DCHECK_NE(kNotFound, insertion_index);
for (int i = 0; i < insertion_index; ++i) {
- result->Set(i, array->GetKey(i), array->GetRawTarget(i));
+ result->Set(i, array.GetKey(i), array.GetRawTarget(i));
}
result->Set(insertion_index, *name, HeapObjectReference::Weak(*target));
for (int i = insertion_index; i < number_of_transitions; ++i) {
- result->Set(i + 1, array->GetKey(i), array->GetRawTarget(i));
+ result->Set(i + 1, array.GetKey(i), array.GetRawTarget(i));
}
SLOW_DCHECK(result->IsSortedNoDuplicates());
@@ -187,7 +185,7 @@
Map TransitionsAccessor::SearchTransition(Name name, PropertyKind kind,
PropertyAttributes attributes) {
- DCHECK(name->IsUniqueName());
+ DCHECK(name.IsUniqueName());
switch (encoding()) {
case kPrototypeInfo:
case kUninitialized:
@@ -199,7 +197,7 @@
return map;
}
case kFullTransitionArray: {
- return transitions()->SearchAndGetTarget(kind, name, attributes);
+ return transitions().SearchAndGetTarget(kind, name, attributes);
}
}
UNREACHABLE();
@@ -207,14 +205,14 @@
Map TransitionsAccessor::SearchSpecial(Symbol name) {
if (encoding() != kFullTransitionArray) return Map();
- int transition = transitions()->SearchSpecial(name);
+ int transition = transitions().SearchSpecial(name);
if (transition == kNotFound) return Map();
- return transitions()->GetTarget(transition);
+ return transitions().GetTarget(transition);
}
// static
bool TransitionsAccessor::IsSpecialTransition(ReadOnlyRoots roots, Name name) {
- if (!name->IsSymbol()) return false;
+ if (!name.IsSymbol()) return false;
return name == roots.nonextensible_symbol() ||
name == roots.sealed_symbol() || name == roots.frozen_symbol() ||
name == roots.elements_transition_symbol() ||
@@ -228,7 +226,7 @@
PropertyAttributes attributes = name->IsPrivate() ? DONT_ENUM : NONE;
Map target = SearchTransition(*name, kData, attributes);
if (target.is_null()) return MaybeHandle<Map>();
- PropertyDetails details = target->GetLastDescriptorDetails();
+ PropertyDetails details = target.GetLastDescriptorDetails();
DCHECK_EQ(attributes, details.attributes());
DCHECK_EQ(kData, details.kind());
if (requested_location == kFieldOnly && details.location() != kField) {
@@ -238,9 +236,9 @@
}
bool TransitionsAccessor::CanHaveMoreTransitions() {
- if (map_->is_dictionary_map()) return false;
+ if (map_.is_dictionary_map()) return false;
if (encoding() == kFullTransitionArray) {
- return transitions()->number_of_transitions() < kMaxNumberOfTransitions;
+ return transitions().number_of_transitions() < kMaxNumberOfTransitions;
}
return true;
}
@@ -249,11 +247,11 @@
bool TransitionsAccessor::IsMatchingMap(Map target, Name name,
PropertyKind kind,
PropertyAttributes attributes) {
- int descriptor = target->LastAdded();
- DescriptorArray descriptors = target->instance_descriptors();
- Name key = descriptors->GetKey(descriptor);
+ int descriptor = target.LastAdded();
+ DescriptorArray descriptors = target.instance_descriptors();
+ Name key = descriptors.GetKey(descriptor);
if (key != name) return false;
- return descriptors->GetDetails(descriptor)
+ return descriptors.GetDetails(descriptor)
.HasKindAndAttributes(kind, attributes);
}
@@ -268,12 +266,12 @@
}
int new_number_of_transitions = 0;
for (int i = 0; i < number_of_transitions; i++) {
- MaybeObject target = array->Get(header + i);
+ MaybeObject target = array.Get(header + i);
DCHECK(target->IsCleared() ||
- (target->IsWeak() && target->GetHeapObject()->IsMap()));
+ (target->IsWeak() && target->GetHeapObject().IsMap()));
if (!target->IsCleared()) {
if (new_number_of_transitions != i) {
- array->Set(header + new_number_of_transitions, target);
+ array.Set(header + new_number_of_transitions, target);
}
new_number_of_transitions++;
}
@@ -282,7 +280,7 @@
MaybeObject undefined =
MaybeObject::FromObject(*isolate->factory()->undefined_value());
for (int i = new_number_of_transitions; i < number_of_transitions; i++) {
- array->Set(header + i, undefined);
+ array.Set(header + i, undefined);
}
if (number_of_transitions != new_number_of_transitions) {
SetNumberOfPrototypeTransitions(array, new_number_of_transitions);
@@ -310,11 +308,11 @@
void TransitionsAccessor::PutPrototypeTransition(Handle<Object> prototype,
Handle<Map> target_map) {
- DCHECK(HeapObject::cast(*prototype)->map()->IsMap());
+ DCHECK(HeapObject::cast(*prototype).map().IsMap());
// Don't cache prototype transition if this map is either shared, or a map of
// a prototype.
- if (map_->is_prototype_map()) return;
- if (map_->is_dictionary_map() || !FLAG_cache_prototype_transitions) return;
+ if (map_.is_prototype_map()) return;
+ if (map_.is_dictionary_map() || !FLAG_cache_prototype_transitions) return;
const int header = TransitionArray::kProtoTransitionHeaderSize;
@@ -348,12 +346,12 @@
int length = TransitionArray::NumberOfPrototypeTransitions(cache);
for (int i = 0; i < length; i++) {
MaybeObject target =
- cache->Get(TransitionArray::kProtoTransitionHeaderSize + i);
+ cache.Get(TransitionArray::kProtoTransitionHeaderSize + i);
DCHECK(target->IsWeakOrCleared());
HeapObject heap_object;
if (target->GetHeapObjectIfWeak(&heap_object)) {
Map map = Map::cast(heap_object);
- if (map->prototype() == *prototype) {
+ if (map.prototype() == *prototype) {
return handle(map, isolate_);
}
}
@@ -363,18 +361,18 @@
WeakFixedArray TransitionsAccessor::GetPrototypeTransitions() {
if (encoding() != kFullTransitionArray ||
- !transitions()->HasPrototypeTransitions()) {
+ !transitions().HasPrototypeTransitions()) {
return ReadOnlyRoots(isolate_).empty_weak_fixed_array();
}
- return transitions()->GetPrototypeTransitions();
+ return transitions().GetPrototypeTransitions();
}
// static
void TransitionArray::SetNumberOfPrototypeTransitions(
WeakFixedArray proto_transitions, int value) {
- DCHECK_NE(proto_transitions->length(), 0);
- proto_transitions->Set(kProtoTransitionNumberOfEntriesOffset,
- MaybeObject::FromSmi(Smi::FromInt(value)));
+ DCHECK_NE(proto_transitions.length(), 0);
+ proto_transitions.Set(kProtoTransitionNumberOfEntriesOffset,
+ MaybeObject::FromSmi(Smi::FromInt(value)));
}
int TransitionsAccessor::NumberOfTransitions() {
@@ -386,7 +384,7 @@
case kWeakRef:
return 1;
case kFullTransitionArray:
- return transitions()->number_of_transitions();
+ return transitions().number_of_transitions();
}
UNREACHABLE();
return 0; // Make GCC happy.
@@ -396,14 +394,14 @@
// We only cache the migration target for maps with empty transitions for GC's
// sake.
if (encoding() != kUninitialized) return;
- DCHECK(map_->is_deprecated());
- map_->set_raw_transitions(MaybeObject::FromObject(migration_target));
+ DCHECK(map_.is_deprecated());
+ map_.set_raw_transitions(MaybeObject::FromObject(migration_target));
MarkNeedsReload();
}
Map TransitionsAccessor::GetMigrationTarget() {
if (encoding() == kMigrationTarget) {
- return map_->raw_transitions()->cast<Map>();
+ return map_.raw_transitions()->cast<Map>();
}
return Map();
}
@@ -427,16 +425,16 @@
// keep referenced objects alive, so we zap it.
// When there is another reference to the array somewhere (e.g. a handle),
// not zapping turns from a waste of memory into a source of crashes.
- old_transitions->Zap(isolate_);
+ old_transitions.Zap(isolate_);
}
- map_->set_raw_transitions(new_transitions);
+ map_.set_raw_transitions(new_transitions);
MarkNeedsReload();
}
void TransitionsAccessor::SetPrototypeTransitions(
Handle<WeakFixedArray> proto_transitions) {
EnsureHasFullTransitionArray();
- transitions()->SetPrototypeTransitions(*proto_transitions);
+ transitions().SetPrototypeTransitions(*proto_transitions);
}
void TransitionsAccessor::EnsureHasFullTransitionArray() {
@@ -475,12 +473,12 @@
break;
}
case kFullTransitionArray: {
- if (transitions()->HasPrototypeTransitions()) {
- WeakFixedArray proto_trans = transitions()->GetPrototypeTransitions();
+ if (transitions().HasPrototypeTransitions()) {
+ WeakFixedArray proto_trans = transitions().GetPrototypeTransitions();
int length = TransitionArray::NumberOfPrototypeTransitions(proto_trans);
for (int i = 0; i < length; ++i) {
int index = TransitionArray::kProtoTransitionHeaderSize + i;
- MaybeObject target = proto_trans->Get(index);
+ MaybeObject target = proto_trans.Get(index);
HeapObject heap_object;
if (target->GetHeapObjectIfWeak(&heap_object)) {
TransitionsAccessor(isolate_, Map::cast(heap_object), no_gc)
@@ -490,8 +488,8 @@
}
}
}
- for (int i = 0; i < transitions()->number_of_transitions(); ++i) {
- TransitionsAccessor(isolate_, transitions()->GetTarget(i), no_gc)
+ for (int i = 0; i < transitions().number_of_transitions(); ++i) {
+ TransitionsAccessor(isolate_, transitions().GetTarget(i), no_gc)
.TraverseTransitionTreeInternal(callback, data, no_gc);
}
break;
@@ -506,20 +504,20 @@
// This function only handles full transition arrays.
DCHECK_EQ(kFullTransitionArray, encoding());
TransitionArray new_transitions = TransitionArray::cast(transitions);
- for (int i = 0; i < old_transitions->number_of_transitions(); i++) {
- Map target = old_transitions->GetTarget(i);
- if (target->instance_descriptors() == map_->instance_descriptors()) {
- Name key = old_transitions->GetKey(i);
+ for (int i = 0; i < old_transitions.number_of_transitions(); i++) {
+ Map target = old_transitions.GetTarget(i);
+ if (target.instance_descriptors() == map_.instance_descriptors()) {
+ Name key = old_transitions.GetKey(i);
int new_target_index;
if (IsSpecialTransition(ReadOnlyRoots(isolate_), key)) {
- new_target_index = new_transitions->SearchSpecial(Symbol::cast(key));
+ new_target_index = new_transitions.SearchSpecial(Symbol::cast(key));
} else {
PropertyDetails details = GetTargetDetails(key, target);
new_target_index =
- new_transitions->Search(details.kind(), key, details.attributes());
+ new_transitions.Search(details.kind(), key, details.attributes());
}
DCHECK_NE(TransitionArray::kNotFound, new_target_index);
- DCHECK_EQ(target, new_transitions->GetTarget(new_target_index));
+ DCHECK_EQ(target, new_transitions.GetTarget(new_target_index));
}
}
}
@@ -622,9 +620,8 @@
temp_kind = details.kind();
temp_attributes = details.attributes();
}
- int cmp =
- CompareKeys(temp_key, temp_key->Hash(), temp_kind, temp_attributes,
- key, key->Hash(), kind, attributes);
+ int cmp = CompareKeys(temp_key, temp_key.Hash(), temp_kind,
+ temp_attributes, key, key.Hash(), kind, attributes);
if (cmp > 0) {
SetKey(j + 1, temp_key);
SetRawTarget(j + 1, temp_target);
diff --git a/src/parsing/parse-info.cc b/src/parsing/parse-info.cc
index 868a6f2..12ada46 100644
--- a/src/parsing/parse-info.cc
+++ b/src/parsing/parse-info.cc
@@ -90,7 +90,7 @@
// Do not support re-parsing top-level function of a wrapped script.
// TODO(yangguo): consider whether we need a top-level function in a
// wrapped script at all.
- DCHECK_IMPLIES(is_toplevel(), !Script::cast(shared->script())->is_wrapped());
+ DCHECK_IMPLIES(is_toplevel(), !Script::cast(shared->script()).is_wrapped());
set_allow_lazy_parsing(true);
set_asm_wasm_broken(shared->is_asm_wasm_broken());
@@ -113,7 +113,7 @@
set_collect_type_profile(
isolate->is_collecting_type_profile() &&
(shared->HasFeedbackMetadata()
- ? shared->feedback_metadata()->HasTypeProfileSlot()
+ ? shared->feedback_metadata().HasTypeProfileSlot()
: script->IsUserJavaScript()));
}
diff --git a/src/parsing/parser.cc b/src/parsing/parser.cc
index 8276f11..380920b 100644
--- a/src/parsing/parser.cc
+++ b/src/parsing/parser.cc
@@ -519,10 +519,9 @@
if (!info->is_eval()) {
event_name = "parse-script";
start = 0;
- end = String::cast(script->source())->length();
+ end = String::cast(script.source()).length();
}
- LOG(isolate,
- FunctionEvent(event_name, script->id(), ms, start, end, "", 0));
+ LOG(isolate, FunctionEvent(event_name, script.id(), ms, start, end, "", 0));
}
return result;
}
diff --git a/src/parsing/scanner-character-streams.cc b/src/parsing/scanner-character-streams.cc
index bb4c307..163fed9 100644
--- a/src/parsing/scanner-character-streams.cc
+++ b/src/parsing/scanner-character-streams.cc
@@ -22,11 +22,11 @@
public:
explicit ScopedExternalStringLock(ExternalString string) {
DCHECK(!string.is_null());
- if (string->IsExternalOneByteString()) {
- resource_ = ExternalOneByteString::cast(string)->resource();
+ if (string.IsExternalOneByteString()) {
+ resource_ = ExternalOneByteString::cast(string).resource();
} else {
- DCHECK(string->IsExternalTwoByteString());
- resource_ = ExternalTwoByteString::cast(string)->resource();
+ DCHECK(string.IsExternalTwoByteString());
+ resource_ = ExternalTwoByteString::cast(string).resource();
}
DCHECK(resource_);
resource_->Lock();
@@ -100,7 +100,7 @@
ExternalStringStream(ExternalString string, size_t start_offset,
size_t length)
: lock_(string),
- data_(string->GetChars() + start_offset),
+ data_(string.GetChars() + start_offset),
length_(length) {}
ExternalStringStream(const ExternalStringStream& other) V8_NOEXCEPT
@@ -746,9 +746,9 @@
size_t start_offset = 0;
if (data->IsSlicedString()) {
SlicedString string = SlicedString::cast(*data);
- start_offset = string->offset();
- String parent = string->parent();
- if (parent->IsThinString()) parent = ThinString::cast(parent)->actual();
+ start_offset = string.offset();
+ String parent = string.parent();
+ if (parent.IsThinString()) parent = ThinString::cast(parent).actual();
data = handle(parent, isolate);
} else {
data = String::Flatten(isolate, data);
diff --git a/src/ppc/assembler-ppc-inl.h b/src/ppc/assembler-ppc-inl.h
index b923d01..c52997e 100644
--- a/src/ppc/assembler-ppc-inl.h
+++ b/src/ppc/assembler-ppc-inl.h
@@ -147,7 +147,7 @@
WriteBarrierMode write_barrier_mode,
ICacheFlushMode icache_flush_mode) {
DCHECK(IsCodeTarget(rmode_) || rmode_ == FULL_EMBEDDED_OBJECT);
- Assembler::set_target_address_at(pc_, constant_pool_, target->ptr(),
+ Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(),
icache_flush_mode);
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
WriteBarrierForCode(host(), this, target);
@@ -377,7 +377,7 @@
void Assembler::deserialization_set_special_target_at(
Address instruction_payload, Code code, Address target) {
set_target_address_at(instruction_payload,
- !code.is_null() ? code->constant_pool() : kNullAddress,
+ !code.is_null() ? code.constant_pool() : kNullAddress,
target);
}
diff --git a/src/ppc/simulator-ppc.cc b/src/ppc/simulator-ppc.cc
index d37a34c..6814924 100644
--- a/src/ppc/simulator-ppc.cc
+++ b/src/ppc/simulator-ppc.cc
@@ -408,7 +408,7 @@
if (obj.IsSmi()) {
PrintF("smi %d", Smi::ToInt(obj));
} else {
- obj->ShortPrint();
+ obj.ShortPrint();
}
PrintF(")");
}
diff --git a/src/profiler/allocation-tracker.cc b/src/profiler/allocation-tracker.cc
index 20d45c0..1dd088f 100644
--- a/src/profiler/allocation-tracker.cc
+++ b/src/profiler/allocation-tracker.cc
@@ -212,9 +212,9 @@
JavaScriptFrameIterator it(isolate);
while (!it.done() && length < kMaxAllocationTraceLength) {
JavaScriptFrame* frame = it.frame();
- SharedFunctionInfo shared = frame->function()->shared();
- SnapshotObjectId id = ids_->FindOrAddEntry(
- shared->address(), shared->Size(), false);
+ SharedFunctionInfo shared = frame->function().shared();
+ SnapshotObjectId id =
+ ids_->FindOrAddEntry(shared.address(), shared.Size(), false);
allocation_trace_buffer_[length++] = AddFunctionInfo(shared, id);
it.Advance();
}
@@ -242,19 +242,19 @@
reinterpret_cast<void*>(id), SnapshotObjectIdHash(id));
if (entry->value == nullptr) {
FunctionInfo* info = new FunctionInfo();
- info->name = names_->GetName(shared->DebugName());
+ info->name = names_->GetName(shared.DebugName());
info->function_id = id;
- if (shared->script()->IsScript()) {
- Script script = Script::cast(shared->script());
- if (script->name()->IsName()) {
- Name name = Name::cast(script->name());
+ if (shared.script().IsScript()) {
+ Script script = Script::cast(shared.script());
+ if (script.name().IsName()) {
+ Name name = Name::cast(script.name());
info->script_name = names_->GetName(name);
}
- info->script_id = script->id();
+ info->script_id = script.id();
// Converting start offset into line and column may cause heap
// allocations so we postpone them until snapshot serialization.
unresolved_locations_.push_back(
- new UnresolvedLocation(script, shared->StartPosition(), info));
+ new UnresolvedLocation(script, shared.StartPosition(), info));
}
entry->value = reinterpret_cast<void*>(function_info_list_.size());
function_info_list_.push_back(info);
@@ -278,7 +278,7 @@
int start,
FunctionInfo* info)
: start_position_(start), info_(info) {
- script_ = script->GetIsolate()->global_handles()->Create(script);
+ script_ = script.GetIsolate()->global_handles()->Create(script);
GlobalHandles::MakeWeak(script_.location(), this, &HandleWeakScript,
v8::WeakCallbackType::kParameter);
}
diff --git a/src/profiler/cpu-profiler.cc b/src/profiler/cpu-profiler.cc
index b81b7a7..547807f 100644
--- a/src/profiler/cpu-profiler.cc
+++ b/src/profiler/cpu-profiler.cc
@@ -486,7 +486,7 @@
CodeEventsContainer evt_rec(CodeEventRecord::REPORT_BUILTIN);
ReportBuiltinEventRecord* rec = &evt_rec.ReportBuiltinEventRecord_;
Builtins::Name id = static_cast<Builtins::Name>(i);
- rec->instruction_start = builtins->builtin(id)->InstructionStart();
+ rec->instruction_start = builtins->builtin(id).InstructionStart();
rec->builtin_id = id;
processor_->Enqueue(evt_rec);
}
diff --git a/src/profiler/heap-profiler.cc b/src/profiler/heap-profiler.cc
index 6c83116..a912c2e 100644
--- a/src/profiler/heap-profiler.cc
+++ b/src/profiler/heap-profiler.cc
@@ -148,7 +148,7 @@
SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Object> obj) {
if (!obj->IsHeapObject())
return v8::HeapProfiler::kUnknownObjectId;
- return ids_->FindEntry(HeapObject::cast(*obj)->address());
+ return ids_->FindEntry(HeapObject::cast(*obj).address());
}
void HeapProfiler::ObjectMoveEvent(Address from, Address to, int size) {
@@ -177,7 +177,7 @@
// Make sure that object with the given id is still reachable.
for (HeapObject obj = iterator.Next(); !obj.is_null();
obj = iterator.Next()) {
- if (ids_->FindEntry(obj->address()) == id) {
+ if (ids_->FindEntry(obj.address()) == id) {
DCHECK(object.is_null());
object = obj;
// Can't break -- kFilterUnreachable requires full heap traversal.
@@ -208,7 +208,7 @@
CombinedHeapIterator heap_iterator(heap());
for (HeapObject heap_obj = heap_iterator.Next(); !heap_obj.is_null();
heap_obj = heap_iterator.Next()) {
- if (!heap_obj->IsJSObject() || heap_obj->IsExternal(isolate())) continue;
+ if (!heap_obj.IsJSObject() || heap_obj.IsExternal(isolate())) continue;
v8::Local<v8::Object> v8_obj(
Utils::ToLocal(handle(JSObject::cast(heap_obj), isolate())));
if (!predicate->Filter(v8_obj)) continue;
diff --git a/src/profiler/heap-snapshot-generator.cc b/src/profiler/heap-snapshot-generator.cc
index b308824..dcd6bd4 100644
--- a/src/profiler/heap-snapshot-generator.cc
+++ b/src/profiler/heap-snapshot-generator.cc
@@ -398,11 +398,11 @@
CombinedHeapIterator iterator(heap_);
for (HeapObject obj = iterator.Next(); !obj.is_null();
obj = iterator.Next()) {
- FindOrAddEntry(obj->address(), obj->Size());
+ FindOrAddEntry(obj.address(), obj.Size());
if (FLAG_heap_profiler_trace_objects) {
PrintF("Update object : %p %6d. Next address is %p\n",
- reinterpret_cast<void*>(obj->address()), obj->Size(),
- reinterpret_cast<void*>(obj->address() + obj->Size()));
+ reinterpret_cast<void*>(obj.address()), obj.Size(),
+ reinterpret_cast<void*>(obj.address() + obj.Size()));
}
}
RemoveDeadEntries();
@@ -508,15 +508,15 @@
}
void V8HeapExplorer::ExtractLocation(HeapEntry* entry, HeapObject object) {
- if (object->IsJSFunction()) {
+ if (object.IsJSFunction()) {
JSFunction func = JSFunction::cast(object);
ExtractLocationForJSFunction(entry, func);
- } else if (object->IsJSGeneratorObject()) {
+ } else if (object.IsJSGeneratorObject()) {
JSGeneratorObject gen = JSGeneratorObject::cast(object);
- ExtractLocationForJSFunction(entry, gen->function());
+ ExtractLocationForJSFunction(entry, gen.function());
- } else if (object->IsJSObject()) {
+ } else if (object.IsJSObject()) {
JSObject obj = JSObject::cast(object);
JSFunction maybe_constructor = GetConstructor(obj);
@@ -528,73 +528,70 @@
void V8HeapExplorer::ExtractLocationForJSFunction(HeapEntry* entry,
JSFunction func) {
- if (!func->shared()->script()->IsScript()) return;
- Script script = Script::cast(func->shared()->script());
- int scriptId = script->id();
- int start = func->shared()->StartPosition();
- int line = script->GetLineNumber(start);
- int col = script->GetColumnNumber(start);
+ if (!func.shared().script().IsScript()) return;
+ Script script = Script::cast(func.shared().script());
+ int scriptId = script.id();
+ int start = func.shared().StartPosition();
+ int line = script.GetLineNumber(start);
+ int col = script.GetColumnNumber(start);
snapshot_->AddLocation(entry, scriptId, line, col);
}
HeapEntry* V8HeapExplorer::AddEntry(HeapObject object) {
- if (object->IsJSFunction()) {
+ if (object.IsJSFunction()) {
JSFunction func = JSFunction::cast(object);
- SharedFunctionInfo shared = func->shared();
- const char* name = names_->GetName(shared->Name());
+ SharedFunctionInfo shared = func.shared();
+ const char* name = names_->GetName(shared.Name());
return AddEntry(object, HeapEntry::kClosure, name);
- } else if (object->IsJSBoundFunction()) {
+ } else if (object.IsJSBoundFunction()) {
return AddEntry(object, HeapEntry::kClosure, "native_bind");
- } else if (object->IsJSRegExp()) {
+ } else if (object.IsJSRegExp()) {
JSRegExp re = JSRegExp::cast(object);
- return AddEntry(object,
- HeapEntry::kRegExp,
- names_->GetName(re->Pattern()));
- } else if (object->IsJSObject()) {
+ return AddEntry(object, HeapEntry::kRegExp, names_->GetName(re.Pattern()));
+ } else if (object.IsJSObject()) {
const char* name = names_->GetName(
GetConstructorName(JSObject::cast(object)));
- if (object->IsJSGlobalObject()) {
+ if (object.IsJSGlobalObject()) {
auto it = objects_tags_.find(JSGlobalObject::cast(object));
if (it != objects_tags_.end()) {
name = names_->GetFormatted("%s / %s", name, it->second);
}
}
return AddEntry(object, HeapEntry::kObject, name);
- } else if (object->IsString()) {
+ } else if (object.IsString()) {
String string = String::cast(object);
- if (string->IsConsString()) {
+ if (string.IsConsString()) {
return AddEntry(object, HeapEntry::kConsString, "(concatenated string)");
- } else if (string->IsSlicedString()) {
+ } else if (string.IsSlicedString()) {
return AddEntry(object, HeapEntry::kSlicedString, "(sliced string)");
} else {
return AddEntry(object, HeapEntry::kString,
names_->GetName(String::cast(object)));
}
- } else if (object->IsSymbol()) {
- if (Symbol::cast(object)->is_private())
+ } else if (object.IsSymbol()) {
+ if (Symbol::cast(object).is_private())
return AddEntry(object, HeapEntry::kHidden, "private symbol");
else
return AddEntry(object, HeapEntry::kSymbol, "symbol");
- } else if (object->IsBigInt()) {
+ } else if (object.IsBigInt()) {
return AddEntry(object, HeapEntry::kBigInt, "bigint");
- } else if (object->IsCode()) {
+ } else if (object.IsCode()) {
return AddEntry(object, HeapEntry::kCode, "");
- } else if (object->IsSharedFunctionInfo()) {
- String name = SharedFunctionInfo::cast(object)->Name();
+ } else if (object.IsSharedFunctionInfo()) {
+ String name = SharedFunctionInfo::cast(object).Name();
return AddEntry(object, HeapEntry::kCode, names_->GetName(name));
- } else if (object->IsScript()) {
- Object name = Script::cast(object)->name();
- return AddEntry(
- object, HeapEntry::kCode,
- name->IsString() ? names_->GetName(String::cast(name)) : "");
- } else if (object->IsNativeContext()) {
+ } else if (object.IsScript()) {
+ Object name = Script::cast(object).name();
+ return AddEntry(object, HeapEntry::kCode,
+ name.IsString() ? names_->GetName(String::cast(name)) : "");
+ } else if (object.IsNativeContext()) {
return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
- } else if (object->IsContext()) {
+ } else if (object.IsContext()) {
return AddEntry(object, HeapEntry::kObject, "system / Context");
- } else if (object->IsFixedArray() || object->IsFixedDoubleArray() ||
- object->IsByteArray()) {
+ } else if (object.IsFixedArray() || object.IsFixedDoubleArray() ||
+ object.IsByteArray()) {
return AddEntry(object, HeapEntry::kArray, "");
- } else if (object->IsHeapNumber()) {
+ } else if (object.IsHeapNumber()) {
return AddEntry(object, HeapEntry::kHeapNumber, "number");
}
return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
@@ -602,7 +599,7 @@
HeapEntry* V8HeapExplorer::AddEntry(HeapObject object, HeapEntry::Type type,
const char* name) {
- return AddEntry(object->address(), type, name, object->Size());
+ return AddEntry(object.address(), type, name, object.Size());
}
HeapEntry* V8HeapExplorer::AddEntry(Address address,
@@ -621,9 +618,9 @@
}
const char* V8HeapExplorer::GetSystemEntryName(HeapObject object) {
- switch (object->map()->instance_type()) {
+ switch (object.map().instance_type()) {
case MAP_TYPE:
- switch (Map::cast(object)->instance_type()) {
+ switch (Map::cast(object).instance_type()) {
#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
case instance_type: return "system / Map (" #Name ")";
STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
@@ -659,7 +656,7 @@
: generator_(generator),
parent_obj_(parent_obj),
parent_start_(parent_obj_.RawMaybeWeakField(0)),
- parent_end_(parent_obj_.RawMaybeWeakField(parent_obj_->Size())),
+ parent_end_(parent_obj_.RawMaybeWeakField(parent_obj_.Size())),
parent_(parent),
next_index_(0) {}
void VisitPointers(HeapObject host, ObjectSlot start,
@@ -713,74 +710,74 @@
};
void V8HeapExplorer::ExtractReferences(HeapEntry* entry, HeapObject obj) {
- if (obj->IsJSGlobalProxy()) {
+ if (obj.IsJSGlobalProxy()) {
ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
- } else if (obj->IsJSArrayBuffer()) {
+ } else if (obj.IsJSArrayBuffer()) {
ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
- } else if (obj->IsJSObject()) {
- if (obj->IsJSWeakSet()) {
+ } else if (obj.IsJSObject()) {
+ if (obj.IsJSWeakSet()) {
ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
- } else if (obj->IsJSWeakMap()) {
+ } else if (obj.IsJSWeakMap()) {
ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
- } else if (obj->IsJSSet()) {
+ } else if (obj.IsJSSet()) {
ExtractJSCollectionReferences(entry, JSSet::cast(obj));
- } else if (obj->IsJSMap()) {
+ } else if (obj.IsJSMap()) {
ExtractJSCollectionReferences(entry, JSMap::cast(obj));
- } else if (obj->IsJSPromise()) {
+ } else if (obj.IsJSPromise()) {
ExtractJSPromiseReferences(entry, JSPromise::cast(obj));
- } else if (obj->IsJSGeneratorObject()) {
+ } else if (obj.IsJSGeneratorObject()) {
ExtractJSGeneratorObjectReferences(entry, JSGeneratorObject::cast(obj));
}
ExtractJSObjectReferences(entry, JSObject::cast(obj));
- } else if (obj->IsString()) {
+ } else if (obj.IsString()) {
ExtractStringReferences(entry, String::cast(obj));
- } else if (obj->IsSymbol()) {
+ } else if (obj.IsSymbol()) {
ExtractSymbolReferences(entry, Symbol::cast(obj));
- } else if (obj->IsMap()) {
+ } else if (obj.IsMap()) {
ExtractMapReferences(entry, Map::cast(obj));
- } else if (obj->IsSharedFunctionInfo()) {
+ } else if (obj.IsSharedFunctionInfo()) {
ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
- } else if (obj->IsScript()) {
+ } else if (obj.IsScript()) {
ExtractScriptReferences(entry, Script::cast(obj));
- } else if (obj->IsAccessorInfo()) {
+ } else if (obj.IsAccessorInfo()) {
ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
- } else if (obj->IsAccessorPair()) {
+ } else if (obj.IsAccessorPair()) {
ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
- } else if (obj->IsCode()) {
+ } else if (obj.IsCode()) {
ExtractCodeReferences(entry, Code::cast(obj));
- } else if (obj->IsCell()) {
+ } else if (obj.IsCell()) {
ExtractCellReferences(entry, Cell::cast(obj));
- } else if (obj->IsFeedbackCell()) {
+ } else if (obj.IsFeedbackCell()) {
ExtractFeedbackCellReferences(entry, FeedbackCell::cast(obj));
- } else if (obj->IsPropertyCell()) {
+ } else if (obj.IsPropertyCell()) {
ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
- } else if (obj->IsAllocationSite()) {
+ } else if (obj.IsAllocationSite()) {
ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
- } else if (obj->IsArrayBoilerplateDescription()) {
+ } else if (obj.IsArrayBoilerplateDescription()) {
ExtractArrayBoilerplateDescriptionReferences(
entry, ArrayBoilerplateDescription::cast(obj));
- } else if (obj->IsFeedbackVector()) {
+ } else if (obj.IsFeedbackVector()) {
ExtractFeedbackVectorReferences(entry, FeedbackVector::cast(obj));
- } else if (obj->IsDescriptorArray()) {
+ } else if (obj.IsDescriptorArray()) {
ExtractDescriptorArrayReferences(entry, DescriptorArray::cast(obj));
- } else if (obj->IsWeakFixedArray()) {
+ } else if (obj.IsWeakFixedArray()) {
ExtractWeakArrayReferences(WeakFixedArray::kHeaderSize, entry,
WeakFixedArray::cast(obj));
- } else if (obj->IsWeakArrayList()) {
+ } else if (obj.IsWeakArrayList()) {
ExtractWeakArrayReferences(WeakArrayList::kHeaderSize, entry,
WeakArrayList::cast(obj));
- } else if (obj->IsContext()) {
+ } else if (obj.IsContext()) {
ExtractContextReferences(entry, Context::cast(obj));
- } else if (obj->IsEphemeronHashTable()) {
+ } else if (obj.IsEphemeronHashTable()) {
ExtractEphemeronHashTableReferences(entry, EphemeronHashTable::cast(obj));
- } else if (obj->IsFixedArray()) {
+ } else if (obj.IsFixedArray()) {
ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
}
}
void V8HeapExplorer::ExtractJSGlobalProxyReferences(HeapEntry* entry,
JSGlobalProxy proxy) {
- SetInternalReference(entry, "native_context", proxy->native_context(),
+ SetInternalReference(entry, "native_context", proxy.native_context(),
JSGlobalProxy::kNativeContextOffset);
}
@@ -794,118 +791,116 @@
PrototypeIterator iter(isolate, js_obj);
ReadOnlyRoots roots(isolate);
SetPropertyReference(entry, roots.proto_string(), iter.GetCurrent());
- if (obj->IsJSBoundFunction()) {
+ if (obj.IsJSBoundFunction()) {
JSBoundFunction js_fun = JSBoundFunction::cast(obj);
- TagObject(js_fun->bound_arguments(), "(bound arguments)");
- SetInternalReference(entry, "bindings", js_fun->bound_arguments(),
+ TagObject(js_fun.bound_arguments(), "(bound arguments)");
+ SetInternalReference(entry, "bindings", js_fun.bound_arguments(),
JSBoundFunction::kBoundArgumentsOffset);
- SetInternalReference(entry, "bound_this", js_fun->bound_this(),
+ SetInternalReference(entry, "bound_this", js_fun.bound_this(),
JSBoundFunction::kBoundThisOffset);
SetInternalReference(entry, "bound_function",
- js_fun->bound_target_function(),
+ js_fun.bound_target_function(),
JSBoundFunction::kBoundTargetFunctionOffset);
- FixedArray bindings = js_fun->bound_arguments();
- for (int i = 0; i < bindings->length(); i++) {
+ FixedArray bindings = js_fun.bound_arguments();
+ for (int i = 0; i < bindings.length(); i++) {
const char* reference_name = names_->GetFormatted("bound_argument_%d", i);
- SetNativeBindReference(entry, reference_name, bindings->get(i));
+ SetNativeBindReference(entry, reference_name, bindings.get(i));
}
- } else if (obj->IsJSFunction()) {
+ } else if (obj.IsJSFunction()) {
JSFunction js_fun = JSFunction::cast(js_obj);
- if (js_fun->has_prototype_slot()) {
- Object proto_or_map = js_fun->prototype_or_initial_map();
- if (!proto_or_map->IsTheHole(isolate)) {
- if (!proto_or_map->IsMap()) {
+ if (js_fun.has_prototype_slot()) {
+ Object proto_or_map = js_fun.prototype_or_initial_map();
+ if (!proto_or_map.IsTheHole(isolate)) {
+ if (!proto_or_map.IsMap()) {
SetPropertyReference(entry, roots.prototype_string(), proto_or_map,
nullptr,
JSFunction::kPrototypeOrInitialMapOffset);
} else {
SetPropertyReference(entry, roots.prototype_string(),
- js_fun->prototype());
+ js_fun.prototype());
SetInternalReference(entry, "initial_map", proto_or_map,
JSFunction::kPrototypeOrInitialMapOffset);
}
}
}
- SharedFunctionInfo shared_info = js_fun->shared();
- TagObject(js_fun->raw_feedback_cell(), "(function feedback cell)");
- SetInternalReference(entry, "feedback_cell", js_fun->raw_feedback_cell(),
+ SharedFunctionInfo shared_info = js_fun.shared();
+ TagObject(js_fun.raw_feedback_cell(), "(function feedback cell)");
+ SetInternalReference(entry, "feedback_cell", js_fun.raw_feedback_cell(),
JSFunction::kFeedbackCellOffset);
TagObject(shared_info, "(shared function info)");
SetInternalReference(entry, "shared", shared_info,
JSFunction::kSharedFunctionInfoOffset);
- TagObject(js_fun->context(), "(context)");
- SetInternalReference(entry, "context", js_fun->context(),
+ TagObject(js_fun.context(), "(context)");
+ SetInternalReference(entry, "context", js_fun.context(),
JSFunction::kContextOffset);
- SetInternalReference(entry, "code", js_fun->code(),
- JSFunction::kCodeOffset);
- } else if (obj->IsJSGlobalObject()) {
+ SetInternalReference(entry, "code", js_fun.code(), JSFunction::kCodeOffset);
+ } else if (obj.IsJSGlobalObject()) {
JSGlobalObject global_obj = JSGlobalObject::cast(obj);
- SetInternalReference(entry, "native_context", global_obj->native_context(),
+ SetInternalReference(entry, "native_context", global_obj.native_context(),
JSGlobalObject::kNativeContextOffset);
- SetInternalReference(entry, "global_proxy", global_obj->global_proxy(),
+ SetInternalReference(entry, "global_proxy", global_obj.global_proxy(),
JSGlobalObject::kGlobalProxyOffset);
STATIC_ASSERT(JSGlobalObject::kSize - JSObject::kHeaderSize ==
2 * kTaggedSize);
- } else if (obj->IsJSArrayBufferView()) {
+ } else if (obj.IsJSArrayBufferView()) {
JSArrayBufferView view = JSArrayBufferView::cast(obj);
- SetInternalReference(entry, "buffer", view->buffer(),
+ SetInternalReference(entry, "buffer", view.buffer(),
JSArrayBufferView::kBufferOffset);
}
- TagObject(js_obj->raw_properties_or_hash(), "(object properties)");
- SetInternalReference(entry, "properties", js_obj->raw_properties_or_hash(),
+ TagObject(js_obj.raw_properties_or_hash(), "(object properties)");
+ SetInternalReference(entry, "properties", js_obj.raw_properties_or_hash(),
JSObject::kPropertiesOrHashOffset);
- TagObject(js_obj->elements(), "(object elements)");
- SetInternalReference(entry, "elements", js_obj->elements(),
+ TagObject(js_obj.elements(), "(object elements)");
+ SetInternalReference(entry, "elements", js_obj.elements(),
JSObject::kElementsOffset);
}
void V8HeapExplorer::ExtractStringReferences(HeapEntry* entry, String string) {
- if (string->IsConsString()) {
+ if (string.IsConsString()) {
ConsString cs = ConsString::cast(string);
- SetInternalReference(entry, "first", cs->first(), ConsString::kFirstOffset);
- SetInternalReference(entry, "second", cs->second(),
+ SetInternalReference(entry, "first", cs.first(), ConsString::kFirstOffset);
+ SetInternalReference(entry, "second", cs.second(),
ConsString::kSecondOffset);
- } else if (string->IsSlicedString()) {
+ } else if (string.IsSlicedString()) {
SlicedString ss = SlicedString::cast(string);
- SetInternalReference(entry, "parent", ss->parent(),
+ SetInternalReference(entry, "parent", ss.parent(),
SlicedString::kParentOffset);
- } else if (string->IsThinString()) {
+ } else if (string.IsThinString()) {
ThinString ts = ThinString::cast(string);
- SetInternalReference(entry, "actual", ts->actual(),
+ SetInternalReference(entry, "actual", ts.actual(),
ThinString::kActualOffset);
}
}
void V8HeapExplorer::ExtractSymbolReferences(HeapEntry* entry, Symbol symbol) {
- SetInternalReference(entry, "name", symbol->name(), Symbol::kNameOffset);
+ SetInternalReference(entry, "name", symbol.name(), Symbol::kNameOffset);
}
void V8HeapExplorer::ExtractJSCollectionReferences(HeapEntry* entry,
JSCollection collection) {
- SetInternalReference(entry, "table", collection->table(),
+ SetInternalReference(entry, "table", collection.table(),
JSCollection::kTableOffset);
}
void V8HeapExplorer::ExtractJSWeakCollectionReferences(HeapEntry* entry,
JSWeakCollection obj) {
- SetInternalReference(entry, "table", obj->table(),
+ SetInternalReference(entry, "table", obj.table(),
JSWeakCollection::kTableOffset);
}
void V8HeapExplorer::ExtractEphemeronHashTableReferences(
HeapEntry* entry, EphemeronHashTable table) {
- for (int i = 0, capacity = table->Capacity(); i < capacity; ++i) {
+ for (int i = 0, capacity = table.Capacity(); i < capacity; ++i) {
int key_index = EphemeronHashTable::EntryToIndex(i) +
EphemeronHashTable::kEntryKeyIndex;
int value_index = EphemeronHashTable::EntryToValueIndex(i);
- Object key = table->get(key_index);
- Object value = table->get(value_index);
- SetWeakReference(entry, key_index, key,
- table->OffsetOfElementAt(key_index));
+ Object key = table.get(key_index);
+ Object value = table.get(value_index);
+ SetWeakReference(entry, key_index, key, table.OffsetOfElementAt(key_index));
SetWeakReference(entry, value_index, value,
- table->OffsetOfElementAt(value_index));
+ table.OffsetOfElementAt(value_index));
HeapEntry* key_entry = GetEntry(key);
HeapEntry* value_entry = GetEntry(value);
if (key_entry && value_entry) {
@@ -931,55 +926,54 @@
void V8HeapExplorer::ExtractContextReferences(HeapEntry* entry,
Context context) {
- if (!context->IsNativeContext() && context->is_declaration_context()) {
- ScopeInfo scope_info = context->scope_info();
+ if (!context.IsNativeContext() && context.is_declaration_context()) {
+ ScopeInfo scope_info = context.scope_info();
// Add context allocated locals.
- int context_locals = scope_info->ContextLocalCount();
+ int context_locals = scope_info.ContextLocalCount();
for (int i = 0; i < context_locals; ++i) {
- String local_name = scope_info->ContextLocalName(i);
+ String local_name = scope_info.ContextLocalName(i);
int idx = Context::MIN_CONTEXT_SLOTS + i;
- SetContextReference(entry, local_name, context->get(idx),
+ SetContextReference(entry, local_name, context.get(idx),
Context::OffsetOfElementAt(idx));
}
- if (scope_info->HasFunctionName()) {
- String name = String::cast(scope_info->FunctionName());
- int idx = scope_info->FunctionContextSlotIndex(name);
+ if (scope_info.HasFunctionName()) {
+ String name = String::cast(scope_info.FunctionName());
+ int idx = scope_info.FunctionContextSlotIndex(name);
if (idx >= 0) {
- SetContextReference(entry, name, context->get(idx),
+ SetContextReference(entry, name, context.get(idx),
Context::OffsetOfElementAt(idx));
}
}
}
SetInternalReference(
- entry, "scope_info", context->get(Context::SCOPE_INFO_INDEX),
+ entry, "scope_info", context.get(Context::SCOPE_INFO_INDEX),
FixedArray::OffsetOfElementAt(Context::SCOPE_INFO_INDEX));
- SetInternalReference(entry, "previous", context->get(Context::PREVIOUS_INDEX),
+ SetInternalReference(entry, "previous", context.get(Context::PREVIOUS_INDEX),
FixedArray::OffsetOfElementAt(Context::PREVIOUS_INDEX));
SetInternalReference(entry, "extension",
- context->get(Context::EXTENSION_INDEX),
+ context.get(Context::EXTENSION_INDEX),
FixedArray::OffsetOfElementAt(Context::EXTENSION_INDEX));
SetInternalReference(
- entry, "native_context", context->get(Context::NATIVE_CONTEXT_INDEX),
+ entry, "native_context", context.get(Context::NATIVE_CONTEXT_INDEX),
FixedArray::OffsetOfElementAt(Context::NATIVE_CONTEXT_INDEX));
- if (context->IsNativeContext()) {
- TagObject(context->normalized_map_cache(), "(context norm. map cache)");
- TagObject(context->embedder_data(), "(context data)");
+ if (context.IsNativeContext()) {
+ TagObject(context.normalized_map_cache(), "(context norm. map cache)");
+ TagObject(context.embedder_data(), "(context data)");
for (size_t i = 0; i < arraysize(native_context_names); i++) {
int index = native_context_names[i].index;
const char* name = native_context_names[i].name;
- SetInternalReference(entry, name, context->get(index),
+ SetInternalReference(entry, name, context.get(index),
FixedArray::OffsetOfElementAt(index));
}
SetWeakReference(
- entry, "optimized_code_list",
- context->get(Context::OPTIMIZED_CODE_LIST),
+ entry, "optimized_code_list", context.get(Context::OPTIMIZED_CODE_LIST),
FixedArray::OffsetOfElementAt(Context::OPTIMIZED_CODE_LIST));
SetWeakReference(
entry, "deoptimized_code_list",
- context->get(Context::DEOPTIMIZED_CODE_LIST),
+ context.get(Context::DEOPTIMIZED_CODE_LIST),
FixedArray::OffsetOfElementAt(Context::DEOPTIMIZED_CODE_LIST));
STATIC_ASSERT(Context::OPTIMIZED_CODE_LIST == Context::FIRST_WEAK_SLOT);
STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
@@ -990,54 +984,54 @@
}
void V8HeapExplorer::ExtractMapReferences(HeapEntry* entry, Map map) {
- MaybeObject maybe_raw_transitions_or_prototype_info = map->raw_transitions();
+ MaybeObject maybe_raw_transitions_or_prototype_info = map.raw_transitions();
HeapObject raw_transitions_or_prototype_info;
if (maybe_raw_transitions_or_prototype_info->GetHeapObjectIfWeak(
&raw_transitions_or_prototype_info)) {
- DCHECK(raw_transitions_or_prototype_info->IsMap());
+ DCHECK(raw_transitions_or_prototype_info.IsMap());
SetWeakReference(entry, "transition", raw_transitions_or_prototype_info,
Map::kTransitionsOrPrototypeInfoOffset);
} else if (maybe_raw_transitions_or_prototype_info->GetHeapObjectIfStrong(
&raw_transitions_or_prototype_info)) {
- if (raw_transitions_or_prototype_info->IsTransitionArray()) {
+ if (raw_transitions_or_prototype_info.IsTransitionArray()) {
TransitionArray transitions =
TransitionArray::cast(raw_transitions_or_prototype_info);
- if (map->CanTransition() && transitions->HasPrototypeTransitions()) {
- TagObject(transitions->GetPrototypeTransitions(),
+ if (map.CanTransition() && transitions.HasPrototypeTransitions()) {
+ TagObject(transitions.GetPrototypeTransitions(),
"(prototype transitions)");
}
TagObject(transitions, "(transition array)");
SetInternalReference(entry, "transitions", transitions,
Map::kTransitionsOrPrototypeInfoOffset);
- } else if (raw_transitions_or_prototype_info->IsTuple3() ||
- raw_transitions_or_prototype_info->IsFixedArray()) {
+ } else if (raw_transitions_or_prototype_info.IsTuple3() ||
+ raw_transitions_or_prototype_info.IsFixedArray()) {
TagObject(raw_transitions_or_prototype_info, "(transition)");
SetInternalReference(entry, "transition",
raw_transitions_or_prototype_info,
Map::kTransitionsOrPrototypeInfoOffset);
- } else if (map->is_prototype_map()) {
+ } else if (map.is_prototype_map()) {
TagObject(raw_transitions_or_prototype_info, "prototype_info");
SetInternalReference(entry, "prototype_info",
raw_transitions_or_prototype_info,
Map::kTransitionsOrPrototypeInfoOffset);
}
}
- DescriptorArray descriptors = map->instance_descriptors();
+ DescriptorArray descriptors = map.instance_descriptors();
TagObject(descriptors, "(map descriptors)");
SetInternalReference(entry, "descriptors", descriptors,
Map::kInstanceDescriptorsOffset);
- SetInternalReference(entry, "prototype", map->prototype(),
+ SetInternalReference(entry, "prototype", map.prototype(),
Map::kPrototypeOffset);
if (FLAG_unbox_double_fields) {
- SetInternalReference(entry, "layout_descriptor", map->layout_descriptor(),
+ SetInternalReference(entry, "layout_descriptor", map.layout_descriptor(),
Map::kLayoutDescriptorOffset);
}
- Object constructor_or_backpointer = map->constructor_or_backpointer();
- if (constructor_or_backpointer->IsMap()) {
+ Object constructor_or_backpointer = map.constructor_or_backpointer();
+ if (constructor_or_backpointer.IsMap()) {
TagObject(constructor_or_backpointer, "(back pointer)");
SetInternalReference(entry, "back_pointer", constructor_or_backpointer,
Map::kConstructorOrBackPointerOffset);
- } else if (constructor_or_backpointer->IsFunctionTemplateInfo()) {
+ } else if (constructor_or_backpointer.IsFunctionTemplateInfo()) {
TagObject(constructor_or_backpointer, "(constructor function data)");
SetInternalReference(entry, "constructor_function_data",
constructor_or_backpointer,
@@ -1046,72 +1040,70 @@
SetInternalReference(entry, "constructor", constructor_or_backpointer,
Map::kConstructorOrBackPointerOffset);
}
- TagObject(map->dependent_code(), "(dependent code)");
- SetInternalReference(entry, "dependent_code", map->dependent_code(),
+ TagObject(map.dependent_code(), "(dependent code)");
+ SetInternalReference(entry, "dependent_code", map.dependent_code(),
Map::kDependentCodeOffset);
}
void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
HeapEntry* entry, SharedFunctionInfo shared) {
- String shared_name = shared->DebugName();
+ String shared_name = shared.DebugName();
const char* name = nullptr;
if (shared_name != ReadOnlyRoots(heap_).empty_string()) {
name = names_->GetName(shared_name);
- TagObject(shared->GetCode(), names_->GetFormatted("(code for %s)", name));
+ TagObject(shared.GetCode(), names_->GetFormatted("(code for %s)", name));
} else {
- TagObject(shared->GetCode(),
- names_->GetFormatted(
- "(%s code)", Code::Kind2String(shared->GetCode()->kind())));
+ TagObject(shared.GetCode(),
+ names_->GetFormatted("(%s code)",
+ Code::Kind2String(shared.GetCode().kind())));
}
- if (shared->name_or_scope_info()->IsScopeInfo()) {
- TagObject(shared->name_or_scope_info(), "(function scope info)");
+ if (shared.name_or_scope_info().IsScopeInfo()) {
+ TagObject(shared.name_or_scope_info(), "(function scope info)");
}
- SetInternalReference(entry, "name_or_scope_info",
- shared->name_or_scope_info(),
+ SetInternalReference(entry, "name_or_scope_info", shared.name_or_scope_info(),
SharedFunctionInfo::kNameOrScopeInfoOffset);
SetInternalReference(entry, "script_or_debug_info",
- shared->script_or_debug_info(),
+ shared.script_or_debug_info(),
SharedFunctionInfo::kScriptOrDebugInfoOffset);
- SetInternalReference(entry, "function_data", shared->function_data(),
+ SetInternalReference(entry, "function_data", shared.function_data(),
SharedFunctionInfo::kFunctionDataOffset);
SetInternalReference(
entry, "raw_outer_scope_info_or_feedback_metadata",
- shared->raw_outer_scope_info_or_feedback_metadata(),
+ shared.raw_outer_scope_info_or_feedback_metadata(),
SharedFunctionInfo::kOuterScopeInfoOrFeedbackMetadataOffset);
}
void V8HeapExplorer::ExtractScriptReferences(HeapEntry* entry, Script script) {
- SetInternalReference(entry, "source", script->source(),
- Script::kSourceOffset);
- SetInternalReference(entry, "name", script->name(), Script::kNameOffset);
- SetInternalReference(entry, "context_data", script->context_data(),
+ SetInternalReference(entry, "source", script.source(), Script::kSourceOffset);
+ SetInternalReference(entry, "name", script.name(), Script::kNameOffset);
+ SetInternalReference(entry, "context_data", script.context_data(),
Script::kContextOffset);
- TagObject(script->line_ends(), "(script line ends)");
- SetInternalReference(entry, "line_ends", script->line_ends(),
+ TagObject(script.line_ends(), "(script line ends)");
+ SetInternalReference(entry, "line_ends", script.line_ends(),
Script::kLineEndsOffset);
}
void V8HeapExplorer::ExtractAccessorInfoReferences(HeapEntry* entry,
AccessorInfo accessor_info) {
- SetInternalReference(entry, "name", accessor_info->name(),
+ SetInternalReference(entry, "name", accessor_info.name(),
AccessorInfo::kNameOffset);
SetInternalReference(entry, "expected_receiver_type",
- accessor_info->expected_receiver_type(),
+ accessor_info.expected_receiver_type(),
AccessorInfo::kExpectedReceiverTypeOffset);
- SetInternalReference(entry, "getter", accessor_info->getter(),
+ SetInternalReference(entry, "getter", accessor_info.getter(),
AccessorInfo::kGetterOffset);
- SetInternalReference(entry, "setter", accessor_info->setter(),
+ SetInternalReference(entry, "setter", accessor_info.setter(),
AccessorInfo::kSetterOffset);
- SetInternalReference(entry, "data", accessor_info->data(),
+ SetInternalReference(entry, "data", accessor_info.data(),
AccessorInfo::kDataOffset);
}
void V8HeapExplorer::ExtractAccessorPairReferences(HeapEntry* entry,
AccessorPair accessors) {
- SetInternalReference(entry, "getter", accessors->getter(),
+ SetInternalReference(entry, "getter", accessors.getter(),
AccessorPair::kGetterOffset);
- SetInternalReference(entry, "setter", accessors->setter(),
+ SetInternalReference(entry, "setter", accessors.setter(),
AccessorPair::kSetterOffset);
}
@@ -1120,54 +1112,53 @@
}
void V8HeapExplorer::ExtractCodeReferences(HeapEntry* entry, Code code) {
- TagObject(code->relocation_info(), "(code relocation info)");
- SetInternalReference(entry, "relocation_info", code->relocation_info(),
+ TagObject(code.relocation_info(), "(code relocation info)");
+ SetInternalReference(entry, "relocation_info", code.relocation_info(),
Code::kRelocationInfoOffset);
- TagObject(code->deoptimization_data(), "(code deopt data)");
- SetInternalReference(entry, "deoptimization_data",
- code->deoptimization_data(),
+ TagObject(code.deoptimization_data(), "(code deopt data)");
+ SetInternalReference(entry, "deoptimization_data", code.deoptimization_data(),
Code::kDeoptimizationDataOffset);
- TagObject(code->source_position_table(), "(source position table)");
+ TagObject(code.source_position_table(), "(source position table)");
SetInternalReference(entry, "source_position_table",
- code->source_position_table(),
+ code.source_position_table(),
Code::kSourcePositionTableOffset);
}
void V8HeapExplorer::ExtractCellReferences(HeapEntry* entry, Cell cell) {
- SetInternalReference(entry, "value", cell->value(), Cell::kValueOffset);
+ SetInternalReference(entry, "value", cell.value(), Cell::kValueOffset);
}
void V8HeapExplorer::ExtractFeedbackCellReferences(HeapEntry* entry,
FeedbackCell feedback_cell) {
TagObject(feedback_cell, "(feedback cell)");
- SetInternalReference(entry, "value", feedback_cell->value(),
+ SetInternalReference(entry, "value", feedback_cell.value(),
FeedbackCell::kValueOffset);
}
void V8HeapExplorer::ExtractPropertyCellReferences(HeapEntry* entry,
PropertyCell cell) {
- SetInternalReference(entry, "value", cell->value(),
+ SetInternalReference(entry, "value", cell.value(),
PropertyCell::kValueOffset);
- TagObject(cell->dependent_code(), "(dependent code)");
- SetInternalReference(entry, "dependent_code", cell->dependent_code(),
+ TagObject(cell.dependent_code(), "(dependent code)");
+ SetInternalReference(entry, "dependent_code", cell.dependent_code(),
PropertyCell::kDependentCodeOffset);
}
void V8HeapExplorer::ExtractAllocationSiteReferences(HeapEntry* entry,
AllocationSite site) {
SetInternalReference(entry, "transition_info",
- site->transition_info_or_boilerplate(),
+ site.transition_info_or_boilerplate(),
AllocationSite::kTransitionInfoOrBoilerplateOffset);
- SetInternalReference(entry, "nested_site", site->nested_site(),
+ SetInternalReference(entry, "nested_site", site.nested_site(),
AllocationSite::kNestedSiteOffset);
- TagObject(site->dependent_code(), "(dependent code)");
- SetInternalReference(entry, "dependent_code", site->dependent_code(),
+ TagObject(site.dependent_code(), "(dependent code)");
+ SetInternalReference(entry, "dependent_code", site.dependent_code(),
AllocationSite::kDependentCodeOffset);
}
void V8HeapExplorer::ExtractArrayBoilerplateDescriptionReferences(
HeapEntry* entry, ArrayBoilerplateDescription value) {
- SetInternalReference(entry, "constant_elements", value->constant_elements(),
+ SetInternalReference(entry, "constant_elements", value.constant_elements(),
ArrayBoilerplateDescription::kConstantElementsOffset);
}
@@ -1190,11 +1181,11 @@
void V8HeapExplorer::ExtractJSArrayBufferReferences(HeapEntry* entry,
JSArrayBuffer buffer) {
// Setup a reference to a native memory backing_store object.
- if (!buffer->backing_store()) return;
- size_t data_size = buffer->byte_length();
+ if (!buffer.backing_store()) return;
+ size_t data_size = buffer.byte_length();
JSArrayBufferDataEntryAllocator allocator(data_size, this);
HeapEntry* data_entry =
- generator_->FindOrAddEntry(buffer->backing_store(), &allocator);
+ generator_->FindOrAddEntry(buffer.backing_store(), &allocator);
entry->SetNamedReference(HeapGraphEdge::kInternal, "backing_store",
data_entry);
}
@@ -1202,34 +1193,34 @@
void V8HeapExplorer::ExtractJSPromiseReferences(HeapEntry* entry,
JSPromise promise) {
SetInternalReference(entry, "reactions_or_result",
- promise->reactions_or_result(),
+ promise.reactions_or_result(),
JSPromise::kReactionsOrResultOffset);
}
void V8HeapExplorer::ExtractJSGeneratorObjectReferences(
HeapEntry* entry, JSGeneratorObject generator) {
- SetInternalReference(entry, "function", generator->function(),
+ SetInternalReference(entry, "function", generator.function(),
JSGeneratorObject::kFunctionOffset);
- SetInternalReference(entry, "context", generator->context(),
+ SetInternalReference(entry, "context", generator.context(),
JSGeneratorObject::kContextOffset);
- SetInternalReference(entry, "receiver", generator->receiver(),
+ SetInternalReference(entry, "receiver", generator.receiver(),
JSGeneratorObject::kReceiverOffset);
SetInternalReference(entry, "parameters_and_registers",
- generator->parameters_and_registers(),
+ generator.parameters_and_registers(),
JSGeneratorObject::kParametersAndRegistersOffset);
}
void V8HeapExplorer::ExtractFixedArrayReferences(HeapEntry* entry,
FixedArray array) {
- for (int i = 0, l = array->length(); i < l; ++i) {
- DCHECK(!HasWeakHeapObjectTag(array->get(i)));
- SetInternalReference(entry, i, array->get(i), array->OffsetOfElementAt(i));
+ for (int i = 0, l = array.length(); i < l; ++i) {
+ DCHECK(!HasWeakHeapObjectTag(array.get(i)));
+ SetInternalReference(entry, i, array.get(i), array.OffsetOfElementAt(i));
}
}
void V8HeapExplorer::ExtractFeedbackVectorReferences(
HeapEntry* entry, FeedbackVector feedback_vector) {
- MaybeObject code = feedback_vector->optimized_code_weak_or_smi();
+ MaybeObject code = feedback_vector.optimized_code_weak_or_smi();
HeapObject code_heap_object;
if (code->GetHeapObjectIfWeak(&code_heap_object)) {
SetWeakReference(entry, "optimized code", code_heap_object,
@@ -1239,14 +1230,14 @@
void V8HeapExplorer::ExtractDescriptorArrayReferences(HeapEntry* entry,
DescriptorArray array) {
- SetInternalReference(entry, "enum_cache", array->enum_cache(),
+ SetInternalReference(entry, "enum_cache", array.enum_cache(),
DescriptorArray::kEnumCacheOffset);
- MaybeObjectSlot start = MaybeObjectSlot(array->GetDescriptorSlot(0));
+ MaybeObjectSlot start = MaybeObjectSlot(array.GetDescriptorSlot(0));
MaybeObjectSlot end = MaybeObjectSlot(
- array->GetDescriptorSlot(array->number_of_all_descriptors()));
+ array.GetDescriptorSlot(array.number_of_all_descriptors()));
for (int i = 0; start + i < end; ++i) {
MaybeObjectSlot slot = start + i;
- int offset = static_cast<int>(slot.address() - array->address());
+ int offset = static_cast<int>(slot.address() - array.address());
MaybeObject object = *slot;
HeapObject heap_object;
if (object->GetHeapObjectIfWeak(&heap_object)) {
@@ -1260,8 +1251,8 @@
template <typename T>
void V8HeapExplorer::ExtractWeakArrayReferences(int header_size,
HeapEntry* entry, T array) {
- for (int i = 0; i < array->length(); ++i) {
- MaybeObject object = array->Get(i);
+ for (int i = 0; i < array.length(); ++i) {
+ MaybeObject object = array.Get(i);
HeapObject heap_object;
if (object->GetHeapObjectIfWeak(&heap_object)) {
SetWeakReference(entry, i, heap_object, header_size + i * kTaggedSize);
@@ -1274,20 +1265,20 @@
void V8HeapExplorer::ExtractPropertyReferences(JSObject js_obj,
HeapEntry* entry) {
- Isolate* isolate = js_obj->GetIsolate();
- if (js_obj->HasFastProperties()) {
- DescriptorArray descs = js_obj->map()->instance_descriptors();
- int real_size = js_obj->map()->NumberOfOwnDescriptors();
+ Isolate* isolate = js_obj.GetIsolate();
+ if (js_obj.HasFastProperties()) {
+ DescriptorArray descs = js_obj.map().instance_descriptors();
+ int real_size = js_obj.map().NumberOfOwnDescriptors();
for (int i = 0; i < real_size; i++) {
- PropertyDetails details = descs->GetDetails(i);
+ PropertyDetails details = descs.GetDetails(i);
switch (details.location()) {
case kField: {
Representation r = details.representation();
if (r.IsSmi() || r.IsDouble()) break;
- Name k = descs->GetKey(i);
- FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i);
- Object value = js_obj->RawFastPropertyAt(field_index);
+ Name k = descs.GetKey(i);
+ FieldIndex field_index = FieldIndex::ForDescriptor(js_obj.map(), i);
+ Object value = js_obj.RawFastPropertyAt(field_index);
int field_offset =
field_index.is_inobject() ? field_index.offset() : -1;
@@ -1296,35 +1287,34 @@
break;
}
case kDescriptor:
- SetDataOrAccessorPropertyReference(details.kind(), entry,
- descs->GetKey(i),
- descs->GetStrongValue(i));
+ SetDataOrAccessorPropertyReference(
+ details.kind(), entry, descs.GetKey(i), descs.GetStrongValue(i));
break;
}
}
- } else if (js_obj->IsJSGlobalObject()) {
+ } else if (js_obj.IsJSGlobalObject()) {
// We assume that global objects can only have slow properties.
GlobalDictionary dictionary =
- JSGlobalObject::cast(js_obj)->global_dictionary();
- int length = dictionary->Capacity();
+ JSGlobalObject::cast(js_obj).global_dictionary();
+ int length = dictionary.Capacity();
ReadOnlyRoots roots(isolate);
for (int i = 0; i < length; ++i) {
- if (!dictionary->IsKey(roots, dictionary->KeyAt(i))) continue;
- PropertyCell cell = dictionary->CellAt(i);
- Name name = cell->name();
- Object value = cell->value();
- PropertyDetails details = cell->property_details();
+ if (!dictionary.IsKey(roots, dictionary.KeyAt(i))) continue;
+ PropertyCell cell = dictionary.CellAt(i);
+ Name name = cell.name();
+ Object value = cell.value();
+ PropertyDetails details = cell.property_details();
SetDataOrAccessorPropertyReference(details.kind(), entry, name, value);
}
} else {
- NameDictionary dictionary = js_obj->property_dictionary();
- int length = dictionary->Capacity();
+ NameDictionary dictionary = js_obj.property_dictionary();
+ int length = dictionary.Capacity();
ReadOnlyRoots roots(isolate);
for (int i = 0; i < length; ++i) {
- Object k = dictionary->KeyAt(i);
- if (!dictionary->IsKey(roots, k)) continue;
- Object value = dictionary->ValueAt(i);
- PropertyDetails details = dictionary->DetailsAt(i);
+ Object k = dictionary.KeyAt(i);
+ if (!dictionary.IsKey(roots, k)) continue;
+ Object value = dictionary.ValueAt(i);
+ PropertyDetails details = dictionary.DetailsAt(i);
SetDataOrAccessorPropertyReference(details.kind(), entry, Name::cast(k),
value);
}
@@ -1334,56 +1324,55 @@
void V8HeapExplorer::ExtractAccessorPairProperty(HeapEntry* entry, Name key,
Object callback_obj,
int field_offset) {
- if (!callback_obj->IsAccessorPair()) return;
+ if (!callback_obj.IsAccessorPair()) return;
AccessorPair accessors = AccessorPair::cast(callback_obj);
SetPropertyReference(entry, key, accessors, nullptr, field_offset);
- Object getter = accessors->getter();
- if (!getter->IsOddball()) {
+ Object getter = accessors.getter();
+ if (!getter.IsOddball()) {
SetPropertyReference(entry, key, getter, "get %s");
}
- Object setter = accessors->setter();
- if (!setter->IsOddball()) {
+ Object setter = accessors.setter();
+ if (!setter.IsOddball()) {
SetPropertyReference(entry, key, setter, "set %s");
}
}
void V8HeapExplorer::ExtractElementReferences(JSObject js_obj,
HeapEntry* entry) {
- ReadOnlyRoots roots = js_obj->GetReadOnlyRoots();
- if (js_obj->HasObjectElements()) {
- FixedArray elements = FixedArray::cast(js_obj->elements());
- int length = js_obj->IsJSArray()
- ? Smi::ToInt(JSArray::cast(js_obj)->length())
- : elements->length();
+ ReadOnlyRoots roots = js_obj.GetReadOnlyRoots();
+ if (js_obj.HasObjectElements()) {
+ FixedArray elements = FixedArray::cast(js_obj.elements());
+ int length = js_obj.IsJSArray() ? Smi::ToInt(JSArray::cast(js_obj).length())
+ : elements.length();
for (int i = 0; i < length; ++i) {
- if (!elements->get(i)->IsTheHole(roots)) {
- SetElementReference(entry, i, elements->get(i));
+ if (!elements.get(i).IsTheHole(roots)) {
+ SetElementReference(entry, i, elements.get(i));
}
}
- } else if (js_obj->HasDictionaryElements()) {
- NumberDictionary dictionary = js_obj->element_dictionary();
- int length = dictionary->Capacity();
+ } else if (js_obj.HasDictionaryElements()) {
+ NumberDictionary dictionary = js_obj.element_dictionary();
+ int length = dictionary.Capacity();
for (int i = 0; i < length; ++i) {
- Object k = dictionary->KeyAt(i);
- if (!dictionary->IsKey(roots, k)) continue;
- DCHECK(k->IsNumber());
- uint32_t index = static_cast<uint32_t>(k->Number());
- SetElementReference(entry, index, dictionary->ValueAt(i));
+ Object k = dictionary.KeyAt(i);
+ if (!dictionary.IsKey(roots, k)) continue;
+ DCHECK(k.IsNumber());
+ uint32_t index = static_cast<uint32_t>(k.Number());
+ SetElementReference(entry, index, dictionary.ValueAt(i));
}
}
}
void V8HeapExplorer::ExtractInternalReferences(JSObject js_obj,
HeapEntry* entry) {
- int length = js_obj->GetEmbedderFieldCount();
+ int length = js_obj.GetEmbedderFieldCount();
for (int i = 0; i < length; ++i) {
- Object o = js_obj->GetEmbedderField(i);
- SetInternalReference(entry, i, o, js_obj->GetEmbedderFieldOffset(i));
+ Object o = js_obj.GetEmbedderField(i);
+ SetInternalReference(entry, i, o, js_obj.GetEmbedderFieldOffset(i));
}
}
JSFunction V8HeapExplorer::GetConstructor(JSReceiver receiver) {
- Isolate* isolate = receiver->GetIsolate();
+ Isolate* isolate = receiver.GetIsolate();
DisallowHeapAllocation no_gc;
HandleScope scope(isolate);
MaybeHandle<JSFunction> maybe_constructor =
@@ -1395,17 +1384,17 @@
}
String V8HeapExplorer::GetConstructorName(JSObject object) {
- Isolate* isolate = object->GetIsolate();
- if (object->IsJSFunction()) return ReadOnlyRoots(isolate).closure_string();
+ Isolate* isolate = object.GetIsolate();
+ if (object.IsJSFunction()) return ReadOnlyRoots(isolate).closure_string();
DisallowHeapAllocation no_gc;
HandleScope scope(isolate);
return *JSReceiver::GetConstructorName(handle(object, isolate));
}
HeapEntry* V8HeapExplorer::GetEntry(Object obj) {
- return obj->IsHeapObject() ? generator_->FindOrAddEntry(
- reinterpret_cast<void*>(obj.ptr()), this)
- : nullptr;
+ return obj.IsHeapObject() ? generator_->FindOrAddEntry(
+ reinterpret_cast<void*>(obj.ptr()), this)
+ : nullptr;
}
class RootsReferencesExtractor : public RootVisitor {
@@ -1463,7 +1452,7 @@
obj = iterator.Next(), progress_->ProgressStep()) {
if (interrupted) continue;
- size_t max_pointer = obj->Size() / kTaggedSize;
+ size_t max_pointer = obj.Size() / kTaggedSize;
if (max_pointer > visited_fields_.size()) {
// Clear the current bits.
std::vector<bool>().swap(visited_fields_);
@@ -1473,11 +1462,11 @@
HeapEntry* entry = GetEntry(obj);
ExtractReferences(entry, obj);
- SetInternalReference(entry, "map", obj->map(), HeapObject::kMapOffset);
+ SetInternalReference(entry, "map", obj.map(), HeapObject::kMapOffset);
// Extract unvisited fields as hidden references and restore tags
// of visited fields.
IndexedReferencesExtractor refs_extractor(this, obj, entry);
- obj->Iterate(&refs_extractor);
+ obj.Iterate(&refs_extractor);
// Ensure visited_fields_ doesn't leak to the next object.
for (size_t i = 0; i < max_pointer; ++i) {
@@ -1496,7 +1485,7 @@
bool V8HeapExplorer::IsEssentialObject(Object object) {
ReadOnlyRoots roots(heap_);
- return object->IsHeapObject() && !object->IsOddball() &&
+ return object.IsHeapObject() && !object.IsOddball() &&
object != roots.empty_byte_array() &&
object != roots.empty_fixed_array() &&
object != roots.empty_weak_fixed_array() &&
@@ -1511,13 +1500,13 @@
bool V8HeapExplorer::IsEssentialHiddenReference(Object parent,
int field_offset) {
- if (parent->IsAllocationSite() &&
+ if (parent.IsAllocationSite() &&
field_offset == AllocationSite::kWeakNextOffset)
return false;
- if (parent->IsCodeDataContainer() &&
+ if (parent.IsCodeDataContainer() &&
field_offset == CodeDataContainer::kNextCodeLinkOffset)
return false;
- if (parent->IsContext() &&
+ if (parent.IsContext() &&
field_offset == Context::OffsetOfElementAt(Context::NEXT_CONTEXT_LINK))
return false;
return true;
@@ -1634,15 +1623,15 @@
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry == nullptr) return;
HeapGraphEdge::Type type =
- reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
+ reference_name.IsSymbol() || String::cast(reference_name).length() > 0
? HeapGraphEdge::kProperty
: HeapGraphEdge::kInternal;
const char* name =
- name_format_string != nullptr && reference_name->IsString()
+ name_format_string != nullptr && reference_name.IsString()
? names_->GetFormatted(
name_format_string,
String::cast(reference_name)
- ->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)
+ .ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)
.get())
: names_->GetName(reference_name);
@@ -1689,10 +1678,10 @@
// Add a shortcut to JS global object reference at snapshot root.
// That allows the user to easily find global objects. They are
// also used as starting points in distance calculations.
- if (is_weak || !child_obj->IsNativeContext()) return;
+ if (is_weak || !child_obj.IsNativeContext()) return;
- JSGlobalObject global = Context::cast(child_obj)->global_object();
- if (!global->IsJSGlobalObject()) return;
+ JSGlobalObject global = Context::cast(child_obj).global_object();
+ if (!global.IsJSGlobalObject()) return;
if (!user_roots_.insert(global).second) return;
@@ -1727,13 +1716,13 @@
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
for (FullObjectSlot p = start; p < end; ++p) {
- if (!(*p)->IsNativeContext()) continue;
- JSObject proxy = Context::cast(*p)->global_proxy();
- if (!proxy->IsJSGlobalProxy()) continue;
- Object global = proxy->map()->prototype();
- if (!global->IsJSGlobalObject()) continue;
+ if (!(*p).IsNativeContext()) continue;
+ JSObject proxy = Context::cast(*p).global_proxy();
+ if (!proxy.IsJSGlobalProxy()) continue;
+ Object global = proxy.map().prototype();
+ if (!global.IsJSGlobalObject()) continue;
objects_.push_back(Handle<JSGlobalObject>(JSGlobalObject::cast(global),
- proxy->GetIsolate()));
+ proxy.GetIsolate()));
}
}
int count() const { return static_cast<int>(objects_.size()); }
@@ -1891,7 +1880,7 @@
EmbedderGraphImpl::V8NodeImpl* v8_node =
static_cast<EmbedderGraphImpl::V8NodeImpl*>(node);
Object object = v8_node->GetObject();
- if (object->IsSmi()) return nullptr;
+ if (object.IsSmi()) return nullptr;
return generator_->FindEntry(
reinterpret_cast<void*>(Object::cast(object).ptr()));
}
diff --git a/src/profiler/profile-generator.cc b/src/profiler/profile-generator.cc
index 354bd2ff..e869f65 100644
--- a/src/profiler/profile-generator.cc
+++ b/src/profiler/profile-generator.cc
@@ -181,12 +181,12 @@
}
void CodeEntry::FillFunctionInfo(SharedFunctionInfo shared) {
- if (!shared->script()->IsScript()) return;
- Script script = Script::cast(shared->script());
- set_script_id(script->id());
- set_position(shared->StartPosition());
- if (shared->optimization_disabled()) {
- set_bailout_reason(GetBailoutReason(shared->disable_optimization_reason()));
+ if (!shared.script().IsScript()) return;
+ Script script = Script::cast(shared.script());
+ set_script_id(script.id());
+ set_position(shared.StartPosition());
+ if (shared.optimization_disabled()) {
+ set_bailout_reason(GetBailoutReason(shared.disable_optimization_reason()));
}
}
diff --git a/src/profiler/profiler-listener.cc b/src/profiler/profiler-listener.cc
index 9cfee6a..7ecebde 100644
--- a/src/profiler/profiler-listener.cc
+++ b/src/profiler/profiler-listener.cc
@@ -40,12 +40,12 @@
AbstractCode code, const char* name) {
CodeEventsContainer evt_rec(CodeEventRecord::CODE_CREATION);
CodeCreateEventRecord* rec = &evt_rec.CodeCreateEventRecord_;
- rec->instruction_start = code->InstructionStart();
+ rec->instruction_start = code.InstructionStart();
rec->entry = new CodeEntry(tag, GetName(name), CodeEntry::kEmptyResourceName,
CpuProfileNode::kNoLineNumberInfo,
CpuProfileNode::kNoColumnNumberInfo, nullptr,
- code->InstructionStart());
- rec->instruction_size = code->InstructionSize();
+ code.InstructionStart());
+ rec->instruction_size = code.InstructionSize();
DispatchCodeEvent(evt_rec);
}
@@ -53,12 +53,12 @@
AbstractCode code, Name name) {
CodeEventsContainer evt_rec(CodeEventRecord::CODE_CREATION);
CodeCreateEventRecord* rec = &evt_rec.CodeCreateEventRecord_;
- rec->instruction_start = code->InstructionStart();
+ rec->instruction_start = code.InstructionStart();
rec->entry = new CodeEntry(tag, GetName(name), CodeEntry::kEmptyResourceName,
CpuProfileNode::kNoLineNumberInfo,
CpuProfileNode::kNoColumnNumberInfo, nullptr,
- code->InstructionStart());
- rec->instruction_size = code->InstructionSize();
+ code.InstructionStart());
+ rec->instruction_size = code.InstructionSize();
DispatchCodeEvent(evt_rec);
}
@@ -68,15 +68,15 @@
Name script_name) {
CodeEventsContainer evt_rec(CodeEventRecord::CODE_CREATION);
CodeCreateEventRecord* rec = &evt_rec.CodeCreateEventRecord_;
- rec->instruction_start = code->InstructionStart();
- rec->entry = new CodeEntry(tag, GetName(shared->DebugName()),
+ rec->instruction_start = code.InstructionStart();
+ rec->entry = new CodeEntry(tag, GetName(shared.DebugName()),
GetName(InferScriptName(script_name, shared)),
CpuProfileNode::kNoLineNumberInfo,
CpuProfileNode::kNoColumnNumberInfo, nullptr,
- code->InstructionStart());
- DCHECK(!code->IsCode());
+ code.InstructionStart());
+ DCHECK(!code.IsCode());
rec->entry->FillFunctionInfo(shared);
- rec->instruction_size = code->InstructionSize();
+ rec->instruction_size = code.InstructionSize();
DispatchCodeEvent(evt_rec);
}
@@ -101,15 +101,15 @@
Name script_name, int line, int column) {
CodeEventsContainer evt_rec(CodeEventRecord::CODE_CREATION);
CodeCreateEventRecord* rec = &evt_rec.CodeCreateEventRecord_;
- rec->instruction_start = abstract_code->InstructionStart();
+ rec->instruction_start = abstract_code.InstructionStart();
std::unique_ptr<SourcePositionTable> line_table;
std::unordered_map<int, std::vector<CodeEntryAndLineNumber>> inline_stacks;
std::unordered_set<std::unique_ptr<CodeEntry>, CodeEntry::Hasher,
CodeEntry::Equals>
cached_inline_entries;
bool is_shared_cross_origin = false;
- if (shared->script()->IsScript()) {
- Script script = Script::cast(shared->script());
+ if (shared.script().IsScript()) {
+ Script script = Script::cast(shared.script());
line_table.reset(new SourcePositionTable());
HandleScope scope(isolate_);
@@ -120,17 +120,17 @@
// profiler as is stored on the code object, except that we transform source
// positions to line numbers here, because we only care about attributing
// ticks to a given line.
- for (SourcePositionTableIterator it(abstract_code->source_position_table());
+ for (SourcePositionTableIterator it(abstract_code.source_position_table());
!it.done(); it.Advance()) {
int position = it.source_position().ScriptOffset();
int inlining_id = it.source_position().InliningId();
if (inlining_id == SourcePosition::kNotInlined) {
- int line_number = script->GetLineNumber(position) + 1;
+ int line_number = script.GetLineNumber(position) + 1;
line_table->SetPosition(it.code_offset(), line_number, inlining_id);
} else {
- DCHECK(abstract_code->IsCode());
- Code code = abstract_code->GetCode();
+ DCHECK(abstract_code.IsCode());
+ Code code = abstract_code.GetCode();
std::vector<SourcePositionInfo> stack =
it.source_position().InliningStack(handle(code, isolate_));
DCHECK(!stack.empty());
@@ -151,7 +151,7 @@
1;
const char* resource_name =
- (pos_info.script->name()->IsName())
+ (pos_info.script->name().IsName())
? GetName(Name::cast(pos_info.script->name()))
: CodeEntry::kEmptyResourceName;
@@ -169,7 +169,7 @@
base::make_unique<CodeEntry>(
tag, GetFunctionName(*pos_info.shared), resource_name,
start_pos_info.line + 1, start_pos_info.column + 1, nullptr,
- code->InstructionStart(), inline_is_shared_cross_origin);
+ code.InstructionStart(), inline_is_shared_cross_origin);
inline_entry->FillFunctionInfo(*pos_info.shared);
// Create a canonical CodeEntry for each inlined frame and then re-use
@@ -188,7 +188,7 @@
rec->entry =
new CodeEntry(tag, GetFunctionName(shared),
GetName(InferScriptName(script_name, shared)), line, column,
- std::move(line_table), abstract_code->InstructionStart(),
+ std::move(line_table), abstract_code.InstructionStart(),
is_shared_cross_origin);
if (!inline_stacks.empty()) {
rec->entry->SetInlineStacks(std::move(cached_inline_entries),
@@ -196,7 +196,7 @@
}
rec->entry->FillFunctionInfo(shared);
- rec->instruction_size = abstract_code->InstructionSize();
+ rec->instruction_size = abstract_code.InstructionSize();
DispatchCodeEvent(evt_rec);
}
@@ -217,8 +217,8 @@
void ProfilerListener::CodeMoveEvent(AbstractCode from, AbstractCode to) {
CodeEventsContainer evt_rec(CodeEventRecord::CODE_MOVE);
CodeMoveEventRecord* rec = &evt_rec.CodeMoveEventRecord_;
- rec->from_instruction_start = from->InstructionStart();
- rec->to_instruction_start = to->InstructionStart();
+ rec->from_instruction_start = from.InstructionStart();
+ rec->to_instruction_start = to.InstructionStart();
DispatchCodeEvent(evt_rec);
}
@@ -226,8 +226,8 @@
SharedFunctionInfo shared) {
CodeEventsContainer evt_rec(CodeEventRecord::CODE_DISABLE_OPT);
CodeDisableOptEventRecord* rec = &evt_rec.CodeDisableOptEventRecord_;
- rec->instruction_start = code->InstructionStart();
- rec->bailout_reason = GetBailoutReason(shared->disable_optimization_reason());
+ rec->instruction_start = code.InstructionStart();
+ rec->bailout_reason = GetBailoutReason(shared.disable_optimization_reason());
DispatchCodeEvent(evt_rec);
}
@@ -236,7 +236,7 @@
CodeEventsContainer evt_rec(CodeEventRecord::CODE_DEOPT);
CodeDeoptEventRecord* rec = &evt_rec.CodeDeoptEventRecord_;
Deoptimizer::DeoptInfo info = Deoptimizer::GetDeoptInfo(code, pc);
- rec->instruction_start = code->InstructionStart();
+ rec->instruction_start = code.InstructionStart();
rec->deopt_reason = DeoptimizeReasonToString(info.deopt_reason);
rec->deopt_id = info.deopt_id;
rec->pc = pc;
@@ -261,12 +261,12 @@
void ProfilerListener::RegExpCodeCreateEvent(AbstractCode code, String source) {
CodeEventsContainer evt_rec(CodeEventRecord::CODE_CREATION);
CodeCreateEventRecord* rec = &evt_rec.CodeCreateEventRecord_;
- rec->instruction_start = code->InstructionStart();
+ rec->instruction_start = code.InstructionStart();
rec->entry = new CodeEntry(
CodeEventListener::REG_EXP_TAG, GetConsName("RegExp: ", source),
CodeEntry::kEmptyResourceName, CpuProfileNode::kNoLineNumberInfo,
- CpuProfileNode::kNoColumnNumberInfo, nullptr, code->InstructionStart());
- rec->instruction_size = code->InstructionSize();
+ CpuProfileNode::kNoColumnNumberInfo, nullptr, code.InstructionStart());
+ rec->instruction_size = code.InstructionSize();
DispatchCodeEvent(evt_rec);
}
@@ -281,10 +281,10 @@
}
Name ProfilerListener::InferScriptName(Name name, SharedFunctionInfo info) {
- if (name->IsString() && String::cast(name)->length()) return name;
- if (!info->script()->IsScript()) return name;
- Object source_url = Script::cast(info->script())->source_url();
- return source_url->IsName() ? Name::cast(source_url) : name;
+ if (name.IsString() && String::cast(name).length()) return name;
+ if (!info.script().IsScript()) return name;
+ Object source_url = Script::cast(info.script()).source_url();
+ return source_url.IsName() ? Name::cast(source_url) : name;
}
const char* ProfilerListener::GetFunctionName(SharedFunctionInfo shared) {
diff --git a/src/profiler/sampling-heap-profiler.cc b/src/profiler/sampling-heap-profiler.cc
index 2b22e67..de19d39 100644
--- a/src/profiler/sampling-heap-profiler.cc
+++ b/src/profiler/sampling-heap-profiler.cc
@@ -145,8 +145,8 @@
// closure on the stack. Skip over any such frames (they'll be
// in the top frames of the stack). The allocations made in this
// sensitive moment belong to the formerly optimized frame anyway.
- if (frame->unchecked_function()->IsJSFunction()) {
- SharedFunctionInfo shared = frame->function()->shared();
+ if (frame->unchecked_function().IsJSFunction()) {
+ SharedFunctionInfo shared = frame->function().shared();
stack.push_back(shared);
frames_captured++;
} else {
@@ -190,13 +190,13 @@
// the first element in the list.
for (auto it = stack.rbegin(); it != stack.rend(); ++it) {
SharedFunctionInfo shared = *it;
- const char* name = this->names()->GetName(shared->DebugName());
+ const char* name = this->names()->GetName(shared.DebugName());
int script_id = v8::UnboundScript::kNoScriptId;
- if (shared->script()->IsScript()) {
- Script script = Script::cast(shared->script());
- script_id = script->id();
+ if (shared.script().IsScript()) {
+ Script script = Script::cast(shared.script());
+ script_id = script.id();
}
- node = FindOrAddChildNode(node, name, script_id, shared->StartPosition());
+ node = FindOrAddChildNode(node, name, script_id, shared.StartPosition());
}
if (found_arguments_marker_frames) {
@@ -226,7 +226,7 @@
const_cast<std::map<int, Handle<Script>>&>(scripts);
Handle<Script> script = non_const_scripts[node->script_id_];
if (!script.is_null()) {
- if (script->name()->IsName()) {
+ if (script->name().IsName()) {
Name name = Name::cast(script->name());
script_name = ToApiHandle<v8::String>(
isolate_->factory()->InternalizeUtf8String(names_->GetName(name)));
@@ -269,7 +269,7 @@
Script::Iterator iterator(isolate_);
for (Script script = iterator.Next(); !script.is_null();
script = iterator.Next()) {
- scripts[script->id()] = handle(script, isolate_);
+ scripts[script.id()] = handle(script, isolate_);
}
}
auto profile = new v8::internal::AllocationProfile();
diff --git a/src/profiler/strings-storage.cc b/src/profiler/strings-storage.cc
index 27c108e..532bc37 100644
--- a/src/profiler/strings-storage.cc
+++ b/src/profiler/strings-storage.cc
@@ -70,14 +70,14 @@
}
const char* StringsStorage::GetName(Name name) {
- if (name->IsString()) {
+ if (name.IsString()) {
String str = String::cast(name);
- int length = Min(FLAG_heap_snapshot_string_limit, str->length());
+ int length = Min(FLAG_heap_snapshot_string_limit, str.length());
int actual_length = 0;
- std::unique_ptr<char[]> data = str->ToCString(
+ std::unique_ptr<char[]> data = str.ToCString(
DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length);
return AddOrDisposeString(data.release(), actual_length);
- } else if (name->IsSymbol()) {
+ } else if (name.IsSymbol()) {
return "<symbol>";
}
return "";
@@ -88,11 +88,11 @@
}
const char* StringsStorage::GetConsName(const char* prefix, Name name) {
- if (name->IsString()) {
+ if (name.IsString()) {
String str = String::cast(name);
- int length = Min(FLAG_heap_snapshot_string_limit, str->length());
+ int length = Min(FLAG_heap_snapshot_string_limit, str.length());
int actual_length = 0;
- std::unique_ptr<char[]> data = str->ToCString(
+ std::unique_ptr<char[]> data = str.ToCString(
DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length);
int cons_length = actual_length + static_cast<int>(strlen(prefix)) + 1;
@@ -100,7 +100,7 @@
snprintf(cons_result, cons_length, "%s%s", prefix, data.get());
return AddOrDisposeString(cons_result, cons_length);
- } else if (name->IsSymbol()) {
+ } else if (name.IsSymbol()) {
return "<symbol>";
}
return "";
diff --git a/src/regexp/interpreter-irregexp.cc b/src/regexp/interpreter-irregexp.cc
index b0337aa..89913bc 100644
--- a/src/regexp/interpreter-irregexp.cc
+++ b/src/regexp/interpreter-irregexp.cc
@@ -176,7 +176,7 @@
result = isolate->stack_guard()->HandleInterrupts();
}
- if (result->IsException(isolate)) {
+ if (result.IsException(isolate)) {
return IrregexpInterpreter::EXCEPTION;
}
diff --git a/src/regexp/jsregexp.cc b/src/regexp/jsregexp.cc
index fca5d8d..b4a729d 100644
--- a/src/regexp/jsregexp.cc
+++ b/src/regexp/jsregexp.cc
@@ -176,7 +176,7 @@
if (!has_been_compiled) {
IrregexpInitialize(isolate, re, pattern, flags, parse_result.capture_count);
}
- DCHECK(re->data()->IsFixedArray());
+ DCHECK(re->data().IsFixedArray());
// Compilation succeeded so the data is set on the regexp
// and we can store it in the cache.
Handle<FixedArray> data(FixedArray::cast(re->data()), isolate);
@@ -230,8 +230,8 @@
DisallowHeapAllocation no_gc; // ensure vectors stay valid
String needle = String::cast(regexp->DataAt(JSRegExp::kAtomPatternIndex));
- int needle_len = needle->length();
- DCHECK(needle->IsFlat());
+ int needle_len = needle.length();
+ DCHECK(needle.IsFlat());
DCHECK_LT(0, needle_len);
if (index + needle_len > subject->length()) {
@@ -239,7 +239,7 @@
}
for (int i = 0; i < output_size; i += 2) {
- String::FlatContent needle_content = needle->GetFlatContent(no_gc);
+ String::FlatContent needle_content = needle.GetFlatContent(no_gc);
String::FlatContent subject_content = subject->GetFlatContent(no_gc);
DCHECK(needle_content.IsFlat());
DCHECK(subject_content.IsFlat());
@@ -300,8 +300,8 @@
bool is_one_byte) {
Object compiled_code = re->DataAt(JSRegExp::code_index(is_one_byte));
if (compiled_code != Smi::FromInt(JSRegExp::kUninitializedValue)) {
- DCHECK(FLAG_regexp_interpret_all ? compiled_code->IsByteArray()
- : compiled_code->IsCode());
+ DCHECK(FLAG_regexp_interpret_all ? compiled_code.IsByteArray()
+ : compiled_code.IsCode());
return true;
}
return CompileIrregexp(isolate, re, sample_subject, is_one_byte);
@@ -317,7 +317,7 @@
Object entry = re->DataAt(JSRegExp::code_index(is_one_byte));
// When arriving here entry can only be a smi representing an uncompiled
// regexp.
- DCHECK(entry->IsSmi());
+ DCHECK(entry.IsSmi());
int entry_value = Smi::ToInt(entry);
DCHECK_EQ(JSRegExp::kUninitializedValue, entry_value);
#endif
@@ -363,37 +363,36 @@
}
int RegExpImpl::IrregexpMaxRegisterCount(FixedArray re) {
- return Smi::cast(
- re->get(JSRegExp::kIrregexpMaxRegisterCountIndex))->value();
+ return Smi::cast(re.get(JSRegExp::kIrregexpMaxRegisterCountIndex)).value();
}
void RegExpImpl::SetIrregexpMaxRegisterCount(FixedArray re, int value) {
- re->set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::FromInt(value));
+ re.set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::FromInt(value));
}
void RegExpImpl::SetIrregexpCaptureNameMap(FixedArray re,
Handle<FixedArray> value) {
if (value.is_null()) {
- re->set(JSRegExp::kIrregexpCaptureNameMapIndex, Smi::kZero);
+ re.set(JSRegExp::kIrregexpCaptureNameMapIndex, Smi::kZero);
} else {
- re->set(JSRegExp::kIrregexpCaptureNameMapIndex, *value);
+ re.set(JSRegExp::kIrregexpCaptureNameMapIndex, *value);
}
}
int RegExpImpl::IrregexpNumberOfCaptures(FixedArray re) {
- return Smi::ToInt(re->get(JSRegExp::kIrregexpCaptureCountIndex));
+ return Smi::ToInt(re.get(JSRegExp::kIrregexpCaptureCountIndex));
}
int RegExpImpl::IrregexpNumberOfRegisters(FixedArray re) {
- return Smi::ToInt(re->get(JSRegExp::kIrregexpMaxRegisterCountIndex));
+ return Smi::ToInt(re.get(JSRegExp::kIrregexpMaxRegisterCountIndex));
}
ByteArray RegExpImpl::IrregexpByteCode(FixedArray re, bool is_one_byte) {
- return ByteArray::cast(re->get(JSRegExp::code_index(is_one_byte)));
+ return ByteArray::cast(re.get(JSRegExp::code_index(is_one_byte)));
}
Code RegExpImpl::IrregexpNativeCode(FixedArray re, bool is_one_byte) {
- return Code::cast(re->get(JSRegExp::code_index(is_one_byte)));
+ return Code::cast(re.get(JSRegExp::code_index(is_one_byte)));
}
void RegExpImpl::IrregexpInitialize(Isolate* isolate, Handle<JSRegExp> re,
@@ -525,7 +524,7 @@
#ifdef DEBUG
if (FLAG_regexp_interpret_all && FLAG_trace_regexp_bytecodes) {
String pattern = regexp->Pattern();
- PrintF("\n\nRegexp match: /%s/\n\n", pattern->ToCString().get());
+ PrintF("\n\nRegexp match: /%s/\n\n", pattern.ToCString().get());
PrintF("\n\nSubject string: '%s'\n\n", subject->ToCString().get());
}
#endif
@@ -6871,32 +6870,32 @@
FixedArray* last_match_cache,
ResultsCacheType type) {
FixedArray cache;
- if (!key_string->IsInternalizedString()) return Smi::kZero;
+ if (!key_string.IsInternalizedString()) return Smi::kZero;
if (type == STRING_SPLIT_SUBSTRINGS) {
- DCHECK(key_pattern->IsString());
- if (!key_pattern->IsInternalizedString()) return Smi::kZero;
+ DCHECK(key_pattern.IsString());
+ if (!key_pattern.IsInternalizedString()) return Smi::kZero;
cache = heap->string_split_cache();
} else {
DCHECK(type == REGEXP_MULTIPLE_INDICES);
- DCHECK(key_pattern->IsFixedArray());
+ DCHECK(key_pattern.IsFixedArray());
cache = heap->regexp_multiple_cache();
}
- uint32_t hash = key_string->Hash();
+ uint32_t hash = key_string.Hash();
uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) &
~(kArrayEntriesPerCacheEntry - 1));
- if (cache->get(index + kStringOffset) != key_string ||
- cache->get(index + kPatternOffset) != key_pattern) {
+ if (cache.get(index + kStringOffset) != key_string ||
+ cache.get(index + kPatternOffset) != key_pattern) {
index =
((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1));
- if (cache->get(index + kStringOffset) != key_string ||
- cache->get(index + kPatternOffset) != key_pattern) {
+ if (cache.get(index + kStringOffset) != key_string ||
+ cache.get(index + kPatternOffset) != key_pattern) {
return Smi::kZero;
}
}
- *last_match_cache = FixedArray::cast(cache->get(index + kLastMatchOffset));
- return cache->get(index + kArrayOffset);
+ *last_match_cache = FixedArray::cast(cache.get(index + kLastMatchOffset));
+ return cache.get(index + kArrayOffset);
}
void RegExpResultsCache::Enter(Isolate* isolate, Handle<String> key_string,
@@ -6960,7 +6959,7 @@
void RegExpResultsCache::Clear(FixedArray cache) {
for (int i = 0; i < kRegExpResultsCacheSize; i++) {
- cache->set(i, Smi::kZero);
+ cache.set(i, Smi::kZero);
}
}
diff --git a/src/regexp/regexp-macro-assembler.cc b/src/regexp/regexp-macro-assembler.cc
index 119a6b5..cfe827e 100644
--- a/src/regexp/regexp-macro-assembler.cc
+++ b/src/regexp/regexp-macro-assembler.cc
@@ -102,30 +102,30 @@
const byte* NativeRegExpMacroAssembler::StringCharacterPosition(
String subject, int start_index, const DisallowHeapAllocation& no_gc) {
- if (subject->IsConsString()) {
- subject = ConsString::cast(subject)->first();
- } else if (subject->IsSlicedString()) {
- start_index += SlicedString::cast(subject)->offset();
- subject = SlicedString::cast(subject)->parent();
+ if (subject.IsConsString()) {
+ subject = ConsString::cast(subject).first();
+ } else if (subject.IsSlicedString()) {
+ start_index += SlicedString::cast(subject).offset();
+ subject = SlicedString::cast(subject).parent();
}
- if (subject->IsThinString()) {
- subject = ThinString::cast(subject)->actual();
+ if (subject.IsThinString()) {
+ subject = ThinString::cast(subject).actual();
}
DCHECK_LE(0, start_index);
- DCHECK_LE(start_index, subject->length());
- if (subject->IsSeqOneByteString()) {
+ DCHECK_LE(start_index, subject.length());
+ if (subject.IsSeqOneByteString()) {
return reinterpret_cast<const byte*>(
- SeqOneByteString::cast(subject)->GetChars(no_gc) + start_index);
- } else if (subject->IsSeqTwoByteString()) {
+ SeqOneByteString::cast(subject).GetChars(no_gc) + start_index);
+ } else if (subject.IsSeqTwoByteString()) {
return reinterpret_cast<const byte*>(
- SeqTwoByteString::cast(subject)->GetChars(no_gc) + start_index);
- } else if (subject->IsExternalOneByteString()) {
+ SeqTwoByteString::cast(subject).GetChars(no_gc) + start_index);
+ } else if (subject.IsExternalOneByteString()) {
return reinterpret_cast<const byte*>(
- ExternalOneByteString::cast(subject)->GetChars() + start_index);
+ ExternalOneByteString::cast(subject).GetChars() + start_index);
} else {
- DCHECK(subject->IsExternalTwoByteString());
+ DCHECK(subject.IsExternalTwoByteString());
return reinterpret_cast<const byte*>(
- ExternalTwoByteString::cast(subject)->GetChars() + start_index);
+ ExternalTwoByteString::cast(subject).GetChars() + start_index);
}
}
@@ -134,8 +134,8 @@
Address* return_address, Code re_code, Address* subject,
const byte** input_start, const byte** input_end) {
AllowHeapAllocation allow_allocation;
- DCHECK(re_code->raw_instruction_start() <= *return_address);
- DCHECK(*return_address <= re_code->raw_instruction_end());
+ DCHECK(re_code.raw_instruction_start() <= *return_address);
+ DCHECK(*return_address <= re_code.raw_instruction_end());
int return_value = 0;
// Prepare for possible GC.
HandleScope handles(isolate);
@@ -158,13 +158,13 @@
return_value = EXCEPTION;
} else {
Object result = isolate->stack_guard()->HandleInterrupts();
- if (result->IsException(isolate)) return_value = EXCEPTION;
+ if (result.IsException(isolate)) return_value = EXCEPTION;
}
DisallowHeapAllocation no_gc;
if (*code_handle != re_code) { // Return address no longer valid
- intptr_t delta = code_handle->address() - re_code->address();
+ intptr_t delta = code_handle->address() - re_code.address();
// Overwrite the return address on the stack.
*return_address += delta;
}
@@ -206,25 +206,25 @@
String subject_ptr = *subject;
// Character offsets into string.
int start_offset = previous_index;
- int char_length = subject_ptr->length() - start_offset;
+ int char_length = subject_ptr.length() - start_offset;
int slice_offset = 0;
// The string has been flattened, so if it is a cons string it contains the
// full string in the first part.
if (StringShape(subject_ptr).IsCons()) {
- DCHECK_EQ(0, ConsString::cast(subject_ptr)->second()->length());
- subject_ptr = ConsString::cast(subject_ptr)->first();
+ DCHECK_EQ(0, ConsString::cast(subject_ptr).second().length());
+ subject_ptr = ConsString::cast(subject_ptr).first();
} else if (StringShape(subject_ptr).IsSliced()) {
SlicedString slice = SlicedString::cast(subject_ptr);
- subject_ptr = slice->parent();
- slice_offset = slice->offset();
+ subject_ptr = slice.parent();
+ slice_offset = slice.offset();
}
if (StringShape(subject_ptr).IsThin()) {
- subject_ptr = ThinString::cast(subject_ptr)->actual();
+ subject_ptr = ThinString::cast(subject_ptr).actual();
}
// Ensure that an underlying string has the same representation.
- bool is_one_byte = subject_ptr->IsOneByteRepresentation();
- DCHECK(subject_ptr->IsExternalString() || subject_ptr->IsSeqString());
+ bool is_one_byte = subject_ptr.IsOneByteRepresentation();
+ DCHECK(subject_ptr.IsExternalString() || subject_ptr.IsSeqString());
// String is now either Sequential or External
int char_size_shift = is_one_byte ? 0 : 1;
diff --git a/src/regexp/regexp-utils.cc b/src/regexp/regexp-utils.cc
index 20076b3..5c1103b 100644
--- a/src/regexp/regexp-utils.cc
+++ b/src/regexp/regexp-utils.cc
@@ -37,7 +37,7 @@
namespace {
V8_INLINE bool HasInitialRegExpMap(Isolate* isolate, JSReceiver recv) {
- return recv->map() == isolate->regexp_function()->initial_map();
+ return recv.map() == isolate->regexp_function()->initial_map();
}
} // namespace
@@ -48,7 +48,7 @@
Handle<Object> value_as_object =
isolate->factory()->NewNumberFromInt64(value);
if (HasInitialRegExpMap(isolate, *recv)) {
- JSRegExp::cast(*recv)->set_last_index(*value_as_object, SKIP_WRITE_BARRIER);
+ JSRegExp::cast(*recv).set_last_index(*value_as_object, SKIP_WRITE_BARRIER);
return recv;
} else {
return Object::SetProperty(
@@ -60,7 +60,7 @@
MaybeHandle<Object> RegExpUtils::GetLastIndex(Isolate* isolate,
Handle<JSReceiver> recv) {
if (HasInitialRegExpMap(isolate, *recv)) {
- return handle(JSRegExp::cast(*recv)->last_index(), isolate);
+ return handle(JSRegExp::cast(*recv).last_index(), isolate);
} else {
return Object::GetProperty(isolate, recv,
isolate->factory()->lastIndex_string());
@@ -158,11 +158,11 @@
if (!HasInitialRegExpMap(isolate, recv)) return false;
// Check the receiver's prototype's map.
- Object proto = recv->map()->prototype();
- if (!proto->IsJSReceiver()) return false;
+ Object proto = recv.map().prototype();
+ if (!proto.IsJSReceiver()) return false;
Handle<Map> initial_proto_initial_map = isolate->regexp_prototype_map();
- Map proto_map = JSReceiver::cast(proto)->map();
+ Map proto_map = JSReceiver::cast(proto).map();
if (proto_map != *initial_proto_initial_map) {
return false;
}
@@ -171,10 +171,10 @@
// Check that the index refers to "exec" method (this has to be consistent
// with the init order in the bootstrapper).
DCHECK_EQ(*(isolate->factory()->exec_string()),
- proto_map->instance_descriptors()->GetKey(
+ proto_map.instance_descriptors().GetKey(
JSRegExp::kExecFunctionDescriptorIndex));
- if (proto_map->instance_descriptors()
- ->GetDetails(JSRegExp::kExecFunctionDescriptorIndex)
+ if (proto_map.instance_descriptors()
+ .GetDetails(JSRegExp::kExecFunctionDescriptorIndex)
.constness() != PropertyConstness::kConst) {
return false;
}
@@ -183,8 +183,8 @@
// The smi check is required to omit ToLength(lastIndex) calls with possible
// user-code execution on the fast path.
- Object last_index = JSRegExp::cast(recv)->last_index();
- return last_index->IsSmi() && Smi::ToInt(last_index) >= 0;
+ Object last_index = JSRegExp::cast(recv).last_index();
+ return last_index.IsSmi() && Smi::ToInt(last_index) >= 0;
}
uint64_t RegExpUtils::AdvanceStringIndex(Handle<String> string, uint64_t index,
diff --git a/src/roots.cc b/src/roots.cc
index fb65d2f..d3976e6 100644
--- a/src/roots.cc
+++ b/src/roots.cc
@@ -73,7 +73,7 @@
switch (index) {
#define CHECKTYPE(Type, name, CamelName) \
case RootIndex::k##CamelName: \
- return root->Is##Type();
+ return root.Is##Type();
READ_ONLY_ROOT_LIST(CHECKTYPE)
#undef CHECKTYPE
diff --git a/src/runtime/runtime-array.cc b/src/runtime/runtime-array.cc
index 535fdcc..fcfddde 100644
--- a/src/runtime/runtime-array.cc
+++ b/src/runtime/runtime-array.cc
@@ -189,12 +189,12 @@
Handle<JSObject> object = Handle<JSObject>::cast(receiver);
if (object->HasStringWrapperElements()) {
- int len = String::cast(Handle<JSValue>::cast(object)->value())->length();
+ int len = String::cast(Handle<JSValue>::cast(object)->value()).length();
DCHECK_LE(len, limit);
return Smi::FromInt(len);
}
- if (object->HasSloppyArgumentsElements() || !object->map()->is_extensible()) {
+ if (object->HasSloppyArgumentsElements() || !object->map().is_extensible()) {
return RemoveArrayHolesGeneric(isolate, receiver, limit);
}
@@ -251,24 +251,24 @@
// Assume most arrays contain no holes and undefined values, so minimize the
// number of stores of non-undefined, non-the-hole values.
for (unsigned int i = 0; i < holes; i++) {
- if (elements->is_the_hole(i)) {
+ if (elements.is_the_hole(i)) {
holes--;
} else {
continue;
}
// Position i needs to be filled.
while (holes > i) {
- if (elements->is_the_hole(holes)) {
+ if (elements.is_the_hole(holes)) {
holes--;
} else {
- elements->set(i, elements->get_scalar(holes));
+ elements.set(i, elements.get_scalar(holes));
break;
}
}
}
result = holes;
while (holes < limit) {
- elements->set_the_hole(holes);
+ elements.set_the_hole(holes);
holes++;
}
} else {
@@ -277,42 +277,42 @@
// Split elements into defined, undefined and the_hole, in that order. Only
// count locations for undefined and the hole, and fill them afterwards.
- WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc);
+ WriteBarrierMode write_barrier = elements.GetWriteBarrierMode(no_gc);
unsigned int undefs = limit;
unsigned int holes = limit;
// Assume most arrays contain no holes and undefined values, so minimize the
// number of stores of non-undefined, non-the-hole values.
for (unsigned int i = 0; i < undefs; i++) {
- Object current = elements->get(i);
- if (current->IsTheHole(isolate)) {
+ Object current = elements.get(i);
+ if (current.IsTheHole(isolate)) {
holes--;
undefs--;
- } else if (current->IsUndefined(isolate)) {
+ } else if (current.IsUndefined(isolate)) {
undefs--;
} else {
continue;
}
// Position i needs to be filled.
while (undefs > i) {
- current = elements->get(undefs);
- if (current->IsTheHole(isolate)) {
+ current = elements.get(undefs);
+ if (current.IsTheHole(isolate)) {
holes--;
undefs--;
- } else if (current->IsUndefined(isolate)) {
+ } else if (current.IsUndefined(isolate)) {
undefs--;
} else {
- elements->set(i, current, write_barrier);
+ elements.set(i, current, write_barrier);
break;
}
}
}
result = undefs;
while (undefs < holes) {
- elements->set_undefined(isolate, undefs);
+ elements.set_undefined(isolate, undefs);
undefs++;
}
while (holes < limit) {
- elements->set_the_hole(isolate, holes);
+ elements.set_the_hole(isolate, holes);
holes++;
}
}
@@ -406,7 +406,7 @@
if (object->IsJSArray() &&
!Handle<JSArray>::cast(object)->HasFastPackedElements()) {
if (!isolate->IsNoElementsProtectorIntact() ||
- object->map()->prototype() != initial_array_proto) {
+ object->map().prototype() != initial_array_proto) {
isolate->CountUsage(
v8::Isolate::kArrayPrototypeSortJSArrayModifiedPrototype);
}
@@ -415,7 +415,7 @@
// Skip copying from prototype for JSArrays with ElementsProtector intact and
// the original array prototype.
if (!object->IsJSArray() || !isolate->IsNoElementsProtectorIntact() ||
- object->map()->prototype() != initial_array_proto) {
+ object->map().prototype() != initial_array_proto) {
RETURN_FAILURE_ON_EXCEPTION(isolate,
CopyFromPrototype(isolate, object, length));
}
@@ -546,7 +546,7 @@
if (key < 0) return Smi::kZero;
- uint32_t capacity = static_cast<uint32_t>(object->elements()->length());
+ uint32_t capacity = static_cast<uint32_t>(object->elements().length());
uint32_t index = static_cast<uint32_t>(key);
if (index >= capacity) {
@@ -572,7 +572,7 @@
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, obj, 0);
- return isolate->heap()->ToBoolean(obj->IsJSArray());
+ return isolate->heap()->ToBoolean(obj.IsJSArray());
}
RUNTIME_FUNCTION(Runtime_ArraySpeciesConstructor) {
@@ -599,9 +599,9 @@
// Let len be ? ToLength(? Get(O, "length")).
int64_t len;
{
- if (object->map()->instance_type() == JS_ARRAY_TYPE) {
+ if (object->map().instance_type() == JS_ARRAY_TYPE) {
uint32_t len32 = 0;
- bool success = JSArray::cast(*object)->length()->ToArrayLength(&len32);
+ bool success = JSArray::cast(*object).length().ToArrayLength(&len32);
DCHECK(success);
USE(success);
len = len32;
@@ -653,7 +653,7 @@
// If the receiver is not a special receiver type, and the length is a valid
// element index, perform fast operation tailored to specific ElementsKinds.
- if (!object->map()->IsSpecialReceiverMap() && len < kMaxUInt32 &&
+ if (!object->map().IsSpecialReceiverMap() && len < kMaxUInt32 &&
JSObject::PrototypeHasNoElements(isolate, JSObject::cast(*object))) {
Handle<JSObject> obj = Handle<JSObject>::cast(object);
ElementsAccessor* elements = obj->GetElementsAccessor();
@@ -703,7 +703,7 @@
{
if (object->IsJSArray()) {
uint32_t len32 = 0;
- bool success = JSArray::cast(*object)->length()->ToArrayLength(&len32);
+ bool success = JSArray::cast(*object).length().ToArrayLength(&len32);
DCHECK(success);
USE(success);
len = len32;
@@ -752,7 +752,7 @@
// If the receiver is not a special receiver type, and the length fits
// uint32_t, perform fast operation tailored to specific ElementsKinds.
- if (!object->map()->IsSpecialReceiverMap() && len <= kMaxUInt32 &&
+ if (!object->map().IsSpecialReceiverMap() && len <= kMaxUInt32 &&
JSObject::PrototypeHasNoElements(isolate, JSObject::cast(*object))) {
Handle<JSObject> obj = Handle<JSObject>::cast(object);
ElementsAccessor* elements = obj->GetElementsAccessor();
diff --git a/src/runtime/runtime-classes.cc b/src/runtime/runtime-classes.cc
index 8d343fe..8e6787e 100644
--- a/src/runtime/runtime-classes.cc
+++ b/src/runtime/runtime-classes.cc
@@ -37,7 +37,7 @@
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSFunction, constructor, 0);
- Handle<String> name(constructor->shared()->Name(), isolate);
+ Handle<String> name(constructor->shared().Name(), isolate);
THROW_NEW_ERROR_RETURN_FAILURE(
isolate, NewTypeError(MessageTemplate::kConstructorNonCallable, name));
}
@@ -70,8 +70,8 @@
Handle<JSFunction> function) {
Handle<String> super_name;
if (constructor->IsJSFunction()) {
- super_name = handle(Handle<JSFunction>::cast(constructor)->shared()->Name(),
- isolate);
+ super_name =
+ handle(Handle<JSFunction>::cast(constructor)->shared().Name(), isolate);
} else if (constructor->IsOddball()) {
DCHECK(constructor->IsNull(isolate));
super_name = isolate->factory()->null_string();
@@ -82,7 +82,7 @@
if (super_name->length() == 0) {
super_name = isolate->factory()->null_string();
}
- Handle<String> function_name(function->shared()->Name(), isolate);
+ Handle<String> function_name(function->shared().Name(), isolate);
// anonymous class
if (function_name->length() == 0) {
THROW_NEW_ERROR_RETURN_FAILURE(
@@ -129,14 +129,14 @@
inline void SetHomeObject(Isolate* isolate, JSFunction method,
JSObject home_object) {
- if (method->shared()->needs_home_object()) {
+ if (method.shared().needs_home_object()) {
const int kPropertyIndex = JSFunction::kMaybeHomeObjectDescriptorIndex;
- CHECK_EQ(method->map()->instance_descriptors()->GetKey(kPropertyIndex),
+ CHECK_EQ(method.map().instance_descriptors().GetKey(kPropertyIndex),
ReadOnlyRoots(isolate).home_object_symbol());
FieldIndex field_index =
- FieldIndex::ForDescriptor(method->map(), kPropertyIndex);
- method->RawFastPropertyAtPut(field_index, home_object);
+ FieldIndex::ForDescriptor(method.map(), kPropertyIndex);
+ method.RawFastPropertyAtPut(field_index, home_object);
}
}
@@ -163,7 +163,7 @@
SetHomeObject(isolate, *method, *home_object);
- if (!method->shared()->HasSharedName()) {
+ if (!method->shared().HasSharedName()) {
// TODO(ishell): method does not have a shared name at this point only if
// the key is a computed property name. However, the bytecode generator
// explicitly generates ToName bytecodes to ensure that the computed
@@ -200,7 +200,7 @@
SetHomeObject(isolate, *method, home_object);
- DCHECK(method->shared()->HasSharedName());
+ DCHECK(method->shared().HasSharedName());
return *method;
}
@@ -215,7 +215,7 @@
int capacity = dictionary->Capacity();
for (int i = 0; i < capacity; i++) {
Object value = dictionary->ValueAt(i);
- if (value->IsAccessorPair()) {
+ if (value.IsAccessorPair()) {
Handle<AccessorPair> pair(AccessorPair::cast(value), isolate);
pair = AccessorPair::Copy(isolate, pair);
dictionary->ValueAtPut(i, *pair);
@@ -245,7 +245,7 @@
if (value->IsAccessorPair()) {
Handle<AccessorPair> pair = Handle<AccessorPair>::cast(value);
Object tmp = pair->getter();
- if (tmp->IsSmi()) {
+ if (tmp.IsSmi()) {
Handle<Object> result;
ASSIGN_RETURN_ON_EXCEPTION_VALUE(
isolate, result,
@@ -256,7 +256,7 @@
pair->set_getter(*result);
}
tmp = pair->setter();
- if (tmp->IsSmi()) {
+ if (tmp.IsSmi()) {
Handle<Object> result;
ASSIGN_RETURN_ON_EXCEPTION_VALUE(
isolate, result,
@@ -314,35 +314,34 @@
int field_index = 0;
for (int i = 0; i < nof_descriptors; i++) {
Object value = descriptors_template->GetStrongValue(i);
- if (value->IsAccessorPair()) {
+ if (value.IsAccessorPair()) {
Handle<AccessorPair> pair = AccessorPair::Copy(
isolate, handle(AccessorPair::cast(value), isolate));
value = *pair;
}
DisallowHeapAllocation no_gc;
Name name = descriptors_template->GetKey(i);
- DCHECK(name->IsUniqueName());
+ DCHECK(name.IsUniqueName());
PropertyDetails details = descriptors_template->GetDetails(i);
if (details.location() == kDescriptor) {
if (details.kind() == kData) {
- if (value->IsSmi()) {
+ if (value.IsSmi()) {
value = GetMethodWithSharedNameAndSetHomeObject(isolate, args, value,
*receiver);
}
- details =
- details.CopyWithRepresentation(value->OptimalRepresentation());
+ details = details.CopyWithRepresentation(value.OptimalRepresentation());
} else {
DCHECK_EQ(kAccessor, details.kind());
- if (value->IsAccessorPair()) {
+ if (value.IsAccessorPair()) {
AccessorPair pair = AccessorPair::cast(value);
- Object tmp = pair->getter();
- if (tmp->IsSmi()) {
- pair->set_getter(GetMethodWithSharedNameAndSetHomeObject(
+ Object tmp = pair.getter();
+ if (tmp.IsSmi()) {
+ pair.set_getter(GetMethodWithSharedNameAndSetHomeObject(
isolate, args, tmp, *receiver));
}
- tmp = pair->setter();
- if (tmp->IsSmi()) {
- pair->set_setter(GetMethodWithSharedNameAndSetHomeObject(
+ tmp = pair.setter();
+ if (tmp.IsSmi()) {
+ pair.set_setter(GetMethodWithSharedNameAndSetHomeObject(
isolate, args, tmp, *receiver));
}
}
@@ -350,7 +349,7 @@
} else {
UNREACHABLE();
}
- DCHECK(value->FitsRepresentation(details.representation()));
+ DCHECK(value.FitsRepresentation(details.representation()));
if (details.location() == kDescriptor && details.kind() == kData) {
details = PropertyDetails(details.kind(), details.attributes(), kField,
PropertyConstness::kConst,
@@ -589,7 +588,7 @@
} else if (super_class->IsConstructor()) {
DCHECK(!super_class->IsJSFunction() ||
!IsResumableFunction(
- Handle<JSFunction>::cast(super_class)->shared()->kind()));
+ Handle<JSFunction>::cast(super_class)->shared().kind()));
ASSIGN_RETURN_ON_EXCEPTION(
isolate, prototype_parent,
Runtime::GetObjectProperty(isolate, super_class,
@@ -629,7 +628,7 @@
if (FLAG_trace_maps) {
LOG(isolate,
MapEvent("InitialMap", Map(), constructor->map(),
- "init class constructor", constructor->shared()->DebugName()));
+ "init class constructor", constructor->shared().DebugName()));
LOG(isolate, MapEvent("InitialMap", Map(), prototype->map(),
"init class prototype"));
}
diff --git a/src/runtime/runtime-compiler.cc b/src/runtime/runtime-compiler.cc
index b431b55..d7f94e5 100644
--- a/src/runtime/runtime-compiler.cc
+++ b/src/runtime/runtime-compiler.cc
@@ -25,7 +25,7 @@
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
#ifdef DEBUG
- if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
+ if (FLAG_trace_lazy && !function->shared().is_compiled()) {
PrintF("[unoptimized: ");
function->PrintName();
PrintF("]\n");
@@ -66,14 +66,14 @@
DCHECK_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
- DCHECK_EQ(function->feedback_vector()->optimization_marker(),
+ DCHECK_EQ(function->feedback_vector().optimization_marker(),
OptimizationMarker::kLogFirstExecution);
DCHECK(FLAG_log_function_events);
Handle<SharedFunctionInfo> sfi(function->shared(), isolate);
LOG(isolate, FunctionEvent(
- "first-execution", Script::cast(sfi->script())->id(), 0,
+ "first-execution", Script::cast(sfi->script()).id(), 0,
sfi->StartPosition(), sfi->EndPosition(), sfi->DebugName()));
- function->feedback_vector()->ClearOptimizationMarker();
+ function->feedback_vector().ClearOptimizationMarker();
// Return the code to continue execution, we don't care at this point whether
// this is for lazy compilation or has been eagerly complied.
return function->code();
@@ -99,9 +99,9 @@
DCHECK_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
- DCHECK(function->shared()->is_compiled());
+ DCHECK(function->shared().is_compiled());
- function->feedback_vector()->EvictOptimizedCodeMarkedForDeoptimization(
+ function->feedback_vector().EvictOptimizedCodeMarkedForDeoptimization(
function->shared(), "Runtime_EvictOptimizedCodeSlot");
return function->code();
}
@@ -112,18 +112,18 @@
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
Handle<JSReceiver> stdlib;
- if (args[1]->IsJSReceiver()) {
+ if (args[1].IsJSReceiver()) {
stdlib = args.at<JSReceiver>(1);
}
Handle<JSReceiver> foreign;
- if (args[2]->IsJSReceiver()) {
+ if (args[2].IsJSReceiver()) {
foreign = args.at<JSReceiver>(2);
}
Handle<JSArrayBuffer> memory;
- if (args[3]->IsJSArrayBuffer()) {
+ if (args[3].IsJSArrayBuffer()) {
memory = args.at<JSArrayBuffer>(3);
}
- if (function->shared()->HasAsmWasmData()) {
+ if (function->shared().HasAsmWasmData()) {
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
Handle<AsmWasmData> data(shared->asm_wasm_data(), isolate);
MaybeHandle<Object> result = AsmJs::InstantiateAsmWasm(
@@ -134,11 +134,11 @@
}
// Remove wasm data, mark as broken for asm->wasm, replace function code with
// UncompiledData, and return a smi 0 to indicate failure.
- if (function->shared()->HasAsmWasmData()) {
+ if (function->shared().HasAsmWasmData()) {
SharedFunctionInfo::DiscardCompiled(isolate,
handle(function->shared(), isolate));
}
- function->shared()->set_is_asm_wasm_broken(true);
+ function->shared().set_is_asm_wasm_broken(true);
DCHECK(function->code() ==
isolate->builtins()->builtin(Builtins::kInstantiateAsmJs));
function->set_code(isolate->builtins()->builtin(Builtins::kCompileLazy));
@@ -184,7 +184,7 @@
static bool IsSuitableForOnStackReplacement(Isolate* isolate,
Handle<JSFunction> function) {
// Keep track of whether we've succeeded in optimizing.
- if (function->shared()->optimization_disabled()) return false;
+ if (function->shared().optimization_disabled()) return false;
// If we are trying to do OSR when there are already optimized
// activations of the function, it means (a) the function is directly or
// indirectly recursive and (b) an optimized invocation has been
@@ -209,8 +209,8 @@
// representing the entry point will be valid for any copy of the bytecode.
Handle<BytecodeArray> bytecode(iframe->GetBytecodeArray(), iframe->isolate());
- DCHECK(frame->LookupCode()->is_interpreter_trampoline_builtin());
- DCHECK(frame->function()->shared()->HasBytecodeArray());
+ DCHECK(frame->LookupCode().is_interpreter_trampoline_builtin());
+ DCHECK(frame->function().shared().HasBytecodeArray());
DCHECK(frame->is_interpreted());
// Reset the OSR loop nesting depth to disarm back edges.
@@ -258,11 +258,11 @@
DeoptimizationData data =
DeoptimizationData::cast(result->deoptimization_data());
- if (data->OsrPcOffset()->value() >= 0) {
- DCHECK(BailoutId(data->OsrBytecodeOffset()->value()) == ast_id);
+ if (data.OsrPcOffset().value() >= 0) {
+ DCHECK(BailoutId(data.OsrBytecodeOffset().value()) == ast_id);
if (FLAG_trace_osr) {
PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
- ast_id.ToInt(), data->OsrPcOffset()->value());
+ ast_id.ToInt(), data.OsrPcOffset().value());
}
DCHECK(result->is_turbofanned());
@@ -289,7 +289,7 @@
}
if (!function->IsOptimized()) {
- function->set_code(function->shared()->GetCode());
+ function->set_code(function->shared().GetCode());
}
return Object();
}
@@ -303,7 +303,7 @@
// Check if native context allows code generation from
// strings. Throw an exception if it doesn't.
- if (native_context->allow_code_gen_from_strings()->IsFalse(isolate) &&
+ if (native_context->allow_code_gen_from_strings().IsFalse(isolate) &&
!Compiler::CodeGenerationFromStringsAllowed(isolate, native_context,
source)) {
Handle<Object> error_message =
@@ -340,14 +340,14 @@
// execution default to an indirect call to eval, which will also return
// the first argument without doing anything).
if (*callee != isolate->native_context()->global_eval_fun() ||
- !args[1]->IsString()) {
+ !args[1].IsString()) {
return *callee;
}
- DCHECK(args[3]->IsSmi());
+ DCHECK(args[3].IsSmi());
DCHECK(is_valid_language_mode(args.smi_at(3)));
LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(3));
- DCHECK(args[4]->IsSmi());
+ DCHECK(args[4].IsSmi());
Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
isolate);
return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
diff --git a/src/runtime/runtime-debug.cc b/src/runtime/runtime-debug.cc
index 6df216e..2383c73 100644
--- a/src/runtime/runtime-debug.cc
+++ b/src/runtime/runtime-debug.cc
@@ -66,10 +66,10 @@
DCHECK(it.frame()->is_interpreted());
InterpretedFrame* interpreted_frame =
reinterpret_cast<InterpretedFrame*>(it.frame());
- SharedFunctionInfo shared = interpreted_frame->function()->shared();
- BytecodeArray bytecode_array = shared->GetBytecodeArray();
+ SharedFunctionInfo shared = interpreted_frame->function().shared();
+ BytecodeArray bytecode_array = shared.GetBytecodeArray();
int bytecode_offset = interpreted_frame->GetBytecodeOffset();
- Bytecode bytecode = Bytecodes::FromByte(bytecode_array->get(bytecode_offset));
+ Bytecode bytecode = Bytecodes::FromByte(bytecode_array.get(bytecode_offset));
bool side_effect_check_failed = false;
if (isolate->debug_execution_mode() == DebugInfo::kSideEffects) {
@@ -98,7 +98,7 @@
Smi::FromInt(static_cast<uint8_t>(bytecode)));
}
Object interrupt_object = isolate->stack_guard()->HandleInterrupts();
- if (interrupt_object->IsException(isolate)) {
+ if (interrupt_object.IsException(isolate)) {
return MakePair(interrupt_object,
Smi::FromInt(static_cast<uint8_t>(bytecode)));
}
@@ -112,8 +112,8 @@
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
USE(function);
- DCHECK(function->shared()->HasDebugInfo());
- DCHECK(function->shared()->GetDebugInfo()->BreakAtEntry());
+ DCHECK(function->shared().HasDebugInfo());
+ DCHECK(function->shared().GetDebugInfo().BreakAtEntry());
// Get the top-most JavaScript frame.
JavaScriptFrameIterator it(isolate);
@@ -147,7 +147,7 @@
Factory* factory = isolate->factory();
Handle<IteratorType> iterator = Handle<IteratorType>::cast(object);
const char* kind = nullptr;
- switch (iterator->map()->instance_type()) {
+ switch (iterator->map().instance_type()) {
case JS_MAP_KEY_ITERATOR_TYPE:
kind = "keys";
break;
@@ -300,7 +300,7 @@
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
- if (!args[0]->IsJSGeneratorObject()) return Smi::kZero;
+ if (!args[0].IsJSGeneratorObject()) return Smi::kZero;
// Check arguments.
CONVERT_ARG_HANDLE_CHECKED(JSGeneratorObject, gen, 0);
@@ -323,7 +323,7 @@
HandleScope scope(isolate);
DCHECK_EQ(2, args.length());
- if (!args[0]->IsJSGeneratorObject()) {
+ if (!args[0].IsJSGeneratorObject()) {
return ReadOnlyRoots(isolate).undefined_value();
}
@@ -448,8 +448,8 @@
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, f, 0);
- if (f->IsJSFunction()) {
- return JSFunction::cast(f)->shared()->inferred_name();
+ if (f.IsJSFunction()) {
+ return JSFunction::cast(f).shared().inferred_name();
}
return ReadOnlyRoots(isolate).empty_string();
}
@@ -484,19 +484,19 @@
if (script->type() == Script::TYPE_WASM) {
return WasmModuleObject::cast(script->wasm_module_object())
- ->GetFunctionOffset(line);
+ .GetFunctionOffset(line);
}
Script::InitLineEnds(script);
FixedArray line_ends_array = FixedArray::cast(script->line_ends());
- const int line_count = line_ends_array->length();
+ const int line_count = line_ends_array.length();
DCHECK_LT(0, line_count);
if (line == 0) return 0;
// If line == line_count, we return the first position beyond the last line.
if (line > line_count) return -1;
- return Smi::ToInt(line_ends_array->get(line - 1)) + 1;
+ return Smi::ToInt(line_ends_array.get(line - 1)) + 1;
}
int ScriptLinePositionWithOffset(Handle<Script> script, int line, int offset) {
@@ -578,7 +578,7 @@
Script::Iterator iterator(isolate);
for (Script script = iterator.Next(); !script.is_null();
script = iterator.Next()) {
- if (script->id() == needle) {
+ if (script.id() == needle) {
*result = handle(script, isolate);
return true;
}
@@ -777,7 +777,7 @@
CONVERT_ARG_HANDLE_CHECKED(JSFunction, script_function, 0);
CONVERT_ARG_HANDLE_CHECKED(String, new_source, 1);
- Handle<Script> script(Script::cast(script_function->shared()->script()),
+ Handle<Script> script(Script::cast(script_function->shared().script()),
isolate);
v8::debug::LiveEditResult result;
LiveEdit::PatchScript(isolate, script, new_source, false, &result);
diff --git a/src/runtime/runtime-function.cc b/src/runtime/runtime-function.cc
index 24accdd..0d1879c 100644
--- a/src/runtime/runtime-function.cc
+++ b/src/runtime/runtime-function.cc
@@ -20,8 +20,8 @@
CONVERT_ARG_HANDLE_CHECKED(JSReceiver, function, 0);
if (function->IsJSFunction()) {
- Handle<Object> script(
- Handle<JSFunction>::cast(function)->shared()->script(), isolate);
+ Handle<Object> script(Handle<JSFunction>::cast(function)->shared().script(),
+ isolate);
if (script->IsScript()) return Handle<Script>::cast(script)->source();
}
return ReadOnlyRoots(isolate).undefined_value();
@@ -33,8 +33,8 @@
CONVERT_ARG_HANDLE_CHECKED(JSReceiver, function, 0);
if (function->IsJSFunction()) {
- Handle<Object> script(
- Handle<JSFunction>::cast(function)->shared()->script(), isolate);
+ Handle<Object> script(Handle<JSFunction>::cast(function)->shared().script(),
+ isolate);
if (script->IsScript()) {
return Smi::FromInt(Handle<Script>::cast(script)->id());
}
@@ -60,7 +60,7 @@
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(JSFunction, fun, 0);
- int pos = fun->shared()->StartPosition();
+ int pos = fun.shared().StartPosition();
return Smi::FromInt(pos);
}
@@ -70,7 +70,7 @@
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(JSFunction, f, 0);
- return isolate->heap()->ToBoolean(f->shared()->IsApiFunction());
+ return isolate->heap()->ToBoolean(f.shared().IsApiFunction());
}
@@ -93,7 +93,7 @@
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, object, 0);
- return isolate->heap()->ToBoolean(object->IsFunction());
+ return isolate->heap()->ToBoolean(object.IsFunction());
}
diff --git a/src/runtime/runtime-generator.cc b/src/runtime/runtime-generator.cc
index a69ead2..0312bbd 100644
--- a/src/runtime/runtime-generator.cc
+++ b/src/runtime/runtime-generator.cc
@@ -48,14 +48,14 @@
DCHECK_EQ(2, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
CONVERT_ARG_HANDLE_CHECKED(Object, receiver, 1);
- CHECK_IMPLIES(IsAsyncFunction(function->shared()->kind()),
- IsAsyncGeneratorFunction(function->shared()->kind()));
- CHECK(IsResumableFunction(function->shared()->kind()));
+ CHECK_IMPLIES(IsAsyncFunction(function->shared().kind()),
+ IsAsyncGeneratorFunction(function->shared().kind()));
+ CHECK(IsResumableFunction(function->shared().kind()));
// Underlying function needs to have bytecode available.
- DCHECK(function->shared()->HasBytecodeArray());
- int size = function->shared()->internal_formal_parameter_count() +
- function->shared()->GetBytecodeArray()->register_count();
+ DCHECK(function->shared().HasBytecodeArray());
+ int size = function->shared().internal_formal_parameter_count() +
+ function->shared().GetBytecodeArray().register_count();
Handle<FixedArray> parameters_and_registers =
isolate->factory()->NewFixedArray(size);
@@ -129,7 +129,7 @@
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(JSAsyncGeneratorObject, generator, 0);
- int state = generator->continuation();
+ int state = generator.continuation();
DCHECK_NE(state, JSAsyncGeneratorObject::kGeneratorExecuting);
// If state is 0 ("suspendedStart"), there is guaranteed to be no catch
@@ -137,11 +137,11 @@
// not reach a catch handler.
if (state < 1) return ReadOnlyRoots(isolate).false_value();
- SharedFunctionInfo shared = generator->function()->shared();
- DCHECK(shared->HasBytecodeArray());
- HandlerTable handler_table(shared->GetBytecodeArray());
+ SharedFunctionInfo shared = generator.function().shared();
+ DCHECK(shared.HasBytecodeArray());
+ HandlerTable handler_table(shared.GetBytecodeArray());
- int pc = Smi::cast(generator->input_or_debug_pos())->value();
+ int pc = Smi::cast(generator.input_or_debug_pos()).value();
HandlerTable::CatchPrediction catch_prediction = HandlerTable::ASYNC_AWAIT;
handler_table.LookupRange(pc, nullptr, &catch_prediction);
return isolate->heap()->ToBoolean(catch_prediction == HandlerTable::CAUGHT);
diff --git a/src/runtime/runtime-internal.cc b/src/runtime/runtime-internal.cc
index 964aef6..553e653 100644
--- a/src/runtime/runtime-internal.cc
+++ b/src/runtime/runtime-internal.cc
@@ -269,13 +269,13 @@
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
- function->raw_feedback_cell()->set_interrupt_budget(FLAG_interrupt_budget);
+ function->raw_feedback_cell().set_interrupt_budget(FLAG_interrupt_budget);
if (!function->has_feedback_vector()) {
JSFunction::EnsureFeedbackVector(function);
// Also initialize the invocation count here. This is only really needed for
// OSR. When we OSR functions with lazy feedback allocation we want to have
// a non zero invocation count so we can inline functions.
- function->feedback_vector()->set_invocation_count(1);
+ function->feedback_vector().set_invocation_count(1);
return ReadOnlyRoots(isolate).undefined_value();
}
// Handle interrupts.
@@ -347,7 +347,7 @@
SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate, shared);
int pos = summary.abstract_code()->SourcePosition(summary.code_offset());
if (script->IsScript() &&
- !(Handle<Script>::cast(script)->source()->IsUndefined(isolate))) {
+ !(Handle<Script>::cast(script)->source().IsUndefined(isolate))) {
Handle<Script> casted_script = Handle<Script>::cast(script);
*target = MessageLocation(casted_script, pos, pos + 1, shared);
return true;
@@ -601,7 +601,7 @@
} else {
DCHECK_LE(args.length(), 2);
std::FILE* f;
- if (args[0]->IsString()) {
+ if (args[0].IsString()) {
// With a string argument, the results are appended to that file.
CONVERT_ARG_HANDLE_CHECKED(String, arg0, 0);
DisallowHeapAllocation no_gc;
@@ -626,7 +626,7 @@
OFStream stats_stream(f);
isolate->counters()->runtime_call_stats()->Print(stats_stream);
isolate->counters()->runtime_call_stats()->Reset();
- if (args[0]->IsString())
+ if (args[0].IsString())
std::fclose(f);
else
std::fflush(f);
@@ -687,7 +687,7 @@
CONVERT_ARG_HANDLE_CHECKED(SharedFunctionInfo, shared_info, 1);
CONVERT_SMI_ARG_CHECKED(slot_id, 2);
- Handle<Context> native_context(isolate->context()->native_context(), isolate);
+ Handle<Context> native_context(isolate->context().native_context(), isolate);
return *TemplateObjectDescription::GetTemplateObject(
isolate, native_context, description, shared_info, slot_id);
}
diff --git a/src/runtime/runtime-literals.cc b/src/runtime/runtime-literals.cc
index f3739bc..a6fea38 100644
--- a/src/runtime/runtime-literals.cc
+++ b/src/runtime/runtime-literals.cc
@@ -86,7 +86,7 @@
}
}
- if (object->map()->is_deprecated()) {
+ if (object->map().is_deprecated()) {
JSObject::MigrateInstance(object);
}
@@ -113,23 +113,23 @@
// Deep copy own properties. Arrays only have 1 property "length".
if (!copy->IsJSArray()) {
if (copy->HasFastProperties()) {
- Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors(),
+ Handle<DescriptorArray> descriptors(copy->map().instance_descriptors(),
isolate);
- int limit = copy->map()->NumberOfOwnDescriptors();
+ int limit = copy->map().NumberOfOwnDescriptors();
for (int i = 0; i < limit; i++) {
DCHECK_EQ(kField, descriptors->GetDetails(i).location());
DCHECK_EQ(kData, descriptors->GetDetails(i).kind());
FieldIndex index = FieldIndex::ForDescriptor(copy->map(), i);
if (copy->IsUnboxedDoubleField(index)) continue;
Object raw = copy->RawFastPropertyAt(index);
- if (raw->IsJSObject()) {
+ if (raw.IsJSObject()) {
Handle<JSObject> value(JSObject::cast(raw), isolate);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, value, VisitElementOrProperty(copy, value), JSObject);
if (copying) copy->FastPropertyAtPut(index, *value);
- } else if (copying && raw->IsMutableHeapNumber()) {
+ } else if (copying && raw.IsMutableHeapNumber()) {
DCHECK(descriptors->GetDetails(i).representation().IsDouble());
- uint64_t double_value = MutableHeapNumber::cast(raw)->value_as_bits();
+ uint64_t double_value = MutableHeapNumber::cast(raw).value_as_bits();
auto value =
isolate->factory()->NewMutableHeapNumberFromBits(double_value);
copy->FastPropertyAtPut(index, *value);
@@ -139,8 +139,8 @@
Handle<NameDictionary> dict(copy->property_dictionary(), isolate);
for (int i = 0; i < dict->Capacity(); i++) {
Object raw = dict->ValueAt(i);
- if (!raw->IsJSObject()) continue;
- DCHECK(dict->KeyAt(i)->IsName());
+ if (!raw.IsJSObject()) continue;
+ DCHECK(dict->KeyAt(i).IsName());
Handle<JSObject> value(JSObject::cast(raw), isolate);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, value, VisitElementOrProperty(copy, value), JSObject);
@@ -149,7 +149,7 @@
}
// Assume non-arrays don't end up having elements.
- if (copy->elements()->length() == 0) return copy;
+ if (copy->elements().length() == 0) return copy;
}
// Deep copy own elements.
@@ -164,13 +164,13 @@
if (elements->map() == ReadOnlyRoots(isolate).fixed_cow_array_map()) {
#ifdef DEBUG
for (int i = 0; i < elements->length(); i++) {
- DCHECK(!elements->get(i)->IsJSObject());
+ DCHECK(!elements->get(i).IsJSObject());
}
#endif
} else {
for (int i = 0; i < elements->length(); i++) {
Object raw = elements->get(i);
- if (!raw->IsJSObject()) continue;
+ if (!raw.IsJSObject()) continue;
Handle<JSObject> value(JSObject::cast(raw), isolate);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, value, VisitElementOrProperty(copy, value), JSObject);
@@ -185,7 +185,7 @@
int capacity = element_dictionary->Capacity();
for (int i = 0; i < capacity; i++) {
Object raw = element_dictionary->ValueAt(i);
- if (!raw->IsJSObject()) continue;
+ if (!raw.IsJSObject()) continue;
Handle<JSObject> value(JSObject::cast(raw), isolate);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, value, VisitElementOrProperty(copy, value), JSObject);
@@ -392,7 +392,7 @@
// TODO(cbruni): avoid making the boilerplate fast again, the clone stub
// supports dict-mode objects directly.
JSObject::MigrateSlowToFast(boilerplate,
- boilerplate->map()->UnusedPropertyFields(),
+ boilerplate->map().UnusedPropertyFields(),
"FastLiteral");
}
return boilerplate;
@@ -427,7 +427,7 @@
Handle<FixedArray> fixed_array_values =
Handle<FixedArray>::cast(copied_elements_values);
for (int i = 0; i < fixed_array_values->length(); i++) {
- DCHECK(!fixed_array_values->get(i)->IsFixedArray());
+ DCHECK(!fixed_array_values->get(i).IsFixedArray());
}
#endif
} else {
diff --git a/src/runtime/runtime-module.cc b/src/runtime/runtime-module.cc
index c8047de..91d98bf 100644
--- a/src/runtime/runtime-module.cc
+++ b/src/runtime/runtime-module.cc
@@ -18,11 +18,10 @@
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
CONVERT_ARG_HANDLE_CHECKED(Object, specifier, 1);
- Handle<Script> script(Script::cast(function->shared()->script()), isolate);
+ Handle<Script> script(Script::cast(function->shared().script()), isolate);
while (script->has_eval_from_shared()) {
- script =
- handle(Script::cast(script->eval_from_shared()->script()), isolate);
+ script = handle(Script::cast(script->eval_from_shared().script()), isolate);
}
RETURN_RESULT_OR_FAILURE(
@@ -34,14 +33,14 @@
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
CONVERT_SMI_ARG_CHECKED(module_request, 0);
- Handle<Module> module(isolate->context()->module(), isolate);
+ Handle<Module> module(isolate->context().module(), isolate);
return *Module::GetModuleNamespace(isolate, module, module_request);
}
RUNTIME_FUNCTION(Runtime_GetImportMetaObject) {
HandleScope scope(isolate);
DCHECK_EQ(0, args.length());
- Handle<Module> module(isolate->context()->module(), isolate);
+ Handle<Module> module(isolate->context().module(), isolate);
return *isolate->RunHostInitializeImportMetaObjectCallback(module);
}
diff --git a/src/runtime/runtime-numbers.cc b/src/runtime/runtime-numbers.cc
index 6a22769..e496880 100644
--- a/src/runtime/runtime-numbers.cc
+++ b/src/runtime/runtime-numbers.cc
@@ -89,7 +89,7 @@
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, obj, 0);
- return isolate->heap()->ToBoolean(obj->IsSmi());
+ return isolate->heap()->ToBoolean(obj.IsSmi());
}
diff --git a/src/runtime/runtime-object.cc b/src/runtime/runtime-object.cc
index 018da00..8b94d83 100644
--- a/src/runtime/runtime-object.cc
+++ b/src/runtime/runtime-object.cc
@@ -42,8 +42,8 @@
if (is_found_out) *is_found_out = it.IsFound();
if (!it.IsFound() && key->IsSymbol() &&
- Symbol::cast(*key)->is_private_name()) {
- Handle<Object> name_string(Symbol::cast(*key)->name(), isolate);
+ Symbol::cast(*key).is_private_name()) {
+ Handle<Object> name_string(Symbol::cast(*key).name(), isolate);
DCHECK(name_string->IsString());
THROW_NEW_ERROR(isolate,
NewTypeError(MessageTemplate::kInvalidPrivateFieldRead,
@@ -145,7 +145,7 @@
receiver->SetProperties(ReadOnlyRoots(isolate).empty_fixed_array());
} else {
Object filler = ReadOnlyRoots(isolate).one_pointer_filler_map();
- JSObject::cast(*receiver)->RawFastPropertyAtPut(index, filler);
+ JSObject::cast(*receiver).RawFastPropertyAtPut(index, filler);
// We must clear any recorded slot for the deleted property, because
// subsequent object modifications might put a raw double there.
// Slot clearing is the reason why this entire function cannot currently
@@ -165,7 +165,7 @@
receiver->synchronized_set_map(*parent_map);
#if VERIFY_HEAP
receiver->HeapObjectVerify(isolate);
- receiver->property_array()->PropertyArrayVerify(isolate);
+ receiver->property_array().PropertyArrayVerify(isolate);
#endif
return true;
}
@@ -304,9 +304,9 @@
}
Map map = js_obj->map();
- if (!map->has_hidden_prototype() &&
- (key_is_array_index ? !map->has_indexed_interceptor()
- : !map->has_named_interceptor())) {
+ if (!map.has_hidden_prototype() &&
+ (key_is_array_index ? !map.has_indexed_interceptor()
+ : !map.has_named_interceptor())) {
return ReadOnlyRoots(isolate).false_value();
}
@@ -335,7 +335,7 @@
} else if (object->IsString()) {
return isolate->heap()->ToBoolean(
key_is_array_index
- ? index < static_cast<uint32_t>(String::cast(*object)->length())
+ ? index < static_cast<uint32_t>(String::cast(*object).length())
: key->Equals(ReadOnlyRoots(isolate).length_string()));
} else if (object->IsNullOrUndefined(isolate)) {
THROW_NEW_ERROR_RETURN_FAILURE(
@@ -407,8 +407,8 @@
if (!success) return MaybeHandle<Object>();
if (!it.IsFound() && key->IsSymbol() &&
- Symbol::cast(*key)->is_private_name()) {
- Handle<Object> name_string(Symbol::cast(*key)->name(), isolate);
+ Symbol::cast(*key).is_private_name()) {
+ Handle<Object> name_string(Symbol::cast(*key).name(), isolate);
DCHECK(name_string->IsString());
THROW_NEW_ERROR(isolate,
NewTypeError(MessageTemplate::kInvalidPrivateFieldWrite,
@@ -523,7 +523,7 @@
// Convert string-index keys to their number variant to avoid internalization
// below; and speed up subsequent conversion to index.
uint32_t index;
- if (key_obj->IsString() && String::cast(*key_obj)->AsArrayIndex(&index)) {
+ if (key_obj->IsString() && String::cast(*key_obj).AsArrayIndex(&index)) {
key_obj = isolate->factory()->NewNumberFromUint(index);
}
if (receiver_obj->IsJSObject()) {
@@ -537,23 +537,23 @@
if (receiver->IsJSGlobalObject()) {
// Attempt dictionary lookup.
GlobalDictionary dictionary =
- JSGlobalObject::cast(*receiver)->global_dictionary();
- int entry = dictionary->FindEntry(isolate, key);
+ JSGlobalObject::cast(*receiver).global_dictionary();
+ int entry = dictionary.FindEntry(isolate, key);
if (entry != GlobalDictionary::kNotFound) {
- PropertyCell cell = dictionary->CellAt(entry);
- if (cell->property_details().kind() == kData) {
- Object value = cell->value();
- if (!value->IsTheHole(isolate)) return value;
+ PropertyCell cell = dictionary.CellAt(entry);
+ if (cell.property_details().kind() == kData) {
+ Object value = cell.value();
+ if (!value.IsTheHole(isolate)) return value;
// If value is the hole (meaning, absent) do the general lookup.
}
}
} else if (!receiver->HasFastProperties()) {
// Attempt dictionary lookup.
NameDictionary dictionary = receiver->property_dictionary();
- int entry = dictionary->FindEntry(isolate, key);
+ int entry = dictionary.FindEntry(isolate, key);
if ((entry != NameDictionary::kNotFound) &&
- (dictionary->DetailsAt(entry).kind() == kData)) {
- return dictionary->ValueAt(entry);
+ (dictionary.DetailsAt(entry).kind() == kData)) {
+ return dictionary.ValueAt(entry);
}
}
} else if (key_obj->IsSmi()) {
@@ -566,7 +566,7 @@
Handle<JSObject> js_object = Handle<JSObject>::cast(receiver_obj);
ElementsKind elements_kind = js_object->GetElementsKind();
if (IsDoubleElementsKind(elements_kind)) {
- if (Smi::ToInt(*key_obj) >= js_object->elements()->length()) {
+ if (Smi::ToInt(*key_obj) >= js_object->elements().length()) {
elements_kind = IsHoleyElementsKind(elements_kind) ? HOLEY_ELEMENTS
: PACKED_ELEMENTS;
JSObject::TransitionElementsKind(js_object, elements_kind);
@@ -781,7 +781,7 @@
if (!object->IsJSObject()) return Smi::kZero;
Handle<JSObject> js_object = Handle<JSObject>::cast(object);
// It could have been a DCHECK but we call this function directly from tests.
- if (!js_object->map()->is_deprecated()) return Smi::kZero;
+ if (!js_object->map().is_deprecated()) return Smi::kZero;
// This call must not cause lazy deopts, because it's called from deferred
// code where we can't handle lazy deopts for lack of a suitable bailout
// ID. So we just try migration and signal failure if necessary,
@@ -859,14 +859,14 @@
if (flags & DataPropertyInLiteralFlag::kSetFunctionName) {
DCHECK(value->IsJSFunction());
Handle<JSFunction> function = Handle<JSFunction>::cast(value);
- DCHECK(!function->shared()->HasSharedName());
+ DCHECK(!function->shared().HasSharedName());
Handle<Map> function_map(function->map(), isolate);
if (!JSFunction::SetName(function, name,
isolate->factory()->empty_string())) {
return ReadOnlyRoots(isolate).exception();
}
// Class constructors do not reserve in-object space for name field.
- CHECK_IMPLIES(!IsClassConstructor(function->shared()->kind()),
+ CHECK_IMPLIES(!IsClassConstructor(function->shared().kind()),
*function_map == function->map());
}
@@ -897,7 +897,7 @@
type = Handle<String>(ReadOnlyRoots(isolate).null_string(), isolate);
}
- DCHECK(vector->metadata()->HasTypeProfileSlot());
+ DCHECK(vector->metadata().HasTypeProfileSlot());
FeedbackNexus nexus(vector, vector->GetTypeProfileSlot());
nexus.Collect(type, position->value());
@@ -909,7 +909,7 @@
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(HeapObject, obj, 0);
return isolate->heap()->ToBoolean(
- IsFastPackedElementsKind(obj->map()->elements_kind()));
+ IsFastPackedElementsKind(obj.map().elements_kind()));
}
@@ -917,7 +917,7 @@
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, obj, 0);
- return isolate->heap()->ToBoolean(obj->IsJSReceiver());
+ return isolate->heap()->ToBoolean(obj.IsJSReceiver());
}
@@ -925,8 +925,8 @@
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, obj, 0);
- if (!obj->IsJSReceiver()) return ReadOnlyRoots(isolate).null_value();
- return JSReceiver::cast(obj)->class_name();
+ if (!obj.IsJSReceiver()) return ReadOnlyRoots(isolate).null_value();
+ return JSReceiver::cast(obj).class_name();
}
RUNTIME_FUNCTION(Runtime_GetFunctionName) {
@@ -944,7 +944,7 @@
CONVERT_ARG_HANDLE_CHECKED(JSFunction, getter, 2);
CONVERT_PROPERTY_ATTRIBUTES_CHECKED(attrs, 3);
- if (String::cast(getter->shared()->Name())->length() == 0) {
+ if (String::cast(getter->shared().Name()).length() == 0) {
Handle<Map> getter_map(getter->map(), isolate);
if (!JSFunction::SetName(getter, name, isolate->factory()->get_string())) {
return ReadOnlyRoots(isolate).exception();
@@ -1011,7 +1011,7 @@
// instead because of our call to %ToName() in the desugaring for
// computed properties.
if (property->IsString() &&
- String::cast(*property)->AsArrayIndex(&property_num)) {
+ String::cast(*property).AsArrayIndex(&property_num)) {
property = isolate->factory()->NewNumberFromUint(property_num);
}
@@ -1034,7 +1034,7 @@
CONVERT_ARG_HANDLE_CHECKED(JSFunction, setter, 2);
CONVERT_PROPERTY_ATTRIBUTES_CHECKED(attrs, 3);
- if (String::cast(setter->shared()->Name())->length() == 0) {
+ if (String::cast(setter->shared().Name()).length() == 0) {
Handle<Map> setter_map(setter->map(), isolate);
if (!JSFunction::SetName(setter, name, isolate->factory()->set_string())) {
return ReadOnlyRoots(isolate).exception();
diff --git a/src/runtime/runtime-operators.cc b/src/runtime/runtime-operators.cc
index e81c3a3..272502b 100644
--- a/src/runtime/runtime-operators.cc
+++ b/src/runtime/runtime-operators.cc
@@ -45,7 +45,7 @@
DCHECK_EQ(2, args.length());
CONVERT_ARG_CHECKED(Object, x, 0);
CONVERT_ARG_CHECKED(Object, y, 1);
- return isolate->heap()->ToBoolean(x->StrictEquals(y));
+ return isolate->heap()->ToBoolean(x.StrictEquals(y));
}
RUNTIME_FUNCTION(Runtime_StrictNotEqual) {
@@ -53,7 +53,7 @@
DCHECK_EQ(2, args.length());
CONVERT_ARG_CHECKED(Object, x, 0);
CONVERT_ARG_CHECKED(Object, y, 1);
- return isolate->heap()->ToBoolean(!x->StrictEquals(y));
+ return isolate->heap()->ToBoolean(!x.StrictEquals(y));
}
RUNTIME_FUNCTION(Runtime_LessThan) {
diff --git a/src/runtime/runtime-promise.cc b/src/runtime/runtime-promise.cc
index 8f84ebc..2bfc824 100644
--- a/src/runtime/runtime-promise.cc
+++ b/src/runtime/runtime-promise.cc
@@ -80,7 +80,7 @@
Handle<CallableTask> microtask = isolate->factory()->NewCallableTask(
function, handle(function->native_context(), isolate));
MicrotaskQueue* microtask_queue =
- function->native_context()->microtask_queue();
+ function->native_context().microtask_queue();
if (microtask_queue) microtask_queue->EnqueueMicrotask(*microtask);
return ReadOnlyRoots(isolate).undefined_value();
}
@@ -117,7 +117,7 @@
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(JSPromise, promise, 0);
- promise->set_has_handler(true);
+ promise.set_has_handler(true);
return ReadOnlyRoots(isolate).undefined_value();
}
diff --git a/src/runtime/runtime-regexp.cc b/src/runtime/runtime-regexp.cc
index 3c9d956..85c9ebc 100644
--- a/src/runtime/runtime-regexp.cc
+++ b/src/runtime/runtime-regexp.cc
@@ -45,17 +45,17 @@
// internalized strings.
int maybe_capture_index = -1;
- const int named_capture_count = capture_name_map->length() >> 1;
+ const int named_capture_count = capture_name_map.length() >> 1;
for (int j = 0; j < named_capture_count; j++) {
// The format of {capture_name_map} is documented at
// JSRegExp::kIrregexpCaptureNameMapIndex.
const int name_ix = j * 2;
const int index_ix = j * 2 + 1;
- String capture_name = String::cast(capture_name_map->get(name_ix));
+ String capture_name = String::cast(capture_name_map.get(name_ix));
if (!name_matches(capture_name)) continue;
- maybe_capture_index = Smi::ToInt(capture_name_map->get(index_ix));
+ maybe_capture_index = Smi::ToInt(capture_name_map.get(index_ix));
break;
}
@@ -267,7 +267,7 @@
const int capture_index = LookupNamedCapture(
[=](String capture_name) {
- return capture_name->IsEqualTo(requested_name);
+ return capture_name.IsEqualTo(requested_name);
},
capture_name_map);
@@ -323,7 +323,7 @@
if (capture_count > 0) {
DCHECK_EQ(regexp->TypeTag(), JSRegExp::IRREGEXP);
Object maybe_capture_name_map = regexp->CaptureNameMap();
- if (maybe_capture_name_map->IsFixedArray()) {
+ if (maybe_capture_name_map.IsFixedArray()) {
capture_name_map = FixedArray::cast(maybe_capture_name_map);
}
}
@@ -454,8 +454,8 @@
std::vector<int>* indices, unsigned int limit) {
{
DisallowHeapAllocation no_gc;
- String::FlatContent subject_content = subject->GetFlatContent(no_gc);
- String::FlatContent pattern_content = pattern->GetFlatContent(no_gc);
+ String::FlatContent subject_content = subject.GetFlatContent(no_gc);
+ String::FlatContent pattern_content = pattern.GetFlatContent(no_gc);
DCHECK(subject_content.IsFlat());
DCHECK(pattern_content.IsFlat());
if (subject_content.IsOneByte()) {
@@ -533,7 +533,7 @@
String pattern =
String::cast(pattern_regexp->DataAt(JSRegExp::kAtomPatternIndex));
int subject_len = subject->length();
- int pattern_len = pattern->length();
+ int pattern_len = pattern.length();
int replacement_len = replacement->length();
FindStringIndicesDispatch(isolate, *subject, pattern, indices, 0xFFFFFFFF);
@@ -893,7 +893,7 @@
if (regexp->TypeTag() == JSRegExp::IRREGEXP) {
Object o = regexp->CaptureNameMap();
- has_named_captures_ = o->IsFixedArray();
+ has_named_captures_ = o.IsFixedArray();
if (has_named_captures_) {
capture_name_map_ = handle(FixedArray::cast(o), isolate);
}
@@ -934,7 +934,7 @@
CaptureState* state) override {
DCHECK(has_named_captures_);
const int capture_index = LookupNamedCapture(
- [=](String capture_name) { return capture_name->Equals(*name); },
+ [=](String capture_name) { return capture_name.Equals(*name); },
*capture_name_map_);
if (capture_index == -1) {
@@ -1095,11 +1095,11 @@
Object cached_answer = RegExpResultsCache::Lookup(
isolate->heap(), *subject, regexp->data(), &last_match_cache,
RegExpResultsCache::REGEXP_MULTIPLE_INDICES);
- if (cached_answer->IsFixedArray()) {
+ if (cached_answer.IsFixedArray()) {
int capture_registers = (capture_count + 1) * 2;
int32_t* last_match = NewArray<int32_t>(capture_registers);
for (int i = 0; i < capture_registers; i++) {
- last_match[i] = Smi::ToInt(last_match_cache->get(i));
+ last_match[i] = Smi::ToInt(last_match_cache.get(i));
}
Handle<FixedArray> cached_fixed_array =
Handle<FixedArray>(FixedArray::cast(cached_answer), isolate);
@@ -1339,7 +1339,7 @@
Object result = StringReplaceGlobalRegExpWithString(
isolate, string, regexp, replace, last_match_info);
- if (result->IsString()) {
+ if (result.IsString()) {
return handle(String::cast(result), isolate);
} else {
return MaybeHandle<String>();
@@ -1387,7 +1387,7 @@
CONVERT_ARG_HANDLE_CHECKED(JSReceiver, replace_obj, 2);
DCHECK(RegExpUtils::IsUnmodifiedRegExp(isolate, regexp));
- DCHECK(replace_obj->map()->is_callable());
+ DCHECK(replace_obj->map().is_callable());
Factory* factory = isolate->factory();
Handle<RegExpMatchInfo> last_match_info = isolate->regexp_last_match_info();
@@ -1450,7 +1450,7 @@
DCHECK_EQ(regexp->TypeTag(), JSRegExp::IRREGEXP);
Object maybe_capture_map = regexp->CaptureNameMap();
- if (maybe_capture_map->IsFixedArray()) {
+ if (maybe_capture_map.IsFixedArray()) {
has_named_captures = true;
capture_map = handle(FixedArray::cast(maybe_capture_map), isolate);
}
@@ -1904,7 +1904,7 @@
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, obj, 0);
- return isolate->heap()->ToBoolean(obj->IsJSRegExp());
+ return isolate->heap()->ToBoolean(obj.IsJSRegExp());
}
} // namespace internal
diff --git a/src/runtime/runtime-scopes.cc b/src/runtime/runtime-scopes.cc
index 85fb761..25d10e3 100644
--- a/src/runtime/runtime-scopes.cc
+++ b/src/runtime/runtime-scopes.cc
@@ -52,7 +52,7 @@
Handle<FeedbackVector> feedback_vector = Handle<FeedbackVector>(),
FeedbackSlot slot = FeedbackSlot::Invalid()) {
Handle<ScriptContextTable> script_contexts(
- global->native_context()->script_context_table(), isolate);
+ global->native_context().script_context_table(), isolate);
ScriptContextTable::LookupResult lookup;
if (ScriptContextTable::Lookup(isolate, *script_contexts, *name, &lookup) &&
IsLexicalVariableMode(lookup.mode)) {
@@ -125,7 +125,7 @@
// Preinitialize the feedback slot if the global object does not have
// named interceptor or the interceptor is not masking.
if (!global->HasNamedInterceptor() ||
- global->GetNamedInterceptor()->non_masking()) {
+ global->GetNamedInterceptor().non_masking()) {
FeedbackNexus nexus(feedback_vector, slot);
nexus.ConfigurePropertyCellMode(it.GetPropertyCell());
}
@@ -221,12 +221,12 @@
// context, or a declaration block scope. Since this is called from eval, the
// context passed is the context of the caller, which may be some nested
// context and not the declaration context.
- Handle<Context> context(isolate->context()->declaration_context(), isolate);
+ Handle<Context> context(isolate->context().declaration_context(), isolate);
DCHECK(context->IsFunctionContext() || context->IsNativeContext() ||
context->IsScriptContext() || context->IsEvalContext() ||
(context->IsBlockContext() &&
- context->scope_info()->is_declaration_scope()));
+ context->scope_info().is_declaration_scope()));
bool is_function = value->IsJSFunction();
bool is_var = !is_function;
@@ -252,13 +252,13 @@
value, NONE, is_var, is_function,
RedeclarationType::kTypeError);
}
- if (context->extension()->IsJSGlobalObject()) {
+ if (context->extension().IsJSGlobalObject()) {
Handle<JSGlobalObject> global(JSGlobalObject::cast(context->extension()),
isolate);
return DeclareGlobal(isolate, global, name, value, NONE, is_var,
is_function, RedeclarationType::kTypeError);
} else if (context->IsScriptContext()) {
- DCHECK(context->global_object()->IsJSGlobalObject());
+ DCHECK(context->global_object().IsJSGlobalObject());
Handle<JSGlobalObject> global(
JSGlobalObject::cast(context->global_object()), isolate);
return DeclareGlobal(isolate, global, name, value, NONE, is_var,
@@ -288,7 +288,7 @@
// yet. Sloppy eval will never have an extension object, as vars are hoisted
// out, and lets are known statically.
DCHECK((context->IsBlockContext() &&
- context->scope_info()->is_declaration_scope()) ||
+ context->scope_info().is_declaration_scope()) ||
context->IsFunctionContext());
object =
isolate->factory()->NewJSObject(isolate->context_extension_function());
@@ -389,13 +389,13 @@
template <typename T>
Handle<JSObject> NewSloppyArguments(Isolate* isolate, Handle<JSFunction> callee,
T parameters, int argument_count) {
- CHECK(!IsDerivedConstructor(callee->shared()->kind()));
- DCHECK(callee->shared()->has_simple_parameters());
+ CHECK(!IsDerivedConstructor(callee->shared().kind()));
+ DCHECK(callee->shared().has_simple_parameters());
Handle<JSObject> result =
isolate->factory()->NewArgumentsObject(callee, argument_count);
// Allocate the elements if needed.
- int parameter_count = callee->shared()->internal_formal_parameter_count();
+ int parameter_count = callee->shared().internal_formal_parameter_count();
if (argument_count > 0) {
if (parameter_count > 0) {
int mapped_count = Min(argument_count, parameter_count);
@@ -423,7 +423,7 @@
--index;
}
- Handle<ScopeInfo> scope_info(callee->shared()->scope_info(), isolate);
+ Handle<ScopeInfo> scope_info(callee->shared().scope_info(), isolate);
// First mark all mappable slots as unmapped and copy the values into the
// arguments object.
@@ -522,7 +522,7 @@
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSFunction, callee, 0)
- int start_index = callee->shared()->internal_formal_parameter_count();
+ int start_index = callee->shared().internal_formal_parameter_count();
// This generic runtime function can also be used when the caller has been
// inlined, we use the slow but accurate {GetCallerArguments}.
int argument_count = 0;
@@ -535,9 +535,9 @@
{
DisallowHeapAllocation no_gc;
FixedArray elements = FixedArray::cast(result->elements());
- WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
+ WriteBarrierMode mode = elements.GetWriteBarrierMode(no_gc);
for (int i = 0; i < num_elements; i++) {
- elements->set(i, *arguments[i + start_index], mode);
+ elements.set(i, *arguments[i + start_index], mode);
}
}
return *result;
@@ -580,7 +580,7 @@
// Note that args[0] is the address of an array of full object pointers
// (a.k.a. FullObjectSlot), which looks like a Smi because it's aligned.
DCHECK(args[0].IsSmi());
- FullObjectSlot frame(args[0]->ptr());
+ FullObjectSlot frame(args[0].ptr());
CONVERT_SMI_ARG_CHECKED(length, 1);
CONVERT_SMI_ARG_CHECKED(mapped_count, 2);
Handle<FixedArray> result =
@@ -811,7 +811,7 @@
// If the "property" we were looking for is a local variable, the
// receiver is the global object; see ECMA-262, 3rd., 10.1.6 and 10.2.3.
Handle<Object> receiver = isolate->factory()->undefined_value();
- Handle<Object> value = handle(Context::cast(*holder)->get(index), isolate);
+ Handle<Object> value = handle(Context::cast(*holder).get(index), isolate);
// Check for uninitialized bindings.
if (flag == kNeedsInitialization && value->IsTheHole(isolate)) {
THROW_NEW_ERROR(isolate,
@@ -875,7 +875,7 @@
RUNTIME_FUNCTION_RETURN_PAIR(Runtime_LoadLookupSlotForCall) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
- DCHECK(args[0]->IsString());
+ DCHECK(args[0].IsString());
Handle<String> name = args.at<String>(0);
Handle<Object> value;
Handle<Object> receiver;
@@ -915,7 +915,7 @@
// The property was found in a context slot.
if (index != Context::kNotFound) {
if (flag == kNeedsInitialization &&
- Handle<Context>::cast(holder)->get(index)->IsTheHole(isolate)) {
+ Handle<Context>::cast(holder)->get(index).IsTheHole(isolate)) {
THROW_NEW_ERROR(isolate,
NewReferenceError(MessageTemplate::kNotDefined, name),
Object);
@@ -985,7 +985,7 @@
CONVERT_ARG_HANDLE_CHECKED(Object, value, 1);
const ContextLookupFlags lookup_flags =
static_cast<ContextLookupFlags>(DONT_FOLLOW_CHAINS);
- Handle<Context> declaration_context(isolate->context()->declaration_context(),
+ Handle<Context> declaration_context(isolate->context().declaration_context(),
isolate);
RETURN_RESULT_OR_FAILURE(
isolate, StoreLookupSlot(isolate, declaration_context, name, value,
diff --git a/src/runtime/runtime-strings.cc b/src/runtime/runtime-strings.cc
index 99e33f9..ed5115a 100644
--- a/src/runtime/runtime-strings.cc
+++ b/src/runtime/runtime-strings.cc
@@ -77,8 +77,8 @@
recursion_limit--;
if (subject->IsConsString()) {
ConsString cons = ConsString::cast(*subject);
- Handle<String> first = handle(cons->first(), isolate);
- Handle<String> second = handle(cons->second(), isolate);
+ Handle<String> first = handle(cons.first(), isolate);
+ Handle<String> second = handle(cons.second(), isolate);
Handle<String> new_first;
if (!StringReplaceOneCharWithString(isolate, first, search, replace, found,
recursion_limit).ToHandle(&new_first)) {
@@ -276,7 +276,7 @@
DCHECK_EQ(3, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSArray, array, 0);
int32_t array_length;
- if (!args[1]->ToInt32(&array_length)) {
+ if (!args[1].ToInt32(&array_length)) {
THROW_NEW_ERROR_RETURN_FAILURE(isolate, NewInvalidStringLengthError());
}
CONVERT_ARG_HANDLE_CHECKED(String, special, 2);
@@ -303,15 +303,15 @@
{
DisallowHeapAllocation no_gc;
FixedArray fixed_array = FixedArray::cast(array->elements());
- if (fixed_array->length() < array_length) {
- array_length = fixed_array->length();
+ if (fixed_array.length() < array_length) {
+ array_length = fixed_array.length();
}
if (array_length == 0) {
return ReadOnlyRoots(isolate).empty_string();
} else if (array_length == 1) {
- Object first = fixed_array->get(0);
- if (first->IsString()) return first;
+ Object first = fixed_array.get(0);
+ if (first.IsString()) return first;
}
length = StringBuilderConcatLength(special_length, fixed_array,
array_length, &one_byte);
@@ -356,20 +356,20 @@
FixedArray one_byte_cache = heap->single_character_string_cache();
Object undefined = ReadOnlyRoots(heap).undefined_value();
int i;
- WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
+ WriteBarrierMode mode = elements.GetWriteBarrierMode(no_gc);
for (i = 0; i < length; ++i) {
- Object value = one_byte_cache->get(chars[i]);
+ Object value = one_byte_cache.get(chars[i]);
if (value == undefined) break;
- elements->set(i, value, mode);
+ elements.set(i, value, mode);
}
if (i < length) {
- MemsetTagged(elements->RawFieldOfElementAt(i), Smi::kZero, length - i);
+ MemsetTagged(elements.RawFieldOfElementAt(i), Smi::kZero, length - i);
}
#ifdef DEBUG
for (int j = 0; j < length; ++j) {
- Object element = elements->get(j);
+ Object element = elements.get(j);
DCHECK(element == Smi::kZero ||
- (element->IsString() && String::cast(element)->LooksValid()));
+ (element.IsString() && String::cast(element).LooksValid()));
}
#endif
return i;
@@ -415,7 +415,7 @@
#ifdef DEBUG
for (int i = 0; i < length; ++i) {
- DCHECK_EQ(String::cast(elements->get(i))->length(), 1);
+ DCHECK_EQ(String::cast(elements->get(i)).length(), 1);
}
#endif
diff --git a/src/runtime/runtime-symbol.cc b/src/runtime/runtime-symbol.cc
index 1595f16..1d698e6 100644
--- a/src/runtime/runtime-symbol.cc
+++ b/src/runtime/runtime-symbol.cc
@@ -39,7 +39,7 @@
CONVERT_ARG_HANDLE_CHECKED(Symbol, symbol, 0);
IncrementalStringBuilder builder(isolate);
builder.AppendCString("Symbol(");
- if (symbol->name()->IsString()) {
+ if (symbol->name().IsString()) {
builder.AppendString(handle(String::cast(symbol->name()), isolate));
}
builder.AppendCharacter(')');
@@ -51,7 +51,7 @@
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Symbol, symbol, 0);
- return isolate->heap()->ToBoolean(symbol->is_private());
+ return isolate->heap()->ToBoolean(symbol.is_private());
}
} // namespace internal
} // namespace v8
diff --git a/src/runtime/runtime-test.cc b/src/runtime/runtime-test.cc
index 40636e2..86817be 100644
--- a/src/runtime/runtime-test.cc
+++ b/src/runtime/runtime-test.cc
@@ -223,7 +223,7 @@
Handle<JSFunction> function) {
// TODO(mythria): Remove the check for undefined, once we fix all tests to
// add PrepareForOptimization when using OptimizeFunctionOnNextCall.
- if (isolate->heap()->pending_optimize_for_test_bytecode()->IsUndefined()) {
+ if (isolate->heap()->pending_optimize_for_test_bytecode().IsUndefined()) {
return;
}
@@ -258,25 +258,25 @@
// The following conditions were lifted (in part) from the DCHECK inside
// JSFunction::MarkForOptimization().
- if (!function->shared()->allows_lazy_compilation()) {
+ if (!function->shared().allows_lazy_compilation()) {
return ReadOnlyRoots(isolate).undefined_value();
}
// If function isn't compiled, compile it now.
- IsCompiledScope is_compiled_scope(function->shared()->is_compiled_scope());
+ IsCompiledScope is_compiled_scope(function->shared().is_compiled_scope());
if (!is_compiled_scope.is_compiled() &&
!Compiler::Compile(function, Compiler::CLEAR_EXCEPTION,
&is_compiled_scope)) {
return ReadOnlyRoots(isolate).undefined_value();
}
- if (function->shared()->optimization_disabled() &&
- function->shared()->disable_optimization_reason() ==
+ if (function->shared().optimization_disabled() &&
+ function->shared().disable_optimization_reason() ==
BailoutReason::kNeverOptimize) {
return ReadOnlyRoots(isolate).undefined_value();
}
- if (function->shared()->HasAsmWasmData()) {
+ if (function->shared().HasAsmWasmData()) {
return ReadOnlyRoots(isolate).undefined_value();
}
@@ -321,7 +321,7 @@
// This function may not have been lazily compiled yet, even though its shared
// function has.
if (!function->is_compiled()) {
- DCHECK(function->shared()->IsInterpreted());
+ DCHECK(function->shared().IsInterpreted());
function->set_code(*BUILTIN_CODE(isolate, InterpreterEntryTrampoline));
}
@@ -335,12 +335,12 @@
bool EnsureFeedbackVector(Handle<JSFunction> function) {
// Check function allows lazy compilation.
- if (!function->shared()->allows_lazy_compilation()) {
+ if (!function->shared().allows_lazy_compilation()) {
return false;
}
// If function isn't compiled, compile it now.
- IsCompiledScope is_compiled_scope(function->shared()->is_compiled_scope());
+ IsCompiledScope is_compiled_scope(function->shared().is_compiled_scope());
if (!is_compiled_scope.is_compiled() &&
!Compiler::Compile(function, Compiler::CLEAR_EXCEPTION,
&is_compiled_scope)) {
@@ -375,28 +375,28 @@
// If optimization is disabled for the function, return without making it
// pending optimize for test.
- if (function->shared()->optimization_disabled() &&
- function->shared()->disable_optimization_reason() ==
+ if (function->shared().optimization_disabled() &&
+ function->shared().disable_optimization_reason() ==
BailoutReason::kNeverOptimize) {
return ReadOnlyRoots(isolate).undefined_value();
}
// We don't optimize Asm/Wasm functions.
- if (function->shared()->HasAsmWasmData()) {
+ if (function->shared().HasAsmWasmData()) {
return ReadOnlyRoots(isolate).undefined_value();
}
// Hold onto the bytecode array between marking and optimization to ensure
// it's not flushed.
Handle<ObjectHashTable> table =
- isolate->heap()->pending_optimize_for_test_bytecode()->IsUndefined()
+ isolate->heap()->pending_optimize_for_test_bytecode().IsUndefined()
? ObjectHashTable::New(isolate, 1)
: handle(ObjectHashTable::cast(
isolate->heap()->pending_optimize_for_test_bytecode()),
isolate);
table = ObjectHashTable::Put(
table, handle(function->shared(), isolate),
- handle(function->shared()->GetBytecodeArray(), isolate));
+ handle(function->shared().GetBytecodeArray(), isolate));
isolate->heap()->SetPendingOptimizeForTestBytecode(*table);
return ReadOnlyRoots(isolate).undefined_value();
@@ -417,8 +417,8 @@
if (!it.done()) function = handle(it.frame()->function(), isolate);
if (function.is_null()) return ReadOnlyRoots(isolate).undefined_value();
- if (function->shared()->optimization_disabled() &&
- function->shared()->disable_optimization_reason() ==
+ if (function->shared().optimization_disabled() &&
+ function->shared().disable_optimization_reason() ==
BailoutReason::kNeverOptimize) {
return ReadOnlyRoots(isolate).undefined_value();
}
@@ -471,7 +471,7 @@
return ReadOnlyRoots(isolate).undefined_value();
}
Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
- function->shared()->DisableOptimization(BailoutReason::kNeverOptimize);
+ function->shared().DisableOptimization(BailoutReason::kNeverOptimize);
return ReadOnlyRoots(isolate).undefined_value();
}
@@ -533,7 +533,7 @@
if (function->IsOptimized()) {
status |= static_cast<int>(OptimizationStatus::kOptimized);
- if (function->code()->is_turbofanned()) {
+ if (function->code().is_turbofanned()) {
status |= static_cast<int>(OptimizationStatus::kTurboFanned);
}
}
@@ -578,7 +578,7 @@
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
// Functions without a feedback vector have never deoptimized.
if (!function->has_feedback_vector()) return Smi::kZero;
- return Smi::FromInt(function->feedback_vector()->deopt_count());
+ return Smi::FromInt(function->feedback_vector().deopt_count());
}
static void ReturnThis(const v8::FunctionCallbackInfo<v8::Value>& args) {
@@ -705,11 +705,11 @@
bool weak = maybe_object.IsWeak();
#ifdef DEBUG
- if (object->IsString() && !isolate->context().is_null()) {
+ if (object.IsString() && !isolate->context().is_null()) {
DCHECK(!weak);
// If we have a string, assume it's a code "marker"
// and print some interesting cpu debugging info.
- object->Print(os);
+ object.Print(os);
JavaScriptFrameIterator it(isolate);
JavaScriptFrame* frame = it.frame();
os << "fp = " << reinterpret_cast<void*>(frame->fp())
@@ -721,10 +721,10 @@
if (weak) {
os << "[weak] ";
}
- object->Print(os);
+ object.Print(os);
}
- if (object->IsHeapObject()) {
- HeapObject::cast(object)->map()->Print(os);
+ if (object.IsHeapObject()) {
+ HeapObject::cast(object).map().Print(os);
}
#else
if (weak) {
@@ -752,7 +752,7 @@
PrintF("%c", character);
}
PrintF(": ");
- args[1]->ShortPrint();
+ args[1].ShortPrint();
PrintF("\n");
return ReadOnlyRoots(isolate).undefined_value();
@@ -820,10 +820,10 @@
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, arg, 0);
- if (arg->IsTrue(isolate)) {
+ if (arg.IsTrue(isolate)) {
isolate->set_force_slow_path(true);
} else {
- DCHECK(arg->IsFalse(isolate));
+ DCHECK(arg.IsFalse(isolate));
isolate->set_force_slow_path(false);
}
return ReadOnlyRoots(isolate).undefined_value();
@@ -868,7 +868,7 @@
return ReadOnlyRoots(isolate).exception();
}
StdoutStream os;
- func->code()->Print(os);
+ func->code().Print(os);
os << std::endl;
#endif // DEBUG
return ReadOnlyRoots(isolate).undefined_value();
@@ -910,7 +910,7 @@
CONVERT_ARG_CHECKED(Object, obj, 0);
PrintIndentation(isolate);
PrintF("} -> ");
- obj->ShortPrint();
+ obj.ShortPrint();
PrintF("\n");
return obj; // return TOS
}
@@ -920,7 +920,7 @@
DCHECK_EQ(2, args.length());
CONVERT_ARG_CHECKED(JSObject, obj1, 0);
CONVERT_ARG_CHECKED(JSObject, obj2, 1);
- return isolate->heap()->ToBoolean(obj1->map() == obj2->map());
+ return isolate->heap()->ToBoolean(obj1.map() == obj2.map());
}
@@ -935,12 +935,12 @@
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(JSFunction, function, 0);
- if (!function->shared()->HasAsmWasmData()) {
+ if (!function.shared().HasAsmWasmData()) {
// Doesn't have wasm data.
return ReadOnlyRoots(isolate).false_value();
}
- if (function->shared()->HasBuiltinId() &&
- function->shared()->builtin_id() == Builtins::kInstantiateAsmJs) {
+ if (function.shared().HasBuiltinId() &&
+ function.shared().builtin_id() == Builtins::kInstantiateAsmJs) {
// Hasn't been compiled yet.
return ReadOnlyRoots(isolate).false_value();
}
@@ -978,7 +978,7 @@
SealHandleScope shs(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(JSFunction, function, 0);
- bool is_js_to_wasm = function->code()->kind() == Code::JS_TO_WASM_FUNCTION;
+ bool is_js_to_wasm = function.code().kind() == Code::JS_TO_WASM_FUNCTION;
return isolate->heap()->ToBoolean(is_js_to_wasm);
}
@@ -1045,10 +1045,10 @@
return ReadOnlyRoots(isolate).undefined_value();
}
-#define ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(Name) \
- RUNTIME_FUNCTION(Runtime_Has##Name) { \
- CONVERT_ARG_CHECKED(JSObject, obj, 0); \
- return isolate->heap()->ToBoolean(obj->Has##Name()); \
+#define ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(Name) \
+ RUNTIME_FUNCTION(Runtime_Has##Name) { \
+ CONVERT_ARG_CHECKED(JSObject, obj, 0); \
+ return isolate->heap()->ToBoolean(obj.Has##Name()); \
}
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastElements)
@@ -1068,7 +1068,7 @@
#define FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION(Type, type, TYPE, ctype) \
RUNTIME_FUNCTION(Runtime_HasFixed##Type##Elements) { \
CONVERT_ARG_CHECKED(JSObject, obj, 0); \
- return isolate->heap()->ToBoolean(obj->HasFixed##Type##Elements()); \
+ return isolate->heap()->ToBoolean(obj.HasFixed##Type##Elements()); \
}
TYPED_ARRAYS(FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION)
@@ -1161,7 +1161,7 @@
#else
CHECK(object->IsObject());
if (object->IsHeapObject()) {
- CHECK(HeapObject::cast(*object)->map()->IsMap());
+ CHECK(HeapObject::cast(*object).map().IsMap());
} else {
CHECK(object->IsSmi());
}
@@ -1175,8 +1175,8 @@
CONVERT_ARG_HANDLE_CHECKED(WasmModuleObject, module_obj, 0);
int instance_count = 0;
WeakArrayList weak_instance_list = module_obj->weak_instance_list();
- for (int i = 0; i < weak_instance_list->length(); ++i) {
- if (weak_instance_list->Get(i)->IsWeak()) instance_count++;
+ for (int i = 0; i < weak_instance_list.length(); ++i) {
+ if (weak_instance_list.Get(i)->IsWeak()) instance_count++;
}
return Smi::FromInt(instance_count);
}
@@ -1186,7 +1186,7 @@
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(WasmInstanceObject, instance, 0);
if (!instance->has_debug_info()) return Object();
- uint64_t num = instance->debug_info()->NumInterpretedCalls();
+ uint64_t num = instance->debug_info().NumInterpretedCalls();
return *isolate->factory()->NewNumberFromSize(static_cast<size_t>(num));
}
@@ -1218,12 +1218,12 @@
WasmCompiledFrame* frame = WasmCompiledFrame::cast(it.frame());
uint8_t* mem_start = reinterpret_cast<uint8_t*>(
- frame->wasm_instance()->memory_object()->array_buffer()->backing_store());
+ frame->wasm_instance().memory_object().array_buffer().backing_store());
int func_index = frame->function_index();
int pos = frame->position();
// TODO(titzer): eliminate dependency on WasmModule definition here.
int func_start =
- frame->wasm_instance()->module()->functions[func_index].code.offset();
+ frame->wasm_instance().module()->functions[func_index].code.offset();
wasm::ExecutionTier tier = frame->wasm_code()->is_liftoff()
? wasm::ExecutionTier::kLiftoff
: wasm::ExecutionTier::kTurbofan;
@@ -1237,7 +1237,7 @@
DCHECK_EQ(2, args.length());
CONVERT_ARG_HANDLE_CHECKED(WasmInstanceObject, instance, 0);
CONVERT_SMI_ARG_CHECKED(function_index, 1);
- auto* native_module = instance->module_object()->native_module();
+ auto* native_module = instance->module_object().native_module();
isolate->wasm_engine()->CompileFunction(
isolate, native_module, function_index, wasm::ExecutionTier::kTurbofan);
CHECK(!native_module->compilation_state()->failed());
@@ -1252,7 +1252,7 @@
Handle<WasmExportedFunction> exp_fun =
Handle<WasmExportedFunction>::cast(function);
wasm::NativeModule* native_module =
- exp_fun->instance()->module_object()->native_module();
+ exp_fun->instance().module_object().native_module();
uint32_t func_index = exp_fun->function_index();
wasm::WasmCodeRefScope code_ref_scope;
wasm::WasmCode* code = native_module->GetCode(func_index);
@@ -1264,7 +1264,7 @@
DCHECK_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0);
- object->map()->CompleteInobjectSlackTracking(isolate);
+ object->map().CompleteInobjectSlackTracking(isolate);
return ReadOnlyRoots(isolate).undefined_value();
}
@@ -1274,7 +1274,7 @@
DisallowHeapAllocation no_gc;
CONVERT_ARG_CHECKED(WasmInstanceObject, instance, 0);
- instance->module_object()->native_module()->set_lazy_compile_frozen(true);
+ instance.module_object().native_module()->set_lazy_compile_frozen(true);
return ReadOnlyRoots(isolate).undefined_value();
}
diff --git a/src/runtime/runtime-typedarray.cc b/src/runtime/runtime-typedarray.cc
index 6012ae4..ba1e0d3 100644
--- a/src/runtime/runtime-typedarray.cc
+++ b/src/runtime/runtime-typedarray.cc
@@ -108,7 +108,7 @@
// In case of a SAB, the data is copied into temporary memory, as
// std::sort might crash in case the underlying data is concurrently
// modified while sorting.
- CHECK(array->buffer()->IsJSArrayBuffer());
+ CHECK(array->buffer().IsJSArrayBuffer());
Handle<JSArrayBuffer> buffer(JSArrayBuffer::cast(array->buffer()), isolate);
const bool copy_data = buffer->is_shared();
diff --git a/src/runtime/runtime-utils.h b/src/runtime/runtime-utils.h
index 7d35010..13b0955 100644
--- a/src/runtime/runtime-utils.h
+++ b/src/runtime/runtime-utils.h
@@ -17,40 +17,40 @@
// it in a variable with the given name. If the object is not of the
// expected type we crash safely.
#define CONVERT_ARG_CHECKED(Type, name, index) \
- CHECK(args[index]->Is##Type()); \
+ CHECK(args[index].Is##Type()); \
Type name = Type::cast(args[index]);
#define CONVERT_ARG_HANDLE_CHECKED(Type, name, index) \
- CHECK(args[index]->Is##Type()); \
+ CHECK(args[index].Is##Type()); \
Handle<Type> name = args.at<Type>(index);
#define CONVERT_NUMBER_ARG_HANDLE_CHECKED(name, index) \
- CHECK(args[index]->IsNumber()); \
+ CHECK(args[index].IsNumber()); \
Handle<Object> name = args.at(index);
// Cast the given object to a boolean and store it in a variable with
// the given name. If the object is not a boolean we crash safely.
#define CONVERT_BOOLEAN_ARG_CHECKED(name, index) \
- CHECK(args[index]->IsBoolean()); \
- bool name = args[index]->IsTrue(isolate);
+ CHECK(args[index].IsBoolean()); \
+ bool name = args[index].IsTrue(isolate);
// Cast the given argument to a Smi and store its value in an int variable
// with the given name. If the argument is not a Smi we crash safely.
#define CONVERT_SMI_ARG_CHECKED(name, index) \
- CHECK(args[index]->IsSmi()); \
+ CHECK(args[index].IsSmi()); \
int name = args.smi_at(index);
// Cast the given argument to a double and store it in a variable with
// the given name. If the argument is not a number (as opposed to
// the number not-a-number) we crash safely.
#define CONVERT_DOUBLE_ARG_CHECKED(name, index) \
- CHECK(args[index]->IsNumber()); \
+ CHECK(args[index].IsNumber()); \
double name = args.number_at(index);
// Cast the given argument to a size_t and store its value in a variable with
// the given name. If the argument is not a size_t we crash safely.
#define CONVERT_SIZE_ARG_CHECKED(name, index) \
- CHECK(args[index]->IsNumber()); \
+ CHECK(args[index].IsNumber()); \
Handle<Object> name##_object = args.at(index); \
size_t name = 0; \
CHECK(TryNumberToSize(*name##_object, &name));
@@ -59,7 +59,7 @@
// a variable of the specified type with the given name. If the
// object is not a Number we crash safely.
#define CONVERT_NUMBER_CHECKED(type, name, Type, obj) \
- CHECK(obj->IsNumber()); \
+ CHECK(obj.IsNumber()); \
type name = NumberTo##Type(obj);
// Cast the given argument to PropertyDetails and store its value in a
@@ -80,23 +80,23 @@
// Assert that the given argument is a number within the Int32 range
// and convert it to int32_t. If the argument is not an Int32 we crash safely.
#define CONVERT_INT32_ARG_CHECKED(name, index) \
- CHECK(args[index]->IsNumber()); \
+ CHECK(args[index].IsNumber()); \
int32_t name = 0; \
- CHECK(args[index]->ToInt32(&name));
+ CHECK(args[index].ToInt32(&name));
// Assert that the given argument is a number within the Uint32 range
// and convert it to uint32_t. If the argument is not an Uint32 call
// IllegalOperation and return.
#define CONVERT_UINT32_ARG_CHECKED(name, index) \
- CHECK(args[index]->IsNumber()); \
+ CHECK(args[index].IsNumber()); \
uint32_t name = 0; \
- CHECK(args[index]->ToUint32(&name));
+ CHECK(args[index].ToUint32(&name));
// Cast the given argument to PropertyAttributes and store its value in a
// variable with the given name. If the argument is not a Smi or the
// enum value is out of range, we crash safely.
#define CONVERT_PROPERTY_ATTRIBUTES_CHECKED(name, index) \
- CHECK(args[index]->IsSmi()); \
+ CHECK(args[index].IsSmi()); \
CHECK_EQ(args.smi_at(index) & ~(READ_ONLY | DONT_ENUM | DONT_DELETE), 0); \
PropertyAttributes name = static_cast<PropertyAttributes>(args.smi_at(index));
@@ -115,7 +115,7 @@
};
static inline ObjectPair MakePair(Object x, Object y) {
- ObjectPair result = {x->ptr(), y->ptr()};
+ ObjectPair result = {x.ptr(), y.ptr()};
// Pointers x and y returned in rax and rdx, in AMD-x64-abi.
// In Win64 they are assigned to a hidden first argument.
return result;
@@ -124,7 +124,7 @@
typedef uint64_t ObjectPair;
static inline ObjectPair MakePair(Object x, Object y) {
#if defined(V8_TARGET_LITTLE_ENDIAN)
- return x->ptr() | (static_cast<ObjectPair>(y->ptr()) << 32);
+ return x.ptr() | (static_cast<ObjectPair>(y.ptr()) << 32);
#elif defined(V8_TARGET_BIG_ENDIAN)
return y->ptr() | (static_cast<ObjectPair>(x->ptr()) << 32);
#else
diff --git a/src/runtime/runtime-wasm.cc b/src/runtime/runtime-wasm.cc
index e81ae7b..1dddf1a 100644
--- a/src/runtime/runtime-wasm.cc
+++ b/src/runtime/runtime-wasm.cc
@@ -39,7 +39,7 @@
}
Context GetNativeContextFromWasmInstanceOnStackTop(Isolate* isolate) {
- return GetWasmInstanceOnStackTop(isolate)->native_context();
+ return GetWasmInstanceOnStackTop(isolate).native_context();
}
class ClearThreadInWasmScope {
@@ -325,7 +325,7 @@
DCHECK(isolate->context().is_null());
isolate->set_context(instance->native_context());
- auto* native_module = instance->module_object()->native_module();
+ auto* native_module = instance->module_object().native_module();
bool success = wasm::CompileLazy(isolate, native_module, func_index);
if (!success) {
DCHECK(isolate->has_pending_exception());
@@ -341,7 +341,7 @@
Handle<JSArrayBuffer> getSharedArrayBuffer(Handle<WasmInstanceObject> instance,
Isolate* isolate, uint32_t address) {
DCHECK(instance->has_memory_object());
- Handle<JSArrayBuffer> array_buffer(instance->memory_object()->array_buffer(),
+ Handle<JSArrayBuffer> array_buffer(instance->memory_object().array_buffer(),
isolate);
// Validation should have failed if the memory was not shared.
@@ -442,9 +442,9 @@
CONVERT_ARG_HANDLE_CHECKED(WasmInstanceObject, instance, 0);
CONVERT_UINT32_ARG_CHECKED(table_index, 1);
CONVERT_UINT32_ARG_CHECKED(entry_index, 2);
- DCHECK_LT(table_index, instance->tables()->length());
+ DCHECK_LT(table_index, instance->tables().length());
auto table = handle(
- WasmTableObject::cast(instance->tables()->get(table_index)), isolate);
+ WasmTableObject::cast(instance->tables().get(table_index)), isolate);
if (!WasmTableObject::IsInBounds(isolate, table, entry_index)) {
return ThrowWasmError(isolate, MessageTemplate::kWasmTrapTableOutOfBounds);
@@ -465,9 +465,9 @@
CONVERT_ARG_CHECKED(Object, element_raw, 3);
// TODO(mstarzinger): Manually box because parameters are not visited yet.
Handle<Object> element(element_raw, isolate);
- DCHECK_LT(table_index, instance->tables()->length());
+ DCHECK_LT(table_index, instance->tables().length());
auto table = handle(
- WasmTableObject::cast(instance->tables()->get(table_index)), isolate);
+ WasmTableObject::cast(instance->tables().get(table_index)), isolate);
if (!WasmTableObject::IsInBounds(isolate, table, entry_index)) {
return ThrowWasmError(isolate, MessageTemplate::kWasmTrapTableOutOfBounds);
@@ -487,9 +487,9 @@
DCHECK(isolate->context().is_null());
isolate->set_context(instance->native_context());
- DCHECK_LT(table_index, instance->tables()->length());
+ DCHECK_LT(table_index, instance->tables().length());
auto table_obj = handle(
- WasmTableObject::cast(instance->tables()->get(table_index)), isolate);
+ WasmTableObject::cast(instance->tables().get(table_index)), isolate);
// This check is already done in generated code.
DCHECK(WasmTableObject::IsInBounds(isolate, table_obj, entry_index));
@@ -516,7 +516,7 @@
maybe_target_instance.ToHandleChecked();
const wasm::WasmModule* target_module =
- target_instance->module_object()->native_module()->module();
+ target_instance->module_object().native_module()->module();
wasm::FunctionSig* target_sig = target_module->functions[function_index].sig;
@@ -545,9 +545,9 @@
CONVERT_UINT32_ARG_CHECKED(table_index, 0);
CONVERT_UINT32_ARG_CHECKED(entry_index, 1);
- DCHECK_LT(table_index, instance->tables()->length());
+ DCHECK_LT(table_index, instance->tables().length());
auto table_obj = handle(
- WasmTableObject::cast(instance->tables()->get(table_index)), isolate);
+ WasmTableObject::cast(instance->tables().get(table_index)), isolate);
DCHECK(WasmTableObject::IsInBounds(isolate, table_obj, entry_index));
@@ -635,7 +635,7 @@
CONVERT_UINT32_ARG_CHECKED(delta, 2);
Handle<WasmTableObject> table(
- WasmTableObject::cast(instance->tables()->get(table_index)), isolate);
+ WasmTableObject::cast(instance->tables().get(table_index)), isolate);
int result = WasmTableObject::Grow(isolate, table, delta, value);
return Smi::FromInt(result);
@@ -654,9 +654,9 @@
CONVERT_UINT32_ARG_CHECKED(count, 3);
Handle<WasmTableObject> table(
- WasmTableObject::cast(instance->tables()->get(table_index)), isolate);
+ WasmTableObject::cast(instance->tables().get(table_index)), isolate);
- uint32_t table_size = static_cast<uint32_t>(table->entries()->length());
+ uint32_t table_size = static_cast<uint32_t>(table->entries().length());
if (start > table_size) {
return ThrowTableOutOfBounds(isolate, instance);
diff --git a/src/snapshot/code-serializer.cc b/src/snapshot/code-serializer.cc
index c4caed2..1a1294e 100644
--- a/src/snapshot/code-serializer.cc
+++ b/src/snapshot/code-serializer.cc
@@ -50,7 +50,7 @@
Handle<Script> script(Script::cast(info->script()), isolate);
if (FLAG_trace_serializer) {
PrintF("[Serializing from");
- script->name()->ShortPrint();
+ script->name().ShortPrint();
PrintF("]\n");
}
// TODO(7110): Enable serialization of Asm modules once the AsmWasmData is
@@ -102,7 +102,7 @@
// For objects in RO_SPACE, never serialize the object, but instead create a
// back reference that encodes the page number as the chunk_index and the
// offset within the page as the chunk_offset.
- Address address = obj->address();
+ Address address = obj.address();
Page* page = Page::FromAddress(address);
uint32_t chunk_index = 0;
for (Page* p : *read_only_space) {
@@ -112,7 +112,7 @@
uint32_t chunk_offset = static_cast<uint32_t>(page->Offset(address));
SerializerReference back_reference =
SerializerReference::BackReference(RO_SPACE, chunk_index, chunk_offset);
- reference_map()->Add(reinterpret_cast<void*>(obj->ptr()), back_reference);
+ reference_map()->Add(reinterpret_cast<void*>(obj.ptr()), back_reference);
CHECK(SerializeBackReference(obj));
return true;
}
@@ -126,60 +126,60 @@
if (SerializeReadOnlyObject(obj)) return;
- CHECK(!obj->IsCode());
+ CHECK(!obj.IsCode());
ReadOnlyRoots roots(isolate());
if (ElideObject(obj)) {
return SerializeObject(roots.undefined_value());
}
- if (obj->IsScript()) {
+ if (obj.IsScript()) {
Script script_obj = Script::cast(obj);
- DCHECK_NE(script_obj->compilation_type(), Script::COMPILATION_TYPE_EVAL);
+ DCHECK_NE(script_obj.compilation_type(), Script::COMPILATION_TYPE_EVAL);
// We want to differentiate between undefined and uninitialized_symbol for
// context_data for now. It is hack to allow debugging for scripts that are
// included as a part of custom snapshot. (see debug::Script::IsEmbedded())
- Object context_data = script_obj->context_data();
+ Object context_data = script_obj.context_data();
if (context_data != roots.undefined_value() &&
context_data != roots.uninitialized_symbol()) {
- script_obj->set_context_data(roots.undefined_value());
+ script_obj.set_context_data(roots.undefined_value());
}
// We don't want to serialize host options to avoid serializing unnecessary
// object graph.
- FixedArray host_options = script_obj->host_defined_options();
- script_obj->set_host_defined_options(roots.empty_fixed_array());
+ FixedArray host_options = script_obj.host_defined_options();
+ script_obj.set_host_defined_options(roots.empty_fixed_array());
SerializeGeneric(obj);
- script_obj->set_host_defined_options(host_options);
- script_obj->set_context_data(context_data);
+ script_obj.set_host_defined_options(host_options);
+ script_obj.set_context_data(context_data);
return;
}
- if (obj->IsSharedFunctionInfo()) {
+ if (obj.IsSharedFunctionInfo()) {
SharedFunctionInfo sfi = SharedFunctionInfo::cast(obj);
// TODO(7110): Enable serializing of Asm modules once the AsmWasmData
// is context independent.
- DCHECK(!sfi->IsApiFunction() && !sfi->HasAsmWasmData());
+ DCHECK(!sfi.IsApiFunction() && !sfi.HasAsmWasmData());
DebugInfo debug_info;
BytecodeArray debug_bytecode_array;
- if (sfi->HasDebugInfo()) {
+ if (sfi.HasDebugInfo()) {
// Clear debug info.
- debug_info = sfi->GetDebugInfo();
- if (debug_info->HasInstrumentedBytecodeArray()) {
- debug_bytecode_array = debug_info->DebugBytecodeArray();
- sfi->SetDebugBytecodeArray(debug_info->OriginalBytecodeArray());
+ debug_info = sfi.GetDebugInfo();
+ if (debug_info.HasInstrumentedBytecodeArray()) {
+ debug_bytecode_array = debug_info.DebugBytecodeArray();
+ sfi.SetDebugBytecodeArray(debug_info.OriginalBytecodeArray());
}
- sfi->set_script_or_debug_info(debug_info->script());
+ sfi.set_script_or_debug_info(debug_info.script());
}
- DCHECK(!sfi->HasDebugInfo());
+ DCHECK(!sfi.HasDebugInfo());
SerializeGeneric(obj);
// Restore debug info
if (!debug_info.is_null()) {
- sfi->set_script_or_debug_info(debug_info);
+ sfi.set_script_or_debug_info(debug_info);
if (!debug_bytecode_array.is_null()) {
- sfi->SetDebugBytecodeArray(debug_bytecode_array);
+ sfi.SetDebugBytecodeArray(debug_bytecode_array);
}
}
return;
@@ -192,24 +192,24 @@
// --interpreted-frames-native-stack is on. See v8:9122 for more context
#ifndef V8_TARGET_ARCH_ARM
if (V8_UNLIKELY(FLAG_interpreted_frames_native_stack) &&
- obj->IsInterpreterData()) {
- obj = InterpreterData::cast(obj)->bytecode_array();
+ obj.IsInterpreterData()) {
+ obj = InterpreterData::cast(obj).bytecode_array();
}
#endif // V8_TARGET_ARCH_ARM
- if (obj->IsBytecodeArray()) {
+ if (obj.IsBytecodeArray()) {
// Clear the stack frame cache if present
- BytecodeArray::cast(obj)->ClearFrameCacheFromSourcePositionTable();
+ BytecodeArray::cast(obj).ClearFrameCacheFromSourcePositionTable();
}
// Past this point we should not see any (context-specific) maps anymore.
- CHECK(!obj->IsMap());
+ CHECK(!obj.IsMap());
// There should be no references to the global object embedded.
- CHECK(!obj->IsJSGlobalProxy() && !obj->IsJSGlobalObject());
+ CHECK(!obj.IsJSGlobalProxy() && !obj.IsJSGlobalObject());
// Embedded FixedArrays that need rehashing must support rehashing.
- CHECK_IMPLIES(obj->NeedsRehashing(), obj->CanBeRehashed());
+ CHECK_IMPLIES(obj.NeedsRehashing(), obj.CanBeRehashed());
// We expect no instantiated function objects or contexts.
- CHECK(!obj->IsJSFunction() && !obj->IsContext());
+ CHECK(!obj.IsJSFunction() && !obj.IsContext());
SerializeGeneric(obj);
}
@@ -231,13 +231,13 @@
Script script = Script::cast(sfi->script());
Handle<Script> script_handle(script, isolate);
String name = ReadOnlyRoots(isolate).empty_string();
- if (script->name()->IsString()) name = String::cast(script->name());
+ if (script.name().IsString()) name = String::cast(script.name());
Handle<String> name_handle(name, isolate);
SharedFunctionInfo::ScriptIterator iter(isolate, script);
for (SharedFunctionInfo info = iter.Next(); !info.is_null();
info = iter.Next()) {
- if (!info->HasBytecodeArray()) continue;
+ if (!info.HasBytecodeArray()) continue;
Handle<Code> code = isolate->factory()->CopyCode(Handle<Code>::cast(
isolate->factory()->interpreter_entry_trampoline_for_profiling()));
@@ -245,15 +245,15 @@
Handle<InterpreterData>::cast(isolate->factory()->NewStruct(
INTERPRETER_DATA_TYPE, AllocationType::kOld));
- interpreter_data->set_bytecode_array(info->GetBytecodeArray());
+ interpreter_data->set_bytecode_array(info.GetBytecodeArray());
interpreter_data->set_interpreter_trampoline(*code);
- info->set_interpreter_data(*interpreter_data);
+ info.set_interpreter_data(*interpreter_data);
if (!log_code_creation) continue;
Handle<AbstractCode> abstract_code = Handle<AbstractCode>::cast(code);
- int line_num = script->GetLineNumber(info->StartPosition()) + 1;
- int column_num = script->GetColumnNumber(info->StartPosition()) + 1;
+ int line_num = script.GetLineNumber(info.StartPosition()) + 1;
+ int column_num = script.GetColumnNumber(info.StartPosition()) + 1;
PROFILE(isolate,
CodeCreateEvent(CodeEventListener::INTERPRETED_FUNCTION_TAG,
*abstract_code, info, *name_handle, line_num,
@@ -316,10 +316,10 @@
String name = ReadOnlyRoots(isolate).empty_string();
Script script = Script::cast(result->script());
Handle<Script> script_handle(script, isolate);
- if (script->name()->IsString()) name = String::cast(script->name());
+ if (script.name().IsString()) name = String::cast(script.name());
Handle<String> name_handle(name, isolate);
if (FLAG_log_function_events) {
- LOG(isolate, FunctionEvent("deserialize", script->id(),
+ LOG(isolate, FunctionEvent("deserialize", script.id(),
timer.Elapsed().InMillisecondsF(),
result->StartPosition(), result->EndPosition(),
*name_handle));
@@ -330,11 +330,11 @@
SharedFunctionInfo::ScriptIterator iter(isolate, script);
for (i::SharedFunctionInfo info = iter.Next(); !info.is_null();
info = iter.Next()) {
- if (info->is_compiled()) {
- int line_num = script->GetLineNumber(info->StartPosition()) + 1;
- int column_num = script->GetColumnNumber(info->StartPosition()) + 1;
+ if (info.is_compiled()) {
+ int line_num = script.GetLineNumber(info.StartPosition()) + 1;
+ int column_num = script.GetColumnNumber(info.StartPosition()) + 1;
PROFILE(isolate, CodeCreateEvent(CodeEventListener::SCRIPT_TAG,
- info->abstract_code(), info,
+ info.abstract_code(), info,
*name_handle, line_num, column_num));
}
}
diff --git a/src/snapshot/deserializer-allocator.cc b/src/snapshot/deserializer-allocator.cc
index 054f44d..4fb600d 100644
--- a/src/snapshot/deserializer-allocator.cc
+++ b/src/snapshot/deserializer-allocator.cc
@@ -29,7 +29,7 @@
AllocationResult result = lo_space->AllocateRaw(size);
HeapObject obj = result.ToObjectChecked();
deserialized_large_objects_.push_back(obj);
- return obj->address();
+ return obj.address();
} else if (space == MAP_SPACE) {
DCHECK_EQ(Map::kSize, size);
return allocated_maps_[next_map_index_++];
@@ -63,11 +63,11 @@
// If one of the following assertions fails, then we are deserializing an
// aligned object when the filler maps have not been deserialized yet.
// We require filler maps as padding to align the object.
- DCHECK(ReadOnlyRoots(heap_).free_space_map()->IsMap());
- DCHECK(ReadOnlyRoots(heap_).one_pointer_filler_map()->IsMap());
- DCHECK(ReadOnlyRoots(heap_).two_pointer_filler_map()->IsMap());
+ DCHECK(ReadOnlyRoots(heap_).free_space_map().IsMap());
+ DCHECK(ReadOnlyRoots(heap_).one_pointer_filler_map().IsMap());
+ DCHECK(ReadOnlyRoots(heap_).two_pointer_filler_map().IsMap());
obj = heap_->AlignWithFiller(obj, size, reserved, next_alignment_);
- address = obj->address();
+ address = obj.address();
next_alignment_ = kWordAligned;
return address;
} else {
@@ -106,7 +106,7 @@
if (next_alignment_ != kWordAligned) {
int padding = Heap::GetFillToAlign(address, next_alignment_);
next_alignment_ = kWordAligned;
- DCHECK(padding == 0 || HeapObject::FromAddress(address)->IsFiller());
+ DCHECK(padding == 0 || HeapObject::FromAddress(address).IsFiller());
address += padding;
}
return HeapObject::FromAddress(address);
diff --git a/src/snapshot/deserializer.cc b/src/snapshot/deserializer.cc
index b68209a..dfcd727 100644
--- a/src/snapshot/deserializer.cc
+++ b/src/snapshot/deserializer.cc
@@ -71,7 +71,7 @@
void Deserializer::Rehash() {
DCHECK(can_rehash() || deserializing_user_code());
for (HeapObject item : to_rehash_) {
- item->RehashBasedOnMap(ReadOnlyRoots(isolate_));
+ item.RehashBasedOnMap(ReadOnlyRoots(isolate_));
}
}
@@ -117,7 +117,7 @@
DCHECK_EQ(code - space, kNewObject);
HeapObject object = GetBackReferencedObject(space);
int size = source_.GetInt() << kTaggedSizeLog2;
- Address obj_address = object->address();
+ Address obj_address = object.address();
// Object's map is already initialized, now read the rest.
MaybeObjectSlot start(obj_address + kTaggedSize);
MaybeObjectSlot end(obj_address + size);
@@ -154,26 +154,26 @@
void Deserializer::LogScriptEvents(Script script) {
DisallowHeapAllocation no_gc;
LOG(isolate_,
- ScriptEvent(Logger::ScriptEventType::kDeserialize, script->id()));
+ ScriptEvent(Logger::ScriptEventType::kDeserialize, script.id()));
LOG(isolate_, ScriptDetails(script));
TRACE_EVENT_OBJECT_CREATED_WITH_ID(
TRACE_DISABLED_BY_DEFAULT("v8.compile"), "Script",
- TRACE_ID_WITH_SCOPE("v8::internal::Script", script->id()));
+ TRACE_ID_WITH_SCOPE("v8::internal::Script", script.id()));
TRACE_EVENT_OBJECT_SNAPSHOT_WITH_ID(
TRACE_DISABLED_BY_DEFAULT("v8.compile"), "Script",
- TRACE_ID_WITH_SCOPE("v8::internal::Script", script->id()),
- script->ToTracedValue());
+ TRACE_ID_WITH_SCOPE("v8::internal::Script", script.id()),
+ script.ToTracedValue());
}
StringTableInsertionKey::StringTableInsertionKey(String string)
: StringTableKey(ComputeHashField(string), string.length()),
string_(string) {
- DCHECK(string->IsInternalizedString());
+ DCHECK(string.IsInternalizedString());
}
bool StringTableInsertionKey::IsMatch(String string) {
// We want to compare the content of two strings here.
- return string_->SlowEquals(string);
+ return string_.SlowEquals(string);
}
Handle<String> StringTableInsertionKey::AsHandle(Isolate* isolate) {
@@ -182,8 +182,8 @@
uint32_t StringTableInsertionKey::ComputeHashField(String string) {
// Make sure hash_field() is computed.
- string->Hash();
- return string->hash_field();
+ string.Hash();
+ return string.hash_field();
}
namespace {
@@ -193,7 +193,7 @@
int entry = table.FindEntry(isolate, key);
if (entry == kNotFound) return String();
- String canonical = String::cast(table->KeyAt(entry));
+ String canonical = String::cast(table.KeyAt(entry));
DCHECK_NE(canonical, key->string());
key->string().MakeThin(isolate, canonical);
return canonical;
@@ -203,24 +203,24 @@
HeapObject Deserializer::PostProcessNewObject(HeapObject obj, int space) {
if ((FLAG_rehash_snapshot && can_rehash_) || deserializing_user_code()) {
- if (obj->IsString()) {
+ if (obj.IsString()) {
// Uninitialize hash field as we need to recompute the hash.
String string = String::cast(obj);
- string->set_hash_field(String::kEmptyHashField);
+ string.set_hash_field(String::kEmptyHashField);
// Rehash strings before read-only space is sealed. Strings outside
// read-only space are rehashed lazily. (e.g. when rehashing dictionaries)
if (space == RO_SPACE) {
to_rehash_.push_back(obj);
}
- } else if (obj->NeedsRehashing()) {
+ } else if (obj.NeedsRehashing()) {
to_rehash_.push_back(obj);
}
}
if (deserializing_user_code()) {
- if (obj->IsString()) {
+ if (obj.IsString()) {
String string = String::cast(obj);
- if (string->IsInternalizedString()) {
+ if (string.IsInternalizedString()) {
// Canonicalize the internalized string. If it already exists in the
// string table, set it to forward to the existing one.
StringTableInsertionKey key(string);
@@ -231,9 +231,9 @@
new_internalized_strings_.push_back(handle(string, isolate_));
return string;
}
- } else if (obj->IsScript()) {
+ } else if (obj.IsScript()) {
new_scripts_.push_back(handle(Script::cast(obj), isolate_));
- } else if (obj->IsAllocationSite()) {
+ } else if (obj.IsAllocationSite()) {
// We should link new allocation sites, but we can't do this immediately
// because |AllocationSite::HasWeakNext()| internally accesses
// |Heap::roots_| that may not have been initialized yet. So defer this to
@@ -243,99 +243,99 @@
DCHECK(CanBeDeferred(obj));
}
}
- if (obj->IsScript()) {
+ if (obj.IsScript()) {
LogScriptEvents(Script::cast(obj));
- } else if (obj->IsCode()) {
+ } else if (obj.IsCode()) {
// We flush all code pages after deserializing the startup snapshot.
// Hence we only remember each individual code object when deserializing
// user code.
if (deserializing_user_code() || space == LO_SPACE) {
new_code_objects_.push_back(Code::cast(obj));
}
- } else if (FLAG_trace_maps && obj->IsMap()) {
+ } else if (FLAG_trace_maps && obj.IsMap()) {
// Keep track of all seen Maps to log them later since they might be only
// partially initialized at this point.
new_maps_.push_back(Map::cast(obj));
- } else if (obj->IsAccessorInfo()) {
+ } else if (obj.IsAccessorInfo()) {
#ifdef USE_SIMULATOR
accessor_infos_.push_back(AccessorInfo::cast(obj));
#endif
- } else if (obj->IsCallHandlerInfo()) {
+ } else if (obj.IsCallHandlerInfo()) {
#ifdef USE_SIMULATOR
call_handler_infos_.push_back(CallHandlerInfo::cast(obj));
#endif
- } else if (obj->IsExternalString()) {
- if (obj->map() == ReadOnlyRoots(isolate_).native_source_string_map()) {
+ } else if (obj.IsExternalString()) {
+ if (obj.map() == ReadOnlyRoots(isolate_).native_source_string_map()) {
ExternalOneByteString string = ExternalOneByteString::cast(obj);
- DCHECK(string->is_uncached());
- string->SetResource(
+ DCHECK(string.is_uncached());
+ string.SetResource(
isolate_, NativesExternalStringResource::DecodeForDeserialization(
- string->resource()));
+ string.resource()));
} else {
ExternalString string = ExternalString::cast(obj);
- uint32_t index = string->resource_as_uint32();
+ uint32_t index = string.resource_as_uint32();
Address address =
static_cast<Address>(isolate_->api_external_references()[index]);
- string->set_address_as_resource(address);
+ string.set_address_as_resource(address);
isolate_->heap()->UpdateExternalString(string, 0,
- string->ExternalPayloadSize());
+ string.ExternalPayloadSize());
}
isolate_->heap()->RegisterExternalString(String::cast(obj));
- } else if (obj->IsJSDataView()) {
+ } else if (obj.IsJSDataView()) {
JSDataView data_view = JSDataView::cast(obj);
JSArrayBuffer buffer = JSArrayBuffer::cast(data_view.buffer());
data_view.set_data_pointer(
reinterpret_cast<uint8_t*>(buffer.backing_store()) +
data_view.byte_offset());
- } else if (obj->IsJSTypedArray()) {
+ } else if (obj.IsJSTypedArray()) {
JSTypedArray typed_array = JSTypedArray::cast(obj);
- CHECK_LE(typed_array->byte_offset(), Smi::kMaxValue);
- int32_t byte_offset = static_cast<int32_t>(typed_array->byte_offset());
+ CHECK_LE(typed_array.byte_offset(), Smi::kMaxValue);
+ int32_t byte_offset = static_cast<int32_t>(typed_array.byte_offset());
if (byte_offset > 0) {
FixedTypedArrayBase elements =
- FixedTypedArrayBase::cast(typed_array->elements());
+ FixedTypedArrayBase::cast(typed_array.elements());
// Must be off-heap layout.
- DCHECK(!typed_array->is_on_heap());
+ DCHECK(!typed_array.is_on_heap());
void* pointer_with_offset = reinterpret_cast<void*>(
- reinterpret_cast<intptr_t>(elements->external_pointer()) +
+ reinterpret_cast<intptr_t>(elements.external_pointer()) +
byte_offset);
- elements->set_external_pointer(pointer_with_offset);
+ elements.set_external_pointer(pointer_with_offset);
}
- } else if (obj->IsJSArrayBuffer()) {
+ } else if (obj.IsJSArrayBuffer()) {
JSArrayBuffer buffer = JSArrayBuffer::cast(obj);
// Only fixup for the off-heap case.
- if (buffer->backing_store() != nullptr) {
- Smi store_index(reinterpret_cast<Address>(buffer->backing_store()));
- void* backing_store = off_heap_backing_stores_[store_index->value()];
+ if (buffer.backing_store() != nullptr) {
+ Smi store_index(reinterpret_cast<Address>(buffer.backing_store()));
+ void* backing_store = off_heap_backing_stores_[store_index.value()];
- buffer->set_backing_store(backing_store);
+ buffer.set_backing_store(backing_store);
isolate_->heap()->RegisterNewArrayBuffer(buffer);
}
- } else if (obj->IsFixedTypedArrayBase()) {
+ } else if (obj.IsFixedTypedArrayBase()) {
FixedTypedArrayBase fta = FixedTypedArrayBase::cast(obj);
// Only fixup for the off-heap case.
- if (fta->base_pointer() == Smi::kZero) {
- Smi store_index(reinterpret_cast<Address>(fta->external_pointer()));
- void* backing_store = off_heap_backing_stores_[store_index->value()];
- fta->set_external_pointer(backing_store);
+ if (fta.base_pointer() == Smi::kZero) {
+ Smi store_index(reinterpret_cast<Address>(fta.external_pointer()));
+ void* backing_store = off_heap_backing_stores_[store_index.value()];
+ fta.set_external_pointer(backing_store);
}
- } else if (obj->IsBytecodeArray()) {
+ } else if (obj.IsBytecodeArray()) {
// TODO(mythria): Remove these once we store the default values for these
// fields in the serializer.
BytecodeArray bytecode_array = BytecodeArray::cast(obj);
- bytecode_array->set_osr_loop_nesting_level(0);
+ bytecode_array.set_osr_loop_nesting_level(0);
}
#ifdef DEBUG
- if (obj->IsDescriptorArray()) {
+ if (obj.IsDescriptorArray()) {
DescriptorArray descriptor_array = DescriptorArray::cast(obj);
- DCHECK_EQ(0, descriptor_array->raw_number_of_marked_descriptors());
+ DCHECK_EQ(0, descriptor_array.raw_number_of_marked_descriptors());
}
#endif
// Check alignment.
- DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(),
- HeapObject::RequiredAlignment(obj->map())));
+ DCHECK_EQ(0, Heap::GetFillToAlign(obj.address(),
+ HeapObject::RequiredAlignment(obj.map())));
return obj;
}
@@ -374,8 +374,8 @@
}
}
- if (deserializing_user_code() && obj->IsThinString()) {
- obj = ThinString::cast(obj)->actual();
+ if (deserializing_user_code() && obj.IsThinString()) {
+ obj = ThinString::cast(obj).actual();
}
hot_objects_.Add(obj);
@@ -411,7 +411,7 @@
}
#ifdef DEBUG
- if (obj->IsCode()) {
+ if (obj.IsCode()) {
DCHECK(space_number == CODE_SPACE || space_number == CODE_LO_SPACE);
} else {
DCHECK(space_number != CODE_SPACE && space_number != CODE_LO_SPACE);
@@ -442,7 +442,7 @@
void Deserializer::VisitCodeTarget(Code host, RelocInfo* rinfo) {
HeapObject object = ReadObject();
- rinfo->set_target_address(Code::cast(object)->raw_instruction_start());
+ rinfo->set_target_address(Code::cast(object).raw_instruction_start());
}
void Deserializer::VisitEmbeddedPointer(Code host, RelocInfo* rinfo) {
@@ -478,8 +478,8 @@
// Internal reference target is encoded as an offset from code entry.
int target_offset = source_.GetInt();
DCHECK_LT(static_cast<unsigned>(target_offset),
- static_cast<unsigned>(host->raw_instruction_size()));
- Address target = host->entry() + target_offset;
+ static_cast<unsigned>(host.raw_instruction_size()));
+ Address target = host.entry() + target_offset;
Assembler::deserialization_set_target_internal_reference_at(
rinfo->pc(), target, rinfo->rmode());
}
@@ -637,7 +637,7 @@
HeapObject obj = HeapObject::FromAddress(current_object_address);
// If the deferred object is a map, its instance type may be used
// during deserialization. Initialize it with a temporary value.
- if (obj->IsMap()) Map::cast(obj)->set_instance_type(FILLER_TYPE);
+ if (obj.IsMap()) Map::cast(obj).set_instance_type(FILLER_TYPE);
current = limit;
return false;
}
diff --git a/src/snapshot/embedded-data.cc b/src/snapshot/embedded-data.cc
index 32996e6..a8a7ea0 100644
--- a/src/snapshot/embedded-data.cc
+++ b/src/snapshot/embedded-data.cc
@@ -89,8 +89,8 @@
namespace {
bool BuiltinAliasesOffHeapTrampolineRegister(Isolate* isolate, Code code) {
- DCHECK(Builtins::IsIsolateIndependent(code->builtin_index()));
- switch (Builtins::KindOf(code->builtin_index())) {
+ DCHECK(Builtins::IsIsolateIndependent(code.builtin_index()));
+ switch (Builtins::KindOf(code.builtin_index())) {
case Builtins::CPP:
case Builtins::TFC:
case Builtins::TFH:
@@ -107,7 +107,7 @@
}
Callable callable = Builtins::CallableFor(
- isolate, static_cast<Builtins::Name>(code->builtin_index()));
+ isolate, static_cast<Builtins::Name>(code.builtin_index()));
CallInterfaceDescriptor descriptor = callable.descriptor();
if (descriptor.ContextRegister() == kOffHeapTrampolineRegister) {
@@ -151,7 +151,7 @@
// Do not emit write-barrier for off-heap writes.
off_heap_it.rinfo()->set_target_address(
- blob->InstructionStartOfBuiltin(target->builtin_index()),
+ blob->InstructionStartOfBuiltin(target.builtin_index()),
SKIP_WRITE_BARRIER);
on_heap_it.next();
@@ -185,7 +185,7 @@
if (Builtins::IsIsolateIndependent(i)) {
// Sanity-check that the given builtin is isolate-independent and does not
// use the trampoline register in its calling convention.
- if (!code->IsIsolateIndependent(isolate)) {
+ if (!code.IsIsolateIndependent(isolate)) {
saw_unsafe_builtin = true;
fprintf(stderr, "%s is not isolate-independent.\n", Builtins::name(i));
}
@@ -205,7 +205,7 @@
Builtins::name(i));
}
- uint32_t length = static_cast<uint32_t>(code->raw_instruction_size());
+ uint32_t length = static_cast<uint32_t>(code.raw_instruction_size());
DCHECK_EQ(0, raw_data_size % kCodeAlignment);
metadata[i].instructions_offset = raw_data_size;
@@ -248,10 +248,10 @@
Code code = builtins->builtin(i);
uint32_t offset = metadata[i].instructions_offset;
uint8_t* dst = raw_data_start + offset;
- DCHECK_LE(RawDataOffset() + offset + code->raw_instruction_size(),
+ DCHECK_LE(RawDataOffset() + offset + code.raw_instruction_size(),
blob_size);
- std::memcpy(dst, reinterpret_cast<uint8_t*>(code->raw_instruction_start()),
- code->raw_instruction_size());
+ std::memcpy(dst, reinterpret_cast<uint8_t*>(code.raw_instruction_start()),
+ code.raw_instruction_size());
}
EmbeddedData d(blob, blob_size);
diff --git a/src/snapshot/embedded-file-writer.cc b/src/snapshot/embedded-file-writer.cc
index 6c1fa2b..fee4391 100644
--- a/src/snapshot/embedded-file-writer.cc
+++ b/src/snapshot/embedded-file-writer.cc
@@ -261,10 +261,10 @@
Code code = builtins->builtin(i);
// Verify that the code object is still the "real code" and not a
// trampoline (which wouldn't have source positions).
- DCHECK(!code->is_off_heap_trampoline());
+ DCHECK(!code.is_off_heap_trampoline());
std::vector<unsigned char> data(
- code->SourcePositionTable()->GetDataStartAddress(),
- code->SourcePositionTable()->GetDataEndAddress());
+ code.SourcePositionTable().GetDataStartAddress(),
+ code.SourcePositionTable().GetDataEndAddress());
source_positions_[i] = data;
}
}
diff --git a/src/snapshot/object-deserializer.cc b/src/snapshot/object-deserializer.cc
index 222e0bb..7f6eb65 100644
--- a/src/snapshot/object-deserializer.cc
+++ b/src/snapshot/object-deserializer.cc
@@ -61,8 +61,8 @@
for (Code code : new_code_objects()) {
// Record all references to embedded objects in the new code object.
WriteBarrierForCode(code);
- FlushInstructionCache(code->raw_instruction_start(),
- code->raw_instruction_size());
+ FlushInstructionCache(code.raw_instruction_start(),
+ code.raw_instruction_size());
}
}
@@ -96,14 +96,14 @@
// Allocation sites are present in the snapshot, and must be linked into
// a list at deserialization time.
for (AllocationSite site : new_allocation_sites()) {
- if (!site->HasWeakNext()) continue;
+ if (!site.HasWeakNext()) continue;
// TODO(mvstanton): consider treating the heap()->allocation_sites_list()
// as a (weak) root. If this root is relocated correctly, this becomes
// unnecessary.
if (heap->allocation_sites_list() == Smi::kZero) {
- site->set_weak_next(ReadOnlyRoots(heap).undefined_value());
+ site.set_weak_next(ReadOnlyRoots(heap).undefined_value());
} else {
- site->set_weak_next(heap->allocation_sites_list());
+ site.set_weak_next(heap->allocation_sites_list());
}
heap->set_allocation_sites_list(site);
}
diff --git a/src/snapshot/partial-serializer.cc b/src/snapshot/partial-serializer.cc
index 61d9442..895d452 100644
--- a/src/snapshot/partial-serializer.cc
+++ b/src/snapshot/partial-serializer.cc
@@ -32,29 +32,28 @@
void PartialSerializer::Serialize(Context* o, bool include_global_proxy) {
context_ = *o;
- DCHECK(context_->IsNativeContext());
+ DCHECK(context_.IsNativeContext());
reference_map()->AddAttachedReference(
- reinterpret_cast<void*>(context_->global_proxy()->ptr()));
+ reinterpret_cast<void*>(context_.global_proxy().ptr()));
// The bootstrap snapshot has a code-stub context. When serializing the
// partial snapshot, it is chained into the weak context list on the isolate
// and it's next context pointer may point to the code-stub context. Clear
// it before serializing, it will get re-added to the context list
// explicitly when it's loaded.
- context_->set(Context::NEXT_CONTEXT_LINK,
- ReadOnlyRoots(isolate()).undefined_value());
- DCHECK(!context_->global_object()->IsUndefined());
+ context_.set(Context::NEXT_CONTEXT_LINK,
+ ReadOnlyRoots(isolate()).undefined_value());
+ DCHECK(!context_.global_object().IsUndefined());
// Reset math random cache to get fresh random numbers.
MathRandom::ResetContext(context_);
#ifdef DEBUG
- MicrotaskQueue* microtask_queue =
- context_->native_context()->microtask_queue();
+ MicrotaskQueue* microtask_queue = context_.native_context().microtask_queue();
DCHECK_EQ(0, microtask_queue->size());
DCHECK(!microtask_queue->HasMicrotasksSuppressions());
DCHECK_EQ(0, microtask_queue->GetMicrotasksScopeDepth());
DCHECK(microtask_queue->DebugMicrotasksScopeDepthIsZero());
#endif
- context_->native_context()->set_microtask_queue(nullptr);
+ context_.native_context().set_microtask_queue(nullptr);
VisitRootPointer(Root::kPartialSnapshotCache, nullptr, FullObjectSlot(o));
SerializeDeferredObjects();
@@ -93,18 +92,18 @@
DCHECK(!startup_serializer_->ReferenceMapContains(obj));
// All the internalized strings that the partial snapshot needs should be
// either in the root table or in the partial snapshot cache.
- DCHECK(!obj->IsInternalizedString());
+ DCHECK(!obj.IsInternalizedString());
// Function and object templates are not context specific.
- DCHECK(!obj->IsTemplateInfo());
+ DCHECK(!obj.IsTemplateInfo());
// We should not end up at another native context.
- DCHECK_IMPLIES(obj != context_, !obj->IsNativeContext());
+ DCHECK_IMPLIES(obj != context_, !obj.IsNativeContext());
// Clear literal boilerplates and feedback.
- if (obj->IsFeedbackVector()) FeedbackVector::cast(obj)->ClearSlots(isolate());
+ if (obj.IsFeedbackVector()) FeedbackVector::cast(obj).ClearSlots(isolate());
// Clear InterruptBudget when serializing FeedbackCell.
- if (obj->IsFeedbackCell()) {
- FeedbackCell::cast(obj)->set_interrupt_budget(
+ if (obj.IsFeedbackCell()) {
+ FeedbackCell::cast(obj).set_interrupt_budget(
FeedbackCell::GetInitialInterruptBudget());
}
@@ -112,12 +111,12 @@
return;
}
- if (obj->IsJSFunction()) {
+ if (obj.IsJSFunction()) {
// Unconditionally reset the JSFunction to its SFI's code, since we can't
// serialize optimized code anyway.
JSFunction closure = JSFunction::cast(obj);
- closure->ResetIfBytecodeFlushed();
- if (closure->is_compiled()) closure->set_code(closure->shared()->GetCode());
+ closure.ResetIfBytecodeFlushed();
+ if (closure.is_compiled()) closure.set_code(closure.shared().GetCode());
}
CheckRehashability(obj);
@@ -132,12 +131,12 @@
// allow them to be part of the partial snapshot because they contain a
// unique ID, and deserializing several partial snapshots containing script
// would cause dupes.
- DCHECK(!o->IsScript());
- return o->IsName() || o->IsSharedFunctionInfo() || o->IsHeapNumber() ||
- o->IsCode() || o->IsScopeInfo() || o->IsAccessorInfo() ||
- o->IsTemplateInfo() || o->IsClassPositions() ||
- o->map() == ReadOnlyRoots(startup_serializer_->isolate())
- .fixed_cow_array_map();
+ DCHECK(!o.IsScript());
+ return o.IsName() || o.IsSharedFunctionInfo() || o.IsHeapNumber() ||
+ o.IsCode() || o.IsScopeInfo() || o.IsAccessorInfo() ||
+ o.IsTemplateInfo() || o.IsClassPositions() ||
+ o.map() == ReadOnlyRoots(startup_serializer_->isolate())
+ .fixed_cow_array_map();
}
namespace {
@@ -145,12 +144,12 @@
} // anonymous namespace
bool PartialSerializer::SerializeJSObjectWithEmbedderFields(Object obj) {
- if (!obj->IsJSObject()) return false;
+ if (!obj.IsJSObject()) return false;
JSObject js_obj = JSObject::cast(obj);
- int embedder_fields_count = js_obj->GetEmbedderFieldCount();
+ int embedder_fields_count = js_obj.GetEmbedderFieldCount();
if (embedder_fields_count == 0) return false;
CHECK_GT(embedder_fields_count, 0);
- DCHECK(!js_obj->NeedsRehashing());
+ DCHECK(!js_obj.NeedsRehashing());
DisallowHeapAllocation no_gc;
DisallowJavascriptExecution no_js(isolate());
@@ -171,14 +170,13 @@
EmbedderDataSlot embedder_data_slot(js_obj, i);
original_embedder_values.emplace_back(embedder_data_slot.load_raw(no_gc));
Object object = embedder_data_slot.load_tagged();
- if (object->IsHeapObject()) {
+ if (object.IsHeapObject()) {
DCHECK(IsValidHeapObject(isolate()->heap(), HeapObject::cast(object)));
serialized_data.push_back({nullptr, 0});
} else {
// If no serializer is provided and the field was empty, we serialize it
// by default to nullptr.
- if (serialize_embedder_fields_.callback == nullptr &&
- object->ptr() == 0) {
+ if (serialize_embedder_fields_.callback == nullptr && object.ptr() == 0) {
serialized_data.push_back({nullptr, 0});
} else {
DCHECK_NOT_NULL(serialize_embedder_fields_.callback);
@@ -206,7 +204,7 @@
// 4) Obtain back reference for the serialized object.
SerializerReference reference =
- reference_map()->LookupReference(reinterpret_cast<void*>(js_obj->ptr()));
+ reference_map()->LookupReference(reinterpret_cast<void*>(js_obj.ptr()));
DCHECK(reference.is_back_reference());
// 5) Write data returned by the embedder callbacks into a separate sink,
@@ -237,8 +235,8 @@
void PartialSerializer::CheckRehashability(HeapObject obj) {
if (!can_be_rehashed_) return;
- if (!obj->NeedsRehashing()) return;
- if (obj->CanBeRehashed()) return;
+ if (!obj.NeedsRehashing()) return;
+ if (obj.CanBeRehashed()) return;
can_be_rehashed_ = false;
}
diff --git a/src/snapshot/read-only-serializer.cc b/src/snapshot/read-only-serializer.cc
index 489b4a2..159be77 100644
--- a/src/snapshot/read-only-serializer.cc
+++ b/src/snapshot/read-only-serializer.cc
@@ -27,7 +27,7 @@
void ReadOnlySerializer::SerializeObject(HeapObject obj) {
CHECK(ReadOnlyHeap::Contains(obj));
- CHECK_IMPLIES(obj->IsString(), obj->IsInternalizedString());
+ CHECK_IMPLIES(obj.IsString(), obj.IsInternalizedString());
if (SerializeHotObject(obj)) return;
if (IsRootAndHasBeenSerialized(obj) && SerializeRoot(obj)) {
@@ -88,7 +88,7 @@
// not be fulfilled during deserialization until few first root objects are
// serialized. But we must serialize Map objects since deserializer checks
// that these root objects are indeed Maps.
- return !object->IsMap();
+ return !object.IsMap();
}
bool ReadOnlySerializer::SerializeUsingReadOnlyObjectCache(
diff --git a/src/snapshot/roots-serializer.cc b/src/snapshot/roots-serializer.cc
index 2e5bdf57b..633216b 100644
--- a/src/snapshot/roots-serializer.cc
+++ b/src/snapshot/roots-serializer.cc
@@ -58,8 +58,8 @@
void RootsSerializer::CheckRehashability(HeapObject obj) {
if (!can_be_rehashed_) return;
- if (!obj->NeedsRehashing()) return;
- if (obj->CanBeRehashed()) return;
+ if (!obj.NeedsRehashing()) return;
+ if (obj.CanBeRehashed()) return;
can_be_rehashed_ = false;
}
diff --git a/src/snapshot/serializer-common.cc b/src/snapshot/serializer-common.cc
index fa8d19e..c05ce60 100644
--- a/src/snapshot/serializer-common.cc
+++ b/src/snapshot/serializer-common.cc
@@ -120,28 +120,28 @@
// and eventually terminates the cache with undefined.
visitor->VisitRootPointer(Root::kPartialSnapshotCache, nullptr,
FullObjectSlot(&cache->at(i)));
- if (cache->at(i)->IsUndefined(isolate)) break;
+ if (cache->at(i).IsUndefined(isolate)) break;
}
}
bool SerializerDeserializer::CanBeDeferred(HeapObject o) {
- return !o->IsString() && !o->IsScript() && !o->IsJSTypedArray();
+ return !o.IsString() && !o.IsScript() && !o.IsJSTypedArray();
}
void SerializerDeserializer::RestoreExternalReferenceRedirectors(
const std::vector<AccessorInfo>& accessor_infos) {
// Restore wiped accessor infos.
for (AccessorInfo info : accessor_infos) {
- Foreign::cast(info->js_getter())
- ->set_foreign_address(info->redirected_getter());
+ Foreign::cast(info.js_getter())
+ .set_foreign_address(info.redirected_getter());
}
}
void SerializerDeserializer::RestoreExternalReferenceRedirectors(
const std::vector<CallHandlerInfo>& call_handler_infos) {
for (CallHandlerInfo info : call_handler_infos) {
- Foreign::cast(info->js_callback())
- ->set_foreign_address(info->redirected_callback());
+ Foreign::cast(info.js_callback())
+ .set_foreign_address(info.redirected_callback());
}
}
diff --git a/src/snapshot/serializer.cc b/src/snapshot/serializer.cc
index 606c930..4a3d107 100644
--- a/src/snapshot/serializer.cc
+++ b/src/snapshot/serializer.cc
@@ -58,7 +58,7 @@
#ifdef OBJECT_PRINT
void Serializer::CountInstanceType(Map map, int size, AllocationSpace space) {
- int instance_type = map->instance_type();
+ int instance_type = map.instance_type();
instance_type_count_[space][instance_type]++;
instance_type_size_[space][instance_type] += size;
}
@@ -107,7 +107,7 @@
}
void Serializer::SerializeRootObject(Object object) {
- if (object->IsSmi()) {
+ if (object.IsSmi()) {
PutSmi(Smi::cast(object));
} else {
SerializeObject(HeapObject::cast(object));
@@ -117,7 +117,7 @@
#ifdef DEBUG
void Serializer::PrintStack() {
for (const auto o : stack_) {
- o->Print();
+ o.Print();
PrintF("\n");
}
}
@@ -141,7 +141,7 @@
DCHECK(index >= 0 && index < kNumberOfHotObjects);
if (FLAG_trace_serializer) {
PrintF(" Encoding hot object %d:", index);
- obj->ShortPrint();
+ obj.ShortPrint();
PrintF("\n");
}
sink_.Put(kHotObject + index, "HotObject");
@@ -166,7 +166,7 @@
DCHECK(reference.is_back_reference());
if (FLAG_trace_serializer) {
PrintF(" Encoding back reference to: ");
- obj->ShortPrint();
+ obj.ShortPrint();
PrintF("\n");
}
@@ -179,15 +179,15 @@
}
bool Serializer::ObjectIsBytecodeHandler(HeapObject obj) const {
- if (!obj->IsCode()) return false;
- return (Code::cast(obj)->kind() == Code::BYTECODE_HANDLER);
+ if (!obj.IsCode()) return false;
+ return (Code::cast(obj).kind() == Code::BYTECODE_HANDLER);
}
void Serializer::PutRoot(RootIndex root, HeapObject object) {
int root_index = static_cast<int>(root);
if (FLAG_trace_serializer) {
PrintF(" Encoding root %d:", root_index);
- object->ShortPrint();
+ object.ShortPrint();
PrintF("\n");
}
@@ -243,7 +243,7 @@
}
int Serializer::PutAlignmentPrefix(HeapObject object) {
- AllocationAlignment alignment = HeapObject::RequiredAlignment(object->map());
+ AllocationAlignment alignment = HeapObject::RequiredAlignment(object.map());
if (alignment != kWordAligned) {
DCHECK(1 <= alignment && alignment <= 3);
byte prefix = (kAlignmentPrefix - 1) + alignment;
@@ -286,10 +286,10 @@
Code Serializer::CopyCode(Code code) {
code_buffer_.clear(); // Clear buffer without deleting backing store.
- int size = code->CodeSize();
+ int size = code.CodeSize();
code_buffer_.insert(code_buffer_.end(),
- reinterpret_cast<byte*>(code->address()),
- reinterpret_cast<byte*>(code->address() + size));
+ reinterpret_cast<byte*>(code.address()),
+ reinterpret_cast<byte*>(code.address() + size));
// When pointer compression is enabled the checked cast will try to
// decompress map field of off-heap Code object.
return Code::unchecked_cast(HeapObject::FromAddress(
@@ -300,16 +300,16 @@
int size, Map map) {
if (serializer_->code_address_map_) {
const char* code_name =
- serializer_->code_address_map_->Lookup(object_->address());
+ serializer_->code_address_map_->Lookup(object_.address());
LOG(serializer_->isolate_,
- CodeNameEvent(object_->address(), sink_->Position(), code_name));
+ CodeNameEvent(object_.address(), sink_->Position(), code_name));
}
SerializerReference back_reference;
if (space == LO_SPACE) {
sink_->Put(kNewObject + space, "NewLargeObject");
sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
- CHECK(!object_->IsCode());
+ CHECK(!object_.IsCode());
back_reference = serializer_->allocator()->AllocateLargeObject(size);
} else if (space == MAP_SPACE) {
DCHECK_EQ(Map::kSize, size);
@@ -360,28 +360,28 @@
void Serializer::ObjectSerializer::SerializeJSTypedArray() {
JSTypedArray typed_array = JSTypedArray::cast(object_);
FixedTypedArrayBase elements =
- FixedTypedArrayBase::cast(typed_array->elements());
+ FixedTypedArrayBase::cast(typed_array.elements());
- if (!typed_array->WasDetached()) {
- if (!typed_array->is_on_heap()) {
+ if (!typed_array.WasDetached()) {
+ if (!typed_array.is_on_heap()) {
// Explicitly serialize the backing store now.
- JSArrayBuffer buffer = JSArrayBuffer::cast(typed_array->buffer());
- CHECK_LE(buffer->byte_length(), Smi::kMaxValue);
- CHECK_LE(typed_array->byte_offset(), Smi::kMaxValue);
- int32_t byte_length = static_cast<int32_t>(buffer->byte_length());
- int32_t byte_offset = static_cast<int32_t>(typed_array->byte_offset());
+ JSArrayBuffer buffer = JSArrayBuffer::cast(typed_array.buffer());
+ CHECK_LE(buffer.byte_length(), Smi::kMaxValue);
+ CHECK_LE(typed_array.byte_offset(), Smi::kMaxValue);
+ int32_t byte_length = static_cast<int32_t>(buffer.byte_length());
+ int32_t byte_offset = static_cast<int32_t>(typed_array.byte_offset());
// We need to calculate the backing store from the external pointer
// because the ArrayBuffer may already have been serialized.
void* backing_store = reinterpret_cast<void*>(
- reinterpret_cast<intptr_t>(elements->external_pointer()) -
+ reinterpret_cast<intptr_t>(elements.external_pointer()) -
byte_offset);
int32_t ref = SerializeBackingStore(backing_store, byte_length);
// The external_pointer is the backing_store + typed_array->byte_offset.
// To properly share the buffer, we set the backing store ref here. On
// deserialization we re-add the byte_offset to external_pointer.
- elements->set_external_pointer(
+ elements.set_external_pointer(
reinterpret_cast<void*>(Smi::FromInt(ref).ptr()));
}
} else {
@@ -389,26 +389,26 @@
// same backing store does not know anything about it. This fixup step finds
// detached TypedArrays and clears the values in the FixedTypedArray so that
// we don't try to serialize the now invalid backing store.
- elements->set_external_pointer(reinterpret_cast<void*>(Smi::kZero.ptr()));
- elements->set_number_of_elements_onheap_only(0);
+ elements.set_external_pointer(reinterpret_cast<void*>(Smi::kZero.ptr()));
+ elements.set_number_of_elements_onheap_only(0);
}
SerializeObject();
}
void Serializer::ObjectSerializer::SerializeJSArrayBuffer() {
JSArrayBuffer buffer = JSArrayBuffer::cast(object_);
- void* backing_store = buffer->backing_store();
+ void* backing_store = buffer.backing_store();
// We cannot store byte_length larger than Smi range in the snapshot.
- CHECK_LE(buffer->byte_length(), Smi::kMaxValue);
- int32_t byte_length = static_cast<int32_t>(buffer->byte_length());
+ CHECK_LE(buffer.byte_length(), Smi::kMaxValue);
+ int32_t byte_length = static_cast<int32_t>(buffer.byte_length());
// The embedder-allocated backing store only exists for the off-heap case.
if (backing_store != nullptr) {
int32_t ref = SerializeBackingStore(backing_store, byte_length);
- buffer->set_backing_store(reinterpret_cast<void*>(Smi::FromInt(ref).ptr()));
+ buffer.set_backing_store(reinterpret_cast<void*>(Smi::FromInt(ref).ptr()));
}
SerializeObject();
- buffer->set_backing_store(backing_store);
+ buffer.set_backing_store(backing_store);
}
void Serializer::ObjectSerializer::SerializeExternalString() {
@@ -418,30 +418,30 @@
// for native native source code strings, we replace the resource field
// with the native source id.
// For the rest we serialize them to look like ordinary sequential strings.
- if (object_->map() != ReadOnlyRoots(heap).native_source_string_map()) {
+ if (object_.map() != ReadOnlyRoots(heap).native_source_string_map()) {
ExternalString string = ExternalString::cast(object_);
- Address resource = string->resource_as_address();
+ Address resource = string.resource_as_address();
ExternalReferenceEncoder::Value reference;
if (serializer_->external_reference_encoder_.TryEncode(resource).To(
&reference)) {
DCHECK(reference.is_from_api());
- string->set_uint32_as_resource(reference.index());
+ string.set_uint32_as_resource(reference.index());
SerializeObject();
- string->set_address_as_resource(resource);
+ string.set_address_as_resource(resource);
} else {
SerializeExternalStringAsSequentialString();
}
} else {
ExternalOneByteString string = ExternalOneByteString::cast(object_);
- DCHECK(string->is_uncached());
+ DCHECK(string.is_uncached());
const NativesExternalStringResource* resource =
reinterpret_cast<const NativesExternalStringResource*>(
- string->resource());
+ string.resource());
// Replace the resource field with the type and index of the native source.
- string->set_resource(resource->EncodeForSerialization());
+ string.set_resource(resource->EncodeForSerialization());
SerializeObject();
// Restore the resource field.
- string->set_resource(resource);
+ string.set_resource(resource);
}
}
@@ -449,29 +449,29 @@
// Instead of serializing this as an external string, we serialize
// an imaginary sequential string with the same content.
ReadOnlyRoots roots(serializer_->isolate());
- DCHECK(object_->IsExternalString());
- DCHECK(object_->map() != roots.native_source_string_map());
+ DCHECK(object_.IsExternalString());
+ DCHECK(object_.map() != roots.native_source_string_map());
ExternalString string = ExternalString::cast(object_);
- int length = string->length();
+ int length = string.length();
Map map;
int content_size;
int allocation_size;
const byte* resource;
// Find the map and size for the imaginary sequential string.
- bool internalized = object_->IsInternalizedString();
- if (object_->IsExternalOneByteString()) {
+ bool internalized = object_.IsInternalizedString();
+ if (object_.IsExternalOneByteString()) {
map = internalized ? roots.one_byte_internalized_string_map()
: roots.one_byte_string_map();
allocation_size = SeqOneByteString::SizeFor(length);
content_size = length * kCharSize;
resource = reinterpret_cast<const byte*>(
- ExternalOneByteString::cast(string)->resource()->data());
+ ExternalOneByteString::cast(string).resource()->data());
} else {
map = internalized ? roots.internalized_string_map() : roots.string_map();
allocation_size = SeqTwoByteString::SizeFor(length);
content_size = length * kShortSize;
resource = reinterpret_cast<const byte*>(
- ExternalTwoByteString::cast(string)->resource()->data());
+ ExternalTwoByteString::cast(string).resource()->data());
}
AllocationSpace space =
@@ -487,7 +487,7 @@
sink_->PutInt(bytes_to_output, "length");
// Serialize string header (except for map).
- uint8_t* string_start = reinterpret_cast<uint8_t*>(string->address());
+ uint8_t* string_start = reinterpret_cast<uint8_t*>(string.address());
for (int i = HeapObject::kHeaderSize; i < SeqString::kHeaderSize; i++) {
sink_->PutSection(string_start[i], "StringHeader");
}
@@ -507,19 +507,19 @@
class UnlinkWeakNextScope {
public:
explicit UnlinkWeakNextScope(Heap* heap, HeapObject object) {
- if (object->IsAllocationSite() &&
- AllocationSite::cast(object)->HasWeakNext()) {
+ if (object.IsAllocationSite() &&
+ AllocationSite::cast(object).HasWeakNext()) {
object_ = object;
- next_ = AllocationSite::cast(object)->weak_next();
- AllocationSite::cast(object)->set_weak_next(
+ next_ = AllocationSite::cast(object).weak_next();
+ AllocationSite::cast(object).set_weak_next(
ReadOnlyRoots(heap).undefined_value());
}
}
~UnlinkWeakNextScope() {
if (!object_.is_null()) {
- AllocationSite::cast(object_)->set_weak_next(next_,
- UPDATE_WEAK_WRITE_BARRIER);
+ AllocationSite::cast(object_).set_weak_next(next_,
+ UPDATE_WEAK_WRITE_BARRIER);
}
}
@@ -532,48 +532,48 @@
void Serializer::ObjectSerializer::Serialize() {
if (FLAG_trace_serializer) {
PrintF(" Encoding heap object: ");
- object_->ShortPrint();
+ object_.ShortPrint();
PrintF("\n");
}
- if (object_->IsExternalString()) {
+ if (object_.IsExternalString()) {
SerializeExternalString();
return;
} else if (!ReadOnlyHeap::Contains(object_)) {
// Only clear padding for strings outside RO_SPACE. RO_SPACE should have
// been cleared elsewhere.
- if (object_->IsSeqOneByteString()) {
+ if (object_.IsSeqOneByteString()) {
// Clear padding bytes at the end. Done here to avoid having to do this
// at allocation sites in generated code.
- SeqOneByteString::cast(object_)->clear_padding();
- } else if (object_->IsSeqTwoByteString()) {
- SeqTwoByteString::cast(object_)->clear_padding();
+ SeqOneByteString::cast(object_).clear_padding();
+ } else if (object_.IsSeqTwoByteString()) {
+ SeqTwoByteString::cast(object_).clear_padding();
}
}
- if (object_->IsJSTypedArray()) {
+ if (object_.IsJSTypedArray()) {
SerializeJSTypedArray();
return;
}
- if (object_->IsJSArrayBuffer()) {
+ if (object_.IsJSArrayBuffer()) {
SerializeJSArrayBuffer();
return;
}
// We don't expect fillers.
- DCHECK(!object_->IsFiller());
+ DCHECK(!object_.IsFiller());
- if (object_->IsScript()) {
+ if (object_.IsScript()) {
// Clear cached line ends.
Object undefined = ReadOnlyRoots(serializer_->isolate()).undefined_value();
- Script::cast(object_)->set_line_ends(undefined);
+ Script::cast(object_).set_line_ends(undefined);
}
SerializeObject();
}
void Serializer::ObjectSerializer::SerializeObject() {
- int size = object_->Size();
- Map map = object_->map();
+ int size = object_.Size();
+ Map map = object_.map();
AllocationSpace space =
MemoryChunk::FromHeapObject(object_)->owner()->identity();
// Young generation large objects are tenured.
@@ -602,12 +602,12 @@
void Serializer::ObjectSerializer::SerializeDeferred() {
if (FLAG_trace_serializer) {
PrintF(" Encoding deferred heap object: ");
- object_->ShortPrint();
+ object_.ShortPrint();
PrintF("\n");
}
- int size = object_->Size();
- Map map = object_->map();
+ int size = object_.Size();
+ Map map = object_.map();
SerializerReference back_reference =
serializer_->reference_map()->LookupReference(
reinterpret_cast<void*>(object_.ptr()));
@@ -627,16 +627,16 @@
void Serializer::ObjectSerializer::SerializeContent(Map map, int size) {
UnlinkWeakNextScope unlink_weak_next(serializer_->isolate()->heap(), object_);
- if (object_->IsCode()) {
+ if (object_.IsCode()) {
// For code objects, output raw bytes first.
OutputCode(size);
// Then iterate references via reloc info.
- object_->IterateBody(map, size, this);
+ object_.IterateBody(map, size, this);
} else {
// For other objects, iterate references first.
- object_->IterateBody(map, size, this);
+ object_.IterateBody(map, size, this);
// Then output data payload, if any.
- OutputRawData(object_->address() + size);
+ OutputRawData(object_.address() + size);
}
}
@@ -710,7 +710,7 @@
void Serializer::ObjectSerializer::VisitExternalReference(Foreign host,
Address* p) {
auto encoded_reference =
- serializer_->EncodeExternalReference(host->foreign_address());
+ serializer_->EncodeExternalReference(host.foreign_address());
if (encoded_reference.is_from_api()) {
sink_->Put(kApiReference, "ApiRef");
} else {
@@ -737,10 +737,10 @@
void Serializer::ObjectSerializer::VisitInternalReference(Code host,
RelocInfo* rinfo) {
- Address entry = Code::cast(object_)->entry();
+ Address entry = Code::cast(object_).entry();
DCHECK_GE(rinfo->target_internal_reference(), entry);
uintptr_t target_offset = rinfo->target_internal_reference() - entry;
- DCHECK_LE(target_offset, Code::cast(object_)->raw_instruction_size());
+ DCHECK_LE(target_offset, Code::cast(object_).raw_instruction_size());
sink_->Put(kInternalReference, "InternalRef");
sink_->PutInt(target_offset, "internal ref value");
}
@@ -763,7 +763,7 @@
CHECK(Builtins::IsIsolateIndependentBuiltin(target));
sink_->Put(kOffHeapTarget, "OffHeapTarget");
- sink_->PutInt(target->builtin_index(), "builtin index");
+ sink_->PutInt(target.builtin_index(), "builtin index");
bytes_processed_so_far_ += rinfo->target_address_size();
}
@@ -803,7 +803,7 @@
} // anonymous namespace
void Serializer::ObjectSerializer::OutputRawData(Address up_to) {
- Address object_start = object_->address();
+ Address object_start = object_.address();
int base = bytes_processed_so_far_;
int up_to_offset = static_cast<int>(up_to - object_start);
int to_skip = up_to_offset - bytes_processed_so_far_;
@@ -825,13 +825,13 @@
__msan_check_mem_is_initialized(
reinterpret_cast<void*>(object_start + base), bytes_to_output);
#endif // MEMORY_SANITIZER
- if (object_->IsBytecodeArray()) {
+ if (object_.IsBytecodeArray()) {
// The bytecode age field can be changed by GC concurrently.
byte field_value = BytecodeArray::kNoAgeBytecodeAge;
OutputRawWithCustomField(sink_, object_start, base, bytes_to_output,
BytecodeArray::kBytecodeAgeOffset,
sizeof(field_value), &field_value);
- } else if (object_->IsDescriptorArray()) {
+ } else if (object_.IsDescriptorArray()) {
// The number of marked descriptors field can be changed by GC
// concurrently.
byte field_value[2];
@@ -865,7 +865,7 @@
// With enabled pointer compression normal accessors no longer work for
// off-heap objects, so we have to get the relocation info data via the
// on-heap code object.
- ByteArray relocation_info = on_heap_code->unchecked_relocation_info();
+ ByteArray relocation_info = on_heap_code.unchecked_relocation_info();
for (RelocIterator it(off_heap_code, relocation_info, mode_mask); !it.done();
it.next()) {
RelocInfo* rinfo = it.rinfo();
@@ -873,9 +873,9 @@
}
// We need to wipe out the header fields *after* wiping out the
// relocations, because some of these fields are needed for the latter.
- off_heap_code->WipeOutHeader();
+ off_heap_code.WipeOutHeader();
- Address start = off_heap_code->address() + Code::kDataStart;
+ Address start = off_heap_code.address() + Code::kDataStart;
int bytes_to_output = size - Code::kDataStart;
DCHECK(IsAligned(bytes_to_output, kTaggedSize));
diff --git a/src/snapshot/serializer.h b/src/snapshot/serializer.h
index 27870fa..39a71ea 100644
--- a/src/snapshot/serializer.h
+++ b/src/snapshot/serializer.h
@@ -29,7 +29,7 @@
}
void CodeMoveEvent(AbstractCode from, AbstractCode to) override {
- address_to_name_map_.Move(from->address(), to->address());
+ address_to_name_map_.Move(from.address(), to.address());
}
void CodeDisableOptEvent(AbstractCode code,
@@ -116,7 +116,7 @@
void LogRecordedBuffer(AbstractCode code, SharedFunctionInfo,
const char* name, int length) override {
- address_to_name_map_.Insert(code->address(), name, length);
+ address_to_name_map_.Insert(code.address(), name, length);
}
void LogRecordedBuffer(const wasm::WasmCode* code, const char* name,
@@ -167,7 +167,7 @@
bool ReferenceMapContains(HeapObject o) {
return reference_map()
- ->LookupReference(reinterpret_cast<void*>(o->ptr()))
+ ->LookupReference(reinterpret_cast<void*>(o.ptr()))
.is_valid();
}
@@ -235,7 +235,7 @@
Code CopyCode(Code code);
void QueueDeferredObject(HeapObject obj) {
- DCHECK(reference_map_.LookupReference(reinterpret_cast<void*>(obj->ptr()))
+ DCHECK(reference_map_.LookupReference(reinterpret_cast<void*>(obj.ptr()))
.is_back_reference());
deferred_objects_.push_back(obj);
}
diff --git a/src/snapshot/startup-serializer.cc b/src/snapshot/startup-serializer.cc
index 2d3a4eb..3646e41 100644
--- a/src/snapshot/startup-serializer.cc
+++ b/src/snapshot/startup-serializer.cc
@@ -36,25 +36,25 @@
namespace {
bool IsUnexpectedCodeObject(Isolate* isolate, HeapObject obj) {
- if (!obj->IsCode()) return false;
+ if (!obj.IsCode()) return false;
Code code = Code::cast(obj);
// TODO(v8:8768): Deopt entry code should not be serialized.
- if (code->kind() == Code::STUB && isolate->deoptimizer_data() != nullptr) {
+ if (code.kind() == Code::STUB && isolate->deoptimizer_data() != nullptr) {
if (isolate->deoptimizer_data()->IsDeoptEntryCode(code)) return false;
}
- if (code->kind() == Code::REGEXP) return false;
- if (!code->is_builtin()) return true;
+ if (code.kind() == Code::REGEXP) return false;
+ if (!code.is_builtin()) return true;
if (!FLAG_embedded_builtins) return false;
- if (code->is_off_heap_trampoline()) return false;
+ if (code.is_off_heap_trampoline()) return false;
// An on-heap builtin. We only expect this for the interpreter entry
// trampoline copy stored on the root list and transitively called builtins.
// See Heap::interpreter_entry_trampoline_for_profiling.
- switch (code->builtin_index()) {
+ switch (code.builtin_index()) {
case Builtins::kAbort:
case Builtins::kCEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit:
case Builtins::kInterpreterEntryTrampoline:
@@ -71,7 +71,7 @@
#endif // DEBUG
void StartupSerializer::SerializeObject(HeapObject obj) {
- DCHECK(!obj->IsJSFunction());
+ DCHECK(!obj.IsJSFunction());
DCHECK(!IsUnexpectedCodeObject(isolate(), obj));
if (SerializeHotObject(obj)) return;
@@ -84,26 +84,25 @@
use_simulator = true;
#endif
- if (use_simulator && obj->IsAccessorInfo()) {
+ if (use_simulator && obj.IsAccessorInfo()) {
// Wipe external reference redirects in the accessor info.
AccessorInfo info = AccessorInfo::cast(obj);
- Address original_address = Foreign::cast(info->getter())->foreign_address();
- Foreign::cast(info->js_getter())->set_foreign_address(original_address);
+ Address original_address = Foreign::cast(info.getter()).foreign_address();
+ Foreign::cast(info.js_getter()).set_foreign_address(original_address);
accessor_infos_.push_back(info);
- } else if (use_simulator && obj->IsCallHandlerInfo()) {
+ } else if (use_simulator && obj.IsCallHandlerInfo()) {
CallHandlerInfo info = CallHandlerInfo::cast(obj);
- Address original_address =
- Foreign::cast(info->callback())->foreign_address();
- Foreign::cast(info->js_callback())->set_foreign_address(original_address);
+ Address original_address = Foreign::cast(info.callback()).foreign_address();
+ Foreign::cast(info.js_callback()).set_foreign_address(original_address);
call_handler_infos_.push_back(info);
- } else if (obj->IsScript() && Script::cast(obj)->IsUserJavaScript()) {
- Script::cast(obj)->set_context_data(
+ } else if (obj.IsScript() && Script::cast(obj).IsUserJavaScript()) {
+ Script::cast(obj).set_context_data(
ReadOnlyRoots(isolate()).uninitialized_symbol());
- } else if (obj->IsSharedFunctionInfo()) {
+ } else if (obj.IsSharedFunctionInfo()) {
// Clear inferred name for native functions.
SharedFunctionInfo shared = SharedFunctionInfo::cast(obj);
- if (!shared->IsSubjectToDebugging() && shared->HasUncompiledData()) {
- shared->uncompiled_data()->set_inferred_name(
+ if (!shared.IsSubjectToDebugging() && shared.HasUncompiledData()) {
+ shared.uncompiled_data().set_inferred_name(
ReadOnlyRoots(isolate()).empty_string());
}
}
@@ -150,7 +149,7 @@
: isolate_(isolate) {
AddToSet(isolate->heap()->serialized_objects());
for (auto const& context : *contexts) {
- AddToSet(context->serialized_objects());
+ AddToSet(context.serialized_objects());
}
}
@@ -167,8 +166,8 @@
}
void SerializedHandleChecker::AddToSet(FixedArray serialized) {
- int length = serialized->length();
- for (int i = 0; i < length; i++) serialized_.insert(serialized->get(i));
+ int length = serialized.length();
+ for (int i = 0; i < length; i++) serialized_.insert(serialized.get(i));
}
void SerializedHandleChecker::VisitRootPointers(Root root,
@@ -179,7 +178,7 @@
if (serialized_.find(*p) != serialized_.end()) continue;
PrintF("%s handle not serialized: ",
root == Root::kGlobalHandles ? "global" : "eternal");
- (*p)->Print();
+ (*p).Print();
ok_ = false;
}
}
diff --git a/src/strings/string-builder-inl.h b/src/strings/string-builder-inl.h
index 6752f34..9f218da 100644
--- a/src/strings/string-builder-inl.h
+++ b/src/strings/string-builder-inl.h
@@ -296,11 +296,11 @@
if (sizeof(DestChar) == 1) {
DCHECK_EQ(String::ONE_BYTE_ENCODING, encoding_);
SeqOneByteString::cast(*current_part_)
- ->SeqOneByteStringSet(current_index_++, c);
+ .SeqOneByteStringSet(current_index_++, c);
} else {
DCHECK_EQ(String::TWO_BYTE_ENCODING, encoding_);
SeqTwoByteString::cast(*current_part_)
- ->SeqTwoByteStringSet(current_index_++, c);
+ .SeqTwoByteStringSet(current_index_++, c);
}
if (current_index_ == part_length_) Extend();
}
diff --git a/src/strings/string-builder.cc b/src/strings/string-builder.cc
index 60ab21f..f647aed 100644
--- a/src/strings/string-builder.cc
+++ b/src/strings/string-builder.cc
@@ -17,8 +17,8 @@
DisallowHeapAllocation no_gc;
int position = 0;
for (int i = 0; i < array_length; i++) {
- Object element = fixed_array->get(i);
- if (element->IsSmi()) {
+ Object element = fixed_array.get(i);
+ if (element.IsSmi()) {
// Smi encoding of position and length.
int encoded_slice = Smi::ToInt(element);
int pos;
@@ -29,8 +29,8 @@
len = StringBuilderSubstringLength::decode(encoded_slice);
} else {
// Position and length encoded in two smis.
- Object obj = fixed_array->get(++i);
- DCHECK(obj->IsSmi());
+ Object obj = fixed_array.get(++i);
+ DCHECK(obj.IsSmi());
pos = Smi::ToInt(obj);
len = -encoded_slice;
}
@@ -38,7 +38,7 @@
position += len;
} else {
String string = String::cast(element);
- int element_length = string->length();
+ int element_length = string.length();
String::WriteToFlat(string, sink + position, 0, element_length);
position += element_length;
}
@@ -59,8 +59,8 @@
int position = 0;
for (int i = 0; i < array_length; i++) {
int increment = 0;
- Object elt = fixed_array->get(i);
- if (elt->IsSmi()) {
+ Object elt = fixed_array.get(i);
+ if (elt.IsSmi()) {
// Smi encoding of position and length.
int smi_value = Smi::ToInt(elt);
int pos;
@@ -75,8 +75,8 @@
// Get the position and check that it is a positive smi.
i++;
if (i >= array_length) return -1;
- Object next_smi = fixed_array->get(i);
- if (!next_smi->IsSmi()) return -1;
+ Object next_smi = fixed_array.get(i);
+ if (!next_smi.IsSmi()) return -1;
pos = Smi::ToInt(next_smi);
if (pos < 0) return -1;
}
@@ -84,11 +84,11 @@
DCHECK_GE(len, 0);
if (pos > special_length || len > special_length - pos) return -1;
increment = len;
- } else if (elt->IsString()) {
+ } else if (elt.IsString()) {
String element = String::cast(elt);
- int element_length = element->length();
+ int element_length = element.length();
increment = element_length;
- if (*one_byte && !element->IsOneByteRepresentation()) {
+ if (*one_byte && !element.IsOneByteRepresentation()) {
*one_byte = false;
}
} else {
@@ -140,14 +140,14 @@
}
void FixedArrayBuilder::Add(Object value) {
- DCHECK(!value->IsSmi());
+ DCHECK(!value.IsSmi());
array_->set(length_, value);
length_++;
has_non_smi_elements_ = true;
}
void FixedArrayBuilder::Add(Smi value) {
- DCHECK(value->IsSmi());
+ DCHECK(value.IsSmi());
array_->set(length_, value);
length_++;
}
diff --git a/src/strings/string-stream.cc b/src/strings/string-stream.cc
index 362351c..ad441de 100644
--- a/src/strings/string-stream.cc
+++ b/src/strings/string-stream.cc
@@ -191,15 +191,15 @@
}
void StringStream::PrintObject(Object o) {
- o->ShortPrint(this);
- if (o->IsString()) {
- if (String::cast(o)->length() <= String::kMaxShortPrintLength) {
+ o.ShortPrint(this);
+ if (o.IsString()) {
+ if (String::cast(o).length() <= String::kMaxShortPrintLength) {
return;
}
- } else if (o->IsNumber() || o->IsOddball()) {
+ } else if (o.IsNumber() || o.IsOddball()) {
return;
}
- if (o->IsHeapObject() && object_print_mode_ == kPrintObjectVerbose) {
+ if (o.IsHeapObject() && object_print_mode_ == kPrintObjectVerbose) {
// TODO(delphick): Consider whether we can get the isolate without using
// TLS.
Isolate* isolate = Isolate::Current();
@@ -267,7 +267,7 @@
}
#endif
-bool StringStream::Put(String str) { return Put(str, 0, str->length()); }
+bool StringStream::Put(String str) { return Put(str, 0, str.length()); }
bool StringStream::Put(String str, int start, int end) {
StringCharacterStream stream(str, start);
@@ -284,9 +284,9 @@
}
void StringStream::PrintName(Object name) {
- if (name->IsString()) {
+ if (name.IsString()) {
String str = String::cast(name);
- if (str->length() > 0) {
+ if (str.length() > 0) {
Put(str);
} else {
Add("/* anonymous */");
@@ -297,32 +297,32 @@
}
void StringStream::PrintUsingMap(JSObject js_object) {
- Map map = js_object->map();
- int real_size = map->NumberOfOwnDescriptors();
- DescriptorArray descs = map->instance_descriptors();
+ Map map = js_object.map();
+ int real_size = map.NumberOfOwnDescriptors();
+ DescriptorArray descs = map.instance_descriptors();
for (int i = 0; i < real_size; i++) {
- PropertyDetails details = descs->GetDetails(i);
+ PropertyDetails details = descs.GetDetails(i);
if (details.location() == kField) {
DCHECK_EQ(kData, details.kind());
- Object key = descs->GetKey(i);
- if (key->IsString() || key->IsNumber()) {
+ Object key = descs.GetKey(i);
+ if (key.IsString() || key.IsNumber()) {
int len = 3;
- if (key->IsString()) {
- len = String::cast(key)->length();
+ if (key.IsString()) {
+ len = String::cast(key).length();
}
for (; len < 18; len++) Put(' ');
- if (key->IsString()) {
+ if (key.IsString()) {
Put(String::cast(key));
} else {
- key->ShortPrint();
+ key.ShortPrint();
}
Add(": ");
FieldIndex index = FieldIndex::ForDescriptor(map, i);
- if (js_object->IsUnboxedDoubleField(index)) {
- double value = js_object->RawFastDoublePropertyAt(index);
+ if (js_object.IsUnboxedDoubleField(index)) {
+ double value = js_object.RawFastDoublePropertyAt(index);
Add("<unboxed double> %.16g\n", FmtElm(value));
} else {
- Object value = js_object->RawFastPropertyAt(index);
+ Object value = js_object.RawFastPropertyAt(index);
Add("%o\n", value);
}
}
@@ -331,14 +331,14 @@
}
void StringStream::PrintFixedArray(FixedArray array, unsigned int limit) {
- ReadOnlyRoots roots = array->GetReadOnlyRoots();
+ ReadOnlyRoots roots = array.GetReadOnlyRoots();
for (unsigned int i = 0; i < 10 && i < limit; i++) {
- Object element = array->get(i);
- if (element->IsTheHole(roots)) continue;
+ Object element = array.get(i);
+ if (element.IsTheHole(roots)) continue;
for (int len = 1; len < 18; len++) {
Put(' ');
}
- Add("%d: %o\n", i, array->get(i));
+ Add("%d: %o\n", i, array.get(i));
}
if (limit >= 10) {
Add(" ...\n");
@@ -346,9 +346,9 @@
}
void StringStream::PrintByteArray(ByteArray byte_array) {
- unsigned int limit = byte_array->length();
+ unsigned int limit = byte_array.length();
for (unsigned int i = 0; i < 10 && i < limit; i++) {
- byte b = byte_array->get(i);
+ byte b = byte_array.get(i);
Add(" %d: %3d 0x%02x", i, b, b);
if (b >= ' ' && b <= '~') {
Add(" '%c'", b);
@@ -374,36 +374,36 @@
for (size_t i = 0; i < debug_object_cache->size(); i++) {
HeapObject printee = *(*debug_object_cache)[i];
Add(" #%d# %p: ", static_cast<int>(i),
- reinterpret_cast<void*>(printee->ptr()));
- printee->ShortPrint(this);
+ reinterpret_cast<void*>(printee.ptr()));
+ printee.ShortPrint(this);
Add("\n");
- if (printee->IsJSObject()) {
- if (printee->IsJSValue()) {
- Add(" value(): %o\n", JSValue::cast(printee)->value());
+ if (printee.IsJSObject()) {
+ if (printee.IsJSValue()) {
+ Add(" value(): %o\n", JSValue::cast(printee).value());
}
PrintUsingMap(JSObject::cast(printee));
- if (printee->IsJSArray()) {
+ if (printee.IsJSArray()) {
JSArray array = JSArray::cast(printee);
- if (array->HasObjectElements()) {
- unsigned int limit = FixedArray::cast(array->elements())->length();
+ if (array.HasObjectElements()) {
+ unsigned int limit = FixedArray::cast(array.elements()).length();
unsigned int length =
- static_cast<uint32_t>(JSArray::cast(array)->length()->Number());
+ static_cast<uint32_t>(JSArray::cast(array).length().Number());
if (length < limit) limit = length;
- PrintFixedArray(FixedArray::cast(array->elements()), limit);
+ PrintFixedArray(FixedArray::cast(array.elements()), limit);
}
}
- } else if (printee->IsByteArray()) {
+ } else if (printee.IsByteArray()) {
PrintByteArray(ByteArray::cast(printee));
- } else if (printee->IsFixedArray()) {
- unsigned int limit = FixedArray::cast(printee)->length();
+ } else if (printee.IsFixedArray()) {
+ unsigned int limit = FixedArray::cast(printee).length();
PrintFixedArray(FixedArray::cast(printee), limit);
}
}
}
void StringStream::PrintSecurityTokenIfChanged(JSFunction fun) {
- Object token = fun->native_context()->security_token();
- Isolate* isolate = fun->GetIsolate();
+ Object token = fun.native_context().security_token();
+ Isolate* isolate = fun.GetIsolate();
if (token != isolate->string_stream_current_security_token()) {
Add("Security context: %o\n", token);
isolate->set_string_stream_current_security_token(token);
@@ -412,32 +412,32 @@
void StringStream::PrintFunction(JSFunction fun, Object receiver, Code* code) {
PrintPrototype(fun, receiver);
- *code = fun->code();
+ *code = fun.code();
}
void StringStream::PrintPrototype(JSFunction fun, Object receiver) {
- Object name = fun->shared()->Name();
+ Object name = fun.shared().Name();
bool print_name = false;
- Isolate* isolate = fun->GetIsolate();
- if (receiver->IsNullOrUndefined(isolate) || receiver->IsTheHole(isolate) ||
- receiver->IsJSProxy()) {
+ Isolate* isolate = fun.GetIsolate();
+ if (receiver.IsNullOrUndefined(isolate) || receiver.IsTheHole(isolate) ||
+ receiver.IsJSProxy()) {
print_name = true;
} else if (!isolate->context().is_null()) {
- if (!receiver->IsJSObject()) {
- receiver = receiver->GetPrototypeChainRootMap(isolate)->prototype();
+ if (!receiver.IsJSObject()) {
+ receiver = receiver.GetPrototypeChainRootMap(isolate).prototype();
}
for (PrototypeIterator iter(isolate, JSObject::cast(receiver),
kStartAtReceiver);
!iter.IsAtEnd(); iter.Advance()) {
- if (iter.GetCurrent()->IsJSProxy()) break;
- Object key = iter.GetCurrent<JSObject>()->SlowReverseLookup(fun);
- if (!key->IsUndefined(isolate)) {
- if (!name->IsString() || !key->IsString() ||
- !String::cast(name)->Equals(String::cast(key))) {
+ if (iter.GetCurrent().IsJSProxy()) break;
+ Object key = iter.GetCurrent<JSObject>().SlowReverseLookup(fun);
+ if (!key.IsUndefined(isolate)) {
+ if (!name.IsString() || !key.IsString() ||
+ !String::cast(name).Equals(String::cast(key))) {
print_name = true;
}
- if (name->IsString() && String::cast(name)->length() == 0) {
+ if (name.IsString() && String::cast(name).length() == 0) {
print_name = false;
}
name = key;
@@ -450,7 +450,7 @@
// which it was looked up.
if (print_name) {
Add("(aka ");
- PrintName(fun->shared()->Name());
+ PrintName(fun.shared().Name());
Put(')');
}
}
diff --git a/src/value-serializer.cc b/src/value-serializer.cc
index f81c051..20819ea 100644
--- a/src/value-serializer.cc
+++ b/src/value-serializer.cc
@@ -268,12 +268,12 @@
}
void ValueSerializer::WriteBigIntContents(BigInt bigint) {
- uint32_t bitfield = bigint->GetBitfieldForSerialization();
+ uint32_t bitfield = bigint.GetBitfieldForSerialization();
int bytelength = BigInt::DigitsByteLengthForBitfield(bitfield);
WriteVarint<uint32_t>(bitfield);
uint8_t* dest;
if (ReserveRawBytes(bytelength).To(&dest)) {
- bigint->SerializeDigits(dest);
+ bigint.SerializeDigits(dest);
}
}
@@ -356,7 +356,7 @@
}
DCHECK(object->IsHeapObject());
- switch (HeapObject::cast(*object)->map()->instance_type()) {
+ switch (HeapObject::cast(*object).map().instance_type()) {
case ODDBALL_TYPE:
WriteOddball(Oddball::cast(*object));
return ThrowIfOutOfMemory();
@@ -401,7 +401,7 @@
void ValueSerializer::WriteOddball(Oddball oddball) {
SerializationTag tag = SerializationTag::kUndefined;
- switch (oddball->kind()) {
+ switch (oddball.kind()) {
case Oddball::kUndefined:
tag = SerializationTag::kUndefined;
break;
@@ -423,17 +423,17 @@
void ValueSerializer::WriteSmi(Smi smi) {
static_assert(kSmiValueSize <= 32, "Expected SMI <= 32 bits.");
WriteTag(SerializationTag::kInt32);
- WriteZigZag<int32_t>(smi->value());
+ WriteZigZag<int32_t>(smi.value());
}
void ValueSerializer::WriteHeapNumber(HeapNumber number) {
WriteTag(SerializationTag::kDouble);
- WriteDouble(number->value());
+ WriteDouble(number.value());
}
void ValueSerializer::WriteMutableHeapNumber(MutableHeapNumber number) {
WriteTag(SerializationTag::kDouble);
- WriteDouble(number->value());
+ WriteDouble(number.value());
}
void ValueSerializer::WriteBigInt(BigInt bigint) {
@@ -477,7 +477,7 @@
*id_map_entry = id + 1;
// Eliminate callable and exotic objects, which should not be serialized.
- InstanceType instance_type = receiver->map()->instance_type();
+ InstanceType instance_type = receiver->map().instance_type();
if (receiver->IsCallable() || (IsSpecialReceiverInstanceType(instance_type) &&
instance_type != JS_SPECIAL_API_OBJECT_TYPE)) {
ThrowDataCloneError(MessageTemplate::kDataCloneError, receiver);
@@ -543,9 +543,9 @@
}
Maybe<bool> ValueSerializer::WriteJSObject(Handle<JSObject> object) {
- DCHECK(!object->map()->IsCustomElementsReceiverMap());
+ DCHECK(!object->map().IsCustomElementsReceiverMap());
const bool can_serialize_fast =
- object->HasFastProperties() && object->elements()->length() == 0;
+ object->HasFastProperties() && object->elements().length() == 0;
if (!can_serialize_fast) return WriteJSObjectSlow(object);
Handle<Map> map(object->map(), isolate_);
@@ -556,9 +556,9 @@
uint32_t properties_written = 0;
bool map_changed = false;
for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
- Handle<Name> key(map->instance_descriptors()->GetKey(i), isolate_);
+ Handle<Name> key(map->instance_descriptors().GetKey(i), isolate_);
if (!key->IsString()) continue;
- PropertyDetails details = map->instance_descriptors()->GetDetails(i);
+ PropertyDetails details = map->instance_descriptors().GetDetails(i);
if (details.IsDontEnum()) continue;
Handle<Object> value;
@@ -606,7 +606,7 @@
Maybe<bool> ValueSerializer::WriteJSArray(Handle<JSArray> array) {
uint32_t length = 0;
- bool valid_length = array->length()->ToArrayLength(&length);
+ bool valid_length = array->length().ToArrayLength(&length);
DCHECK(valid_length);
USE(valid_length);
@@ -653,7 +653,7 @@
// Fall back to slow path.
break;
}
- Handle<Object> element(FixedArray::cast(array->elements())->get(i),
+ Handle<Object> element(FixedArray::cast(array->elements()).get(i),
isolate_);
if (!WriteObject(element).FromMaybe(false)) return Nothing<bool>();
}
@@ -717,26 +717,26 @@
void ValueSerializer::WriteJSDate(JSDate date) {
WriteTag(SerializationTag::kDate);
- WriteDouble(date->value()->Number());
+ WriteDouble(date.value().Number());
}
Maybe<bool> ValueSerializer::WriteJSValue(Handle<JSValue> value) {
Object inner_value = value->value();
- if (inner_value->IsTrue(isolate_)) {
+ if (inner_value.IsTrue(isolate_)) {
WriteTag(SerializationTag::kTrueObject);
- } else if (inner_value->IsFalse(isolate_)) {
+ } else if (inner_value.IsFalse(isolate_)) {
WriteTag(SerializationTag::kFalseObject);
- } else if (inner_value->IsNumber()) {
+ } else if (inner_value.IsNumber()) {
WriteTag(SerializationTag::kNumberObject);
- WriteDouble(inner_value->Number());
- } else if (inner_value->IsBigInt()) {
+ WriteDouble(inner_value.Number());
+ } else if (inner_value.IsBigInt()) {
WriteTag(SerializationTag::kBigIntObject);
WriteBigIntContents(BigInt::cast(inner_value));
- } else if (inner_value->IsString()) {
+ } else if (inner_value.IsString()) {
WriteTag(SerializationTag::kStringObject);
WriteString(handle(String::cast(inner_value), isolate_));
} else {
- DCHECK(inner_value->IsSymbol());
+ DCHECK(inner_value.IsSymbol());
ThrowDataCloneError(MessageTemplate::kDataCloneError, value);
return Nothing<bool>();
}
@@ -745,8 +745,8 @@
void ValueSerializer::WriteJSRegExp(JSRegExp regexp) {
WriteTag(SerializationTag::kRegExp);
- WriteString(handle(regexp->Pattern(), isolate_));
- WriteVarint(static_cast<uint32_t>(regexp->GetFlags()));
+ WriteString(handle(regexp.Pattern(), isolate_));
+ WriteVarint(static_cast<uint32_t>(regexp.GetFlags()));
}
Maybe<bool> ValueSerializer::WriteJSMap(Handle<JSMap> map) {
@@ -855,8 +855,8 @@
}
WriteTag(SerializationTag::kArrayBufferView);
ArrayBufferViewTag tag = ArrayBufferViewTag::kInt8Array;
- if (view->IsJSTypedArray()) {
- switch (JSTypedArray::cast(view)->type()) {
+ if (view.IsJSTypedArray()) {
+ switch (JSTypedArray::cast(view).type()) {
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
case kExternal##Type##Array: \
tag = ArrayBufferViewTag::k##Type##Array; \
@@ -865,12 +865,12 @@
#undef TYPED_ARRAY_CASE
}
} else {
- DCHECK(view->IsJSDataView());
+ DCHECK(view.IsJSDataView());
tag = ArrayBufferViewTag::kDataView;
}
WriteVarint(static_cast<uint8_t>(tag));
- WriteVarint(static_cast<uint32_t>(view->byte_offset()));
- WriteVarint(static_cast<uint32_t>(view->byte_length()));
+ WriteVarint(static_cast<uint32_t>(view.byte_offset()));
+ WriteVarint(static_cast<uint32_t>(view.byte_length()));
return ThrowIfOutOfMemory();
}
@@ -916,7 +916,7 @@
}
Maybe<bool> ValueSerializer::WriteWasmMemory(Handle<WasmMemoryObject> object) {
- if (!object->array_buffer()->is_shared()) {
+ if (!object->array_buffer().is_shared()) {
ThrowDataCloneError(MessageTemplate::kDataCloneError, object);
return Nothing<bool>();
}
@@ -1896,13 +1896,13 @@
static void CommitProperties(Handle<JSObject> object, Handle<Map> map,
const std::vector<Handle<Object>>& properties) {
JSObject::AllocateStorageForMap(object, map);
- DCHECK(!object->map()->is_dictionary_map());
+ DCHECK(!object->map().is_dictionary_map());
DisallowHeapAllocation no_gc;
- DescriptorArray descriptors = object->map()->instance_descriptors();
+ DescriptorArray descriptors = object->map().instance_descriptors();
for (unsigned i = 0; i < properties.size(); i++) {
// Initializing store.
- object->WriteToField(i, descriptors->GetDetails(i), *properties[i]);
+ object->WriteToField(i, descriptors.GetDetails(i), *properties[i]);
}
}
@@ -1920,7 +1920,7 @@
bool transitioning = true;
Handle<Map> map(object->map(), isolate_);
DCHECK(!map->is_dictionary_map());
- DCHECK_EQ(0, map->instance_descriptors()->number_of_descriptors());
+ DCHECK_EQ(0, map->instance_descriptors().number_of_descriptors());
std::vector<Handle<Object>> properties;
properties.reserve(8);
@@ -1971,13 +1971,13 @@
if (transitioning) {
int descriptor = static_cast<int>(properties.size());
PropertyDetails details =
- target->instance_descriptors()->GetDetails(descriptor);
+ target->instance_descriptors().GetDetails(descriptor);
Representation expected_representation = details.representation();
if (value->FitsRepresentation(expected_representation)) {
if (expected_representation.IsHeapObject() &&
!target->instance_descriptors()
- ->GetFieldType(descriptor)
- ->NowContains(value)) {
+ .GetFieldType(descriptor)
+ .NowContains(value)) {
Handle<FieldType> value_type =
value->OptimalType(isolate_, expected_representation);
Map::GeneralizeField(isolate_, target, descriptor,
@@ -1985,8 +1985,8 @@
value_type);
}
DCHECK(target->instance_descriptors()
- ->GetFieldType(descriptor)
- ->NowContains(value));
+ .GetFieldType(descriptor)
+ .NowContains(value));
properties.push_back(value);
map = target;
continue;
@@ -2049,7 +2049,7 @@
bool ValueDeserializer::HasObjectWithID(uint32_t id) {
return id < static_cast<unsigned>(id_map_->length()) &&
- !id_map_->get(id)->IsTheHole(isolate_);
+ !id_map_->get(id).IsTheHole(isolate_);
}
MaybeHandle<JSReceiver> ValueDeserializer::GetObjectWithID(uint32_t id) {
@@ -2057,8 +2057,8 @@
return MaybeHandle<JSReceiver>();
}
Object value = id_map_->get(id);
- if (value->IsTheHole(isolate_)) return MaybeHandle<JSReceiver>();
- DCHECK(value->IsJSReceiver());
+ if (value.IsTheHole(isolate_)) return MaybeHandle<JSReceiver>();
+ DCHECK(value.IsJSReceiver());
return Handle<JSReceiver>(JSReceiver::cast(value), isolate_);
}
diff --git a/src/wasm/c-api.cc b/src/wasm/c-api.cc
index 2fa0f0e..229c614 100644
--- a/src/wasm/c-api.cc
+++ b/src/wasm/c-api.cc
@@ -1162,14 +1162,14 @@
do {
if (value->IsSmi()) return Val(i::Smi::ToInt(*value));
if (value->IsHeapNumber()) {
- return Val(i::DoubleToInt32(i::HeapNumber::cast(*value)->value()));
+ return Val(i::DoubleToInt32(i::HeapNumber::cast(*value).value()));
}
value = i::Object::ToInt32(isolate, value).ToHandleChecked();
// This will loop back at most once.
} while (true);
UNREACHABLE();
case I64:
- if (value->IsBigInt()) return Val(i::BigInt::cast(*value)->AsInt64());
+ if (value->IsBigInt()) return Val(i::BigInt::cast(*value).AsInt64());
return Val(
i::BigInt::FromObject(isolate, value).ToHandleChecked()->AsInt64());
case F32:
@@ -1178,7 +1178,7 @@
return Val(static_cast<float32_t>(i::Smi::ToInt(*value)));
}
if (value->IsHeapNumber()) {
- return Val(i::DoubleToFloat32(i::HeapNumber::cast(*value)->value()));
+ return Val(i::DoubleToFloat32(i::HeapNumber::cast(*value).value()));
}
value = i::Object::ToNumber(isolate, value).ToHandleChecked();
// This will loop back at most once.
@@ -1190,7 +1190,7 @@
return Val(static_cast<float64_t>(i::Smi::ToInt(*value)));
}
if (value->IsHeapNumber()) {
- return Val(i::HeapNumber::cast(*value)->value());
+ return Val(i::HeapNumber::cast(*value).value());
}
value = i::Object::ToNumber(isolate, value).ToHandleChecked();
// This will loop back at most once.
@@ -1638,36 +1638,36 @@
static own<FuncType*> Deserialize(i::PodArray<i::wasm::ValueType> sig) {
int result_arity = ResultArity(sig);
- int param_arity = sig->length() - result_arity - 1;
+ int param_arity = sig.length() - result_arity - 1;
vec<ValType*> results = vec<ValType*>::make_uninitialized(result_arity);
vec<ValType*> params = vec<ValType*>::make_uninitialized(param_arity);
int i = 0;
- for (; i < sig->length(); ++i) {
- results[i] = ValType::make(v8::wasm::v8_valtype_to_wasm(sig->get(i)));
+ for (; i < sig.length(); ++i) {
+ results[i] = ValType::make(v8::wasm::v8_valtype_to_wasm(sig.get(i)));
}
i++;
for (; i < param_arity; ++i) {
- params[i] = ValType::make(v8::wasm::v8_valtype_to_wasm(sig->get(i)));
+ params[i] = ValType::make(v8::wasm::v8_valtype_to_wasm(sig.get(i)));
}
return FuncType::make(std::move(params), std::move(results));
}
static int ResultArity(i::PodArray<i::wasm::ValueType> sig) {
int count = 0;
- for (; count < sig->length(); count++) {
- if (sig->get(count) == kMarker) return count;
+ for (; count < sig.length(); count++) {
+ if (sig.get(count) == kMarker) return count;
}
UNREACHABLE();
}
static int ParamArity(i::PodArray<i::wasm::ValueType> sig) {
- return sig->length() - ResultArity(sig) - 1;
+ return sig.length() - ResultArity(sig) - 1;
}
static i::PodArray<i::wasm::ValueType> GetSig(
i::Handle<i::JSFunction> function) {
- return i::WasmCapiFunction::cast(*function)->GetSerializedSignature();
+ return i::WasmCapiFunction::cast(*function).GetSerializedSignature();
}
};
@@ -1710,7 +1710,7 @@
i::Handle<i::WasmExportedFunction> function =
i::Handle<i::WasmExportedFunction>::cast(func);
i::wasm::FunctionSig* sig =
- function->instance()->module()->functions[function->function_index()].sig;
+ function->instance().module()->functions[function->function_index()].sig;
uint32_t param_arity = static_cast<uint32_t>(sig->parameter_count());
uint32_t result_arity = static_cast<uint32_t>(sig->return_count());
auto params = vec<ValType*>::make_uninitialized(param_arity);
@@ -1736,7 +1736,7 @@
i::Handle<i::WasmExportedFunction> function =
i::Handle<i::WasmExportedFunction>::cast(func);
i::wasm::FunctionSig* sig =
- function->instance()->module()->functions[function->function_index()].sig;
+ function->instance().module()->functions[function->function_index()].sig;
return sig->parameter_count();
}
@@ -1749,7 +1749,7 @@
i::Handle<i::WasmExportedFunction> function =
i::Handle<i::WasmExportedFunction>::cast(func);
i::wasm::FunctionSig* sig =
- function->instance()->module()->functions[function->function_index()].sig;
+ function->instance().module()->functions[function->function_index()].sig;
return sig->return_count();
}
@@ -1767,7 +1767,7 @@
if (i::WasmExportedFunction::IsWasmExportedFunction(*v8_func)) {
i::WasmExportedFunction wef = i::WasmExportedFunction::cast(*v8_func);
i::wasm::FunctionSig* sig =
- wef->instance()->module()->functions[wef->function_index()].sig;
+ wef.instance().module()->functions[wef.function_index()].sig;
num_params = static_cast<int>(sig->parameter_count());
num_results = static_cast<int>(sig->return_count());
if (num_results > 0) {
@@ -1862,7 +1862,7 @@
params[i] = Val(nullptr);
} else {
i::JSReceiver raw_obj = i::JSReceiver::cast(i::Object(raw));
- i::Handle<i::JSReceiver> obj(raw_obj, raw_obj->GetIsolate());
+ i::Handle<i::JSReceiver> obj(raw_obj, raw_obj.GetIsolate());
params[i] = Val(implement<Ref>::type::make(impl(self->store), obj));
}
break;
@@ -1882,7 +1882,7 @@
isolate->Throw(*impl(trap.get())->v8_object());
i::Object ex = isolate->pending_exception();
isolate->clear_pending_exception();
- return ex->ptr();
+ return ex.ptr();
}
p = argv;
@@ -2062,7 +2062,7 @@
// This doesn't call WasmTableObject::Set because the table has
// just been created, so it can't be imported by any instances
// yet that might require updating.
- DCHECK_EQ(table_obj->dispatch_tables()->length(), 0);
+ DCHECK_EQ(table_obj->dispatch_tables().length(), 0);
backing_store->set(i, *init);
}
}
@@ -2073,7 +2073,7 @@
i::Handle<i::WasmTableObject> table = impl(this)->v8_object();
uint32_t min = table->current_length();
uint32_t max;
- if (!table->maximum_length()->ToUint32(&max)) max = 0xFFFFFFFFu;
+ if (!table->maximum_length().ToUint32(&max)) max = 0xFFFFFFFFu;
// TODO(wasm+): support new element types.
return TableType::make(ValType::make(FUNCREF), Limits(min, max));
}
@@ -2162,7 +2162,7 @@
auto Memory::type() const -> own<MemoryType*> {
i::Handle<i::WasmMemoryObject> memory = impl(this)->v8_object();
- uint32_t min = static_cast<uint32_t>(memory->array_buffer()->byte_length() /
+ uint32_t min = static_cast<uint32_t>(memory->array_buffer().byte_length() /
i::wasm::kWasmPageSize);
uint32_t max =
memory->has_maximum_pages() ? memory->maximum_pages() : 0xFFFFFFFFu;
@@ -2171,16 +2171,16 @@
auto Memory::data() const -> byte_t* {
return reinterpret_cast<byte_t*>(
- impl(this)->v8_object()->array_buffer()->backing_store());
+ impl(this)->v8_object()->array_buffer().backing_store());
}
auto Memory::data_size() const -> size_t {
- return impl(this)->v8_object()->array_buffer()->byte_length();
+ return impl(this)->v8_object()->array_buffer().byte_length();
}
auto Memory::size() const -> pages_t {
return static_cast<pages_t>(
- impl(this)->v8_object()->array_buffer()->byte_length() /
+ impl(this)->v8_object()->array_buffer().byte_length() /
i::wasm::kWasmPageSize);
}
diff --git a/src/wasm/module-compiler.cc b/src/wasm/module-compiler.cc
index b022733..c98b7fa 100644
--- a/src/wasm/module-compiler.cc
+++ b/src/wasm/module-compiler.cc
@@ -868,8 +868,8 @@
namespace {
void RecordStats(const Code code, Counters* counters) {
- counters->wasm_generated_code_size()->Increment(code->body_size());
- counters->wasm_reloc_size()->Increment(code->relocation_info()->length());
+ counters->wasm_generated_code_size()->Increment(code.body_size());
+ counters->wasm_reloc_size()->Increment(code.relocation_info().length());
}
constexpr int kMainThreadTaskId = -1;
diff --git a/src/wasm/module-instantiate.cc b/src/wasm/module-instantiate.cc
index e88dfd0..2cf3f74 100644
--- a/src/wasm/module-instantiate.cc
+++ b/src/wasm/module-instantiate.cc
@@ -38,10 +38,9 @@
case WasmInitExpr::kGlobalIndex: {
uint32_t offset =
instance->module()->globals[expr.val.global_index].offset;
- auto raw_addr =
- reinterpret_cast<Address>(
- instance->untagged_globals_buffer()->backing_store()) +
- offset;
+ auto raw_addr = reinterpret_cast<Address>(
+ instance->untagged_globals_buffer().backing_store()) +
+ offset;
return ReadLittleEndianValue<uint32_t>(raw_addr);
}
default:
@@ -429,10 +428,10 @@
uint32_t base = EvalUint32InitExpr(instance, elem_segment.offset);
// Because of imported tables, {table_size} has to come from the table
// object itself.
- auto table_object = handle(WasmTableObject::cast(instance->tables()->get(
+ auto table_object = handle(WasmTableObject::cast(instance->tables().get(
elem_segment.table_index)),
isolate_);
- size_t table_size = table_object->entries()->length();
+ size_t table_size = table_object->entries().length();
if (!IsInBounds(base, elem_segment.entries.size(), table_size)) {
thrower_->LinkError("table initializer is out of bounds");
return {};
@@ -802,9 +801,8 @@
break;
}
case compiler::WasmImportCallKind::kWasmToCapi: {
- NativeModule* native_module = instance->module_object()->native_module();
- Address host_address =
- WasmCapiFunction::cast(*value)->GetHostCallTarget();
+ NativeModule* native_module = instance->module_object().native_module();
+ Address host_address = WasmCapiFunction::cast(*value).GetHostCallTarget();
WasmCodeRefScope code_ref_scope;
WasmCode* wasm_code = compiler::CompileWasmCapiCallWrapper(
isolate_->wasm_engine(), native_module, expected_sig, host_address);
@@ -821,7 +819,7 @@
}
default: {
// The imported function is a callable.
- NativeModule* native_module = instance->module_object()->native_module();
+ NativeModule* native_module = instance->module_object().native_module();
WasmCode* wasm_code = native_module->import_wrapper_cache()->GetOrCompile(
isolate_->wasm_engine(), isolate_->counters(), kind, expected_sig);
ImportedFunctionEntry entry(instance, func_index);
@@ -869,7 +867,7 @@
Handle<WasmInstanceObject> target_instance =
maybe_target_instance.ToHandleChecked();
FunctionSig* sig = target_instance->module_object()
- ->module()
+ .module()
->functions[function_index]
.sig;
@@ -895,7 +893,7 @@
}
const WasmTable& table = module_->tables[table_index];
- instance->tables()->set(table_index, *value);
+ instance->tables().set(table_index, *value);
auto table_object = Handle<WasmTableObject>::cast(value);
int imported_table_size = table_object->entries().length();
@@ -906,12 +904,12 @@
}
if (table.has_maximum_size) {
- if (table_object->maximum_length()->IsUndefined(isolate_)) {
+ if (table_object->maximum_length().IsUndefined(isolate_)) {
thrower_->LinkError("table import %d has no maximum length, expected %d",
import_index, table.maximum_size);
return false;
}
- int64_t imported_maximum_size = table_object->maximum_length()->Number();
+ int64_t imported_maximum_size = table_object->maximum_length().Number();
if (imported_maximum_size < 0) {
thrower_->LinkError("table import %d has no maximum length, expected %d",
import_index, table.maximum_size);
@@ -1031,7 +1029,7 @@
address_or_offset = reinterpret_cast<Address>(raw_buffer_ptr(
Handle<JSArrayBuffer>::cast(buffer), global_object->offset()));
}
- instance->imported_mutable_globals_buffers()->set(global.index, *buffer);
+ instance->imported_mutable_globals_buffers().set(global.index, *buffer);
instance->imported_mutable_globals()[global.index] = address_or_offset;
return true;
}
@@ -1196,8 +1194,8 @@
return -1;
}
Object exception_tag = imported_exception->exception_tag();
- DCHECK(instance->exceptions_table()->get(import.index)->IsUndefined());
- instance->exceptions_table()->set(import.index, exception_tag);
+ DCHECK(instance->exceptions_table().get(import.index).IsUndefined());
+ instance->exceptions_table().set(import.index, exception_tag);
exception_wrappers_[import.index] = imported_exception;
break;
}
@@ -1387,7 +1385,7 @@
break;
}
case kExternalTable: {
- desc.set_value(handle(instance->tables()->get(exp.index), isolate_));
+ desc.set_value(handle(instance->tables().get(exp.index), isolate_));
break;
}
case kExternalMemory: {
@@ -1455,7 +1453,7 @@
Handle<WasmExceptionObject> wrapper = exception_wrappers_[exp.index];
if (wrapper.is_null()) {
Handle<HeapObject> exception_tag(
- HeapObject::cast(instance->exceptions_table()->get(exp.index)),
+ HeapObject::cast(instance->exceptions_table().get(exp.index)),
isolate_);
wrapper =
WasmExceptionObject::New(isolate_, exception.sig, exception_tag);
@@ -1509,7 +1507,7 @@
// for both instantiation and in the implementation of the table.init
// instruction.
bool ok =
- ClampToBounds<size_t>(dst, &count, table_object->entries()->length());
+ ClampToBounds<size_t>(dst, &count, table_object->entries().length());
// Use & instead of && so the clamp is not short-circuited.
ok &= ClampToBounds<size_t>(src, &count, elem_segment.entries.size());
@@ -1547,8 +1545,8 @@
WasmTableObject::SetFunctionTablePlaceholder(
isolate, table_object, entry_index, instance, func_index);
} else {
- table_object->entries()->set(entry_index,
- *wasm_exported_function.ToHandleChecked());
+ table_object->entries().set(entry_index,
+ *wasm_exported_function.ToHandleChecked());
}
// UpdateDispatchTables() updates all other dispatch tables, since
// we have not yet added the dispatch table we are currently building.
@@ -1570,7 +1568,7 @@
bool success = LoadElemSegmentImpl(
isolate_, instance,
handle(WasmTableObject::cast(
- instance->tables()->get(elem_segment.table_index)),
+ instance->tables().get(elem_segment.table_index)),
isolate_),
elem_segment, dst, src, count);
if (enabled_.bulk_memory) {
@@ -1590,7 +1588,7 @@
for (int index = 0; index < table_count; ++index) {
if (module_->tables[index].type == kWasmAnyFunc) {
auto table_object = handle(
- WasmTableObject::cast(instance->tables()->get(index)), isolate_);
+ WasmTableObject::cast(instance->tables().get(index)), isolate_);
// Add the new dispatch table at the end to avoid redundant lookups.
WasmTableObject::AddDispatchTable(isolate_, table_object, instance,
@@ -1603,7 +1601,7 @@
Handle<WasmInstanceObject> instance) {
Handle<FixedArray> exceptions_table(instance->exceptions_table(), isolate_);
for (int index = 0; index < exceptions_table->length(); ++index) {
- if (!exceptions_table->get(index)->IsUndefined(isolate_)) continue;
+ if (!exceptions_table->get(index).IsUndefined(isolate_)) continue;
Handle<WasmExceptionTag> exception_tag =
WasmExceptionTag::New(isolate_, index);
exceptions_table->set(index, *exception_tag);
@@ -1623,7 +1621,7 @@
auto& elem_segment = instance->module()->elem_segments[segment_index];
return LoadElemSegmentImpl(
isolate, instance,
- handle(WasmTableObject::cast(instance->tables()->get(table_index)),
+ handle(WasmTableObject::cast(instance->tables().get(table_index)),
isolate),
elem_segment, dst, src, count);
}
diff --git a/src/wasm/wasm-debug.cc b/src/wasm/wasm-debug.cc
index 40460bd..5090351 100644
--- a/src/wasm/wasm-debug.cc
+++ b/src/wasm/wasm-debug.cc
@@ -75,21 +75,21 @@
DCHECK_LE(0, local_index);
if (!debug_info->has_locals_names()) {
Handle<WasmModuleObject> module_object(
- debug_info->wasm_instance()->module_object(), isolate);
+ debug_info->wasm_instance().module_object(), isolate);
Handle<FixedArray> locals_names = DecodeLocalNames(isolate, module_object);
debug_info->set_locals_names(*locals_names);
}
Handle<FixedArray> locals_names(debug_info->locals_names(), isolate);
if (func_index >= locals_names->length() ||
- locals_names->get(func_index)->IsUndefined(isolate)) {
+ locals_names->get(func_index).IsUndefined(isolate)) {
return {};
}
Handle<FixedArray> func_locals_names(
FixedArray::cast(locals_names->get(func_index)), isolate);
if (local_index >= func_locals_names->length() ||
- func_locals_names->get(local_index)->IsUndefined(isolate)) {
+ func_locals_names->get(local_index).IsUndefined(isolate)) {
return {};
}
return handle(String::cast(func_locals_names->get(local_index)), isolate);
@@ -137,14 +137,14 @@
// Return raw pointer into heap. The WasmInterpreter will make its own copy
// of this data anyway, and there is no heap allocation in-between.
NativeModule* native_module =
- debug_info->wasm_instance()->module_object()->native_module();
+ debug_info.wasm_instance().module_object().native_module();
return ModuleWireBytes{native_module->wire_bytes()};
}
public:
InterpreterHandle(Isolate* isolate, Handle<WasmDebugInfo> debug_info)
: isolate_(isolate),
- module_(debug_info->wasm_instance()->module_object()->module()),
+ module_(debug_info->wasm_instance().module_object().module()),
interpreter_(isolate, module_, GetBytes(*debug_info),
handle(debug_info->wasm_instance(), isolate)) {}
@@ -263,8 +263,8 @@
// Check that this is indeed the instance which is connected to this
// interpreter.
DCHECK_EQ(this, Managed<InterpreterHandle>::cast(
- instance_obj->debug_info()->interpreter_handle())
- ->raw());
+ instance_obj->debug_info().interpreter_handle())
+ .raw());
return instance_obj;
}
@@ -367,7 +367,7 @@
Handle<String> name = isolate_->factory()->InternalizeOneByteString(
StaticCharVector("memory"));
Handle<JSArrayBuffer> memory_buffer(
- instance->memory_object()->array_buffer(), isolate_);
+ instance->memory_object().array_buffer(), isolate_);
Handle<JSTypedArray> uint8_array = isolate_->factory()->NewJSTypedArray(
kExternalUint8Array, memory_buffer, 0, memory_buffer->byte_length());
JSObject::SetOwnPropertyIgnoreAttributes(global_scope_object, name,
@@ -487,15 +487,15 @@
}
wasm::InterpreterHandle* GetInterpreterHandle(WasmDebugInfo debug_info) {
- Object handle_obj = debug_info->interpreter_handle();
- DCHECK(!handle_obj->IsUndefined());
- return Managed<wasm::InterpreterHandle>::cast(handle_obj)->raw();
+ Object handle_obj = debug_info.interpreter_handle();
+ DCHECK(!handle_obj.IsUndefined());
+ return Managed<wasm::InterpreterHandle>::cast(handle_obj).raw();
}
wasm::InterpreterHandle* GetInterpreterHandleOrNull(WasmDebugInfo debug_info) {
- Object handle_obj = debug_info->interpreter_handle();
- if (handle_obj->IsUndefined()) return nullptr;
- return Managed<wasm::InterpreterHandle>::cast(handle_obj)->raw();
+ Object handle_obj = debug_info.interpreter_handle();
+ if (handle_obj.IsUndefined()) return nullptr;
+ return Managed<wasm::InterpreterHandle>::cast(handle_obj).raw();
}
} // namespace
@@ -540,8 +540,7 @@
// Ensure that the interpreter is instantiated.
GetOrCreateInterpreterHandle(isolate, debug_info);
Handle<WasmInstanceObject> instance(debug_info->wasm_instance(), isolate);
- wasm::NativeModule* native_module =
- instance->module_object()->native_module();
+ wasm::NativeModule* native_module = instance->module_object().native_module();
const wasm::WasmModule* module = instance->module();
// We may modify the wasm jump table.
@@ -632,7 +631,7 @@
debug_info->set_c_wasm_entry_map(*managed_map);
}
Handle<FixedArray> entries(debug_info->c_wasm_entries(), isolate);
- wasm::SignatureMap* map = debug_info->c_wasm_entry_map()->raw();
+ wasm::SignatureMap* map = debug_info->c_wasm_entry_map().raw();
int32_t index = map->Find(*sig);
if (index == -1) {
index = static_cast<int32_t>(map->FindOrInsert(*sig));
@@ -641,7 +640,7 @@
entries, entries->length(), AllocationType::kOld);
debug_info->set_c_wasm_entries(*entries);
}
- DCHECK(entries->get(index)->IsUndefined(isolate));
+ DCHECK(entries->get(index).IsUndefined(isolate));
Handle<Code> new_entry_code =
compiler::CompileCWasmEntry(isolate, sig).ToHandleChecked();
Handle<WasmExportedFunctionData> function_data =
@@ -656,8 +655,8 @@
NewFunctionArgs args = NewFunctionArgs::ForWasm(
name, function_data, isolate->sloppy_function_map());
Handle<JSFunction> new_entry = isolate->factory()->NewFunction(args);
- new_entry->set_context(debug_info->wasm_instance()->native_context());
- new_entry->shared()->set_internal_formal_parameter_count(
+ new_entry->set_context(debug_info->wasm_instance().native_context());
+ new_entry->shared().set_internal_formal_parameter_count(
compiler::CWasmEntryParameters::kNumParameters);
entries->set(index, *new_entry);
}
diff --git a/src/wasm/wasm-engine.cc b/src/wasm/wasm-engine.cc
index 9f59e47..eadd80c 100644
--- a/src/wasm/wasm-engine.cc
+++ b/src/wasm/wasm-engine.cc
@@ -262,7 +262,7 @@
Isolate* isolate, Handle<AsmWasmData> asm_wasm_data,
Handle<Script> script) {
std::shared_ptr<NativeModule> native_module =
- asm_wasm_data->managed_native_module()->get();
+ asm_wasm_data->managed_native_module().get();
Handle<FixedArray> export_wrappers =
handle(asm_wasm_data->export_wrappers(), isolate);
size_t code_size_estimate =
diff --git a/src/wasm/wasm-interpreter.cc b/src/wasm/wasm-interpreter.cc
index f7c6d23..997d587 100644
--- a/src/wasm/wasm-interpreter.cc
+++ b/src/wasm/wasm-interpreter.cc
@@ -1351,7 +1351,7 @@
if (IsReferenceValue()) {
value_ = WasmValue(Handle<Object>::null());
int ref_index = static_cast<int>(index);
- thread->reference_stack()->set(ref_index, *v.to_anyref());
+ thread->reference_stack().set(ref_index, *v.to_anyref());
}
}
@@ -1360,7 +1360,7 @@
DCHECK(value_.to_anyref().is_null());
int ref_index = static_cast<int>(index);
Isolate* isolate = thread->isolate_;
- Handle<Object> ref(thread->reference_stack()->get(ref_index), isolate);
+ Handle<Object> ref(thread->reference_stack().get(ref_index), isolate);
DCHECK(!ref->IsTheHole(isolate));
return WasmValue(ref);
}
@@ -1371,18 +1371,18 @@
if (!IsReferenceValue()) return;
int ref_index = static_cast<int>(index);
Isolate* isolate = thread->isolate_;
- thread->reference_stack()->set_the_hole(isolate, ref_index);
+ thread->reference_stack().set_the_hole(isolate, ref_index);
}
static void ClearValues(ThreadImpl* thread, sp_t index, int count) {
int ref_index = static_cast<int>(index);
- thread->reference_stack()->FillWithHoles(ref_index, ref_index + count);
+ thread->reference_stack().FillWithHoles(ref_index, ref_index + count);
}
static bool IsClearedValue(ThreadImpl* thread, sp_t index) {
int ref_index = static_cast<int>(index);
Isolate* isolate = thread->isolate_;
- return thread->reference_stack()->is_the_hole(isolate, ref_index);
+ return thread->reference_stack().is_the_hole(isolate, ref_index);
}
private:
@@ -2105,7 +2105,7 @@
if (global->mutability && global->imported) {
*buffer =
handle(FixedArray::cast(
- instance_object_->imported_mutable_globals_buffers()->get(
+ instance_object_->imported_mutable_globals_buffers().get(
global->index)),
isolate_);
Address idx = instance_object_->imported_mutable_globals()[global->index];
@@ -2538,8 +2538,7 @@
uint32_t index) V8_WARN_UNUSED_RESULT {
HandleScope handle_scope(isolate_); // Avoid leaking handles.
Handle<WasmExceptionTag> exception_tag(
- WasmExceptionTag::cast(
- instance_object_->exceptions_table()->get(index)),
+ WasmExceptionTag::cast(instance_object_->exceptions_table().get(index)),
isolate_);
uint32_t encoded_size = WasmExceptionPackage::GetEncodedSize(exception);
Handle<Object> exception_object =
@@ -2614,7 +2613,7 @@
Handle<Object> caught_tag =
WasmExceptionPackage::GetExceptionTag(isolate_, exception_object);
Handle<Object> expected_tag =
- handle(instance_object_->exceptions_table()->get(index), isolate_);
+ handle(instance_object_->exceptions_table().get(index), isolate_);
DCHECK(expected_tag->IsWasmExceptionTag());
return expected_tag.is_identical_to(caught_tag);
}
diff --git a/src/wasm/wasm-js.cc b/src/wasm/wasm-js.cc
index 7a1f040..b29d45a 100644
--- a/src/wasm/wasm-js.cc
+++ b/src/wasm/wasm-js.cc
@@ -1624,8 +1624,8 @@
return;
}
- if (!table->maximum_length()->IsUndefined()) {
- uint64_t max_size = table->maximum_length()->Number();
+ if (!table->maximum_length().IsUndefined()) {
+ uint64_t max_size = table->maximum_length().Number();
DCHECK_LE(max_size, std::numeric_limits<uint32_t>::max());
if (!ret->CreateDataProperty(isolate->GetCurrentContext(),
v8_str(isolate, "maximum"),
@@ -1945,7 +1945,7 @@
Handle<FunctionTemplateInfo> temp = NewFunctionTemplate(isolate, func);
Handle<JSFunction> function =
ApiNatives::InstantiateFunction(temp, name).ToHandleChecked();
- DCHECK(function->shared()->HasSharedName());
+ DCHECK(function->shared().HasSharedName());
return function;
}
@@ -1955,7 +1955,7 @@
PropertyAttributes attributes = NONE) {
Handle<String> name = v8_str(isolate, str);
Handle<JSFunction> function = CreateFunc(isolate, name, func);
- function->shared()->set_length(length);
+ function->shared().set_length(length);
JSObject::AddProperty(isolate, object, name, function, attributes);
return function;
}
@@ -1996,7 +1996,7 @@
CreateFunc(isolate, GetterName(isolate, name), getter);
Handle<JSFunction> setter_func =
CreateFunc(isolate, SetterName(isolate, name), setter);
- setter_func->shared()->set_length(1);
+ setter_func->shared().set_length(1);
v8::PropertyAttribute attributes = v8::None;
@@ -2012,7 +2012,7 @@
void SetDummyInstanceTemplate(Isolate* isolate, Handle<JSFunction> fun) {
Handle<ObjectTemplateInfo> instance_template = NewObjectTemplate(isolate);
FunctionTemplateInfo::SetInstanceTemplate(
- isolate, handle(fun->shared()->get_api_func_data(), isolate),
+ isolate, handle(fun->shared().get_api_func_data(), isolate),
instance_template);
}
@@ -2022,8 +2022,8 @@
Handle<Context> context(global->native_context(), isolate);
// Install the JS API once only.
Object prev = context->get(Context::WASM_MODULE_CONSTRUCTOR_INDEX);
- if (!prev->IsUndefined(isolate)) {
- DCHECK(prev->IsJSFunction());
+ if (!prev.IsUndefined(isolate)) {
+ DCHECK(prev.IsJSFunction());
return;
}
@@ -2193,7 +2193,7 @@
FUNCTION_WITHOUT_PROTOTYPE, MaybeHandle<JSFunction>());
CHECK(JSObject::SetPrototype(
function_proto,
- handle(context->function_function()->prototype(), isolate), false,
+ handle(context->function_function().prototype(), isolate), false,
kDontThrow)
.FromJust());
JSFunction::SetInitialMap(function_constructor, function_map,
diff --git a/src/wasm/wasm-memory.cc b/src/wasm/wasm-memory.cc
index 69deb90..599cc50 100644
--- a/src/wasm/wasm-memory.cc
+++ b/src/wasm/wasm-memory.cc
@@ -280,7 +280,7 @@
void WasmMemoryTracker::RegisterWasmMemoryAsShared(
Handle<WasmMemoryObject> object, Isolate* isolate) {
- const void* backing_store = object->array_buffer()->backing_store();
+ const void* backing_store = object->array_buffer().backing_store();
// TODO(V8:8810): This should be a DCHECK, currently some tests do not
// use a full WebAssembly.Memory, and fail on registering so return early.
if (!IsWasmMemory(backing_store)) return;
@@ -323,9 +323,9 @@
void WasmMemoryTracker::RegisterSharedWasmMemory_Locked(
Handle<WasmMemoryObject> object, Isolate* isolate) {
- DCHECK(object->array_buffer()->is_shared());
+ DCHECK(object->array_buffer().is_shared());
- void* backing_store = object->array_buffer()->backing_store();
+ void* backing_store = object->array_buffer().backing_store();
// The allocation of a WasmMemoryObject should always be registered with the
// WasmMemoryTracker.
const auto& result = allocations_.find(backing_store);
@@ -426,11 +426,11 @@
HandleScope scope(isolate);
Handle<WasmMemoryObject> memory_object = memory_obj_state.memory_object;
DCHECK(memory_object->IsWasmMemoryObject());
- DCHECK(memory_object->array_buffer()->is_shared());
+ DCHECK(memory_object->array_buffer().is_shared());
// Permissions adjusted, but create a new buffer with new size
// and old attributes. Buffer has already been allocated,
// just create a new buffer with same backing store.
- bool is_external = memory_object->array_buffer()->is_external();
+ bool is_external = memory_object->array_buffer().is_external();
Handle<JSArrayBuffer> new_buffer = SetupArrayBuffer(
isolate, backing_store, new_size, is_external, SharedFlag::kShared);
memory_obj_state.memory_object->update_instances(isolate, new_buffer);
diff --git a/src/wasm/wasm-objects-inl.h b/src/wasm/wasm-objects-inl.h
index d9c42d1..9cc8fc9 100644
--- a/src/wasm/wasm-objects-inl.h
+++ b/src/wasm/wasm-objects-inl.h
@@ -53,7 +53,7 @@
#define OPTIONAL_ACCESSORS(holder, name, type, offset) \
bool holder::has_##name() { \
- return !READ_FIELD(*this, offset)->IsUndefined(); \
+ return !READ_FIELD(*this, offset).IsUndefined(); \
} \
ACCESSORS(holder, name, type, offset)
@@ -95,11 +95,11 @@
OPTIONAL_ACCESSORS(WasmModuleObject, breakpoint_infos, FixedArray,
kBreakPointInfosOffset)
wasm::NativeModule* WasmModuleObject::native_module() const {
- return managed_native_module()->raw();
+ return managed_native_module().raw();
}
const std::shared_ptr<wasm::NativeModule>&
WasmModuleObject::shared_native_module() const {
- return managed_native_module()->get();
+ return managed_native_module().get();
}
const wasm::WasmModule* WasmModuleObject::module() const {
// TODO(clemensh): Remove this helper (inline in callers).
@@ -111,7 +111,7 @@
}
bool WasmModuleObject::is_asm_js() {
bool asm_js = module()->origin == wasm::kAsmJsOrigin;
- DCHECK_EQ(asm_js, script()->IsUserJavaScript());
+ DCHECK_EQ(asm_js, script().IsUserJavaScript());
DCHECK_EQ(asm_js, has_asm_js_offset_table());
return asm_js;
}
@@ -143,8 +143,8 @@
Address WasmGlobalObject::address() const {
DCHECK_NE(type(), wasm::kWasmAnyRef);
- DCHECK_LE(offset() + type_size(), untagged_buffer()->byte_length());
- return Address(untagged_buffer()->backing_store()) + offset();
+ DCHECK_LE(offset() + type_size(), untagged_buffer().byte_length());
+ return Address(untagged_buffer().backing_store()) + offset();
}
int32_t WasmGlobalObject::GetI32() {
@@ -166,7 +166,7 @@
Handle<Object> WasmGlobalObject::GetRef() {
// We use this getter for anyref, anyfunc, and except_ref.
DCHECK(wasm::ValueTypes::IsReferenceType(type()));
- return handle(tagged_buffer()->get(offset()), GetIsolate());
+ return handle(tagged_buffer().get(offset()), GetIsolate());
}
void WasmGlobalObject::SetI32(int32_t value) {
@@ -188,7 +188,7 @@
void WasmGlobalObject::SetAnyRef(Handle<Object> value) {
// We use this getter anyref and except_ref.
DCHECK(type() == wasm::kWasmAnyRef || type() == wasm::kWasmExceptRef);
- tagged_buffer()->set(offset(), *value);
+ tagged_buffer().set(offset(), *value);
}
bool WasmGlobalObject::SetAnyFunc(Isolate* isolate, Handle<Object> value) {
@@ -197,7 +197,7 @@
!WasmExportedFunction::IsWasmExportedFunction(*value)) {
return false;
}
- tagged_buffer()->set(offset(), *value);
+ tagged_buffer().set(offset(), *value);
return true;
}
@@ -339,7 +339,7 @@
#undef WRITE_PRIMITIVE_FIELD
#undef PRIMITIVE_ACCESSORS
-uint32_t WasmTableObject::current_length() { return entries()->length(); }
+uint32_t WasmTableObject::current_length() { return entries().length(); }
wasm::ValueType WasmTableObject::type() {
return static_cast<wasm::ValueType>(raw_type());
diff --git a/src/wasm/wasm-objects.cc b/src/wasm/wasm-objects.cc
index 8bede4b..784f231 100644
--- a/src/wasm/wasm-objects.cc
+++ b/src/wasm/wasm-objects.cc
@@ -169,8 +169,8 @@
WasmInstanceNativeAllocations* GetNativeAllocations(
WasmInstanceObject instance) {
return Managed<WasmInstanceNativeAllocations>::cast(
- instance->managed_native_allocations())
- ->raw();
+ instance.managed_native_allocations())
+ .raw();
}
#ifdef DEBUG
@@ -309,8 +309,8 @@
namespace {
int GetBreakpointPos(Isolate* isolate, Object break_point_info_or_undef) {
- if (break_point_info_or_undef->IsUndefined(isolate)) return kMaxInt;
- return BreakPointInfo::cast(break_point_info_or_undef)->source_position();
+ if (break_point_info_or_undef.IsUndefined(isolate)) return kMaxInt;
+ return BreakPointInfo::cast(break_point_info_or_undef).source_position();
}
int FindBreakpointInfoInsertPos(Isolate* isolate,
@@ -367,7 +367,7 @@
// Enlarge break positions array if necessary.
bool need_realloc = !breakpoint_infos->get(breakpoint_infos->length() - 1)
- ->IsUndefined(isolate);
+ .IsUndefined(isolate);
Handle<FixedArray> new_breakpoint_infos = breakpoint_infos;
if (need_realloc) {
new_breakpoint_infos = isolate->factory()->NewFixedArray(
@@ -381,7 +381,7 @@
// Move elements [insert_pos, ...] up by one.
for (int i = breakpoint_infos->length() - 1; i >= insert_pos; --i) {
Object entry = breakpoint_infos->get(i);
- if (entry->IsUndefined(isolate)) continue;
+ if (entry.IsUndefined(isolate)) continue;
new_breakpoint_infos->set(i + 1, entry);
}
@@ -411,7 +411,7 @@
Handle<Object> obj(breakpoint_infos->get(i), isolate);
if (obj->IsUndefined(isolate)) {
for (; i < e; ++i) {
- DCHECK(breakpoint_infos->get(i)->IsUndefined(isolate));
+ DCHECK(breakpoint_infos->get(i).IsUndefined(isolate));
}
break;
}
@@ -758,7 +758,7 @@
bool WasmModuleObject::GetPositionInfo(uint32_t position,
Script::PositionInfo* info) {
- if (script()->source_mapping_url()->IsString()) {
+ if (script().source_mapping_url().IsString()) {
if (module()->functions.size() == 0) return false;
info->line = 0;
info->column = position;
@@ -842,7 +842,7 @@
// Check if growing by {count} is valid.
uint32_t max_size;
- if (!table->maximum_length()->ToUint32(&max_size)) {
+ if (!table->maximum_length().ToUint32(&max_size)) {
max_size = FLAG_wasm_max_table_size;
}
DCHECK_LE(old_size, max_size);
@@ -864,7 +864,7 @@
for (int i = 0; i < dispatch_tables->length();
i += kDispatchTableNumElements) {
int table_index =
- Smi::cast(dispatch_tables->get(i + kDispatchTableIndexOffset))->value();
+ Smi::cast(dispatch_tables->get(i + kDispatchTableIndexOffset)).value();
if (table_index > 0) {
continue;
}
@@ -887,7 +887,7 @@
uint32_t entry_index) {
return (entry_index <
static_cast<uint32_t>(std::numeric_limits<int>::max()) &&
- static_cast<int>(entry_index) < table->entries()->length());
+ static_cast<int>(entry_index) < table->entries().length());
}
bool WasmTableObject::IsValidElement(Isolate* isolate,
@@ -983,9 +983,9 @@
uint32_t start, Handle<Object> entry,
uint32_t count) {
// Bounds checks must be done by the caller.
- DCHECK_LE(start, table->entries()->length());
- DCHECK_LE(count, table->entries()->length());
- DCHECK_LE(start + count, table->entries()->length());
+ DCHECK_LE(start, table->entries().length());
+ DCHECK_LE(count, table->entries().length());
+ DCHECK_LE(start + count, table->entries().length());
for (uint32_t i = 0; i < count; i++) {
WasmTableObject::Set(isolate, table, start + i, entry);
@@ -1004,7 +1004,7 @@
for (int i = 0; i < dispatch_tables->length();
i += kDispatchTableNumElements) {
int table_index =
- Smi::cast(dispatch_tables->get(i + kDispatchTableIndexOffset))->value();
+ Smi::cast(dispatch_tables->get(i + kDispatchTableIndexOffset)).value();
if (table_index > 0) {
// Only table 0 has a dispatch table in the instance at the moment.
// TODO(ahaas): Introduce dispatch tables for the other tables as well.
@@ -1034,16 +1034,16 @@
// TODO(jkummerow): Unify with "SignatureHelper" in c-api.cc.
PodArray<wasm::ValueType> serialized_sig =
capi_function->GetSerializedSignature();
- int total_count = serialized_sig->length() - 1;
+ int total_count = serialized_sig.length() - 1;
std::unique_ptr<wasm::ValueType[]> reps(new wasm::ValueType[total_count]);
int result_count;
static const wasm::ValueType kMarker = wasm::kWasmStmt;
for (int i = 0, j = 0; i <= total_count; i++) {
- if (serialized_sig->get(i) == kMarker) {
+ if (serialized_sig.get(i) == kMarker) {
result_count = i;
continue;
}
- reps[j++] = serialized_sig->get(i);
+ reps[j++] = serialized_sig.get(i);
}
int param_count = total_count - result_count;
wasm::FunctionSig sig(result_count, param_count, reps.get());
@@ -1051,7 +1051,7 @@
for (int i = 0; i < dispatch_tables->length();
i += kDispatchTableNumElements) {
int table_index =
- Smi::cast(dispatch_tables->get(i + kDispatchTableIndexOffset))->value();
+ Smi::cast(dispatch_tables->get(i + kDispatchTableIndexOffset)).value();
if (table_index > 0) {
// Only table 0 has a dispatch table in the instance at the moment.
// TODO(ahaas): Introduce dispatch tables for the other tables as well.
@@ -1063,7 +1063,7 @@
isolate);
// TODO(jkummerow): Find a way to avoid recompiling wrappers.
wasm::NativeModule* native_module =
- instance->module_object()->native_module();
+ instance->module_object().native_module();
Address host_address = capi_function->GetHostCallTarget();
wasm::WasmCodeRefScope code_ref_scope;
wasm::WasmCode* wasm_code = compiler::CompileWasmCapiCallWrapper(
@@ -1090,7 +1090,7 @@
for (int i = 0; i < dispatch_tables->length();
i += kDispatchTableNumElements) {
int table_index =
- Smi::cast(dispatch_tables->get(i + kDispatchTableIndexOffset))->value();
+ Smi::cast(dispatch_tables->get(i + kDispatchTableIndexOffset)).value();
if (table_index > 0) {
// Only table 0 has a dispatch table in the instance at the moment.
continue;
@@ -1112,7 +1112,7 @@
Handle<Tuple2> tuple = isolate->factory()->NewTuple2(
instance, Handle<Smi>(Smi::FromInt(func_index), isolate),
AllocationType::kYoung);
- table->entries()->set(entry_index, *tuple);
+ table->entries().set(entry_index, *tuple);
}
void WasmTableObject::GetFunctionTableEntry(
@@ -1120,10 +1120,10 @@
bool* is_valid, bool* is_null, MaybeHandle<WasmInstanceObject>* instance,
int* function_index) {
DCHECK_EQ(table->type(), wasm::kWasmAnyFunc);
- DCHECK_LT(entry_index, table->entries()->length());
+ DCHECK_LT(entry_index, table->entries().length());
// We initialize {is_valid} with {true}. We may change it later.
*is_valid = true;
- Handle<Object> element(table->entries()->get(entry_index), isolate);
+ Handle<Object> element(table->entries().get(entry_index), isolate);
*is_null = element->IsNull(isolate);
if (*is_null) return;
@@ -1423,7 +1423,7 @@
void IndirectFunctionTableEntry::clear() {
instance_->indirect_function_table_sig_ids()[index_] = -1;
instance_->indirect_function_table_targets()[index_] = 0;
- instance_->indirect_function_table_refs()->set(
+ instance_->indirect_function_table_refs().set(
index_, ReadOnlyRoots(instance_->GetIsolate()).undefined_value());
}
@@ -1457,11 +1457,11 @@
Object ref) {
instance_->indirect_function_table_sig_ids()[index_] = sig_id;
instance_->indirect_function_table_targets()[index_] = call_target;
- instance_->indirect_function_table_refs()->set(index_, ref);
+ instance_->indirect_function_table_refs().set(index_, ref);
}
Object IndirectFunctionTableEntry::object_ref() {
- return instance_->indirect_function_table_refs()->get(index_);
+ return instance_->indirect_function_table_refs().get(index_);
}
int IndirectFunctionTableEntry::sig_id() {
@@ -1478,8 +1478,8 @@
that.instance_->indirect_function_table_sig_ids()[that.index_];
instance_->indirect_function_table_targets()[index_] =
that.instance_->indirect_function_table_targets()[that.index_];
- instance_->indirect_function_table_refs()->set(
- index_, that.instance_->indirect_function_table_refs()->get(that.index_));
+ instance_->indirect_function_table_refs().set(
+ index_, that.instance_->indirect_function_table_refs().get(that.index_));
}
void ImportedFunctionEntry::SetWasmToJs(
@@ -1493,7 +1493,7 @@
wasm_to_js_wrapper->kind() == wasm::WasmCode::kWasmToCapiWrapper);
Handle<Tuple2> tuple =
isolate->factory()->NewTuple2(instance_, callable, AllocationType::kOld);
- instance_->imported_function_refs()->set(index_, *tuple);
+ instance_->imported_function_refs().set(index_, *tuple);
instance_->imported_function_targets()[index_] =
wasm_to_js_wrapper->instruction_start();
}
@@ -1502,28 +1502,28 @@
Address call_target) {
TRACE_IFT("Import WASM %p[%d] = {instance=%p, target=%" PRIuPTR "}\n",
reinterpret_cast<void*>(instance_->ptr()), index_,
- reinterpret_cast<void*>(instance->ptr()), call_target);
- instance_->imported_function_refs()->set(index_, instance);
+ reinterpret_cast<void*>(instance.ptr()), call_target);
+ instance_->imported_function_refs().set(index_, instance);
instance_->imported_function_targets()[index_] = call_target;
}
WasmInstanceObject ImportedFunctionEntry::instance() {
// The imported reference entry is either a target instance or a tuple
// of this instance and the target callable.
- Object value = instance_->imported_function_refs()->get(index_);
- if (value->IsWasmInstanceObject()) {
+ Object value = instance_->imported_function_refs().get(index_);
+ if (value.IsWasmInstanceObject()) {
return WasmInstanceObject::cast(value);
}
Tuple2 tuple = Tuple2::cast(value);
- return WasmInstanceObject::cast(tuple->value1());
+ return WasmInstanceObject::cast(tuple.value1());
}
JSReceiver ImportedFunctionEntry::callable() {
- return JSReceiver::cast(Tuple2::cast(object_ref())->value2());
+ return JSReceiver::cast(Tuple2::cast(object_ref()).value2());
}
Object ImportedFunctionEntry::object_ref() {
- return instance_->imported_function_refs()->get(index_);
+ return instance_->imported_function_refs().get(index_);
}
Address ImportedFunctionEntry::target() {
@@ -1569,7 +1569,7 @@
}
const WasmModule* WasmInstanceObject::module() {
- return module_object()->module();
+ return module_object().module();
}
Handle<WasmDebugInfo> WasmInstanceObject::GetOrCreateDebugInfo(
@@ -1688,7 +1688,7 @@
}
Address WasmInstanceObject::GetCallTarget(uint32_t func_index) {
- wasm::NativeModule* native_module = module_object()->native_module();
+ wasm::NativeModule* native_module = module_object().native_module();
if (func_index < native_module->num_imported_functions()) {
return imported_function_targets()[func_index];
}
@@ -1722,10 +1722,10 @@
uint32_t table_dst_index,
uint32_t dst, uint32_t src,
uint32_t count) {
- if (static_cast<int>(table_dst_index) >= instance->tables()->length()) {
+ if (static_cast<int>(table_dst_index) >= instance->tables().length()) {
return false;
}
- if (static_cast<int>(table_src_index) >= instance->tables()->length()) {
+ if (static_cast<int>(table_src_index) >= instance->tables().length()) {
return false;
}
@@ -1746,7 +1746,7 @@
// TODO(titzer): multiple tables in TableCopy
auto table = handle(
- WasmTableObject::cast(instance->tables()->get(table_src_index)), isolate);
+ WasmTableObject::cast(instance->tables().get(table_src_index)), isolate);
// Broadcast table copy operation to all instances that import this table.
Handle<FixedArray> dispatch_tables(table->dispatch_tables(), isolate);
for (int i = 0; i < dispatch_tables->length();
@@ -1760,17 +1760,17 @@
// Copy the function entries.
auto dst_table = handle(
- WasmTableObject::cast(instance->tables()->get(table_dst_index)), isolate);
+ WasmTableObject::cast(instance->tables().get(table_dst_index)), isolate);
auto src_table = handle(
- WasmTableObject::cast(instance->tables()->get(table_src_index)), isolate);
+ WasmTableObject::cast(instance->tables().get(table_src_index)), isolate);
if (copy_backward) {
for (uint32_t i = count; i > 0; i--) {
- dst_table->entries()->set(dst + i - 1,
- src_table->entries()->get(src + i - 1));
+ dst_table->entries().set(dst + i - 1,
+ src_table->entries().get(src + i - 1));
}
} else {
for (uint32_t i = 0; i < count; i++) {
- dst_table->entries()->set(dst + i, src_table->entries()->get(src + i));
+ dst_table->entries().set(dst + i, src_table->entries().get(src + i));
}
}
return ok;
@@ -1793,8 +1793,8 @@
Isolate* isolate, Handle<WasmInstanceObject> instance, int index) {
MaybeHandle<WasmExportedFunction> result;
if (instance->has_wasm_exported_functions()) {
- Object val = instance->wasm_exported_functions()->get(index);
- if (!val->IsUndefined(isolate)) {
+ Object val = instance->wasm_exported_functions().get(index);
+ if (!val.IsUndefined(isolate)) {
result = Handle<WasmExportedFunction>(WasmExportedFunction::cast(val),
isolate);
}
@@ -1833,7 +1833,7 @@
wrapper = compiler::CompileJSToWasmWrapper(isolate, function.sig,
function.imported)
.ToHandleChecked();
- module_object->export_wrappers()->set(wrapper_index, *wrapper);
+ module_object->export_wrappers().set(wrapper_index, *wrapper);
}
result = WasmExportedFunction::New(
isolate, instance, function_index,
@@ -1892,9 +1892,9 @@
DCHECK_EQ(0, sig->return_count());
DCHECK_LE(sig->parameter_count(), std::numeric_limits<int>::max());
int sig_size = static_cast<int>(sig->parameter_count());
- if (sig_size != serialized_signature()->length()) return false;
+ if (sig_size != serialized_signature().length()) return false;
for (int index = 0; index < sig_size; ++index) {
- if (sig->GetParam(index) != serialized_signature()->get(index)) {
+ if (sig->GetParam(index) != serialized_signature().get(index)) {
return false;
}
}
@@ -1906,18 +1906,18 @@
int param_count = static_cast<int>(sig->parameter_count());
int result_count = static_cast<int>(sig->return_count());
PodArray<wasm::ValueType> serialized_sig =
- shared()->wasm_capi_function_data()->serialized_signature();
- if (param_count + result_count + 1 != serialized_sig->length()) return false;
+ shared().wasm_capi_function_data().serialized_signature();
+ if (param_count + result_count + 1 != serialized_sig.length()) return false;
int serialized_index = 0;
for (int i = 0; i < result_count; i++, serialized_index++) {
- if (sig->GetReturn(i) != serialized_sig->get(serialized_index)) {
+ if (sig->GetReturn(i) != serialized_sig.get(serialized_index)) {
return false;
}
}
- if (serialized_sig->get(serialized_index) != wasm::kWasmStmt) return false;
+ if (serialized_sig.get(serialized_index) != wasm::kWasmStmt) return false;
serialized_index++;
for (int i = 0; i < param_count; i++, serialized_index++) {
- if (sig->GetParam(i) != serialized_sig->get(serialized_index)) return false;
+ if (sig->GetParam(i) != serialized_sig.get(serialized_index)) return false;
}
return true;
}
@@ -2025,15 +2025,15 @@
}
bool WasmExportedFunction::IsWasmExportedFunction(Object object) {
- if (!object->IsJSFunction()) return false;
+ if (!object.IsJSFunction()) return false;
JSFunction js_function = JSFunction::cast(object);
- if (Code::JS_TO_WASM_FUNCTION != js_function->code()->kind()) return false;
- DCHECK(js_function->shared()->HasWasmExportedFunctionData());
+ if (Code::JS_TO_WASM_FUNCTION != js_function.code().kind()) return false;
+ DCHECK(js_function.shared().HasWasmExportedFunctionData());
return true;
}
bool WasmCapiFunction::IsWasmCapiFunction(Object object) {
- if (!object->IsJSFunction()) return false;
+ if (!object.IsJSFunction()) return false;
JSFunction js_function = JSFunction::cast(object);
// TODO(jkummerow): Enable this when there is a JavaScript wrapper
// able to call this function.
@@ -2042,7 +2042,7 @@
// }
// DCHECK(js_function->shared()->HasWasmCapiFunctionData());
// return true;
- return js_function->shared()->HasWasmCapiFunctionData();
+ return js_function.shared().HasWasmCapiFunctionData();
}
Handle<WasmCapiFunction> WasmCapiFunction::New(
@@ -2066,11 +2066,11 @@
}
WasmInstanceObject WasmExportedFunction::instance() {
- return shared()->wasm_exported_function_data()->instance();
+ return shared().wasm_exported_function_data().instance();
}
int WasmExportedFunction::function_index() {
- return shared()->wasm_exported_function_data()->function_index();
+ return shared().wasm_exported_function_data().function_index();
}
Handle<WasmExportedFunction> WasmExportedFunction::New(
@@ -2081,7 +2081,7 @@
int jump_table_offset = -1;
if (func_index >= num_imported_functions) {
ptrdiff_t jump_table_diff =
- instance->module_object()->native_module()->jump_table_offset(
+ instance->module_object().native_module()->jump_table_offset(
func_index);
DCHECK(jump_table_diff >= 0 && jump_table_diff <= INT_MAX);
jump_table_offset = static_cast<int>(jump_table_diff);
@@ -2110,7 +2110,7 @@
Vector<uint8_t>::cast(buffer.SubVector(0, length)))
.ToHandleChecked();
}
- bool is_asm_js_module = instance->module_object()->is_asm_js();
+ bool is_asm_js_module = instance->module_object().is_asm_js();
Handle<Map> function_map = is_asm_js_module
? isolate->sloppy_function_map()
: isolate->wasm_exported_function_map();
@@ -2120,25 +2120,25 @@
// According to the spec, exported functions should not have a [[Construct]]
// method. This does not apply to functions exported from asm.js however.
DCHECK_EQ(is_asm_js_module, js_function->IsConstructor());
- js_function->shared()->set_length(arity);
- js_function->shared()->set_internal_formal_parameter_count(arity);
+ js_function->shared().set_length(arity);
+ js_function->shared().set_internal_formal_parameter_count(arity);
return Handle<WasmExportedFunction>::cast(js_function);
}
Address WasmExportedFunction::GetWasmCallTarget() {
- return instance()->GetCallTarget(function_index());
+ return instance().GetCallTarget(function_index());
}
wasm::FunctionSig* WasmExportedFunction::sig() {
- return instance()->module()->functions[function_index()].sig;
+ return instance().module()->functions[function_index()].sig;
}
Address WasmCapiFunction::GetHostCallTarget() const {
- return shared()->wasm_capi_function_data()->call_target();
+ return shared().wasm_capi_function_data().call_target();
}
PodArray<wasm::ValueType> WasmCapiFunction::GetSerializedSignature() const {
- return shared()->wasm_capi_function_data()->serialized_signature();
+ return shared().wasm_capi_function_data().serialized_signature();
}
Handle<WasmExceptionTag> WasmExceptionTag::New(Isolate* isolate, int index) {
diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h
index 6f4a823..f71fddc 100644
--- a/src/x64/assembler-x64-inl.h
+++ b/src/x64/assembler-x64-inl.h
@@ -248,7 +248,7 @@
void Assembler::deserialization_set_special_target_at(
Address instruction_payload, Code code, Address target) {
set_target_address_at(instruction_payload,
- !code.is_null() ? code->constant_pool() : kNullAddress,
+ !code.is_null() ? code.constant_pool() : kNullAddress,
target);
}
@@ -382,7 +382,7 @@
WriteUnalignedValue(pc_, tagged);
#endif // V8_COMPRESS_POINTERS
} else {
- WriteUnalignedValue(pc_, target->ptr());
+ WriteUnalignedValue(pc_, target.ptr());
}
if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
FlushInstructionCache(pc_, sizeof(Address));
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 27be342..cfcd4ff 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -1109,7 +1109,7 @@
Register TurboAssembler::GetSmiConstant(Smi source) {
STATIC_ASSERT(kSmiTag == 0);
- int value = source->value();
+ int value = source.value();
if (value == 0) {
xorl(kScratchRegister, kScratchRegister);
return kScratchRegister;
@@ -1120,7 +1120,7 @@
void TurboAssembler::Move(Register dst, Smi source) {
STATIC_ASSERT(kSmiTag == 0);
- int value = source->value();
+ int value = source.value();
if (value == 0) {
xorl(dst, dst);
} else {
@@ -1185,7 +1185,7 @@
void MacroAssembler::Cmp(Register dst, Smi src) {
DCHECK_NE(dst, kScratchRegister);
- if (src->value() == 0) {
+ if (src.value() == 0) {
test_tagged(dst, dst);
} else {
Register constant_reg = GetSmiConstant(src);
@@ -1208,7 +1208,7 @@
void MacroAssembler::SmiCompare(Operand dst, Smi src) {
AssertSmi(dst);
if (SmiValuesAre32Bits()) {
- cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
+ cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src.value()));
} else {
DCHECK(SmiValuesAre31Bits());
cmpl(dst, Immediate(src));
@@ -1255,10 +1255,9 @@
}
void MacroAssembler::SmiAddConstant(Operand dst, Smi constant) {
- if (constant->value() != 0) {
+ if (constant.value() != 0) {
if (SmiValuesAre32Bits()) {
- addl(Operand(dst, kSmiShift / kBitsPerByte),
- Immediate(constant->value()));
+ addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant.value()));
} else {
DCHECK(SmiValuesAre31Bits());
if (kTaggedSize == kInt64Size) {
diff --git a/test/cctest/cctest.cc b/test/cctest/cctest.cc
index ebfb13e..de05b17 100644
--- a/test/cctest/cctest.cc
+++ b/test/cctest/cctest.cc
@@ -249,7 +249,7 @@
i::Handle<i::Code> code =
i::compiler::Pipeline::GenerateCodeForTesting(&info, isolate, out_broker)
.ToHandleChecked();
- info.native_context()->AddOptimizedCode(*code);
+ info.native_context().AddOptimizedCode(*code);
function->set_code(*code);
return function;
diff --git a/test/cctest/compiler/function-tester.cc b/test/cctest/compiler/function-tester.cc
index 3aebed0..7bb8d61 100644
--- a/test/cctest/compiler/function-tester.cc
+++ b/test/cctest/compiler/function-tester.cc
@@ -137,7 +137,7 @@
p = *f.function;
}
return Handle<JSFunction>(
- p, p->GetIsolate()); // allocated in outer handle scope.
+ p, p.GetIsolate()); // allocated in outer handle scope.
}
Handle<JSFunction> FunctionTester::Compile(Handle<JSFunction> function) {
diff --git a/test/cctest/compiler/test-code-assembler.cc b/test/cctest/compiler/test-code-assembler.cc
index d8dbc22..fd9b52f 100644
--- a/test/cctest/compiler/test-code-assembler.cc
+++ b/test/cctest/compiler/test-code-assembler.cc
@@ -529,7 +529,7 @@
result = ft.Call(isolate->factory()->undefined_value(),
isolate->factory()->to_string_tag_symbol())
.ToHandleChecked();
- CHECK(String::cast(*result)->IsOneByteEqualTo(OneByteVector("undefined")));
+ CHECK(String::cast(*result).IsOneByteEqualTo(OneByteVector("undefined")));
// First handler returns a number.
result = ft.Call(isolate->factory()->to_string_tag_symbol(),
diff --git a/test/cctest/compiler/test-code-generator.cc b/test/cctest/compiler/test-code-generator.cc
index 26522ae..dbc7b0c 100644
--- a/test/cctest/compiler/test-code-generator.cc
+++ b/test/cctest/compiler/test-code-generator.cc
@@ -250,7 +250,7 @@
switch (operand.representation()) {
case MachineRepresentation::kTagged:
if (value->IsSmi()) {
- os << Smi::cast(*value)->value();
+ os << Smi::cast(*value).value();
} else {
os << value->Number();
}
@@ -263,7 +263,7 @@
FixedArray vector = FixedArray::cast(*value);
os << "[";
for (int lane = 0; lane < 4; lane++) {
- os << Smi::cast(vector->get(lane))->value();
+ os << Smi::cast(vector.get(lane)).value();
if (lane < 3) {
os << ", ";
}
@@ -808,9 +808,9 @@
case MachineRepresentation::kSimd128:
for (int lane = 0; lane < 4; lane++) {
int actual_lane =
- Smi::cast(FixedArray::cast(*actual)->get(lane))->value();
+ Smi::cast(FixedArray::cast(*actual).get(lane)).value();
int expected_lane =
- Smi::cast(FixedArray::cast(*expected)->get(lane))->value();
+ Smi::cast(FixedArray::cast(*expected).get(lane)).value();
if (actual_lane != expected_lane) {
return false;
}
diff --git a/test/cctest/compiler/test-run-bytecode-graph-builder.cc b/test/cctest/compiler/test-run-bytecode-graph-builder.cc
index bc1f6c1..f2fac48 100644
--- a/test/cctest/compiler/test-run-bytecode-graph-builder.cc
+++ b/test/cctest/compiler/test-run-bytecode-graph-builder.cc
@@ -116,7 +116,7 @@
Handle<JSFunction> function =
Handle<JSFunction>::cast(v8::Utils::OpenHandle(*api_function));
JSFunction::EnsureFeedbackVector(function);
- CHECK(function->shared()->HasBytecodeArray());
+ CHECK(function->shared().HasBytecodeArray());
Zone zone(isolate_->allocator(), ZONE_NAME);
Handle<SharedFunctionInfo> shared(function->shared(), isolate_);
diff --git a/test/cctest/compiler/test-run-jscalls.cc b/test/cctest/compiler/test-run-jscalls.cc
index b1e9ddf..04cf6d2 100644
--- a/test/cctest/compiler/test-run-jscalls.cc
+++ b/test/cctest/compiler/test-run-jscalls.cc
@@ -165,7 +165,7 @@
TEST(EvalCall) {
FunctionTester T("(function(a,b) { return eval(a); })");
- Handle<JSObject> g(T.function->context()->global_object()->global_proxy(),
+ Handle<JSObject> g(T.function->context().global_object().global_proxy(),
T.isolate);
T.CheckCall(T.Val(23), T.Val("17 + 6"), T.undefined());
@@ -190,7 +190,7 @@
// patches an undefined receiver to the global receiver. If this starts to
// fail once we fix the calling protocol, just remove this test.
FunctionTester T("(function(a) { return this; })");
- Handle<JSObject> g(T.function->context()->global_object()->global_proxy(),
+ Handle<JSObject> g(T.function->context().global_object().global_proxy(),
T.isolate);
T.CheckCall(g, T.undefined());
}
diff --git a/test/cctest/compiler/test-run-jsobjects.cc b/test/cctest/compiler/test-run-jsobjects.cc
index 5b0fe84..d31af54 100644
--- a/test/cctest/compiler/test-run-jsobjects.cc
+++ b/test/cctest/compiler/test-run-jsobjects.cc
@@ -17,7 +17,7 @@
Handle<Object> arguments;
T.Call(T.Val(19), T.Val(23), T.Val(42), T.Val(65)).ToHandle(&arguments);
CHECK(arguments->IsJSObject() && !arguments->IsJSArray());
- CHECK(JSObject::cast(*arguments)->HasSloppyArgumentsElements());
+ CHECK(JSObject::cast(*arguments).HasSloppyArgumentsElements());
Handle<String> l = T.isolate->factory()->length_string();
Handle<Object> length =
Object::GetProperty(T.isolate, arguments, l).ToHandleChecked();
@@ -31,7 +31,7 @@
Handle<Object> arguments;
T.Call(T.Val(19), T.Val(23), T.Val(42), T.Val(65)).ToHandle(&arguments);
CHECK(arguments->IsJSObject() && !arguments->IsJSArray());
- CHECK(!JSObject::cast(*arguments)->HasSloppyArgumentsElements());
+ CHECK(!JSObject::cast(*arguments).HasSloppyArgumentsElements());
Handle<String> l = T.isolate->factory()->length_string();
Handle<Object> length =
Object::GetProperty(T.isolate, arguments, l).ToHandleChecked();
@@ -45,7 +45,7 @@
Handle<Object> arguments;
T.Call(T.Val(19), T.Val(23), T.Val(42), T.Val(65)).ToHandle(&arguments);
CHECK(arguments->IsJSObject() && arguments->IsJSArray());
- CHECK(!JSObject::cast(*arguments)->HasSloppyArgumentsElements());
+ CHECK(!JSObject::cast(*arguments).HasSloppyArgumentsElements());
Handle<String> l = T.isolate->factory()->length_string();
Handle<Object> length =
Object::GetProperty(T.isolate, arguments, l).ToHandleChecked();
diff --git a/test/cctest/compiler/test-run-unwinding-info.cc b/test/cctest/compiler/test-run-unwinding-info.cc
index 5ecc501..a127c5f 100644
--- a/test/cctest/compiler/test-run-unwinding-info.cc
+++ b/test/cctest/compiler/test-run-unwinding-info.cc
@@ -27,7 +27,7 @@
tester.Call(tester.Val(-1));
- CHECK(tester.function->code()->has_unwinding_info());
+ CHECK(tester.function->code().has_unwinding_info());
}
// TODO(ssanfilippo) Build low-level graph and check that state is correctly
diff --git a/test/cctest/heap/test-alloc.cc b/test/cctest/heap/test-alloc.cc
index 1bbb5cb..d05fdf5 100644
--- a/test/cctest/heap/test-alloc.cc
+++ b/test/cctest/heap/test-alloc.cc
@@ -54,12 +54,12 @@
heap->AllocateRaw(size, AllocationType::kYoung).ToObjectChecked();
// In order to pass heap verification on Isolate teardown, mark the
// allocated area as a filler.
- heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
+ heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
// Old generation.
heap::SimulateFullSpace(heap->old_space());
obj = heap->AllocateRaw(size, AllocationType::kOld).ToObjectChecked();
- heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
+ heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
// Large object space.
static const size_t kLargeObjectSpaceFillerLength =
@@ -71,23 +71,23 @@
while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, AllocationType::kOld)
.ToObjectChecked();
- heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
+ heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
}
obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, AllocationType::kOld)
.ToObjectChecked();
- heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
+ heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
// Map space.
heap::SimulateFullSpace(heap->map_space());
obj = heap->AllocateRaw(Map::kSize, AllocationType::kMap).ToObjectChecked();
- heap->CreateFillerObjectAt(obj->address(), Map::kSize,
+ heap->CreateFillerObjectAt(obj.address(), Map::kSize,
ClearRecordedSlots::kNo);
// Code space.
heap::SimulateFullSpace(heap->code_space());
- size = CcTest::i_isolate()->builtins()->builtin(Builtins::kIllegal)->Size();
+ size = CcTest::i_isolate()->builtins()->builtin(Builtins::kIllegal).Size();
obj = heap->AllocateRaw(size, AllocationType::kCode).ToObjectChecked();
- heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
+ heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
return CcTest::i_isolate()->factory()->true_value();
}
@@ -135,7 +135,7 @@
factory->function_string(), isolate->sloppy_function_map(),
Builtins::kEmptyFunction);
Handle<JSFunction> function = factory->NewFunction(args);
- CHECK(!function->shared()->construct_as_builtin());
+ CHECK(!function->shared().construct_as_builtin());
// Force the creation of an initial map.
factory->NewJSObject(function);
diff --git a/test/cctest/heap/test-embedder-tracing.cc b/test/cctest/heap/test-embedder-tracing.cc
index 022d605..08f093b 100644
--- a/test/cctest/heap/test-embedder-tracing.cc
+++ b/test/cctest/heap/test-embedder-tracing.cc
@@ -36,7 +36,7 @@
instance->SetAlignedPointerInInternalField(1, second_field);
CHECK(!instance.IsEmpty());
i::Handle<i::JSReceiver> js_obj = v8::Utils::OpenHandle(*instance);
- CHECK_EQ(i::JS_API_OBJECT_TYPE, js_obj->map()->instance_type());
+ CHECK_EQ(i::JS_API_OBJECT_TYPE, js_obj->map().instance_type());
return scope.Escape(instance);
}
@@ -211,7 +211,7 @@
Handle<JSObject> js_key =
handle(JSObject::cast(*v8::Utils::OpenHandle(*key)), i_isolate);
Handle<JSReceiver> js_api_object = v8::Utils::OpenHandle(*api_object);
- int32_t hash = js_key->GetOrCreateHash(i_isolate)->value();
+ int32_t hash = js_key->GetOrCreateHash(i_isolate).value();
JSWeakCollection::Set(weak_map, js_key, js_api_object, hash);
}
CcTest::CollectGarbage(i::OLD_SPACE);
diff --git a/test/cctest/heap/test-heap.cc b/test/cctest/heap/test-heap.cc
index 96f4e79..9694be5 100644
--- a/test/cctest/heap/test-heap.cc
+++ b/test/cctest/heap/test-heap.cc
@@ -74,11 +74,11 @@
AllocationSite::kPretenureMinimumCreated + 1;
static void CheckMap(Map map, int type, int instance_size) {
- CHECK(map->IsHeapObject());
+ CHECK(map.IsHeapObject());
DCHECK(IsValidHeapObject(CcTest::heap(), map));
- CHECK_EQ(ReadOnlyRoots(CcTest::heap()).meta_map(), map->map());
- CHECK_EQ(type, map->instance_type());
- CHECK_EQ(instance_size, map->instance_size());
+ CHECK_EQ(ReadOnlyRoots(CcTest::heap()).meta_map(), map.map());
+ CHECK_EQ(type, map.instance_type());
+ CHECK_EQ(instance_size, map.instance_size());
}
@@ -102,7 +102,7 @@
Handle<JSFunction> fun(
JSFunction::cast(context->get(stored_ctor_context_index)), isolate);
- Handle<JSObject> proto(JSObject::cast(fun->initial_map()->prototype()),
+ Handle<JSObject> proto(JSObject::cast(fun->initial_map().prototype()),
isolate);
Handle<Map> that_map(proto->map(), isolate);
@@ -149,16 +149,16 @@
}
static void CheckOddball(Isolate* isolate, Object obj, const char* string) {
- CHECK(obj->IsOddball());
+ CHECK(obj.IsOddball());
Handle<Object> handle(obj, isolate);
Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
- CHECK(String::cast(print_string)->IsOneByteEqualTo(CStrVector(string)));
+ CHECK(String::cast(print_string).IsOneByteEqualTo(CStrVector(string)));
}
static void CheckSmi(Isolate* isolate, int value, const char* string) {
Handle<Object> handle(Smi::FromInt(value), isolate);
Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
- CHECK(String::cast(print_string)->IsOneByteEqualTo(CStrVector(string)));
+ CHECK(String::cast(print_string).IsOneByteEqualTo(CStrVector(string)));
}
@@ -167,7 +167,7 @@
CHECK(number->IsNumber());
Handle<Object> print_string =
Object::ToString(isolate, number).ToHandleChecked();
- CHECK(String::cast(*print_string)->IsOneByteEqualTo(CStrVector(string)));
+ CHECK(String::cast(*print_string).IsOneByteEqualTo(CStrVector(string)));
}
void CheckEmbeddedObjectsAreEqual(Handle<Code> lhs, Handle<Code> rhs) {
@@ -227,9 +227,9 @@
CHECK(code->IsCode());
HeapObject obj = HeapObject::cast(*code);
- Address obj_addr = obj->address();
+ Address obj_addr = obj.address();
- for (int i = 0; i < obj->Size(); i += kTaggedSize) {
+ for (int i = 0; i < obj.Size(); i += kTaggedSize) {
Object found = isolate->FindCodeObject(obj_addr + i);
CHECK_EQ(*code, found);
}
@@ -237,7 +237,7 @@
Handle<Code> copy = Factory::CodeBuilder(isolate, desc, Code::STUB).Build();
HeapObject obj_copy = HeapObject::cast(*copy);
Object not_right =
- isolate->FindCodeObject(obj_copy->address() + obj_copy->Size() / 2);
+ isolate->FindCodeObject(obj_copy.address() + obj_copy.Size() / 2);
CHECK(not_right != *code);
}
@@ -313,7 +313,7 @@
CHECK_EQ(10, s->length());
Handle<String> object_string = Handle<String>::cast(factory->Object_string());
- Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
+ Handle<JSGlobalObject> global(CcTest::i_isolate()->context().global_object(),
isolate);
CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
@@ -339,9 +339,9 @@
CcTest::InitializeVM();
int request = 24;
CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
- CHECK(Smi::FromInt(42)->IsSmi());
- CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
- CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
+ CHECK(Smi::FromInt(42).IsSmi());
+ CHECK(Smi::FromInt(Smi::kMinValue).IsSmi());
+ CHECK(Smi::FromInt(Smi::kMaxValue).IsSmi());
}
@@ -354,7 +354,7 @@
// Check GC.
CcTest::CollectGarbage(NEW_SPACE);
- Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
+ Handle<JSGlobalObject> global(CcTest::i_isolate()->context().global_object(),
isolate);
Handle<String> name = factory->InternalizeUtf8String("theFunction");
Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
@@ -470,10 +470,10 @@
// after gc, it should survive
CcTest::CollectGarbage(NEW_SPACE);
- CHECK((*h1)->IsString());
- CHECK((*h2)->IsHeapNumber());
- CHECK((*h3)->IsString());
- CHECK((*h4)->IsHeapNumber());
+ CHECK((*h1).IsString());
+ CHECK((*h2).IsHeapNumber());
+ CHECK((*h3).IsString());
+ CHECK((*h4).IsHeapNumber());
CHECK_EQ(*h3, *h1);
GlobalHandles::Destroy(h1.location());
@@ -529,7 +529,7 @@
&TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
CcTest::CollectGarbage(NEW_SPACE);
- CHECK((*h1)->IsHeapNumber());
+ CHECK((*h1).IsHeapNumber());
CHECK(WeakPointerCleared);
GlobalHandles::Destroy(h1.location());
}
@@ -568,7 +568,7 @@
// Incremental marking potentially marked handles before they turned weak.
CcTest::CollectAllGarbage();
- CHECK((*h1)->IsString());
+ CHECK((*h1).IsString());
CHECK(WeakPointerCleared);
GlobalHandles::Destroy(h1.location());
}
@@ -939,7 +939,7 @@
JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
uint32_t int_length = 0;
- CHECK(array->length()->ToArrayIndex(&int_length));
+ CHECK(array->length().ToArrayIndex(&int_length));
CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
CHECK(array->HasDictionaryElements()); // Must be in slow mode.
@@ -947,7 +947,7 @@
Object::SetElement(isolate, array, int_length, name, ShouldThrow::kDontThrow)
.Check();
uint32_t new_int_length = 0;
- CHECK(array->length()->ToArrayIndex(&new_int_length));
+ CHECK(array->length().ToArrayIndex(&new_int_length));
CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
CHECK_EQ(*element, *name);
@@ -1113,7 +1113,7 @@
// Add a Map object to look for.
objs[next_objs_index++] =
- Handle<Map>(HeapObject::cast(*objs[0])->map(), isolate);
+ Handle<Map>(HeapObject::cast(*objs[0]).map(), isolate);
CHECK_EQ(objs_count, next_objs_index);
CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
@@ -1157,12 +1157,12 @@
.ToHandleChecked();
CHECK(func_value->IsJSFunction());
Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
- CHECK(function->shared()->is_compiled());
+ CHECK(function->shared().is_compiled());
// The code will survive at least two GCs.
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();
- CHECK(function->shared()->is_compiled());
+ CHECK(function->shared().is_compiled());
// Simulate several GCs that use full marking.
const int kAgingThreshold = 6;
@@ -1171,11 +1171,11 @@
}
// foo should no longer be in the compilation cache
- CHECK(!function->shared()->is_compiled());
+ CHECK(!function->shared().is_compiled());
CHECK(!function->is_compiled());
// Call foo to get it recompiled.
CompileRun("foo()");
- CHECK(function->shared()->is_compiled());
+ CHECK(function->shared().is_compiled());
CHECK(function->is_compiled());
}
}
@@ -1215,12 +1215,12 @@
.ToHandleChecked();
CHECK(func_value->IsJSFunction());
Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
- CHECK(function->shared()->is_compiled());
+ CHECK(function->shared().is_compiled());
// The code will survive at least two GCs.
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();
- CHECK(function->shared()->is_compiled());
+ CHECK(function->shared().is_compiled());
// Simulate several GCs that use incremental marking.
const int kAgingThreshold = 6;
@@ -1228,7 +1228,7 @@
heap::SimulateIncrementalMarking(CcTest::heap());
CcTest::CollectAllGarbage();
}
- CHECK(!function->shared()->is_compiled());
+ CHECK(!function->shared().is_compiled());
CHECK(!function->is_compiled());
// This compile will compile the function again.
@@ -1241,7 +1241,7 @@
// the loop breaks once the function is enqueued as a candidate.
for (int i = 0; i < kAgingThreshold; i++) {
heap::SimulateIncrementalMarking(CcTest::heap());
- if (function->shared()->GetBytecodeArray()->IsOld()) break;
+ if (function->shared().GetBytecodeArray().IsOld()) break;
CcTest::CollectAllGarbage();
}
@@ -1256,7 +1256,7 @@
// Simulate one final GC and make sure the candidate wasn't flushed.
CcTest::CollectAllGarbage();
- CHECK(function->shared()->is_compiled());
+ CHECK(function->shared().is_compiled());
CHECK(function->is_compiled());
}
@@ -1360,7 +1360,7 @@
CHECK(shared->HasBytecodeArray());
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
- shared->GetBytecodeArray()->MakeOlder();
+ shared->GetBytecodeArray().MakeOlder();
}
}
@@ -1396,9 +1396,9 @@
int CountNativeContexts() {
int count = 0;
Object object = CcTest::heap()->native_contexts_list();
- while (!object->IsUndefined(CcTest::i_isolate())) {
+ while (!object.IsUndefined(CcTest::i_isolate())) {
count++;
- object = Context::cast(object)->next_context_link();
+ object = Context::cast(object).next_context_link();
}
return count;
}
@@ -1481,7 +1481,7 @@
for (int i = 0; i < kNumTestContexts; i++) {
// TODO(dcarney): is there a better way to do this?
i::Address* unsafe = reinterpret_cast<i::Address*>(*ctx[i]);
- *unsafe = ReadOnlyRoots(CcTest::heap()).undefined_value()->ptr();
+ *unsafe = ReadOnlyRoots(CcTest::heap()).undefined_value().ptr();
ctx[i].Clear();
// Scavenge treats these references as strong.
@@ -1637,7 +1637,7 @@
heap->new_space()->AllocateRawAligned(size, alignment);
HeapObject obj;
allocation.To(&obj);
- heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
+ heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
return obj;
}
@@ -1666,7 +1666,7 @@
// aligned address.
start = AlignNewSpace(kDoubleAligned, 0);
obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
- CHECK(IsAligned(obj->address(), kDoubleAlignment));
+ CHECK(IsAligned(obj.address(), kDoubleAlignment));
// There is no filler.
CHECK_EQ(kTaggedSize, *top_addr - start);
@@ -1674,23 +1674,23 @@
// unaligned address.
start = AlignNewSpace(kDoubleAligned, kTaggedSize);
obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
- CHECK(IsAligned(obj->address(), kDoubleAlignment));
+ CHECK(IsAligned(obj.address(), kDoubleAlignment));
// There is a filler object before the object.
filler = HeapObject::FromAddress(start);
- CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
+ CHECK(obj != filler && filler.IsFiller() && filler.Size() == kTaggedSize);
CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
// Similarly for kDoubleUnaligned.
start = AlignNewSpace(kDoubleUnaligned, 0);
obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
- CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
+ CHECK(IsAligned(obj.address() + kTaggedSize, kDoubleAlignment));
CHECK_EQ(kTaggedSize, *top_addr - start);
start = AlignNewSpace(kDoubleUnaligned, kTaggedSize);
obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
- CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
+ CHECK(IsAligned(obj.address() + kTaggedSize, kDoubleAlignment));
// There is a filler object before the object.
filler = HeapObject::FromAddress(start);
- CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
+ CHECK(obj != filler && filler.IsFiller() && filler.Size() == kTaggedSize);
CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
}
}
@@ -1702,7 +1702,7 @@
heap->old_space()->AllocateRawAligned(size, alignment);
HeapObject obj;
allocation.To(&obj);
- heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
+ heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
return obj;
}
@@ -1731,7 +1731,7 @@
// Allocate a dummy object to properly set up the linear allocation info.
AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize);
CHECK(!dummy.IsRetry());
- heap->CreateFillerObjectAt(dummy.ToObjectChecked()->address(), kTaggedSize,
+ heap->CreateFillerObjectAt(dummy.ToObjectChecked().address(), kTaggedSize,
ClearRecordedSlots::kNo);
// Double misalignment is 4 on 32-bit platforms or when pointer compression
@@ -1744,28 +1744,28 @@
start = AlignOldSpace(kDoubleAligned, 0);
obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
// The object is aligned.
- CHECK(IsAligned(obj->address(), kDoubleAlignment));
+ CHECK(IsAligned(obj.address(), kDoubleAlignment));
// Try the opposite alignment case.
start = AlignOldSpace(kDoubleAligned, kTaggedSize);
obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
- CHECK(IsAligned(obj->address(), kDoubleAlignment));
+ CHECK(IsAligned(obj.address(), kDoubleAlignment));
filler = HeapObject::FromAddress(start);
CHECK(obj != filler);
- CHECK(filler->IsFiller());
- CHECK_EQ(kTaggedSize, filler->Size());
- CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
+ CHECK(filler.IsFiller());
+ CHECK_EQ(kTaggedSize, filler.Size());
+ CHECK(obj != filler && filler.IsFiller() && filler.Size() == kTaggedSize);
// Similarly for kDoubleUnaligned.
start = AlignOldSpace(kDoubleUnaligned, 0);
obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
// The object is aligned.
- CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
+ CHECK(IsAligned(obj.address() + kTaggedSize, kDoubleAlignment));
// Try the opposite alignment case.
start = AlignOldSpace(kDoubleUnaligned, kTaggedSize);
obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
- CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
+ CHECK(IsAligned(obj.address() + kTaggedSize, kDoubleAlignment));
filler = HeapObject::FromAddress(start);
- CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
+ CHECK(obj != filler && filler.IsFiller() && filler.Size() == kTaggedSize);
}
}
@@ -1786,7 +1786,7 @@
Handle<Object> number_new = factory->NewNumber(1.000123);
CHECK(number_new->IsHeapNumber());
CHECK(Heap::InYoungGeneration(*number_new));
- CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
+ CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new).address(),
required_alignment));
AlignOldSpace(required_alignment, offset);
@@ -1794,7 +1794,7 @@
factory->NewNumber(1.000321, AllocationType::kOld);
CHECK(number_old->IsHeapNumber());
CHECK(heap->InOldSpace(*number_old));
- CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old)->address(),
+ CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old).address(),
required_alignment));
}
}
@@ -1816,7 +1816,7 @@
Handle<Object> number_new = factory->NewMutableHeapNumber(1.000123);
CHECK(number_new->IsMutableHeapNumber());
CHECK(Heap::InYoungGeneration(*number_new));
- CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
+ CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new).address(),
required_alignment));
AlignOldSpace(required_alignment, offset);
@@ -1824,7 +1824,7 @@
factory->NewMutableHeapNumber(1.000321, AllocationType::kOld);
CHECK(number_old->IsMutableHeapNumber());
CHECK(heap->InOldSpace(*number_old));
- CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old)->address(),
+ CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old).address(),
required_alignment));
}
}
@@ -1836,8 +1836,8 @@
intptr_t size_of_objects_2 = 0;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (!obj->IsFreeSpace()) {
- size_of_objects_2 += obj->Size();
+ if (!obj.IsFreeSpace()) {
+ size_of_objects_2 += obj.Size();
}
}
// Delta must be within 5% of the larger result.
@@ -1951,7 +1951,7 @@
HeapIterator iterator(CcTest::heap());
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (obj->IsJSGlobalObject()) count++;
+ if (obj.IsJSGlobalObject()) count++;
}
return count;
}
@@ -2540,12 +2540,12 @@
JSObject inner_object = JSObject::cast(o->RawFastPropertyAt(idx1));
CHECK(CcTest::heap()->InOldSpace(inner_object));
- if (!inner_object->IsUnboxedDoubleField(idx1)) {
- CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
+ if (!inner_object.IsUnboxedDoubleField(idx1)) {
+ CHECK(CcTest::heap()->InOldSpace(inner_object.RawFastPropertyAt(idx1)));
} else {
- CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
+ CHECK_EQ(2.2, inner_object.RawFastDoublePropertyAt(idx1));
}
- CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
+ CHECK(CcTest::heap()->InOldSpace(inner_object.RawFastPropertyAt(idx2)));
}
@@ -2937,7 +2937,7 @@
// Count number of live transitions after marking. Note that one transition
// is left, because 'o' still holds an instance of one transition target.
int transitions_after =
- CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
+ CountMapTransitions(i_isolate, Map::cast(root->map().GetBackPointer()));
CHECK_EQ(1, transitions_after);
}
@@ -2966,7 +2966,7 @@
// Count number of live transitions after marking. Note that one transition
// is left, because 'o' still holds an instance of one transition target.
int transitions_after =
- CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
+ CountMapTransitions(i_isolate, Map::cast(root->map().GetBackPointer()));
CHECK_EQ(2, transitions_after);
}
@@ -2995,7 +2995,7 @@
// Count number of live transitions after marking. Note that one transition
// is left, because 'o' still holds an instance of one transition target.
int transitions_after =
- CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
+ CountMapTransitions(i_isolate, Map::cast(root->map().GetBackPointer()));
CHECK_EQ(1, transitions_after);
}
#endif // DEBUG
@@ -3105,7 +3105,7 @@
CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
StdoutStream os;
- g->shared()->Print(os);
+ g->shared().Print(os);
os << std::endl;
}
#endif // OBJECT_PRINT
@@ -3469,23 +3469,23 @@
DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
FixedArray foo_parameters = stack_trace->Parameters(0);
- CHECK_EQ(foo_parameters->length(), 1);
- CHECK(foo_parameters->get(0)->IsSmi());
- CHECK_EQ(Smi::ToInt(foo_parameters->get(0)), 42);
+ CHECK_EQ(foo_parameters.length(), 1);
+ CHECK(foo_parameters.get(0).IsSmi());
+ CHECK_EQ(Smi::ToInt(foo_parameters.get(0)), 42);
FixedArray bar_parameters = stack_trace->Parameters(1);
- CHECK_EQ(bar_parameters->length(), 2);
- CHECK(bar_parameters->get(0)->IsJSObject());
- CHECK(bar_parameters->get(1)->IsBoolean());
+ CHECK_EQ(bar_parameters.length(), 2);
+ CHECK(bar_parameters.get(0).IsJSObject());
+ CHECK(bar_parameters.get(1).IsBoolean());
Handle<Object> foo = Handle<Object>::cast(GetByName("foo"));
- CHECK_EQ(bar_parameters->get(0), *foo);
- CHECK(!bar_parameters->get(1)->BooleanValue(CcTest::i_isolate()));
+ CHECK_EQ(bar_parameters.get(0), *foo);
+ CHECK(!bar_parameters.get(1).BooleanValue(CcTest::i_isolate()));
FixedArray main_parameters = stack_trace->Parameters(2);
- CHECK_EQ(main_parameters->length(), 2);
- CHECK(main_parameters->get(0)->IsJSObject());
- CHECK(main_parameters->get(1)->IsUndefined());
- CHECK_EQ(main_parameters->get(0), *foo);
+ CHECK_EQ(main_parameters.length(), 2);
+ CHECK(main_parameters.get(0).IsJSObject());
+ CHECK(main_parameters.get(1).IsUndefined());
+ CHECK_EQ(main_parameters.get(0), *foo);
});
}
@@ -3510,14 +3510,14 @@
DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
FixedArray parameters_add = stack_trace->Parameters(0);
- CHECK_EQ(parameters_add->length(), 1);
- CHECK(parameters_add->get(0)->IsSmi());
- CHECK_EQ(Smi::ToInt(parameters_add->get(0)), 42);
+ CHECK_EQ(parameters_add.length(), 1);
+ CHECK(parameters_add.get(0).IsSmi());
+ CHECK_EQ(Smi::ToInt(parameters_add.get(0)), 42);
FixedArray parameters_foo = stack_trace->Parameters(1);
- CHECK_EQ(parameters_foo->length(), 1);
- CHECK(parameters_foo->get(0)->IsSmi());
- CHECK_EQ(Smi::ToInt(parameters_foo->get(0)), 41);
+ CHECK_EQ(parameters_foo.length(), 1);
+ CHECK(parameters_foo.get(0).IsSmi());
+ CHECK_EQ(Smi::ToInt(parameters_foo.get(0)), 41);
});
}
@@ -3532,9 +3532,9 @@
DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
FixedArray parameters = stack_trace->Parameters(0);
- CHECK_EQ(parameters->length(), 2);
- CHECK(parameters->get(0)->IsSmi());
- CHECK_EQ(Smi::ToInt(parameters->get(0)), 9999);
+ CHECK_EQ(parameters.length(), 2);
+ CHECK(parameters.get(0).IsSmi());
+ CHECK_EQ(Smi::ToInt(parameters.get(0)), 9999);
});
}
@@ -3603,7 +3603,7 @@
CcTest::heap()->new_space()->AllocateRawUnaligned(
AllocationMemento::kSize + kTaggedSize);
CHECK(allocation.To(&obj));
- Address addr_obj = obj->address();
+ Address addr_obj = obj.address();
CcTest::heap()->CreateFillerObjectAt(addr_obj,
AllocationMemento::kSize + kTaggedSize,
ClearRecordedSlots::kNo);
@@ -3751,10 +3751,10 @@
static int AllocationSitesCount(Heap* heap) {
int count = 0;
- for (Object site = heap->allocation_sites_list(); site->IsAllocationSite();) {
+ for (Object site = heap->allocation_sites_list(); site.IsAllocationSite();) {
AllocationSite cur = AllocationSite::cast(site);
- CHECK(cur->HasWeakNext());
- site = cur->weak_next();
+ CHECK(cur.HasWeakNext());
+ site = cur.weak_next();
count++;
}
return count;
@@ -3763,15 +3763,15 @@
static int SlimAllocationSiteCount(Heap* heap) {
int count = 0;
for (Object weak_list = heap->allocation_sites_list();
- weak_list->IsAllocationSite();) {
+ weak_list.IsAllocationSite();) {
AllocationSite weak_cur = AllocationSite::cast(weak_list);
- for (Object site = weak_cur->nested_site(); site->IsAllocationSite();) {
+ for (Object site = weak_cur.nested_site(); site.IsAllocationSite();) {
AllocationSite cur = AllocationSite::cast(site);
- CHECK(!cur->HasWeakNext());
- site = cur->nested_site();
+ CHECK(!cur.HasWeakNext());
+ site = cur.nested_site();
count++;
}
- weak_list = weak_cur->weak_next();
+ weak_list = weak_cur.weak_next();
}
return count;
}
@@ -3818,16 +3818,16 @@
int dependency_group_count = 0;
DependentCode dependency = site->dependent_code();
while (dependency != ReadOnlyRoots(heap).empty_weak_fixed_array()) {
- CHECK(dependency->group() ==
+ CHECK(dependency.group() ==
DependentCode::kAllocationSiteTransitionChangedGroup ||
- dependency->group() ==
+ dependency.group() ==
DependentCode::kAllocationSiteTenuringChangedGroup);
- CHECK_EQ(1, dependency->count());
- CHECK(dependency->object_at(0)->IsWeak());
+ CHECK_EQ(1, dependency.count());
+ CHECK(dependency.object_at(0)->IsWeak());
Code function_bar =
- Code::cast(dependency->object_at(0)->GetHeapObjectAssumeWeak());
+ Code::cast(dependency.object_at(0)->GetHeapObjectAssumeWeak());
CHECK_EQ(bar_handle->code(), function_bar);
- dependency = dependency->next_link();
+ dependency = dependency.next_link();
dependency_group_count++;
}
// Expect a dependent code object for transitioning and pretenuring.
@@ -3842,7 +3842,7 @@
// The site still exists because of our global handle, but the code is no
// longer referred to by dependent_code().
- CHECK(site->dependent_code()->object_at(0)->IsCleared());
+ CHECK(site->dependent_code().object_at(0)->IsCleared());
}
void CheckNumberOfAllocations(Heap* heap, const char* source,
@@ -4173,7 +4173,7 @@
#ifdef VERIFY_HEAP
CcTest::heap()->Verify();
#endif
- CHECK(!bar->code()->marked_for_deoptimization());
+ CHECK(!bar->code().marked_for_deoptimization());
code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
}
@@ -4253,9 +4253,9 @@
static int GetCodeChainLength(Code code) {
int result = 0;
- while (code->next_code_link()->IsCode()) {
+ while (code.next_code_link().IsCode()) {
result++;
- code = Code::cast(code->next_code_link());
+ code = Code::cast(code.next_code_link());
}
return result;
}
@@ -4279,7 +4279,7 @@
OptimizeDummyFunction(CcTest::isolate(), "mortal");
Handle<JSFunction> immortal =
OptimizeDummyFunction(CcTest::isolate(), "immortal");
- CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
+ CHECK_EQ(immortal->code().next_code_link(), mortal->code());
code_chain_length_before = GetCodeChainLength(immortal->code());
// Keep the immortal code and let the mortal code die.
code = scope.CloseAndEscape(Handle<Code>(immortal->code(), isolate));
@@ -4307,13 +4307,13 @@
OptimizeDummyFunction(CcTest::isolate(), "mortal1");
Handle<JSFunction> mortal2 =
OptimizeDummyFunction(CcTest::isolate(), "mortal2");
- CHECK_EQ(mortal2->code()->next_code_link(), mortal1->code());
+ CHECK_EQ(mortal2->code().next_code_link(), mortal1->code());
code_data_container = scope.CloseAndEscape(Handle<CodeDataContainer>(
- mortal2->code()->code_data_container(), isolate));
+ mortal2->code().code_data_container(), isolate));
CompileRun("mortal1 = null; mortal2 = null;");
}
CcTest::CollectAllAvailableGarbage();
- CHECK(code_data_container->next_code_link()->IsUndefined(isolate));
+ CHECK(code_data_container->next_code_link().IsUndefined(isolate));
}
static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
@@ -4872,7 +4872,7 @@
for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
// Let's not optimize the loop away.
- CHECK_NE(obj->address(), kNullAddress);
+ CHECK_NE(obj.address(), kNullAddress);
}
}
@@ -4972,7 +4972,7 @@
Factory* factory = isolate->factory();
HandleScope sc(isolate);
- Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
+ Handle<JSGlobalObject> global(CcTest::i_isolate()->context().global_object(),
isolate);
Handle<JSArray> array = factory->NewJSArray(2);
@@ -4990,7 +4990,7 @@
if (!isolate->snapshot_available()) return;
Heap* heap = isolate->heap();
CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
- heap->number_string_cache()->length());
+ heap->number_string_cache().length());
}
@@ -5184,7 +5184,7 @@
Isolate* isolate = CcTest::i_isolate();
Object message(
*reinterpret_cast<Address*>(isolate->pending_message_obj_address()));
- CHECK(message->IsTheHole(isolate));
+ CHECK(message.IsTheHole(isolate));
}
@@ -5283,7 +5283,7 @@
{
HeapIterator it(heap);
for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
- if (obj->IsScript()) script_count++;
+ if (obj.IsScript()) script_count++;
}
}
@@ -5313,7 +5313,7 @@
{
HeapIterator it(heap);
for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
- if (!obj->IsSharedFunctionInfo()) continue;
+ if (!obj.IsSharedFunctionInfo()) continue;
sfi_count++;
}
}
@@ -5338,10 +5338,10 @@
if (!allocation.To(&result)) return allocation;
}
- result->set_map_after_allocation(ReadOnlyRoots(heap).byte_array_map(),
- SKIP_WRITE_BARRIER);
- ByteArray::cast(result)->set_length(length);
- ByteArray::cast(result)->clear_padding();
+ result.set_map_after_allocation(ReadOnlyRoots(heap).byte_array_map(),
+ SKIP_WRITE_BARRIER);
+ ByteArray::cast(result).set_length(length);
+ ByteArray::cast(result).clear_padding();
return result;
}
@@ -5376,7 +5376,7 @@
while (
AllocateByteArrayForTest(heap, M, AllocationType::kOld).To(&byte_array)) {
for (int j = 0; j < M; j++) {
- byte_array->set(j, 0x31);
+ byte_array.set(j, 0x31);
}
}
// Re-enable old space expansion to avoid OOM crash.
@@ -5404,7 +5404,7 @@
while (AllocateByteArrayForTest(heap, M, AllocationType::kYoung)
.To(&byte_array)) {
for (int j = 0; j < M; j++) {
- byte_array->set(j, 0x31);
+ byte_array.set(j, 0x31);
}
// Add the array in root set.
handle(byte_array, isolate);
@@ -5450,7 +5450,7 @@
for (size_t j = 0; j < arrays.size(); j++) {
array = arrays[j];
for (int i = 0; i < N; i++) {
- array->set(i, *ec_obj);
+ array.set(i, *ec_obj);
}
}
}
@@ -5487,9 +5487,9 @@
Handle<FixedArray> tmp = isolate->factory()->NewFixedArray(
number_of_objects, AllocationType::kOld);
root->set(0, *tmp);
- for (int i = 0; i < get()->length(); i++) {
+ for (int i = 0; i < get().length(); i++) {
tmp = isolate->factory()->NewFixedArray(100, AllocationType::kOld);
- get()->set(i, *tmp);
+ get().set(i, *tmp);
}
}
}
@@ -5499,7 +5499,7 @@
Handle<FixedArray> root;
} arr(isolate, kNumberOfObjects);
- CHECK_EQ(arr.get()->length(), kNumberOfObjects);
+ CHECK_EQ(arr.get().length(), kNumberOfObjects);
CHECK(heap->lo_space()->Contains(arr.get()));
LargePage* page = LargePage::FromHeapObject(arr.get());
CHECK_NOT_NULL(page);
@@ -5515,8 +5515,8 @@
IncrementalMarking* marking = heap->incremental_marking();
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
CHECK(marking_state->IsWhite(arr.get()));
- for (int i = 0; i < arr.get()->length(); i++) {
- HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
+ for (int i = 0; i < arr.get().length(); i++) {
+ HeapObject arr_value = HeapObject::cast(arr.get().get(i));
CHECK(marking_state->IsWhite(arr_value));
}
@@ -5529,8 +5529,8 @@
CHECK(marking->IsMarking());
// Check that we have not marked the interesting array during root scanning.
- for (int i = 0; i < arr.get()->length(); i++) {
- HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
+ for (int i = 0; i < arr.get().length(); i++) {
+ HeapObject arr_value = HeapObject::cast(arr.get().get(i));
CHECK(marking_state->IsWhite(arr_value));
}
@@ -5542,7 +5542,7 @@
i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
StepOrigin::kV8);
if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->ProgressBar() > 0) {
- CHECK_NE(page->ProgressBar(), arr.get()->Size());
+ CHECK_NE(page->ProgressBar(), arr.get().Size());
{
// Shift by 1, effectively moving one white object across the progress
// bar, meaning that we will miss marking it.
@@ -5569,8 +5569,8 @@
// All objects need to be black after marking. If a white object crossed the
// progress bar, we would fail here.
- for (int i = 0; i < arr.get()->length(); i++) {
- HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
+ for (int i = 0; i < arr.get().length(); i++) {
+ HeapObject arr_value = HeapObject::cast(arr.get().get(i));
CHECK(marking_state->IsBlack(arr_value));
}
}
@@ -5808,8 +5808,8 @@
// First trim in one word steps.
for (int i = 0; i < 10; i++) {
trimmed = heap->LeftTrimFixedArray(previous, 1);
- HeapObject filler = HeapObject::FromAddress(previous->address());
- CHECK(filler->IsFiller());
+ HeapObject filler = HeapObject::FromAddress(previous.address());
+ CHECK(filler.IsFiller());
CHECK(marking_state->IsBlack(trimmed));
CHECK(marking_state->IsBlack(previous));
previous = trimmed;
@@ -5819,8 +5819,8 @@
for (int i = 2; i <= 3; i++) {
for (int j = 0; j < 10; j++) {
trimmed = heap->LeftTrimFixedArray(previous, i);
- HeapObject filler = HeapObject::FromAddress(previous->address());
- CHECK(filler->IsFiller());
+ HeapObject filler = HeapObject::FromAddress(previous.address());
+ CHECK(filler.IsFiller());
CHECK(marking_state->IsBlack(trimmed));
CHECK(marking_state->IsBlack(previous));
previous = trimmed;
@@ -5876,7 +5876,7 @@
isolate->heap()->RightTrimFixedArray(*array, 1);
HeapObject filler = HeapObject::FromAddress(previous);
- CHECK(filler->IsFiller());
+ CHECK(filler.IsFiller());
CHECK(marking_state->IsImpossible(filler));
// Trim 10 times by one, two, and three word.
@@ -5885,7 +5885,7 @@
previous -= kTaggedSize * i;
isolate->heap()->RightTrimFixedArray(*array, i);
HeapObject filler = HeapObject::FromAddress(previous);
- CHECK(filler->IsFiller());
+ CHECK(filler.IsFiller());
CHECK(marking_state->IsWhite(filler));
}
}
@@ -6269,7 +6269,7 @@
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
}
- CHECK(object->map()->IsMap());
+ CHECK(object->map().IsMap());
}
HEAP_TEST(MarkCompactEpochCounter) {
diff --git a/test/cctest/heap/test-invalidated-slots.cc b/test/cctest/heap/test-invalidated-slots.cc
index 3f94f71..9bd3249 100644
--- a/test/cctest/heap/test-invalidated-slots.cc
+++ b/test/cctest/heap/test-invalidated-slots.cc
@@ -55,8 +55,8 @@
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
InvalidatedSlotsFilter filter(page);
for (ByteArray byte_array : byte_arrays) {
- Address start = byte_array->address() + ByteArray::kHeaderSize;
- Address end = byte_array->address() + byte_array->Size();
+ Address start = byte_array.address() + ByteArray::kHeaderSize;
+ Address end = byte_array.address() + byte_array.Size();
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK(filter.IsValid(addr));
}
@@ -71,13 +71,13 @@
// Register every second byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i += 2) {
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
- byte_arrays[i]->Size());
+ byte_arrays[i].Size());
}
InvalidatedSlotsFilter filter(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
- Address start = byte_array->address() + ByteArray::kHeaderSize;
- Address end = byte_array->address() + byte_array->Size();
+ Address start = byte_array.address() + ByteArray::kHeaderSize;
+ Address end = byte_array.address() + byte_array.Size();
for (Address addr = start; addr < end; addr += kTaggedSize) {
if (i % 2 == 0) {
CHECK(!filter.IsValid(addr));
@@ -96,13 +96,13 @@
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
- byte_arrays[i]->Size());
+ byte_arrays[i].Size());
}
InvalidatedSlotsFilter filter(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
- Address start = byte_array->address() + ByteArray::kHeaderSize;
- Address end = byte_array->address() + byte_array->Size();
+ Address start = byte_array.address() + ByteArray::kHeaderSize;
+ Address end = byte_array.address() + byte_array.Size();
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK(!filter.IsValid(addr));
}
@@ -118,16 +118,16 @@
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
- byte_arrays[i]->Size());
+ byte_arrays[i].Size());
}
// Trim byte arrays and check that the slots outside the byte arrays are
// considered invalid if the old space page was swept.
InvalidatedSlotsFilter filter(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
- Address start = byte_array->address() + ByteArray::kHeaderSize;
- Address end = byte_array->address() + byte_array->Size();
- heap->RightTrimFixedArray(byte_array, byte_array->length());
+ Address start = byte_array.address() + ByteArray::kHeaderSize;
+ Address end = byte_array.address() + byte_array.Size();
+ heap->RightTrimFixedArray(byte_array, byte_array.length());
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK_EQ(filter.IsValid(addr), page->SweepingDone());
}
@@ -146,14 +146,14 @@
// candidate.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
- byte_arrays[i]->Size());
+ byte_arrays[i].Size());
}
// All slots must still be valid.
InvalidatedSlotsFilter filter(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
- Address start = byte_array->address() + ByteArray::kHeaderSize;
- Address end = byte_array->address() + byte_array->Size();
+ Address start = byte_array.address() + ByteArray::kHeaderSize;
+ Address end = byte_array.address() + byte_array.Size();
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK(filter.IsValid(addr));
}
@@ -166,18 +166,18 @@
std::vector<ByteArray> byte_arrays;
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Ensure that the first array has smaller size then the rest.
- heap->RightTrimFixedArray(byte_arrays[0], byte_arrays[0]->length() - 8);
+ heap->RightTrimFixedArray(byte_arrays[0], byte_arrays[0].length() - 8);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
page->RegisterObjectWithInvalidatedSlots(byte_arrays[i],
- byte_arrays[i]->Size());
+ byte_arrays[i].Size());
}
// All slots must still be invalid.
InvalidatedSlotsFilter filter(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
ByteArray byte_array = byte_arrays[i];
- Address start = byte_array->address() + ByteArray::kHeaderSize;
- Address end = byte_array->address() + byte_array->Size();
+ Address start = byte_array.address() + ByteArray::kHeaderSize;
+ Address end = byte_array.address() + byte_array.Size();
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK(!filter.IsValid(addr));
}
@@ -326,7 +326,7 @@
{
AlwaysAllocateScope always_allocate(isolate);
Handle<JSFunction> function = factory->NewFunctionForTest(name);
- function->shared()->set_expected_nof_properties(3);
+ function->shared().set_expected_nof_properties(3);
obj = factory->NewJSObject(function, AllocationType::kOld);
}
// Start incremental marking.
diff --git a/test/cctest/heap/test-lab.cc b/test/cctest/heap/test-lab.cc
index 94f652e..1a3db87 100644
--- a/test/cctest/heap/test-lab.cc
+++ b/test/cctest/heap/test-lab.cc
@@ -17,7 +17,7 @@
static Address AllocateLabBackingStore(Heap* heap, intptr_t size_in_bytes) {
AllocationResult result = heap->old_space()->AllocateRaw(
static_cast<int>(size_in_bytes), kDoubleAligned);
- Address adr = result.ToObjectChecked()->address();
+ Address adr = result.ToObjectChecked().address();
return adr;
}
@@ -30,10 +30,10 @@
size_t counter = 0;
while (base < limit) {
object = HeapObject::FromAddress(base);
- CHECK(object->IsFiller());
+ CHECK(object.IsFiller());
CHECK_LT(counter, expected_size.size());
- CHECK_EQ(expected_size[counter], object->Size());
- base += object->Size();
+ CHECK_EQ(expected_size[counter], object.Size());
+ base += object.Size();
counter++;
}
}
@@ -46,7 +46,7 @@
AllocationResult result =
lab->AllocateRawAligned(static_cast<int>(size_in_bytes), alignment);
if (result.To(&obj)) {
- heap->CreateFillerObjectAt(obj->address(), static_cast<int>(size_in_bytes),
+ heap->CreateFillerObjectAt(obj.address(), static_cast<int>(size_in_bytes),
ClearRecordedSlots::kNo);
return true;
}
diff --git a/test/cctest/heap/test-mark-compact.cc b/test/cctest/heap/test-mark-compact.cc
index 698dd93..5d53ec8 100644
--- a/test/cctest/heap/test-mark-compact.cc
+++ b/test/cctest/heap/test-mark-compact.cc
@@ -103,8 +103,8 @@
HeapObject obj;
AllocationResult alloc = heap->AllocateRaw(Map::kSize, AllocationType::kMap);
if (!alloc.To(&obj)) return alloc;
- obj->set_map_after_allocation(ReadOnlyRoots(heap).meta_map(),
- SKIP_WRITE_BARRIER);
+ obj.set_map_after_allocation(ReadOnlyRoots(heap).meta_map(),
+ SKIP_WRITE_BARRIER);
return isolate->factory()->InitializeMap(Map::cast(obj), JS_OBJECT_TYPE,
JSObject::kHeaderSize,
TERMINAL_FAST_ELEMENTS_KIND, 0);
@@ -121,11 +121,11 @@
AllocationResult result = heap->AllocateRaw(size, allocation);
if (!result.To(&obj)) return result;
}
- obj->set_map_after_allocation(ReadOnlyRoots(heap).fixed_array_map(),
- SKIP_WRITE_BARRIER);
+ obj.set_map_after_allocation(ReadOnlyRoots(heap).fixed_array_map(),
+ SKIP_WRITE_BARRIER);
FixedArray array = FixedArray::cast(obj);
- array->set_length(length);
- MemsetTagged(array->data_start(), ReadOnlyRoots(heap).undefined_value(),
+ array.set_length(length);
+ MemsetTagged(array.data_start(), ReadOnlyRoots(heap).undefined_value(),
length);
return array;
}
@@ -139,7 +139,7 @@
Factory* factory = isolate->factory();
v8::HandleScope sc(CcTest::isolate());
- Handle<JSGlobalObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSGlobalObject> global(isolate->context().global_object(), isolate);
// call mark-compact when heap is empty
CcTest::CollectGarbage(OLD_SPACE);
@@ -370,7 +370,7 @@
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
for (auto object_and_size :
LiveObjectRange<kGreyObjects>(page, marking_state->bitmap(page))) {
- CHECK(!object_and_size.first->IsFiller());
+ CHECK(!object_and_size.first.IsFiller());
}
}
diff --git a/test/cctest/heap/test-spaces.cc b/test/cctest/heap/test-spaces.cc
index 24a882f..9530190 100644
--- a/test/cctest/heap/test-spaces.cc
+++ b/test/cctest/heap/test-spaces.cc
@@ -301,7 +301,7 @@
int lo_size = Page::kPageSize;
Object obj = lo->AllocateRaw(lo_size).ToObjectChecked();
- CHECK(obj->IsHeapObject());
+ CHECK(obj.IsHeapObject());
HeapObject ho = HeapObject::cast(obj);
@@ -390,7 +390,7 @@
CHECK(!allocation.IsRetry());
HeapObject filler;
CHECK(allocation.To(&filler));
- space->heap()->CreateFillerObjectAt(filler->address(), size,
+ space->heap()->CreateFillerObjectAt(filler.address(), size,
ClearRecordedSlots::kNo);
return filler;
}
@@ -400,7 +400,7 @@
CHECK(!allocation.IsRetry());
HeapObject filler;
CHECK(allocation.To(&filler));
- space->heap()->CreateFillerObjectAt(filler->address(), size,
+ space->heap()->CreateFillerObjectAt(filler.address(), size,
ClearRecordedSlots::kNo);
return filler;
}
@@ -571,7 +571,7 @@
heap::SimulateFullSpace(old_space);
AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
HeapObject obj = result.ToObjectChecked();
- heap->CreateFillerObjectAt(obj->address(), filler_size,
+ heap->CreateFillerObjectAt(obj.address(), filler_size,
ClearRecordedSlots::kNo);
}
@@ -582,14 +582,14 @@
old_space->AllocateRaw(max_object_size, kWordAligned);
HeapObject obj = result.ToObjectChecked();
// Simulate allocation folding moving the top pointer back.
- old_space->SetTopAndLimit(obj->address(), old_space->limit());
+ old_space->SetTopAndLimit(obj.address(), old_space->limit());
}
{
// This triggers assert in crbug.com/777177.
AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
HeapObject obj = result.ToObjectChecked();
- heap->CreateFillerObjectAt(obj->address(), filler_size,
+ heap->CreateFillerObjectAt(obj.address(), filler_size,
ClearRecordedSlots::kNo);
}
old_space->RemoveAllocationObserver(&observer);
@@ -621,17 +621,17 @@
AllocationResult result =
new_space->AllocateRaw(until_page_end, kWordAligned);
HeapObject obj = result.ToObjectChecked();
- heap->CreateFillerObjectAt(obj->address(), until_page_end,
+ heap->CreateFillerObjectAt(obj.address(), until_page_end,
ClearRecordedSlots::kNo);
// Simulate allocation folding moving the top pointer back.
- *new_space->allocation_top_address() = obj->address();
+ *new_space->allocation_top_address() = obj.address();
}
{
// This triggers assert in crbug.com/791582
AllocationResult result = new_space->AllocateRaw(256, kWordAligned);
HeapObject obj = result.ToObjectChecked();
- heap->CreateFillerObjectAt(obj->address(), 256, ClearRecordedSlots::kNo);
+ heap->CreateFillerObjectAt(obj.address(), 256, ClearRecordedSlots::kNo);
}
new_space->RemoveAllocationObserver(&observer);
}
@@ -656,7 +656,7 @@
old_space->ResetFreeList();
HeapObject filler = HeapObject::FromAddress(array->address() + array->Size());
- CHECK(filler->IsFreeSpace());
+ CHECK(filler.IsFreeSpace());
size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
size_t should_have_shrunk = RoundDown(
static_cast<size_t>(MemoryChunkLayout::AllocatableMemoryInDataPage() -
@@ -707,7 +707,7 @@
old_space->ResetFreeList();
HeapObject filler = HeapObject::FromAddress(array->address() + array->Size());
- CHECK_EQ(filler->map(),
+ CHECK_EQ(filler.map(),
ReadOnlyRoots(CcTest::heap()).one_pointer_filler_map());
size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
@@ -734,7 +734,7 @@
old_space->ResetFreeList();
HeapObject filler = HeapObject::FromAddress(array->address() + array->Size());
- CHECK_EQ(filler->map(),
+ CHECK_EQ(filler.map(),
ReadOnlyRoots(CcTest::heap()).two_pointer_filler_map());
size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
diff --git a/test/cctest/heap/test-weak-references.cc b/test/cctest/heap/test-weak-references.cc
index 33c396e..18dca8e 100644
--- a/test/cctest/heap/test-weak-references.cc
+++ b/test/cctest/heap/test-weak-references.cc
@@ -28,7 +28,7 @@
.ToLocalChecked();
Handle<Object> obj = v8::Utils::OpenHandle(*script);
Handle<SharedFunctionInfo> shared_function =
- Handle<SharedFunctionInfo>(JSFunction::cast(*obj)->shared(), i_isolate);
+ Handle<SharedFunctionInfo>(JSFunction::cast(*obj).shared(), i_isolate);
Handle<ClosureFeedbackCellArray> closure_cell_array =
ClosureFeedbackCellArray::New(i_isolate, shared_function);
Handle<FeedbackVector> fv = factory->NewFeedbackVector(
@@ -414,21 +414,21 @@
CcTest::CollectGarbage(NEW_SPACE);
HeapObject heap_object;
CHECK(array->Get(0)->GetHeapObjectIfWeak(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2016);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2016);
CHECK(array->Get(1)->GetHeapObjectIfWeak(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2017);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2017);
CHECK(array->Get(2)->GetHeapObjectIfStrong(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2018);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2018);
CHECK(array->Get(3)->GetHeapObjectIfWeak(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2019);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2019);
CcTest::CollectAllGarbage();
CHECK(heap->InOldSpace(*array));
CHECK(array->Get(0)->IsCleared());
CHECK(array->Get(1)->GetHeapObjectIfWeak(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2017);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2017);
CHECK(array->Get(2)->GetHeapObjectIfStrong(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2018);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2018);
CHECK(array->Get(3)->IsCleared());
}
@@ -509,19 +509,19 @@
HeapObject heap_object;
CHECK_EQ(array->length(), 8);
CHECK(array->Get(0)->GetHeapObjectIfWeak(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2016);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2016);
CHECK_EQ(array->Get(1).ToSmi().value(), 1);
CHECK(array->Get(2)->GetHeapObjectIfWeak(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2017);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2017);
CHECK_EQ(array->Get(3).ToSmi().value(), 3);
CHECK(array->Get(4)->GetHeapObjectIfWeak(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2018);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2018);
CHECK_EQ(array->Get(5).ToSmi().value(), 5);
CHECK(array->Get(6)->GetHeapObjectIfWeak(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2019);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2019);
CHECK_EQ(array->Get(7).ToSmi().value(), 7);
CcTest::CollectAllGarbage();
@@ -531,7 +531,7 @@
CHECK_EQ(array->Get(1).ToSmi().value(), 1);
CHECK(array->Get(2)->GetHeapObjectIfWeak(&heap_object));
- CHECK_EQ(Smi::cast(FixedArray::cast(heap_object)->get(0))->value(), 2017);
+ CHECK_EQ(Smi::cast(FixedArray::cast(heap_object).get(0)).value(), 2017);
CHECK_EQ(array->Get(3).ToSmi().value(), 3);
CHECK(array->Get(4)->IsCleared());
@@ -765,7 +765,7 @@
CHECK_EQ(array->length(), 3 + PrototypeUsers::kFirstIndex);
WeakArrayList new_array =
PrototypeUsers::Compact(array, heap, TestCompactCallback);
- CHECK_EQ(new_array->length(), 1 + PrototypeUsers::kFirstIndex);
+ CHECK_EQ(new_array.length(), 1 + PrototypeUsers::kFirstIndex);
CHECK_EQ(saved_heap_object, *live_map);
}
diff --git a/test/cctest/interpreter/bytecode-expectations-printer.cc b/test/cctest/interpreter/bytecode-expectations-printer.cc
index 9011bc5..20a23a7 100644
--- a/test/cctest/interpreter/bytecode-expectations-printer.cc
+++ b/test/cctest/interpreter/bytecode-expectations-printer.cc
@@ -98,7 +98,7 @@
i::Handle<i::JSFunction>::cast(v8::Utils::OpenHandle(*function));
i::Handle<i::BytecodeArray> bytecodes =
- i::handle(js_function->shared()->GetBytecodeArray(), i_isolate());
+ i::handle(js_function->shared().GetBytecodeArray(), i_isolate());
return bytecodes;
}
@@ -108,7 +108,7 @@
v8::Local<v8::Module> module) const {
i::Handle<i::Module> i_module = v8::Utils::OpenHandle(*module);
return i::handle(
- SharedFunctionInfo::cast(i_module->code())->GetBytecodeArray(),
+ SharedFunctionInfo::cast(i_module->code()).GetBytecodeArray(),
i_isolate());
}
@@ -116,7 +116,7 @@
BytecodeExpectationsPrinter::GetBytecodeArrayForScript(
v8::Local<v8::Script> script) const {
i::Handle<i::JSFunction> js_function = v8::Utils::OpenHandle(*script);
- return i::handle(js_function->shared()->GetBytecodeArray(), i_isolate());
+ return i::handle(js_function->shared().GetBytecodeArray(), i_isolate());
}
i::Handle<i::BytecodeArray>
@@ -126,8 +126,8 @@
v8::Utils::OpenHandle(*CompileRun(source_code));
i::Handle<i::JSFunction> js_function =
i::Handle<i::JSFunction>::cast(i_object);
- CHECK(js_function->shared()->HasBytecodeArray());
- return i::handle(js_function->shared()->GetBytecodeArray(), i_isolate());
+ CHECK(js_function->shared().HasBytecodeArray());
+ return i::handle(js_function->shared().GetBytecodeArray(), i_isolate());
}
void BytecodeExpectationsPrinter::PrintEscapedString(
@@ -278,8 +278,8 @@
void BytecodeExpectationsPrinter::PrintV8String(std::ostream& stream,
i::String string) const {
stream << '"';
- for (int i = 0, length = string->length(); i < length; ++i) {
- stream << i::AsEscapedUC16ForJSON(string->Get(i));
+ for (int i = 0, length = string.length(); i < length; ++i) {
+ stream << i::AsEscapedUC16ForJSON(string.Get(i));
}
stream << '"';
}
@@ -288,13 +288,13 @@
std::ostream& stream, i::Handle<i::Object> constant) const {
if (constant->IsSmi()) {
stream << "Smi [";
- i::Smi::cast(*constant)->SmiPrint(stream);
+ i::Smi::cast(*constant).SmiPrint(stream);
stream << "]";
} else {
- stream << i::HeapObject::cast(*constant)->map()->instance_type();
+ stream << i::HeapObject::cast(*constant).map().instance_type();
if (constant->IsHeapNumber()) {
stream << " [";
- i::HeapNumber::cast(*constant)->HeapNumberPrint(stream);
+ i::HeapNumber::cast(*constant).HeapNumberPrint(stream);
stream << "]";
} else if (constant->IsString()) {
stream << " [";
@@ -334,7 +334,7 @@
void BytecodeExpectationsPrinter::PrintConstantPool(
std::ostream& stream, i::FixedArray constant_pool) const {
stream << "constant pool: [\n";
- int num_constants = constant_pool->length();
+ int num_constants = constant_pool.length();
if (num_constants > 0) {
for (int i = 0; i < num_constants; ++i) {
stream << kIndent;
diff --git a/test/cctest/interpreter/interpreter-tester.h b/test/cctest/interpreter/interpreter-tester.h
index e67025a..d6f9a5c 100644
--- a/test/cctest/interpreter/interpreter-tester.h
+++ b/test/cctest/interpreter/interpreter-tester.h
@@ -122,13 +122,13 @@
}
if (!bytecode_.is_null()) {
- function->shared()->set_function_data(*bytecode_.ToHandleChecked());
+ function->shared().set_function_data(*bytecode_.ToHandleChecked());
}
if (HasFeedbackMetadata()) {
function->set_raw_feedback_cell(isolate_->heap()->many_closures_cell());
// Set the raw feedback metadata to circumvent checks that we are not
// overwriting existing metadata.
- function->shared()->set_raw_outer_scope_info_or_feedback_metadata(
+ function->shared().set_raw_outer_scope_info_or_feedback_metadata(
*feedback_metadata_.ToHandleChecked());
JSFunction::EnsureFeedbackVector(function);
}
diff --git a/test/cctest/interpreter/test-interpreter.cc b/test/cctest/interpreter/test-interpreter.cc
index 32fcfa5..dd1438d 100644
--- a/test/cctest/interpreter/test-interpreter.cc
+++ b/test/cctest/interpreter/test-interpreter.cc
@@ -176,7 +176,7 @@
InterpreterTester tester(isolate, bytecode_array);
auto callable = tester.GetCallable<>();
Handle<Object> return_val = callable().ToHandleChecked();
- CHECK_EQ(i::HeapNumber::cast(*return_val)->value(), -2.1e19);
+ CHECK_EQ(i::HeapNumber::cast(*return_val).value(), -2.1e19);
}
// Strings.
@@ -195,7 +195,7 @@
InterpreterTester tester(isolate, bytecode_array);
auto callable = tester.GetCallable<>();
Handle<Object> return_val = callable().ToHandleChecked();
- CHECK(i::String::cast(*return_val)->Equals(*raw_string->string()));
+ CHECK(i::String::cast(*return_val).Equals(*raw_string->string()));
}
}
@@ -424,7 +424,7 @@
Handle<Object> return_value = callable().ToHandleChecked();
CHECK(return_value->IsBigInt());
if (tester.HasFeedbackMetadata()) {
- MaybeObject feedback = callable.vector()->Get(slot);
+ MaybeObject feedback = callable.vector().Get(slot);
CHECK(feedback->IsSmi());
CHECK_EQ(BinaryOperationFeedback::kBigInt, feedback->ToSmi().value());
}
@@ -546,7 +546,7 @@
CHECK(return_value->SameValue(*test_cases[i].expected_value));
if (tester.HasFeedbackMetadata()) {
- MaybeObject feedback = callable.vector()->Get(slot);
+ MaybeObject feedback = callable.vector().Get(slot);
CHECK(feedback->IsSmi());
CHECK_EQ(test_cases[i].expected_feedback, feedback->ToSmi().value());
}
@@ -752,7 +752,7 @@
auto callable = tester.GetCallable<>();
Handle<Object> return_val = callable().ToHandleChecked();
- MaybeObject feedback0 = callable.vector()->Get(slot0);
+ MaybeObject feedback0 = callable.vector().Get(slot0);
CHECK(feedback0->IsSmi());
CHECK_EQ(test_case.feedback, feedback0->ToSmi().value());
CHECK(Object::Equals(isolate, test_case.result, return_val).ToChecked());
@@ -858,7 +858,7 @@
auto callable = tester.GetCallable<>();
Handle<Object> return_val = callable().ToHandleChecked();
- MaybeObject feedback0 = callable.vector()->Get(slot0);
+ MaybeObject feedback0 = callable.vector().Get(slot0);
CHECK(feedback0->IsSmi());
CHECK_EQ(test_case.feedback, feedback0->ToSmi().value());
CHECK(Object::Equals(isolate, test_case.result, return_val).ToChecked());
@@ -929,23 +929,23 @@
test_case.bigint_feedback_value, test_case.any_feedback_value)
.ToHandleChecked();
USE(return_val);
- MaybeObject feedback0 = callable.vector()->Get(slot0);
+ MaybeObject feedback0 = callable.vector().Get(slot0);
CHECK(feedback0->IsSmi());
CHECK_EQ(BinaryOperationFeedback::kSignedSmall, feedback0->ToSmi().value());
- MaybeObject feedback1 = callable.vector()->Get(slot1);
+ MaybeObject feedback1 = callable.vector().Get(slot1);
CHECK(feedback1->IsSmi());
CHECK_EQ(BinaryOperationFeedback::kNumber, feedback1->ToSmi().value());
- MaybeObject feedback2 = callable.vector()->Get(slot2);
+ MaybeObject feedback2 = callable.vector().Get(slot2);
CHECK(feedback2->IsSmi());
CHECK_EQ(BinaryOperationFeedback::kNumber, feedback2->ToSmi().value());
- MaybeObject feedback3 = callable.vector()->Get(slot3);
+ MaybeObject feedback3 = callable.vector().Get(slot3);
CHECK(feedback3->IsSmi());
CHECK_EQ(BinaryOperationFeedback::kBigInt, feedback3->ToSmi().value());
- MaybeObject feedback4 = callable.vector()->Get(slot4);
+ MaybeObject feedback4 = callable.vector().Get(slot4);
CHECK(feedback4->IsSmi());
CHECK_EQ(BinaryOperationFeedback::kAny, feedback4->ToSmi().value());
}
@@ -990,15 +990,15 @@
Handle<Object> return_val =
callable(arg1, arg2, arg3, arg4).ToHandleChecked();
USE(return_val);
- MaybeObject feedback0 = callable.vector()->Get(slot0);
+ MaybeObject feedback0 = callable.vector().Get(slot0);
CHECK(feedback0->IsSmi());
CHECK_EQ(BinaryOperationFeedback::kSignedSmall, feedback0->ToSmi().value());
- MaybeObject feedback1 = callable.vector()->Get(slot1);
+ MaybeObject feedback1 = callable.vector().Get(slot1);
CHECK(feedback1->IsSmi());
CHECK_EQ(BinaryOperationFeedback::kNumber, feedback1->ToSmi().value());
- MaybeObject feedback2 = callable.vector()->Get(slot2);
+ MaybeObject feedback2 = callable.vector().Get(slot2);
CHECK(feedback2->IsSmi());
CHECK_EQ(BinaryOperationFeedback::kAny, feedback2->ToSmi().value());
}
@@ -1481,7 +1481,7 @@
Handle<Object> return_val = callable(object).ToHandleChecked();
Handle<i::String> expected =
factory->NewStringFromAsciiChecked("prefix_abcdefghij");
- CHECK(i::String::cast(*return_val)->Equals(*expected));
+ CHECK(i::String::cast(*return_val).Equals(*expected));
}
}
@@ -1823,7 +1823,7 @@
CHECK_EQ(return_value->BooleanValue(isolate),
CompareC(comparison, inputs[i], inputs[j]));
if (tester.HasFeedbackMetadata()) {
- MaybeObject feedback = callable.vector()->Get(slot);
+ MaybeObject feedback = callable.vector().Get(slot);
CHECK(feedback->IsSmi());
CHECK_EQ(CompareOperationFeedback::kSignedSmall,
feedback->ToSmi().value());
@@ -1874,7 +1874,7 @@
CHECK_EQ(return_value->BooleanValue(isolate),
CompareC(comparison, inputs[i], inputs[j]));
if (tester.HasFeedbackMetadata()) {
- MaybeObject feedback = callable.vector()->Get(slot);
+ MaybeObject feedback = callable.vector().Get(slot);
CHECK(feedback->IsSmi());
CHECK_EQ(CompareOperationFeedback::kNumber,
feedback->ToSmi().value());
@@ -1919,7 +1919,7 @@
Handle<Object> return_value = callable().ToHandleChecked();
CHECK(return_value->IsBoolean());
if (tester.HasFeedbackMetadata()) {
- MaybeObject feedback = callable.vector()->Get(slot);
+ MaybeObject feedback = callable.vector().Get(slot);
CHECK(feedback->IsSmi());
CHECK_EQ(CompareOperationFeedback::kBigInt,
feedback->ToSmi().value());
@@ -1969,7 +1969,7 @@
CHECK_EQ(return_value->BooleanValue(isolate),
CompareC(comparison, inputs[i], inputs[j]));
if (tester.HasFeedbackMetadata()) {
- MaybeObject feedback = callable.vector()->Get(slot);
+ MaybeObject feedback = callable.vector().Get(slot);
CHECK(feedback->IsSmi());
int const expected_feedback =
Token::IsOrderedRelationalCompareOp(comparison)
@@ -2082,7 +2082,7 @@
CHECK_EQ(return_value->BooleanValue(isolate),
CompareC(comparison, lhs, rhs, true));
if (tester.HasFeedbackMetadata()) {
- MaybeObject feedback = callable.vector()->Get(slot);
+ MaybeObject feedback = callable.vector().Get(slot);
CHECK(feedback->IsSmi());
// Comparison with a number and string collects kAny feedback.
CHECK_EQ(CompareOperationFeedback::kAny,
@@ -5022,14 +5022,14 @@
i::Handle<i::Object> o = v8::Utils::OpenHandle(*v8_compile(source_text));
i::Handle<i::JSFunction> f = i::Handle<i::JSFunction>::cast(o);
- CHECK(f->shared()->HasBytecodeArray());
- i::Code code = f->shared()->GetCode();
+ CHECK(f->shared().HasBytecodeArray());
+ i::Code code = f->shared().GetCode();
i::Handle<i::Code> interpreter_entry_trampoline =
BUILTIN_CODE(isolate, InterpreterEntryTrampoline);
- CHECK(code->IsCode());
- CHECK(code->is_interpreter_trampoline_builtin());
- CHECK_NE(code->address(), interpreter_entry_trampoline->address());
+ CHECK(code.IsCode());
+ CHECK(code.is_interpreter_trampoline_builtin());
+ CHECK_NE(code.address(), interpreter_entry_trampoline->address());
}
#endif // V8_TARGET_ARCH_ARM
@@ -5042,24 +5042,24 @@
Code wide_handler =
interpreter->GetBytecodeHandler(Bytecode::kWide, OperandScale::kSingle);
- CHECK_EQ(wide_handler->builtin_index(), Builtins::kWideHandler);
+ CHECK_EQ(wide_handler.builtin_index(), Builtins::kWideHandler);
Code add_handler =
interpreter->GetBytecodeHandler(Bytecode::kAdd, OperandScale::kSingle);
- CHECK_EQ(add_handler->builtin_index(), Builtins::kAddHandler);
+ CHECK_EQ(add_handler.builtin_index(), Builtins::kAddHandler);
// Test that double-width bytecode handlers deserializer correctly, including
// an illegal bytecode handler since there is no Wide.Wide handler.
Code wide_wide_handler =
interpreter->GetBytecodeHandler(Bytecode::kWide, OperandScale::kDouble);
- CHECK_EQ(wide_wide_handler->builtin_index(), Builtins::kIllegalHandler);
+ CHECK_EQ(wide_wide_handler.builtin_index(), Builtins::kIllegalHandler);
Code add_wide_handler =
interpreter->GetBytecodeHandler(Bytecode::kAdd, OperandScale::kDouble);
- CHECK_EQ(add_wide_handler->builtin_index(), Builtins::kAddWideHandler);
+ CHECK_EQ(add_wide_handler.builtin_index(), Builtins::kAddWideHandler);
}
TEST(InterpreterCollectSourcePositions) {
@@ -5084,7 +5084,7 @@
ByteArray source_position_table = bytecode_array->SourcePositionTable();
CHECK(bytecode_array->HasSourcePositionTable());
- CHECK_GT(source_position_table->length(), 0);
+ CHECK_GT(source_position_table.length(), 0);
}
TEST(InterpreterCollectSourcePositions_StackOverflow) {
@@ -5113,14 +5113,14 @@
// Stack overflowed so source position table can be returned but is empty.
ByteArray source_position_table = bytecode_array->SourcePositionTable();
CHECK(!bytecode_array->HasSourcePositionTable());
- CHECK_EQ(source_position_table->length(), 0);
+ CHECK_EQ(source_position_table.length(), 0);
// Reset the stack limit and try again.
isolate->stack_guard()->SetStackLimit(previous_limit);
Compiler::CollectSourcePositions(isolate, sfi);
source_position_table = bytecode_array->SourcePositionTable();
CHECK(bytecode_array->HasSourcePositionTable());
- CHECK_GT(source_position_table->length(), 0);
+ CHECK_GT(source_position_table.length(), 0);
}
TEST(InterpreterCollectSourcePositions_ThrowFrom1stFrame) {
@@ -5243,7 +5243,7 @@
CHECK(bytecode_array->HasSourcePositionTable());
ByteArray source_position_table = bytecode_array->SourcePositionTable();
- CHECK_GT(source_position_table->length(), 0);
+ CHECK_GT(source_position_table.length(), 0);
}
} // namespace interpreter
diff --git a/test/cctest/interpreter/test-source-positions.cc b/test/cctest/interpreter/test-source-positions.cc
index 04a25c9..61d0ae5 100644
--- a/test/cctest/interpreter/test-source-positions.cc
+++ b/test/cctest/interpreter/test-source-positions.cc
@@ -172,7 +172,7 @@
.ToLocalChecked());
Handle<JSFunction> function =
Handle<JSFunction>::cast(v8::Utils::OpenHandle(*api_function));
- return handle(function->shared()->GetBytecodeArray(), isolate_);
+ return handle(function->shared().GetBytecodeArray(), isolate_);
}
void OptimizedBytecodeSourcePositionTester::SetOptimizationFlags(
diff --git a/test/cctest/parsing/test-parse-decision.cc b/test/cctest/parsing/test-parse-decision.cc
index 72a6cf6..37ce657 100644
--- a/test/cctest/parsing/test-parse-decision.cc
+++ b/test/cctest/parsing/test-parse-decision.cc
@@ -32,12 +32,12 @@
// The API object 'wraps' the compiled top-level function, not the i::Script.
Handle<JSFunction> toplevel_fn = v8::Utils::OpenHandle(*script);
SharedFunctionInfo::ScriptIterator iterator(
- toplevel_fn->GetIsolate(), Script::cast(toplevel_fn->shared()->script()));
+ toplevel_fn->GetIsolate(), Script::cast(toplevel_fn->shared().script()));
for (SharedFunctionInfo shared = iterator.Next(); !shared.is_null();
shared = iterator.Next()) {
- std::unique_ptr<char[]> name = String::cast(shared->Name())->ToCString();
- is_compiled->insert(std::make_pair(name.get(), shared->is_compiled()));
+ std::unique_ptr<char[]> name = String::cast(shared.Name()).ToCString();
+ is_compiled->insert(std::make_pair(name.get(), shared.is_compiled()));
}
}
diff --git a/test/cctest/parsing/test-preparser.cc b/test/cctest/parsing/test-preparser.cc
index ed66c84..e7f222e 100644
--- a/test/cctest/parsing/test-preparser.cc
+++ b/test/cctest/parsing/test-preparser.cc
@@ -704,7 +704,7 @@
CHECK(shared->HasUncompiledDataWithPreparseData());
i::Handle<i::PreparseData> produced_data_on_heap(
- shared->uncompiled_data_with_preparse_data()->preparse_data(),
+ shared->uncompiled_data_with_preparse_data().preparse_data(),
isolate);
// Parse the lazy function using the scope data.
diff --git a/test/cctest/test-accessors.cc b/test/cctest/test-accessors.cc
index 15054b8..63b6a86 100644
--- a/test/cctest/test-accessors.cc
+++ b/test/cctest/test-accessors.cc
@@ -539,8 +539,8 @@
i::StackFrame* frame = iter.frame();
CHECK(i != 0 || (frame->type() == i::StackFrame::EXIT));
i::Code code = frame->LookupCode();
- CHECK(code->IsCode());
- CHECK(code->contains(frame->pc()));
+ CHECK(code.IsCode());
+ CHECK(code.contains(frame->pc()));
iter.Advance();
}
}
diff --git a/test/cctest/test-api-interceptors.cc b/test/cctest/test-api-interceptors.cc
index 4efeb03..d3acb86 100644
--- a/test/cctest/test-api-interceptors.cc
+++ b/test/cctest/test-api-interceptors.cc
@@ -1339,9 +1339,9 @@
v8::Utils::OpenHandle<Object, i::JSReceiver>(context->Global());
CHECK(global_proxy->IsJSGlobalProxy());
i::Handle<i::JSGlobalObject> global(
- i::JSGlobalObject::cast(global_proxy->map()->prototype()),
+ i::JSGlobalObject::cast(global_proxy->map().prototype()),
global_proxy->GetIsolate());
- CHECK(global->map()->has_named_interceptor());
+ CHECK(global->map().has_named_interceptor());
v8::Local<Value> value = CompileRun(
"var f = function() { "
@@ -1403,9 +1403,9 @@
v8::Utils::OpenHandle<Object, i::JSReceiver>(context->Global());
CHECK(global_proxy->IsJSGlobalProxy());
i::Handle<i::JSGlobalObject> global(
- i::JSGlobalObject::cast(global_proxy->map()->prototype()),
+ i::JSGlobalObject::cast(global_proxy->map().prototype()),
global_proxy->GetIsolate());
- CHECK(global->map()->has_named_interceptor());
+ CHECK(global->map().has_named_interceptor());
ExpectInt32(
"(function() {"
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index 42b8d6f..5c02242 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -2585,9 +2585,9 @@
LocalContext env;
v8::Local<v8::Value> res = CompileRun("var a = []; a;");
i::Handle<i::JSReceiver> a(v8::Utils::OpenHandle(v8::Object::Cast(*res)));
- CHECK_EQ(1, a->map()->instance_descriptors()->number_of_descriptors());
+ CHECK_EQ(1, a->map().instance_descriptors().number_of_descriptors());
CompileRun("Object.defineProperty(a, 'length', { writable: false });");
- CHECK_EQ(0, a->map()->instance_descriptors()->number_of_descriptors());
+ CHECK_EQ(0, a->map().instance_descriptors().number_of_descriptors());
// But we should still have an AccessorInfo.
i::Handle<i::String> name(v8::Utils::OpenHandle(*v8_str("length")));
i::LookupIterator it(CcTest::i_isolate(), a, name,
@@ -2768,7 +2768,7 @@
i::Handle<i::JSObject> i_obj =
i::Handle<i::JSObject>::cast(v8::Utils::OpenHandle(*obj));
CHECK_EQ(nof_embedder_fields, obj->InternalFieldCount());
- CHECK_EQ(0, i_obj->map()->GetInObjectProperties());
+ CHECK_EQ(0, i_obj->map().GetInObjectProperties());
// Check writing and reading internal fields.
for (int j = 0; j < nof_embedder_fields; j++) {
CHECK(obj->GetInternalField(j)->IsUndefined());
@@ -2831,25 +2831,25 @@
i::Handle<i::JSObject>::cast(v8::Utils::OpenHandle(*value));
#ifdef VERIFY_HEAP
i_value->HeapObjectVerify(i_isolate);
- i_value->map()->HeapObjectVerify(i_isolate);
- i_value->map()->FindRootMap(i_isolate)->HeapObjectVerify(i_isolate);
+ i_value->map().HeapObjectVerify(i_isolate);
+ i_value->map().FindRootMap(i_isolate).HeapObjectVerify(i_isolate);
#endif
CHECK_EQ(nof_embedder_fields, value->InternalFieldCount());
if (in_object_only) {
- CHECK_LE(nof_properties, i_value->map()->GetInObjectProperties());
+ CHECK_LE(nof_properties, i_value->map().GetInObjectProperties());
} else {
- CHECK_LE(i_value->map()->GetInObjectProperties(), kMaxNofProperties);
+ CHECK_LE(i_value->map().GetInObjectProperties(), kMaxNofProperties);
}
// Make Sure we get the precise property count.
- i_value->map()->FindRootMap(i_isolate)->CompleteInobjectSlackTracking(
+ i_value->map().FindRootMap(i_isolate).CompleteInobjectSlackTracking(
i_isolate);
// TODO(cbruni): fix accounting to make this condition true.
// CHECK_EQ(0, i_value->map()->UnusedPropertyFields());
if (in_object_only) {
- CHECK_EQ(nof_properties, i_value->map()->GetInObjectProperties());
+ CHECK_EQ(nof_properties, i_value->map().GetInObjectProperties());
} else {
- CHECK_LE(i_value->map()->GetInObjectProperties(), kMaxNofProperties);
+ CHECK_LE(i_value->map().GetInObjectProperties(), kMaxNofProperties);
}
}
}
@@ -3096,10 +3096,10 @@
if (set_in_js) {
CompileRun("var m = new Set(); m.add(global);");
i::Object original_hash = i_global_proxy->GetHash();
- CHECK(original_hash->IsSmi());
+ CHECK(original_hash.IsSmi());
hash1 = i::Smi::ToInt(original_hash);
} else {
- hash1 = i_global_proxy->GetOrCreateHash(i_isolate)->value();
+ hash1 = i_global_proxy->GetOrCreateHash(i_isolate).value();
}
// Hash should be retained after being detached.
env->DetachGlobal();
@@ -4247,10 +4247,10 @@
{
i::Handle<i::Object> obj = v8::Utils::OpenHandle(*ext);
- CHECK_EQ(i::HeapObject::cast(*obj)->map(), CcTest::heap()->external_map());
+ CHECK_EQ(i::HeapObject::cast(*obj).map(), CcTest::heap()->external_map());
CHECK(ext->IsExternal());
CHECK(!CompileRun("new Set().add(this.ext)").IsEmpty());
- CHECK_EQ(i::HeapObject::cast(*obj)->map(), CcTest::heap()->external_map());
+ CHECK_EQ(i::HeapObject::cast(*obj).map(), CcTest::heap()->external_map());
CHECK(ext->IsExternal());
}
@@ -8076,8 +8076,8 @@
// Verify that we created an array where the space was reserved up front.
big_array_size =
v8::internal::JSArray::cast(*v8::Utils::OpenHandle(*big_array))
- ->elements()
- ->Size();
+ .elements()
+ .Size();
CHECK_LE(20000, big_array_size);
a->Set(context, v8_str("y"), big_array).FromJust();
big_heap_size = CcTest::heap()->SizeOfObjects();
@@ -11037,7 +11037,7 @@
// compiler downstream.
i::HeapObject heap_object;
CHECK(nexus.GetFeedback().GetHeapObject(&heap_object));
- CHECK(heap_object->IsPropertyCell());
+ CHECK(heap_object.IsPropertyCell());
}
THREADED_TEST(ShadowObjectAndDataPropertyTurbo) {
@@ -11082,7 +11082,7 @@
CHECK_EQ(i::MONOMORPHIC, nexus.ic_state());
i::HeapObject heap_object;
CHECK(nexus.GetFeedback().GetHeapObject(&heap_object));
- CHECK(heap_object->IsPropertyCell());
+ CHECK(heap_object.IsPropertyCell());
}
THREADED_TEST(SetPrototype) {
@@ -12913,9 +12913,9 @@
v8::Utils::OpenHandle(*templ)->constructor());
i::Heap* heap = reinterpret_cast<i::Isolate*>(isolate)->heap();
i::CallHandlerInfo handler_info =
- i::CallHandlerInfo::cast(cons->GetInstanceCallHandler());
- CHECK(!handler_info->IsSideEffectFreeCallHandlerInfo());
- handler_info->set_map(
+ i::CallHandlerInfo::cast(cons.GetInstanceCallHandler());
+ CHECK(!handler_info.IsSideEffectFreeCallHandlerInfo());
+ handler_info.set_map(
i::ReadOnlyRoots(heap).side_effect_free_call_handler_info_map());
CHECK(v8::debug::EvaluateGlobal(isolate, v8_str("obj()"), true).IsEmpty());
}
@@ -13613,10 +13613,10 @@
i::HeapIterator it(CcTest::heap());
for (i::HeapObject object = it.next(); !object.is_null();
object = it.next()) {
- if (object->IsJSGlobalObject()) {
+ if (object.IsJSGlobalObject()) {
i::JSGlobalObject g = i::JSGlobalObject::cast(object);
// Skip dummy global object.
- if (g->global_dictionary()->NumberOfElements() != 0) {
+ if (g.global_dictionary().NumberOfElements() != 0) {
count++;
}
}
@@ -14925,22 +14925,22 @@
i::Isolate* isolate = CcTest::i_isolate();
CHECK(i::StringShape(string).IsExternal());
i::ReadOnlyRoots roots(CcTest::heap());
- if (string->IsOneByteRepresentation()) {
+ if (string.IsOneByteRepresentation()) {
// Check old map is not internalized or long.
- CHECK(string->map() == roots.external_one_byte_string_map());
+ CHECK(string.map() == roots.external_one_byte_string_map());
// Morph external string to be TwoByte string.
- string->set_map(roots.external_string_map());
+ string.set_map(roots.external_string_map());
i::ExternalTwoByteString morphed = i::ExternalTwoByteString::cast(string);
- CcTest::heap()->UpdateExternalString(morphed, string->length(), 0);
- morphed->SetResource(isolate, uc16_resource);
+ CcTest::heap()->UpdateExternalString(morphed, string.length(), 0);
+ morphed.SetResource(isolate, uc16_resource);
} else {
// Check old map is not internalized or long.
- CHECK(string->map() == roots.external_string_map());
+ CHECK(string.map() == roots.external_string_map());
// Morph external string to be one-byte string.
- string->set_map(roots.external_one_byte_string_map());
+ string.set_map(roots.external_one_byte_string_map());
i::ExternalOneByteString morphed = i::ExternalOneByteString::cast(string);
- CcTest::heap()->UpdateExternalString(morphed, string->length(), 0);
- morphed->SetResource(isolate, one_byte_resource);
+ CcTest::heap()->UpdateExternalString(morphed, string.length(), 0);
+ morphed.SetResource(isolate, one_byte_resource);
}
}
@@ -15028,14 +15028,14 @@
.FromJust());
// This avoids the GC from trying to free a stack allocated resource.
- if (ilhs->IsExternalOneByteString())
- i::ExternalOneByteString::cast(ilhs)->SetResource(i_isolate, nullptr);
+ if (ilhs.IsExternalOneByteString())
+ i::ExternalOneByteString::cast(ilhs).SetResource(i_isolate, nullptr);
else
- i::ExternalTwoByteString::cast(ilhs)->SetResource(i_isolate, nullptr);
- if (irhs->IsExternalOneByteString())
- i::ExternalOneByteString::cast(irhs)->SetResource(i_isolate, nullptr);
+ i::ExternalTwoByteString::cast(ilhs).SetResource(i_isolate, nullptr);
+ if (irhs.IsExternalOneByteString())
+ i::ExternalOneByteString::cast(irhs).SetResource(i_isolate, nullptr);
else
- i::ExternalTwoByteString::cast(irhs)->SetResource(i_isolate, nullptr);
+ i::ExternalTwoByteString::cast(irhs).SetResource(i_isolate, nullptr);
}
i::DeleteArray(two_byte_string);
}
@@ -16000,7 +16000,7 @@
i::Handle<FixedTypedArrayClass> fixed_array(
FixedTypedArrayClass::cast(jsobj->elements()), isolate);
CHECK_EQ(FixedTypedArrayClass::kInstanceType,
- fixed_array->map()->instance_type());
+ fixed_array->map().instance_type());
CHECK_EQ(kElementCount, jsobj->length());
CHECK_EQ(kElementCount, fixed_array->number_of_elements_onheap_only());
CcTest::CollectAllGarbage();
@@ -16449,7 +16449,7 @@
v8::Local<v8::Script> lscript = CompileWithOrigin(source, "test", false);
i::Handle<i::SharedFunctionInfo> obj = i::Handle<i::SharedFunctionInfo>::cast(
v8::Utils::OpenHandle(*lscript->GetUnboundScript()));
- CHECK(obj->script()->IsScript());
+ CHECK(obj->script().IsScript());
i::Handle<i::Script> script(i::Script::cast(obj->script()), i_isolate);
int levels[] = {
@@ -18986,8 +18986,8 @@
// happen in real pages.
CHECK(string->IsOneByteRepresentation());
i::ConsString cons = i::ConsString::cast(*string);
- CHECK_EQ(0, cons->second()->length());
- CHECK(cons->first()->IsTwoByteRepresentation());
+ CHECK_EQ(0, cons.second().length());
+ CHECK(cons.first().IsTwoByteRepresentation());
}
// Check that some string operations work.
@@ -20437,11 +20437,11 @@
}
static int CountLiveMapsInMapCache(i::Context context) {
- i::WeakFixedArray map_cache = i::WeakFixedArray::cast(context->map_cache());
- int length = map_cache->length();
+ i::WeakFixedArray map_cache = i::WeakFixedArray::cast(context.map_cache());
+ int length = map_cache.length();
int count = 0;
for (int i = 0; i < length; i++) {
- if (map_cache->Get(i)->IsWeak()) count++;
+ if (map_cache.Get(i)->IsWeak()) count++;
}
return count;
}
@@ -21159,7 +21159,7 @@
Local<Value> object = CompileRun(source);
i::Handle<i::JSObject> array =
i::Handle<i::JSObject>::cast(v8::Utils::OpenHandle(*object.As<Object>()));
- CHECK_EQ(expected, array->elements()->IsCowArray());
+ CHECK_EQ(expected, array->elements().IsCowArray());
}
} // namespace
@@ -22601,9 +22601,9 @@
auto serial_number =
i::Smi::cast(i::Handle<i::JSFunction>::cast(v8::Utils::OpenHandle(*func))
->shared()
- ->get_api_func_data()
- ->serial_number())
- ->value();
+ .get_api_func_data()
+ .serial_number())
+ .value();
CHECK_EQ(i::FunctionTemplateInfo::kInvalidSerialNumber, serial_number);
// Verify that each Function::New creates a new function instance
@@ -23812,7 +23812,7 @@
i::Handle<i::SharedFunctionInfo> obj = i::Handle<i::SharedFunctionInfo>::cast(
v8::Utils::OpenHandle(*script->GetUnboundScript()));
- CHECK(obj->script()->IsScript());
+ CHECK(obj->script().IsScript());
i::Handle<i::Script> script1(i::Script::cast(obj->script()), i_isolate);
@@ -27106,8 +27106,7 @@
->Set(env.local(), v8_str("func"), v8::Utils::ToLocal(func))
.FromJust());
Handle<JSArrayBuffer> memory(
- r.builder().instance_object()->memory_object()->array_buffer(),
- i_isolate);
+ r.builder().instance_object()->memory_object().array_buffer(), i_isolate);
CHECK(env->Global()
->Set(env.local(), v8_str("sab"), v8::Utils::ToLocal(memory))
.FromJust());
@@ -27143,8 +27142,7 @@
->Set(env.local(), v8_str("func"), v8::Utils::ToLocal(func))
.FromJust());
Handle<JSArrayBuffer> memory(
- r.builder().instance_object()->memory_object()->array_buffer(),
- i_isolate);
+ r.builder().instance_object()->memory_object().array_buffer(), i_isolate);
CHECK(env->Global()
->Set(env.local(), v8_str("sab"), v8::Utils::ToLocal(memory))
.FromJust());
@@ -27353,8 +27351,8 @@
for (int id = 0; id < i::Builtins::builtin_count; id++) {
if (!i::Builtins::IsIsolateIndependent(id)) continue;
i::Code builtin = i_isolate->builtins()->builtin(id);
- i::Address start = builtin->InstructionStart();
- i::Address end = start + builtin->InstructionSize();
+ i::Address start = builtin.InstructionStart();
+ i::Address end = start + builtin.InstructionSize();
i::Address builtins_start =
reinterpret_cast<i::Address>(builtins_range.start);
@@ -27368,9 +27366,8 @@
v8::JSEntryStub js_entry_stub = unwind_state.js_entry_stub;
- CHECK_EQ(
- i_isolate->heap()->builtin(i::Builtins::kJSEntry)->InstructionStart(),
- reinterpret_cast<i::Address>(js_entry_stub.code.start));
+ CHECK_EQ(i_isolate->heap()->builtin(i::Builtins::kJSEntry).InstructionStart(),
+ reinterpret_cast<i::Address>(js_entry_stub.code.start));
}
TEST(MicrotaskContextShouldBeNativeContext) {
diff --git a/test/cctest/test-api.h b/test/cctest/test-api.h
index b091ce4..5de611f 100644
--- a/test/cctest/test-api.h
+++ b/test/cctest/test-api.h
@@ -19,9 +19,9 @@
CHECK_EQ(CcTest::isolate(), t.GetIsolate());
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(t.GetIsolate());
CHECK_EQ(t.GetIsolate(), rv.GetIsolate());
- CHECK((*o)->IsTheHole(isolate) || (*o)->IsUndefined(isolate));
+ CHECK((*o).IsTheHole(isolate) || (*o).IsUndefined(isolate));
// Verify reset
- bool is_runtime = (*o)->IsTheHole(isolate);
+ bool is_runtime = (*o).IsTheHole(isolate);
if (is_runtime) {
CHECK(rv.Get()->IsUndefined());
} else {
@@ -29,10 +29,10 @@
CHECK_EQ(*v, *o);
}
rv.Set(true);
- CHECK(!(*o)->IsTheHole(isolate) && !(*o)->IsUndefined(isolate));
+ CHECK(!(*o).IsTheHole(isolate) && !(*o).IsUndefined(isolate));
rv.Set(v8::Local<v8::Object>());
- CHECK((*o)->IsTheHole(isolate) || (*o)->IsUndefined(isolate));
- CHECK_EQ(is_runtime, (*o)->IsTheHole(isolate));
+ CHECK((*o).IsTheHole(isolate) || (*o).IsUndefined(isolate));
+ CHECK_EQ(is_runtime, (*o).IsTheHole(isolate));
// If CPU profiler is active check that when API callback is invoked
// VMState is set to EXTERNAL.
if (isolate->is_profiling()) {
diff --git a/test/cctest/test-assembler-mips.cc b/test/cctest/test-assembler-mips.cc
index adc5da8..23b61db 100644
--- a/test/cctest/test-assembler-mips.cc
+++ b/test/cctest/test-assembler-mips.cc
@@ -3224,7 +3224,7 @@
for (int i = 0; i < kNumCases; ++i) {
__ bind(&labels[i]);
obj = *values[i];
- imm32 = obj->ptr();
+ imm32 = obj.ptr();
__ lui(v0, (imm32 >> 16) & 0xFFFF);
__ ori(v0, v0, imm32 & 0xFFFF);
__ b(&done);
diff --git a/test/cctest/test-assembler-mips64.cc b/test/cctest/test-assembler-mips64.cc
index ff0f2d2..b1753c5 100644
--- a/test/cctest/test-assembler-mips64.cc
+++ b/test/cctest/test-assembler-mips64.cc
@@ -3351,7 +3351,7 @@
for (int i = 0; i < kNumCases; ++i) {
__ bind(&labels[i]);
obj = *values[i];
- imm64 = obj->ptr();
+ imm64 = obj.ptr();
__ lui(v0, (imm64 >> 32) & kImm16Mask);
__ ori(v0, v0, (imm64 >> 16) & kImm16Mask);
__ dsll(v0, v0, 16);
diff --git a/test/cctest/test-code-stub-assembler.cc b/test/cctest/test-code-stub-assembler.cc
index 558e3a6..e83c63a 100644
--- a/test/cctest/test-code-stub-assembler.cc
+++ b/test/cctest/test-code-stub-assembler.cc
@@ -922,7 +922,7 @@
CHECK(root_map->raw_transitions()
->GetHeapObjectAssumeStrong()
- ->IsTransitionArray());
+ .IsTransitionArray());
Handle<TransitionArray> transitions(
TransitionArray::cast(
root_map->raw_transitions()->GetHeapObjectAssumeStrong()),
@@ -1073,9 +1073,9 @@
Handle<Map> map = Map::Create(isolate, inobject_properties);
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
AddProperties(object, names, arraysize(names));
- CHECK_EQ(JS_OBJECT_TYPE, object->map()->instance_type());
- CHECK_EQ(inobject_properties, object->map()->GetInObjectProperties());
- CHECK(!object->map()->is_dictionary_map());
+ CHECK_EQ(JS_OBJECT_TYPE, object->map().instance_type());
+ CHECK_EQ(inobject_properties, object->map().GetInObjectProperties());
+ CHECK(!object->map().is_dictionary_map());
objects.push_back(object);
}
@@ -1085,9 +1085,9 @@
Handle<Map> map = Map::Create(isolate, inobject_properties);
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
AddProperties(object, names, arraysize(names));
- CHECK_EQ(JS_OBJECT_TYPE, object->map()->instance_type());
- CHECK_EQ(inobject_properties, object->map()->GetInObjectProperties());
- CHECK(!object->map()->is_dictionary_map());
+ CHECK_EQ(JS_OBJECT_TYPE, object->map().instance_type());
+ CHECK_EQ(inobject_properties, object->map().GetInObjectProperties());
+ CHECK(!object->map().is_dictionary_map());
objects.push_back(object);
}
@@ -1097,9 +1097,9 @@
Handle<Map> map = Map::Create(isolate, inobject_properties);
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
AddProperties(object, names, arraysize(names));
- CHECK_EQ(JS_OBJECT_TYPE, object->map()->instance_type());
- CHECK_EQ(inobject_properties, object->map()->GetInObjectProperties());
- CHECK(!object->map()->is_dictionary_map());
+ CHECK_EQ(JS_OBJECT_TYPE, object->map().instance_type());
+ CHECK_EQ(inobject_properties, object->map().GetInObjectProperties());
+ CHECK(!object->map().is_dictionary_map());
objects.push_back(object);
}
@@ -1116,8 +1116,8 @@
LanguageMode::kSloppy)
.FromJust());
- CHECK_EQ(JS_OBJECT_TYPE, object->map()->instance_type());
- CHECK(object->map()->is_dictionary_map());
+ CHECK_EQ(JS_OBJECT_TYPE, object->map().instance_type());
+ CHECK(object->map().is_dictionary_map());
objects.push_back(object);
}
@@ -1126,10 +1126,10 @@
Handle<JSFunction> function =
factory->NewFunctionForTest(factory->empty_string());
JSFunction::EnsureHasInitialMap(function);
- function->initial_map()->set_instance_type(JS_GLOBAL_OBJECT_TYPE);
- function->initial_map()->set_is_prototype_map(true);
- function->initial_map()->set_is_dictionary_map(true);
- function->initial_map()->set_may_have_interesting_symbols(true);
+ function->initial_map().set_instance_type(JS_GLOBAL_OBJECT_TYPE);
+ function->initial_map().set_is_prototype_map(true);
+ function->initial_map().set_is_dictionary_map(true);
+ function->initial_map().set_may_have_interesting_symbols(true);
Handle<JSObject> object = factory->NewJSGlobalObject(function);
AddProperties(object, names, arraysize(names));
@@ -1138,8 +1138,8 @@
LanguageMode::kSloppy)
.FromJust());
- CHECK_EQ(JS_GLOBAL_OBJECT_TYPE, object->map()->instance_type());
- CHECK(object->map()->is_dictionary_map());
+ CHECK_EQ(JS_GLOBAL_OBJECT_TYPE, object->map().instance_type());
+ CHECK(object->map().is_dictionary_map());
objects.push_back(object);
}
@@ -1177,13 +1177,13 @@
Handle<JSFunction> function =
factory->NewFunctionForTest(factory->empty_string());
Handle<JSProxy> object = factory->NewJSProxy(function, objects[0]);
- CHECK_EQ(JS_PROXY_TYPE, object->map()->instance_type());
+ CHECK_EQ(JS_PROXY_TYPE, object->map().instance_type());
ft.CheckTrue(object, names[0], expect_bailout);
}
{
Handle<JSObject> object = isolate->global_proxy();
- CHECK_EQ(JS_GLOBAL_PROXY_TYPE, object->map()->instance_type());
+ CHECK_EQ(JS_GLOBAL_PROXY_TYPE, object->map().instance_type());
ft.CheckTrue(object, names[0], expect_bailout);
}
}
@@ -1277,9 +1277,9 @@
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
AddProperties(object, names, arraysize(names), values, arraysize(values),
rand_gen.NextInt());
- CHECK_EQ(JS_OBJECT_TYPE, object->map()->instance_type());
- CHECK_EQ(inobject_properties, object->map()->GetInObjectProperties());
- CHECK(!object->map()->is_dictionary_map());
+ CHECK_EQ(JS_OBJECT_TYPE, object->map().instance_type());
+ CHECK_EQ(inobject_properties, object->map().GetInObjectProperties());
+ CHECK(!object->map().is_dictionary_map());
objects.push_back(object);
}
@@ -1290,9 +1290,9 @@
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
AddProperties(object, names, arraysize(names), values, arraysize(values),
rand_gen.NextInt());
- CHECK_EQ(JS_OBJECT_TYPE, object->map()->instance_type());
- CHECK_EQ(inobject_properties, object->map()->GetInObjectProperties());
- CHECK(!object->map()->is_dictionary_map());
+ CHECK_EQ(JS_OBJECT_TYPE, object->map().instance_type());
+ CHECK_EQ(inobject_properties, object->map().GetInObjectProperties());
+ CHECK(!object->map().is_dictionary_map());
objects.push_back(object);
}
@@ -1303,9 +1303,9 @@
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
AddProperties(object, names, arraysize(names), values, arraysize(values),
rand_gen.NextInt());
- CHECK_EQ(JS_OBJECT_TYPE, object->map()->instance_type());
- CHECK_EQ(inobject_properties, object->map()->GetInObjectProperties());
- CHECK(!object->map()->is_dictionary_map());
+ CHECK_EQ(JS_OBJECT_TYPE, object->map().instance_type());
+ CHECK_EQ(inobject_properties, object->map().GetInObjectProperties());
+ CHECK(!object->map().is_dictionary_map());
objects.push_back(object);
}
@@ -1323,8 +1323,8 @@
LanguageMode::kSloppy)
.FromJust());
- CHECK_EQ(JS_OBJECT_TYPE, object->map()->instance_type());
- CHECK(object->map()->is_dictionary_map());
+ CHECK_EQ(JS_OBJECT_TYPE, object->map().instance_type());
+ CHECK(object->map().is_dictionary_map());
objects.push_back(object);
}
@@ -1339,8 +1339,8 @@
LanguageMode::kSloppy)
.FromJust());
- CHECK_EQ(JS_GLOBAL_OBJECT_TYPE, object->map()->instance_type());
- CHECK(object->map()->is_dictionary_map());
+ CHECK_EQ(JS_GLOBAL_OBJECT_TYPE, object->map().instance_type());
+ CHECK(object->map().is_dictionary_map());
objects.push_back(object);
}
@@ -1385,7 +1385,7 @@
Handle<JSFunction> function =
factory->NewFunctionForTest(factory->empty_string());
Handle<JSProxy> object = factory->NewJSProxy(function, objects[0]);
- CHECK_EQ(JS_PROXY_TYPE, object->map()->instance_type());
+ CHECK_EQ(JS_PROXY_TYPE, object->map().instance_type());
Handle<Object> value = ft.Call(object, names[0]).ToHandleChecked();
// Proxies are not supported yet.
CHECK_EQ(*bailout_symbol, *value);
@@ -1393,7 +1393,7 @@
{
Handle<JSObject> object = isolate->global_proxy();
- CHECK_EQ(JS_GLOBAL_PROXY_TYPE, object->map()->instance_type());
+ CHECK_EQ(JS_GLOBAL_PROXY_TYPE, object->map().instance_type());
// Global proxies are not supported yet.
Handle<Object> value = ft.Call(object, names[0]).ToHandleChecked();
CHECK_EQ(*bailout_symbol, *value);
@@ -1493,7 +1493,7 @@
Handle<JSArray> object = factory->NewJSArray(0, PACKED_SMI_ELEMENTS);
AddElement(object, 0, smi0);
AddElement(object, 1, smi0);
- CHECK_EQ(PACKED_SMI_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(PACKED_SMI_ELEMENTS, object->map().elements_kind());
CHECK_FOUND(object, 0);
CHECK_FOUND(object, 1);
@@ -1506,7 +1506,7 @@
Handle<JSArray> object = factory->NewJSArray(0, HOLEY_SMI_ELEMENTS);
AddElement(object, 0, smi0);
AddElement(object, 13, smi0);
- CHECK_EQ(HOLEY_SMI_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(HOLEY_SMI_ELEMENTS, object->map().elements_kind());
CHECK_FOUND(object, 0);
CHECK_NOT_FOUND(object, 1);
@@ -1519,7 +1519,7 @@
Handle<JSArray> object = factory->NewJSArray(0, PACKED_ELEMENTS);
AddElement(object, 0, smi0);
AddElement(object, 1, smi0);
- CHECK_EQ(PACKED_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(PACKED_ELEMENTS, object->map().elements_kind());
CHECK_FOUND(object, 0);
CHECK_FOUND(object, 1);
@@ -1532,7 +1532,7 @@
Handle<JSArray> object = factory->NewJSArray(0, HOLEY_ELEMENTS);
AddElement(object, 0, smi0);
AddElement(object, 13, smi0);
- CHECK_EQ(HOLEY_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(HOLEY_ELEMENTS, object->map().elements_kind());
CHECK_FOUND(object, 0);
CHECK_NOT_FOUND(object, 1);
@@ -1545,7 +1545,7 @@
Handle<JSTypedArray> object = factory->NewJSTypedArray(INT32_ELEMENTS, 2);
Local<v8::ArrayBuffer> buffer = Utils::ToLocal(object->GetBuffer());
- CHECK_EQ(INT32_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(INT32_ELEMENTS, object->map().elements_kind());
CHECK_FOUND(object, 0);
CHECK_FOUND(object, 1);
@@ -1571,7 +1571,7 @@
Handle<String> str = factory->InternalizeUtf8String("ab");
Handle<JSValue>::cast(object)->set_value(*str);
AddElement(object, 13, smi0);
- CHECK_EQ(FAST_STRING_WRAPPER_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(FAST_STRING_WRAPPER_ELEMENTS, object->map().elements_kind());
CHECK_FOUND(object, 0);
CHECK_FOUND(object, 1);
@@ -1587,7 +1587,7 @@
Handle<JSValue>::cast(object)->set_value(*str);
AddElement(object, 13, smi0);
JSObject::NormalizeElements(object);
- CHECK_EQ(SLOW_STRING_WRAPPER_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(SLOW_STRING_WRAPPER_ELEMENTS, object->map().elements_kind());
CHECK_FOUND(object, 0);
CHECK_FOUND(object, 1);
@@ -1618,19 +1618,19 @@
Handle<JSFunction> function =
factory->NewFunctionForTest(factory->empty_string());
Handle<JSProxy> object = factory->NewJSProxy(function, handler);
- CHECK_EQ(JS_PROXY_TYPE, object->map()->instance_type());
+ CHECK_EQ(JS_PROXY_TYPE, object->map().instance_type());
ft.CheckTrue(object, smi0, expect_bailout);
}
{
Handle<JSObject> object = isolate->global_object();
- CHECK_EQ(JS_GLOBAL_OBJECT_TYPE, object->map()->instance_type());
+ CHECK_EQ(JS_GLOBAL_OBJECT_TYPE, object->map().instance_type());
ft.CheckTrue(object, smi0, expect_bailout);
}
{
Handle<JSObject> object = isolate->global_proxy();
- CHECK_EQ(JS_GLOBAL_PROXY_TYPE, object->map()->instance_type());
+ CHECK_EQ(JS_GLOBAL_PROXY_TYPE, object->map().instance_type());
ft.CheckTrue(object, smi0, expect_bailout);
}
}
@@ -2370,7 +2370,7 @@
CHECK_EQ(isolate->native_context()->scope_info(), context_js->scope_info());
CHECK_EQ(ReadOnlyRoots(isolate).the_hole_value(), context_js->extension());
CHECK_EQ(*isolate->native_context(), context_js->native_context());
- CHECK(context_js->get(PromiseBuiltins::kPromiseSlot)->IsJSPromise());
+ CHECK(context_js->get(PromiseBuiltins::kPromiseSlot).IsJSPromise());
CHECK_EQ(ReadOnlyRoots(isolate).false_value(),
context_js->get(PromiseBuiltins::kDebugEventSlot));
}
@@ -2401,8 +2401,8 @@
ft.Call(isolate->factory()->undefined_value()).ToHandleChecked();
CHECK(result_obj->IsFixedArray());
Handle<FixedArray> result_arr = Handle<FixedArray>::cast(result_obj);
- CHECK(result_arr->get(0)->IsJSFunction());
- CHECK(result_arr->get(1)->IsJSFunction());
+ CHECK(result_arr->get(0).IsJSFunction());
+ CHECK(result_arr->get(1).IsJSFunction());
}
TEST(NewElementsCapacity) {
@@ -2535,7 +2535,7 @@
CHECK_EQ(ReadOnlyRoots(isolate).the_hole_value(), context_js->extension());
CHECK_EQ(*isolate->native_context(), context_js->native_context());
CHECK(
- context_js->get(PromiseBuiltins::kCapabilitySlot)->IsPromiseCapability());
+ context_js->get(PromiseBuiltins::kCapabilitySlot).IsPromiseCapability());
}
TEST(NewPromiseCapability) {
@@ -2565,13 +2565,13 @@
Handle<PromiseCapability> result =
Handle<PromiseCapability>::cast(result_obj);
- CHECK(result->promise()->IsJSPromise());
- CHECK(result->resolve()->IsJSFunction());
- CHECK(result->reject()->IsJSFunction());
+ CHECK(result->promise().IsJSPromise());
+ CHECK(result->resolve().IsJSFunction());
+ CHECK(result->reject().IsJSFunction());
CHECK_EQ(*isolate->promise_capability_default_reject_shared_fun(),
- JSFunction::cast(result->reject())->shared());
+ JSFunction::cast(result->reject()).shared());
CHECK_EQ(*isolate->promise_capability_default_resolve_shared_fun(),
- JSFunction::cast(result->resolve())->shared());
+ JSFunction::cast(result->resolve()).shared());
Handle<JSFunction> callbacks[] = {
handle(JSFunction::cast(result->resolve()), isolate),
@@ -2618,11 +2618,11 @@
Handle<PromiseCapability> result =
Handle<PromiseCapability>::cast(result_obj);
- CHECK(result->promise()->IsJSObject());
+ CHECK(result->promise().IsJSObject());
Handle<JSObject> promise(JSObject::cast(result->promise()), isolate);
CHECK_EQ(constructor_fn->prototype_or_initial_map(), promise->map());
- CHECK(result->resolve()->IsJSFunction());
- CHECK(result->reject()->IsJSFunction());
+ CHECK(result->resolve().IsJSFunction());
+ CHECK(result->reject().IsJSFunction());
Handle<String> resolved_str =
isolate->factory()->NewStringFromAsciiChecked("resolvedStr");
@@ -3100,7 +3100,7 @@
Handle<FixedArray> source(isolate->factory()->empty_fixed_array());
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray result(FixedArray::cast(*result_raw));
- CHECK_EQ(0, result->length());
+ CHECK_EQ(0, result.length());
CHECK_EQ(*(isolate->factory()->empty_fixed_array()), result);
}
@@ -3118,12 +3118,12 @@
source->set(1, Smi::FromInt(1234));
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray result(FixedArray::cast(*result_raw));
- CHECK_EQ(5, result->length());
- CHECK(result->get(0)->IsTheHole(isolate));
- CHECK_EQ(Smi::cast(result->get(1))->value(), 1234);
- CHECK(result->get(2)->IsTheHole(isolate));
- CHECK(result->get(3)->IsTheHole(isolate));
- CHECK(result->get(4)->IsTheHole(isolate));
+ CHECK_EQ(5, result.length());
+ CHECK(result.get(0).IsTheHole(isolate));
+ CHECK_EQ(Smi::cast(result.get(1)).value(), 1234);
+ CHECK(result.get(2).IsTheHole(isolate));
+ CHECK(result.get(3).IsTheHole(isolate));
+ CHECK(result.get(4).IsTheHole(isolate));
}
TEST(CloneFixedArrayCOW) {
@@ -3164,12 +3164,12 @@
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray result(FixedArray::cast(*result_raw));
CHECK_NE(*source, result);
- CHECK_EQ(5, result->length());
- CHECK(result->get(0)->IsTheHole(isolate));
- CHECK_EQ(Smi::cast(result->get(1))->value(), 1234);
- CHECK(result->get(2)->IsTheHole(isolate));
- CHECK(result->get(3)->IsTheHole(isolate));
- CHECK(result->get(4)->IsTheHole(isolate));
+ CHECK_EQ(5, result.length());
+ CHECK(result.get(0).IsTheHole(isolate));
+ CHECK_EQ(Smi::cast(result.get(1)).value(), 1234);
+ CHECK(result.get(2).IsTheHole(isolate));
+ CHECK(result.get(3).IsTheHole(isolate));
+ CHECK(result.get(4).IsTheHole(isolate));
}
TEST(ExtractFixedArraySimple) {
@@ -3194,9 +3194,9 @@
Handle<Smi>(Smi::FromInt(2), isolate))
.ToHandleChecked();
FixedArray result(FixedArray::cast(*result_raw));
- CHECK_EQ(2, result->length());
- CHECK_EQ(Smi::cast(result->get(0))->value(), 1234);
- CHECK(result->get(1)->IsTheHole(isolate));
+ CHECK_EQ(2, result.length());
+ CHECK_EQ(Smi::cast(result.get(0)).value(), 1234);
+ CHECK(result.get(1).IsTheHole(isolate));
}
TEST(ExtractFixedArraySimpleSmiConstant) {
@@ -3218,9 +3218,9 @@
source->set(1, Smi::FromInt(1234));
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray result(FixedArray::cast(*result_raw));
- CHECK_EQ(2, result->length());
- CHECK_EQ(Smi::cast(result->get(0))->value(), 1234);
- CHECK(result->get(1)->IsTheHole(isolate));
+ CHECK_EQ(2, result.length());
+ CHECK_EQ(Smi::cast(result.get(0)).value(), 1234);
+ CHECK(result.get(1).IsTheHole(isolate));
}
TEST(ExtractFixedArraySimpleIntPtrConstant) {
@@ -3242,9 +3242,9 @@
source->set(1, Smi::FromInt(1234));
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray result(FixedArray::cast(*result_raw));
- CHECK_EQ(2, result->length());
- CHECK_EQ(Smi::cast(result->get(0))->value(), 1234);
- CHECK(result->get(1)->IsTheHole(isolate));
+ CHECK_EQ(2, result.length());
+ CHECK_EQ(Smi::cast(result.get(0)).value(), 1234);
+ CHECK(result.get(1).IsTheHole(isolate));
}
TEST(ExtractFixedArraySimpleIntPtrConstantNoDoubles) {
@@ -3264,9 +3264,9 @@
source->set(1, Smi::FromInt(1234));
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray result(FixedArray::cast(*result_raw));
- CHECK_EQ(2, result->length());
- CHECK_EQ(Smi::cast(result->get(0))->value(), 1234);
- CHECK(result->get(1)->IsTheHole(isolate));
+ CHECK_EQ(2, result.length());
+ CHECK_EQ(Smi::cast(result.get(0)).value(), 1234);
+ CHECK(result.get(1).IsTheHole(isolate));
}
TEST(ExtractFixedArraySimpleIntPtrParameters) {
@@ -3288,9 +3288,9 @@
Handle<Smi>(Smi::FromInt(2), isolate))
.ToHandleChecked();
FixedArray result(FixedArray::cast(*result_raw));
- CHECK_EQ(2, result->length());
- CHECK_EQ(Smi::cast(result->get(0))->value(), 1234);
- CHECK(result->get(1)->IsTheHole(isolate));
+ CHECK_EQ(2, result.length());
+ CHECK_EQ(Smi::cast(result.get(0)).value(), 1234);
+ CHECK(result.get(1).IsTheHole(isolate));
Handle<FixedDoubleArray> source_double = Handle<FixedDoubleArray>::cast(
isolate->factory()->NewFixedDoubleArray(5));
@@ -3304,9 +3304,9 @@
Handle<Smi>(Smi::FromInt(2), isolate))
.ToHandleChecked();
FixedDoubleArray double_result = FixedDoubleArray::cast(*double_result_raw);
- CHECK_EQ(2, double_result->length());
- CHECK_EQ(double_result->get_scalar(0), 11);
- CHECK_EQ(double_result->get_scalar(1), 12);
+ CHECK_EQ(2, double_result.length());
+ CHECK_EQ(double_result.get_scalar(0), 11);
+ CHECK_EQ(double_result.get_scalar(1), 12);
}
TEST(SingleInputPhiElimination) {
@@ -3427,38 +3427,38 @@
(*Handle<Smi>::cast(
ft.Call(Handle<Smi>(Smi::FromInt(PACKED_DOUBLE_ELEMENTS), isolate))
.ToHandleChecked()))
- ->value(),
+ .value(),
1);
CHECK_EQ(
(*Handle<Smi>::cast(
ft.Call(Handle<Smi>(Smi::FromInt(HOLEY_DOUBLE_ELEMENTS), isolate))
.ToHandleChecked()))
- ->value(),
+ .value(),
1);
CHECK_EQ((*Handle<Smi>::cast(
ft.Call(Handle<Smi>(Smi::FromInt(HOLEY_ELEMENTS), isolate))
.ToHandleChecked()))
- ->value(),
+ .value(),
0);
CHECK_EQ((*Handle<Smi>::cast(
ft.Call(Handle<Smi>(Smi::FromInt(PACKED_ELEMENTS), isolate))
.ToHandleChecked()))
- ->value(),
+ .value(),
0);
CHECK_EQ((*Handle<Smi>::cast(
ft.Call(Handle<Smi>(Smi::FromInt(PACKED_SMI_ELEMENTS), isolate))
.ToHandleChecked()))
- ->value(),
+ .value(),
0);
CHECK_EQ((*Handle<Smi>::cast(
ft.Call(Handle<Smi>(Smi::FromInt(HOLEY_SMI_ELEMENTS), isolate))
.ToHandleChecked()))
- ->value(),
+ .value(),
0);
CHECK_EQ((*Handle<Smi>::cast(
ft.Call(Handle<Smi>(Smi::FromInt(DICTIONARY_ELEMENTS), isolate))
.ToHandleChecked()))
- ->value(),
+ .value(),
0);
}
diff --git a/test/cctest/test-compiler.cc b/test/cctest/test-compiler.cc
index 3494bf9..a6545d3 100644
--- a/test/cctest/test-compiler.cc
+++ b/test/cctest/test-compiler.cc
@@ -55,7 +55,7 @@
Handle<Object> object(value, isolate);
Handle<String> internalized_name =
isolate->factory()->InternalizeUtf8String(name);
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
Runtime::SetObjectProperty(isolate, global, internalized_name, object,
StoreOrigin::kMaybeKeyed, Just(kDontThrow))
.Check();
@@ -85,7 +85,7 @@
Handle<JSFunction> fun = Compile(buffer.begin());
if (fun.is_null()) return -1;
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
Execution::Call(isolate, fun, global, 0, nullptr).Check();
return GetGlobalProperty("result")->Number();
}
@@ -104,7 +104,7 @@
SetGlobalProperty("x", Smi::FromInt(x));
SetGlobalProperty("y", Smi::FromInt(y));
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
Execution::Call(isolate, fun, global, 0, nullptr).Check();
return GetGlobalProperty("result")->Number();
}
@@ -122,7 +122,7 @@
if (fun.is_null()) return -1;
SetGlobalProperty("x", Smi::FromInt(x));
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
Execution::Call(isolate, fun, global, 0, nullptr).Check();
return GetGlobalProperty("result")->Number();
}
@@ -141,7 +141,7 @@
if (fun.is_null()) return -1;
SetGlobalProperty("n", Smi::FromInt(n));
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
Execution::Call(isolate, fun, global, 0, nullptr).Check();
return GetGlobalProperty("result")->Number();
}
@@ -161,7 +161,7 @@
const char* source = "for (n = 0; n < 100; ++n) print(n, 1, 2);";
Handle<JSFunction> fun = Compile(source);
if (fun.is_null()) return;
- Handle<JSObject> global(CcTest::i_isolate()->context()->global_object(),
+ Handle<JSObject> global(CcTest::i_isolate()->context().global_object(),
fun->GetIsolate());
Execution::Call(CcTest::i_isolate(), fun, global, 0, nullptr).Check();
}
@@ -193,7 +193,7 @@
Handle<JSFunction> fun = Compile(source);
CHECK(!fun.is_null());
- Handle<JSObject> global(CcTest::i_isolate()->context()->global_object(),
+ Handle<JSObject> global(CcTest::i_isolate()->context().global_object(),
fun->GetIsolate());
Execution::Call(CcTest::i_isolate(), fun, global, 0, nullptr).Check();
CHECK_EQ(511.0, GetGlobalProperty("r")->Number());
@@ -208,9 +208,9 @@
Handle<JSFunction> fun = Compile(source);
CHECK(!fun.is_null());
Isolate* isolate = fun->GetIsolate();
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
CHECK(Execution::Call(isolate, fun, global, 0, nullptr).is_null());
- CHECK_EQ(42.0, isolate->pending_exception()->Number());
+ CHECK_EQ(42.0, isolate->pending_exception().Number());
}
@@ -234,7 +234,7 @@
Isolate* isolate = fun0->GetIsolate();
// Run the generated code to populate the global object with 'foo'.
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
Execution::Call(isolate, fun0, global, 0, nullptr).Check();
Handle<Object> fun1 =
@@ -322,7 +322,7 @@
{
HeapObject heap_object;
CHECK(object->GetHeapObjectIfWeak(&heap_object));
- CHECK(heap_object->IsJSFunction());
+ CHECK(heap_object.IsJSFunction());
}
CompileRun("%OptimizeFunctionOnNextCall(f); f(fun1);");
@@ -330,11 +330,11 @@
// Verify that the feedback is still "gathered" despite a recompilation
// of the full code.
CHECK(f->IsOptimized());
- object = f->feedback_vector()->Get(slot_for_a);
+ object = f->feedback_vector().Get(slot_for_a);
{
HeapObject heap_object;
CHECK(object->GetHeapObjectIfWeak(&heap_object));
- CHECK(heap_object->IsJSFunction());
+ CHECK(heap_object.IsJSFunction());
}
}
@@ -366,12 +366,12 @@
// If we are compiling lazily then it should not be compiled, and so no
// feedback vector allocated yet.
- CHECK(!f->shared()->is_compiled());
+ CHECK(!f->shared().is_compiled());
CompileRun("morphing_call();");
// Now a feedback vector / closure feedback cell array is allocated.
- CHECK(f->shared()->is_compiled());
+ CHECK(f->shared().is_compiled());
CHECK(f->has_feedback_vector() || f->has_closure_feedback_cell_array());
}
@@ -788,13 +788,13 @@
"%EnsureFeedbackVectorForFunction(foo);"
"foo();");
Handle<JSFunction> foo = Handle<JSFunction>::cast(GetGlobalProperty("foo"));
- CHECK_EQ(1, foo->feedback_vector()->invocation_count());
+ CHECK_EQ(1, foo->feedback_vector().invocation_count());
CompileRun("foo()");
- CHECK_EQ(2, foo->feedback_vector()->invocation_count());
+ CHECK_EQ(2, foo->feedback_vector().invocation_count());
CompileRun("bar()");
- CHECK_EQ(2, foo->feedback_vector()->invocation_count());
+ CHECK_EQ(2, foo->feedback_vector().invocation_count());
CompileRun("foo(); foo()");
- CHECK_EQ(4, foo->feedback_vector()->invocation_count());
+ CHECK_EQ(4, foo->feedback_vector().invocation_count());
}
TEST(SafeToSkipArgumentsAdaptor) {
@@ -808,17 +808,17 @@
"function e() { \"use strict\"; return eval(\"\"); }; e();"
"function f(x, y) { \"use strict\"; return x + y; }; f(1, 2);");
Handle<JSFunction> a = Handle<JSFunction>::cast(GetGlobalProperty("a"));
- CHECK(a->shared()->is_safe_to_skip_arguments_adaptor());
+ CHECK(a->shared().is_safe_to_skip_arguments_adaptor());
Handle<JSFunction> b = Handle<JSFunction>::cast(GetGlobalProperty("b"));
- CHECK(!b->shared()->is_safe_to_skip_arguments_adaptor());
+ CHECK(!b->shared().is_safe_to_skip_arguments_adaptor());
Handle<JSFunction> c = Handle<JSFunction>::cast(GetGlobalProperty("c"));
- CHECK(!c->shared()->is_safe_to_skip_arguments_adaptor());
+ CHECK(!c->shared().is_safe_to_skip_arguments_adaptor());
Handle<JSFunction> d = Handle<JSFunction>::cast(GetGlobalProperty("d"));
- CHECK(!d->shared()->is_safe_to_skip_arguments_adaptor());
+ CHECK(!d->shared().is_safe_to_skip_arguments_adaptor());
Handle<JSFunction> e = Handle<JSFunction>::cast(GetGlobalProperty("e"));
- CHECK(!e->shared()->is_safe_to_skip_arguments_adaptor());
+ CHECK(!e->shared().is_safe_to_skip_arguments_adaptor());
Handle<JSFunction> f = Handle<JSFunction>::cast(GetGlobalProperty("f"));
- CHECK(f->shared()->is_safe_to_skip_arguments_adaptor());
+ CHECK(f->shared().is_safe_to_skip_arguments_adaptor());
}
TEST(ShallowEagerCompilation) {
@@ -944,10 +944,10 @@
// TODO(mslekova): Remove the duplication with test-heap.cc
static int AllocationSitesCount(Heap* heap) {
int count = 0;
- for (Object site = heap->allocation_sites_list(); site->IsAllocationSite();) {
+ for (Object site = heap->allocation_sites_list(); site.IsAllocationSite();) {
AllocationSite cur = AllocationSite::cast(site);
- CHECK(cur->HasWeakNext());
- site = cur->weak_next();
+ CHECK(cur.HasWeakNext());
+ site = cur.weak_next();
count++;
}
return count;
diff --git a/test/cctest/test-cpu-profiler.cc b/test/cctest/test-cpu-profiler.cc
index 1eb4c80..798be0f 100644
--- a/test/cctest/test-cpu-profiler.cc
+++ b/test/cctest/test-cpu-profiler.cc
@@ -182,26 +182,26 @@
profiler_listener.CodeMoveEvent(comment2_code, moved_code);
// Enqueue a tick event to enable code events processing.
- EnqueueTickSampleEvent(processor, aaa_code->InstructionStart());
+ EnqueueTickSampleEvent(processor, aaa_code.InstructionStart());
isolate->logger()->RemoveCodeEventListener(&profiler_listener);
processor->StopSynchronously();
// Check the state of profile generator.
CodeEntry* aaa =
- generator->code_map()->FindEntry(aaa_code->InstructionStart());
+ generator->code_map()->FindEntry(aaa_code.InstructionStart());
CHECK(aaa);
CHECK_EQ(0, strcmp(aaa_str, aaa->name()));
CodeEntry* comment =
- generator->code_map()->FindEntry(comment_code->InstructionStart());
+ generator->code_map()->FindEntry(comment_code.InstructionStart());
CHECK(comment);
CHECK_EQ(0, strcmp("comment", comment->name()));
- CHECK(!generator->code_map()->FindEntry(comment2_code->InstructionStart()));
+ CHECK(!generator->code_map()->FindEntry(comment2_code.InstructionStart()));
CodeEntry* comment2 =
- generator->code_map()->FindEntry(moved_code->InstructionStart());
+ generator->code_map()->FindEntry(moved_code.InstructionStart());
CHECK(comment2);
CHECK_EQ(0, strcmp("comment2", comment2->name()));
}
@@ -236,14 +236,14 @@
profiler_listener.CodeCreateEvent(i::Logger::STUB_TAG, frame2_code, "ccc");
profiler_listener.CodeCreateEvent(i::Logger::BUILTIN_TAG, frame3_code, "ddd");
- EnqueueTickSampleEvent(processor, frame1_code->raw_instruction_start());
+ EnqueueTickSampleEvent(processor, frame1_code.raw_instruction_start());
EnqueueTickSampleEvent(
processor,
- frame2_code->raw_instruction_start() + frame2_code->ExecutableSize() / 2,
- frame1_code->raw_instruction_start() + frame1_code->ExecutableSize() / 2);
- EnqueueTickSampleEvent(processor, frame3_code->raw_instruction_end() - 1,
- frame2_code->raw_instruction_end() - 1,
- frame1_code->raw_instruction_end() - 1);
+ frame2_code.raw_instruction_start() + frame2_code.ExecutableSize() / 2,
+ frame1_code.raw_instruction_start() + frame1_code.ExecutableSize() / 2);
+ EnqueueTickSampleEvent(processor, frame3_code.raw_instruction_end() - 1,
+ frame2_code.raw_instruction_end() - 1,
+ frame1_code.raw_instruction_end() - 1);
isolate->logger()->RemoveCodeEventListener(&profiler_listener);
processor->StopSynchronously();
@@ -303,11 +303,11 @@
profiler_listener.CodeCreateEvent(i::Logger::BUILTIN_TAG, code, "bbb");
v8::internal::TickSample sample;
- sample.pc = reinterpret_cast<void*>(code->InstructionStart());
+ sample.pc = reinterpret_cast<void*>(code.InstructionStart());
sample.tos = nullptr;
sample.frames_count = v8::TickSample::kMaxFramesCount;
for (unsigned i = 0; i < sample.frames_count; ++i) {
- sample.stack[i] = reinterpret_cast<void*>(code->InstructionStart());
+ sample.stack[i] = reinterpret_cast<void*>(code.InstructionStart());
}
sample.timestamp = base::TimeTicks::HighResolutionNow();
processor->AddSample(sample);
@@ -1145,12 +1145,12 @@
i::Handle<i::JSFunction> func = i::Handle<i::JSFunction>::cast(
v8::Utils::OpenHandle(*GetFunction(env.local(), func_name)));
CHECK(!func->shared().is_null());
- CHECK(!func->shared()->abstract_code().is_null());
+ CHECK(!func->shared().abstract_code().is_null());
CHECK(!optimize || func->IsOptimized() ||
!CcTest::i_isolate()->use_optimizer());
i::AbstractCode code = func->abstract_code();
CHECK(!code.is_null());
- i::Address code_address = code->raw_instruction_start();
+ i::Address code_address = code.raw_instruction_start();
CHECK_NE(code_address, kNullAddress);
CpuProfilesCollection* profiles = new CpuProfilesCollection(isolate);
diff --git a/test/cctest/test-debug.cc b/test/cctest/test-debug.cc
index d87569d..b084df9 100644
--- a/test/cctest/test-debug.cc
+++ b/test/cctest/test-debug.cc
@@ -84,7 +84,7 @@
const char* condition = nullptr) {
i::Handle<i::JSFunction> function =
i::Handle<i::JSFunction>::cast(v8::Utils::OpenHandle(*fun));
- position += function->shared()->StartPosition();
+ position += function->shared().StartPosition();
static int break_point_index = 0;
i::Isolate* isolate = function->GetIsolate();
i::Handle<i::String> condition_string =
@@ -168,7 +168,7 @@
HeapIterator iterator(CcTest::heap());
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- CHECK(!obj->IsDebugInfo());
+ CHECK(!obj.IsDebugInfo());
}
}
@@ -2709,7 +2709,7 @@
// Set breakpoint in the script.
i::Handle<i::Script> i_script(
- i::Script::cast(v8::Utils::OpenHandle(*script)->shared()->script()),
+ i::Script::cast(v8::Utils::OpenHandle(*script)->shared().script()),
isolate);
i::Handle<i::String> condition = isolate->factory()->empty_string();
int position = 0;
@@ -3091,11 +3091,11 @@
v8::internal::Script::InitLineEnds(script);
v8::internal::FixedArray ends =
v8::internal::FixedArray::cast(script->line_ends());
- CHECK_GT(ends->length(), 0);
+ CHECK_GT(ends.length(), 0);
int prev_end = -1;
- for (int j = 0; j < ends->length(); j++) {
- const int curr_end = v8::internal::Smi::ToInt(ends->get(j));
+ for (int j = 0; j < ends.length(); j++) {
+ const int curr_end = v8::internal::Smi::ToInt(ends.get(j));
CHECK_GT(curr_end, prev_end);
prev_end = curr_end;
}
@@ -4221,8 +4221,8 @@
bool fail = false;
for (int i = 0; i < i::Builtins::builtin_count; i++) {
i::Code builtin = builtins->builtin(i);
- if (builtin->kind() != i::Code::BUILTIN) continue;
- auto prediction = builtin->GetBuiltinCatchPrediction();
+ if (builtin.kind() != i::Code::BUILTIN) continue;
+ auto prediction = builtin.GetBuiltinCatchPrediction();
USE(prediction);
}
CHECK(!fail);
@@ -4267,7 +4267,7 @@
i::HeapIterator iterator(isolate->heap());
for (i::HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (!obj->IsJSFunction()) continue;
+ if (!obj.IsJSFunction()) continue;
i::JSFunction fun = i::JSFunction::cast(obj);
all_functions.emplace_back(fun, isolate);
}
@@ -4293,7 +4293,7 @@
Handle<i::String> i_name =
isolate->factory()->NewStringFromAsciiChecked(name);
for (const auto& script : scripts) {
- if (!script->name()->IsString()) continue;
+ if (!script->name().IsString()) continue;
if (i_name->Equals(i::String::cast(script->name()))) return script;
}
return i::MaybeHandle<i::Script>();
@@ -4321,11 +4321,11 @@
i::Script::Iterator iterator(i_isolate);
for (i::Script script = iterator.Next(); !script.is_null();
script = iterator.Next()) {
- if (script->type() == i::Script::TYPE_NATIVE &&
- script->name()->IsUndefined(i_isolate)) {
+ if (script.type() == i::Script::TYPE_NATIVE &&
+ script.name().IsUndefined(i_isolate)) {
continue;
}
- ++count_by_type[script->type()];
+ ++count_by_type[script.type()];
scripts.emplace_back(script, i_isolate);
}
}
@@ -4382,7 +4382,7 @@
v8::Local<v8::Script> v8_script =
v8::Script::Compile(env.local(), v8_str(source)).ToLocalChecked();
i::Handle<i::Script> i_script(
- i::Script::cast(v8::Utils::OpenHandle(*v8_script)->shared()->script()),
+ i::Script::cast(v8::Utils::OpenHandle(*v8_script)->shared().script()),
CcTest::i_isolate());
v8::Local<v8::debug::Script> script =
v8::ToApiHandle<v8::debug::Script>(i_script);
diff --git a/test/cctest/test-dictionary.cc b/test/cctest/test-dictionary.cc
index 3351d93..3a2cc9e 100644
--- a/test/cctest/test-dictionary.cc
+++ b/test/cctest/test-dictionary.cc
@@ -86,17 +86,17 @@
CHECK_EQ(table->NumberOfElements(), i + 1);
CHECK_NE(table->FindEntry(isolate, key), HashMap::kNotFound);
CHECK_EQ(table->Lookup(key), *value);
- CHECK(key->GetIdentityHash()->IsSmi());
+ CHECK(key->GetIdentityHash().IsSmi());
}
// Keys never added to the map which already have an identity hash
// code should not be found.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
- CHECK(key->GetOrCreateIdentityHash(isolate)->IsSmi());
+ CHECK(key->GetOrCreateIdentityHash(isolate).IsSmi());
CHECK_EQ(table->FindEntry(isolate, key), HashMap::kNotFound);
CHECK_EQ(table->Lookup(key), roots.the_hole_value());
- CHECK(key->GetIdentityHash()->IsSmi());
+ CHECK(key->GetIdentityHash().IsSmi());
}
// Keys that don't have an identity hash should not be found and also
@@ -157,16 +157,16 @@
table = HashSet::Add(isolate, table, key);
CHECK_EQ(table->NumberOfElements(), i + 2);
CHECK(table->Has(isolate, key));
- CHECK(key->GetIdentityHash()->IsSmi());
+ CHECK(key->GetIdentityHash().IsSmi());
}
// Keys never added to the map which already have an identity hash
// code should not be found.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
- CHECK(key->GetOrCreateIdentityHash(isolate)->IsSmi());
+ CHECK(key->GetOrCreateIdentityHash(isolate).IsSmi());
CHECK(!table->Has(isolate, key));
- CHECK(key->GetIdentityHash()->IsSmi());
+ CHECK(key->GetIdentityHash().IsSmi());
}
// Keys that don't have an identity hash should not be found and also
@@ -215,26 +215,26 @@
{
Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 100);
ObjectHashTableTest t(*table);
- int capacity = t->capacity();
+ int capacity = t.capacity();
for (int i = 0; i < capacity - 1; i++) {
- t->insert(i, i * i, i);
+ t.insert(i, i * i, i);
}
- t->Rehash(ReadOnlyRoots(isolate));
+ t.Rehash(ReadOnlyRoots(isolate));
for (int i = 0; i < capacity - 1; i++) {
- CHECK_EQ(i, t->lookup(i * i));
+ CHECK_EQ(i, t.lookup(i * i));
}
}
// Test half-filled table.
{
Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 100);
ObjectHashTableTest t(*table);
- int capacity = t->capacity();
+ int capacity = t.capacity();
for (int i = 0; i < capacity / 2; i++) {
- t->insert(i, i * i, i);
+ t.insert(i, i * i, i);
}
- t->Rehash(ReadOnlyRoots(isolate));
+ t.Rehash(ReadOnlyRoots(isolate));
for (int i = 0; i < capacity / 2; i++) {
- CHECK_EQ(i, t->lookup(i * i));
+ CHECK_EQ(i, t.lookup(i * i));
}
}
}
@@ -285,7 +285,7 @@
heap::SimulateFullSpace(CcTest::heap()->old_space());
// Calling Lookup() should not cause GC ever.
- CHECK(table->Lookup(key)->IsTheHole(isolate));
+ CHECK(table->Lookup(key).IsTheHole(isolate));
// Calling Put() should request GC by returning a failure.
int gc_count = isolate->heap()->gc_count();
diff --git a/test/cctest/test-elements-kind.cc b/test/cctest/test-elements-kind.cc
index c75832e..6b98100 100644
--- a/test/cctest/test-elements-kind.cc
+++ b/test/cctest/test-elements-kind.cc
@@ -130,8 +130,8 @@
JSObject::DefinePropertyOrElementIgnoreAttributes(object, name, value, NONE)
.Check();
CHECK_NE(object->map(), *previous_map);
- CHECK_EQ(HOLEY_ELEMENTS, object->map()->elements_kind());
- CHECK_LE(1, object->property_array()->length());
+ CHECK_EQ(HOLEY_ELEMENTS, object->map().elements_kind());
+ CHECK_LE(1, object->property_array().length());
CHECK(EQUALS(isolate, object->elements(), empty_fixed_array));
}
@@ -149,7 +149,7 @@
int nof_inobject_properties = 10;
// force in object properties by changing the expected_nof_properties
// (we always reserve 8 inobject properties slack on top).
- function->shared()->set_expected_nof_properties(nof_inobject_properties - 8);
+ function->shared().set_expected_nof_properties(nof_inobject_properties - 8);
Handle<Object> value(Smi::FromInt(42), isolate);
Handle<JSObject> object = factory->NewJSObject(function);
@@ -166,7 +166,7 @@
.Check();
}
CHECK_NE(object->map(), *previous_map);
- CHECK_EQ(HOLEY_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(HOLEY_ELEMENTS, object->map().elements_kind());
CHECK(EQUALS(isolate, object->property_array(), empty_property_array));
CHECK(EQUALS(isolate, object->elements(), empty_fixed_array));
@@ -177,9 +177,9 @@
JSObject::DefinePropertyOrElementIgnoreAttributes(object, name, value, NONE)
.Check();
CHECK_NE(object->map(), *previous_map);
- CHECK_EQ(HOLEY_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(HOLEY_ELEMENTS, object->map().elements_kind());
// there must be at least 1 element in the properies store
- CHECK_LE(1, object->property_array()->length());
+ CHECK_LE(1, object->property_array().length());
CHECK(EQUALS(isolate, object->elements(), empty_fixed_array));
}
@@ -209,9 +209,9 @@
.Check();
// no change in elements_kind => no map transition
CHECK_EQ(object->map(), *previous_map);
- CHECK_EQ(HOLEY_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(HOLEY_ELEMENTS, object->map().elements_kind());
CHECK(EQUALS(isolate, object->property_array(), empty_property_array));
- CHECK_LE(1, object->elements()->length());
+ CHECK_LE(1, object->elements().length());
// Adding more consecutive elements without a change in the backing store
int non_dict_backing_store_limit = 100;
@@ -222,9 +222,9 @@
}
// no change in elements_kind => no map transition
CHECK_EQ(object->map(), *previous_map);
- CHECK_EQ(HOLEY_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(HOLEY_ELEMENTS, object->map().elements_kind());
CHECK(EQUALS(isolate, object->property_array(), empty_property_array));
- CHECK_LE(non_dict_backing_store_limit, object->elements()->length());
+ CHECK_LE(non_dict_backing_store_limit, object->elements().length());
// Adding an element at an very large index causes a change to
// DICTIONARY_ELEMENTS
@@ -233,9 +233,9 @@
.Check();
// change in elements_kind => map transition
CHECK_NE(object->map(), *previous_map);
- CHECK_EQ(DICTIONARY_ELEMENTS, object->map()->elements_kind());
+ CHECK_EQ(DICTIONARY_ELEMENTS, object->map().elements_kind());
CHECK(EQUALS(isolate, object->property_array(), empty_property_array));
- CHECK_LE(non_dict_backing_store_limit, object->elements()->length());
+ CHECK_LE(non_dict_backing_store_limit, object->elements().length());
}
@@ -264,8 +264,8 @@
.Check();
// No change in elements_kind but added property => new map
CHECK_NE(array->map(), *previous_map);
- CHECK_EQ(PACKED_SMI_ELEMENTS, array->map()->elements_kind());
- CHECK_LE(1, array->property_array()->length());
+ CHECK_EQ(PACKED_SMI_ELEMENTS, array->map().elements_kind());
+ CHECK_LE(1, array->property_array().length());
CHECK(EQUALS(isolate, array->elements(), empty_fixed_array));
CHECK_EQ(0, Smi::ToInt(array->length()));
}
@@ -296,9 +296,9 @@
.Check();
// no change in elements_kind => no map transition
CHECK_EQ(array->map(), *previous_map);
- CHECK_EQ(PACKED_SMI_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(PACKED_SMI_ELEMENTS, array->map().elements_kind());
CHECK(EQUALS(isolate, array->property_array(), empty_property_array));
- CHECK_LE(1, array->elements()->length());
+ CHECK_LE(1, array->elements().length());
CHECK_EQ(1, Smi::ToInt(array->length()));
// Adding more consecutive elements without a change in the backing store
@@ -310,9 +310,9 @@
}
// no change in elements_kind => no map transition
CHECK_EQ(array->map(), *previous_map);
- CHECK_EQ(PACKED_SMI_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(PACKED_SMI_ELEMENTS, array->map().elements_kind());
CHECK(EQUALS(isolate, array->property_array(), empty_property_array));
- CHECK_LE(non_dict_backing_store_limit, array->elements()->length());
+ CHECK_LE(non_dict_backing_store_limit, array->elements().length());
CHECK_EQ(non_dict_backing_store_limit, Smi::ToInt(array->length()));
// Adding an element at an very large index causes a change to
@@ -323,10 +323,10 @@
.Check();
// change in elements_kind => map transition
CHECK_NE(array->map(), *previous_map);
- CHECK_EQ(DICTIONARY_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(DICTIONARY_ELEMENTS, array->map().elements_kind());
CHECK(EQUALS(isolate, array->property_array(), empty_property_array));
- CHECK_LE(non_dict_backing_store_limit, array->elements()->length());
- CHECK_LE(array->elements()->length(), index);
+ CHECK_LE(non_dict_backing_store_limit, array->elements().length());
+ CHECK_LE(array->elements().length(), index);
CHECK_EQ(index + 1, Smi::ToInt(array->length()));
}
@@ -355,14 +355,14 @@
.Check();
// no change in elements_kind => no map transition
CHECK_EQ(array->map(), *previous_map);
- CHECK_EQ(PACKED_SMI_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(PACKED_SMI_ELEMENTS, array->map().elements_kind());
CHECK_EQ(1, Smi::ToInt(array->length()));
// `delete array[0]` does not alter length, but changes the elments_kind
name = MakeString("0");
CHECK(JSReceiver::DeletePropertyOrElement(array, name).FromMaybe(false));
CHECK_NE(array->map(), *previous_map);
- CHECK_EQ(HOLEY_SMI_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(HOLEY_SMI_ELEMENTS, array->map().elements_kind());
CHECK_EQ(1, Smi::ToInt(array->length()));
previous_map = handle(array->map(), isolate);
@@ -376,7 +376,7 @@
NONE)
.Check();
CHECK_EQ(array->map(), *previous_map);
- CHECK_EQ(HOLEY_SMI_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(HOLEY_SMI_ELEMENTS, array->map().elements_kind());
CHECK_EQ(2, Smi::ToInt(array->length()));
// Adding a string to the array changes from FAST_HOLEY_SMI to FAST_HOLEY
@@ -385,7 +385,7 @@
NONE)
.Check();
CHECK_NE(array->map(), *previous_map);
- CHECK_EQ(HOLEY_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(HOLEY_ELEMENTS, array->map().elements_kind());
CHECK_EQ(2, Smi::ToInt(array->length()));
previous_map = handle(array->map(), isolate);
@@ -428,14 +428,14 @@
.Check();
// no change in elements_kind => no map transition
CHECK_EQ(array->map(), *previous_map);
- CHECK_EQ(PACKED_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(PACKED_ELEMENTS, array->map().elements_kind());
CHECK_EQ(1, Smi::ToInt(array->length()));
// `delete array[0]` does not alter length, but changes the elments_kind
name = MakeString("0");
CHECK(JSReceiver::DeletePropertyOrElement(array, name).FromMaybe(false));
CHECK_NE(array->map(), *previous_map);
- CHECK_EQ(HOLEY_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(HOLEY_ELEMENTS, array->map().elements_kind());
CHECK_EQ(1, Smi::ToInt(array->length()));
previous_map = handle(array->map(), isolate);
@@ -449,7 +449,7 @@
NONE)
.Check();
CHECK_EQ(array->map(), *previous_map);
- CHECK_EQ(HOLEY_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(HOLEY_ELEMENTS, array->map().elements_kind());
CHECK_EQ(2, Smi::ToInt(array->length()));
}
@@ -475,7 +475,7 @@
NONE)
.Check();
CHECK_NE(array->map(), *previous_map);
- CHECK_EQ(PACKED_DOUBLE_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(PACKED_DOUBLE_ELEMENTS, array->map().elements_kind());
CHECK_EQ(1, Smi::ToInt(array->length()));
previous_map = handle(array->map(), isolate);
@@ -485,14 +485,14 @@
NONE)
.Check();
CHECK_EQ(array->map(), *previous_map);
- CHECK_EQ(PACKED_DOUBLE_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(PACKED_DOUBLE_ELEMENTS, array->map().elements_kind());
CHECK_EQ(2, Smi::ToInt(array->length()));
// `delete array[0]` does not alter length, but changes the elments_kind
name = MakeString("0");
CHECK(JSReceiver::DeletePropertyOrElement(array, name).FromMaybe(false));
CHECK_NE(array->map(), *previous_map);
- CHECK_EQ(HOLEY_DOUBLE_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(HOLEY_DOUBLE_ELEMENTS, array->map().elements_kind());
CHECK_EQ(2, Smi::ToInt(array->length()));
previous_map = handle(array->map(), isolate);
@@ -502,7 +502,7 @@
NONE)
.Check();
CHECK_EQ(array->map(), *previous_map);
- CHECK_EQ(HOLEY_DOUBLE_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(HOLEY_DOUBLE_ELEMENTS, array->map().elements_kind());
CHECK_EQ(2, Smi::ToInt(array->length()));
// Adding a string to the array changes to elements_kind PACKED_ELEMENTS
@@ -511,7 +511,7 @@
NONE)
.Check();
CHECK_NE(array->map(), *previous_map);
- CHECK_EQ(HOLEY_ELEMENTS, array->map()->elements_kind());
+ CHECK_EQ(HOLEY_ELEMENTS, array->map().elements_kind());
CHECK_EQ(2, Smi::ToInt(array->length()));
previous_map = handle(array->map(), isolate);
diff --git a/test/cctest/test-feedback-vector.cc b/test/cctest/test-feedback-vector.cc
index a9d78f8..1685436 100644
--- a/test/cctest/test-feedback-vector.cc
+++ b/test/cctest/test-feedback-vector.cc
@@ -94,7 +94,7 @@
vector = NewFeedbackVector(isolate, &spec);
FeedbackVectorHelper helper(vector);
FeedbackCell cell = *vector->GetClosureFeedbackCell(0);
- CHECK_EQ(cell->value(), *factory->undefined_value());
+ CHECK_EQ(cell.value(), *factory->undefined_value());
}
}
diff --git a/test/cctest/test-field-type-tracking.cc b/test/cctest/test-field-type-tracking.cc
index 69dac81..32dc32a 100644
--- a/test/cctest/test-field-type-tracking.cc
+++ b/test/cctest/test-field-type-tracking.cc
@@ -110,7 +110,7 @@
explicit Expectations(Isolate* isolate)
: Expectations(
isolate,
- isolate->object_function()->initial_map()->elements_kind()) {}
+ isolate->object_function()->initial_map().elements_kind()) {}
void Init(int index, PropertyKind kind, PropertyAttributes attributes,
PropertyConstness constness, PropertyLocation location,
@@ -250,7 +250,7 @@
}
bool Check(DescriptorArray descriptors, int descriptor) const {
- PropertyDetails details = descriptors->GetDetails(descriptor);
+ PropertyDetails details = descriptors.GetDetails(descriptor);
if (details.kind() != kinds_[descriptor]) return false;
if (details.location() != locations_[descriptor]) return false;
@@ -266,7 +266,7 @@
Object expected_value = *values_[descriptor];
if (details.location() == kField) {
if (details.kind() == kData) {
- FieldType type = descriptors->GetFieldType(descriptor);
+ FieldType type = descriptors.GetFieldType(descriptor);
return FieldType::cast(expected_value) == type;
} else {
// kAccessor
@@ -274,28 +274,28 @@
}
} else {
CHECK_EQ(kAccessor, details.kind());
- Object value = descriptors->GetStrongValue(descriptor);
+ Object value = descriptors.GetStrongValue(descriptor);
if (value == expected_value) return true;
- if (!value->IsAccessorPair()) return false;
+ if (!value.IsAccessorPair()) return false;
AccessorPair pair = AccessorPair::cast(value);
- return pair->Equals(expected_value, *setter_values_[descriptor]);
+ return pair.Equals(expected_value, *setter_values_[descriptor]);
}
UNREACHABLE();
}
bool Check(Map map, int expected_nof) const {
- CHECK_EQ(elements_kind_, map->elements_kind());
+ CHECK_EQ(elements_kind_, map.elements_kind());
CHECK(number_of_properties_ <= MAX_PROPERTIES);
- CHECK_EQ(expected_nof, map->NumberOfOwnDescriptors());
- CHECK(!map->is_dictionary_map());
+ CHECK_EQ(expected_nof, map.NumberOfOwnDescriptors());
+ CHECK(!map.is_dictionary_map());
- DescriptorArray descriptors = map->instance_descriptors();
+ DescriptorArray descriptors = map.instance_descriptors();
CHECK(expected_nof <= number_of_properties_);
for (int i = 0; i < expected_nof; i++) {
if (!Check(descriptors, i)) {
Print();
#ifdef OBJECT_PRINT
- descriptors->Print();
+ descriptors.Print();
#endif
Check(descriptors, i);
return false;
@@ -452,7 +452,7 @@
Handle<Object> setter(pair->setter(), isolate);
int descriptor =
- map->instance_descriptors()->SearchWithCache(isolate, *name, *map);
+ map->instance_descriptors().SearchWithCache(isolate, *name, *map);
map = Map::TransitionToAccessorProperty(isolate, map, name, descriptor,
getter, setter, attributes);
CHECK(!map->is_deprecated());
@@ -523,7 +523,7 @@
Handle<JSObject> obj = factory->NewJSObjectFromMap(map);
JSObject::MigrateToMap(obj, prepared_map);
FieldIndex index = FieldIndex::ForDescriptor(*prepared_map, 0);
- CHECK(obj->RawFastPropertyAt(index)->IsUninitialized(isolate));
+ CHECK(obj->RawFastPropertyAt(index).IsUninitialized(isolate));
#ifdef VERIFY_HEAP
obj->ObjectVerify(isolate);
#endif
@@ -556,18 +556,17 @@
CHECK(obj_value->IsJSObject());
Handle<JSObject> obj = Handle<JSObject>::cast(obj_value);
- CHECK_EQ(1, obj->map()->NumberOfOwnDescriptors());
- CHECK(
- obj->map()->instance_descriptors()->GetStrongValue(0)->IsAccessorPair());
+ CHECK_EQ(1, obj->map().NumberOfOwnDescriptors());
+ CHECK(obj->map().instance_descriptors().GetStrongValue(0).IsAccessorPair());
Handle<Object> value(Smi::FromInt(42), isolate);
JSObject::SetOwnPropertyIgnoreAttributes(obj, foo_str, value, NONE).Check();
// Check that the property contains |value|.
- CHECK_EQ(1, obj->map()->NumberOfOwnDescriptors());
+ CHECK_EQ(1, obj->map().NumberOfOwnDescriptors());
FieldIndex index = FieldIndex::ForDescriptor(obj->map(), 0);
Object the_value = obj->RawFastPropertyAt(index);
- CHECK(the_value->IsSmi());
+ CHECK(the_value.IsSmi());
CHECK_EQ(42, Smi::ToInt(the_value));
}
@@ -703,10 +702,10 @@
// Check that all previous maps are not stable.
Map tmp = *new_map;
while (true) {
- Object back = tmp->GetBackPointer();
- if (back->IsUndefined(isolate)) break;
+ Object back = tmp.GetBackPointer();
+ if (back.IsUndefined(isolate)) break;
tmp = Map::cast(back);
- CHECK(!tmp->is_stable());
+ CHECK(!tmp.is_stable());
}
}
@@ -1409,7 +1408,7 @@
CHECK(!map->is_deprecated());
CHECK_NE(*map, *new_map);
- CHECK(new_map->GetBackPointer()->IsUndefined(isolate));
+ CHECK(new_map->GetBackPointer().IsUndefined(isolate));
for (int i = 0; i < kPropCount; i++) {
expectations.GeneralizeField(i);
}
@@ -2086,7 +2085,7 @@
// Try to update |map|, since there is no place for propX transition at |map2|
// |map| should become "copy-generalized".
Handle<Map> updated_map = Map::Update(isolate, map);
- CHECK(updated_map->GetBackPointer()->IsUndefined(isolate));
+ CHECK(updated_map->GetBackPointer().IsUndefined(isolate));
for (int i = 0; i < kPropCount; i++) {
expectations.SetDataField(i, PropertyConstness::kMutable,
@@ -2191,13 +2190,13 @@
for (int i = 0; i < kPropCount; i++) {
expectations2.GeneralizeField(i);
}
- CHECK(new_map2->GetBackPointer()->IsUndefined(isolate));
+ CHECK(new_map2->GetBackPointer().IsUndefined(isolate));
CHECK(expectations2.Check(*new_map2));
} else {
expectations2.SetDataField(i, expected.constness,
expected.representation, expected.type);
- CHECK(!new_map2->GetBackPointer()->IsUndefined(isolate));
+ CHECK(!new_map2->GetBackPointer().IsUndefined(isolate));
CHECK(expectations2.Check(*new_map2));
}
} else {
@@ -2751,11 +2750,11 @@
Object::NewStorageFor(isolate, isolate->factory()->uninitialized_value(),
Representation::Double());
CHECK(obj->IsMutableHeapNumber());
- CHECK_EQ(kHoleNanInt64, MutableHeapNumber::cast(*obj)->value_as_bits());
+ CHECK_EQ(kHoleNanInt64, MutableHeapNumber::cast(*obj).value_as_bits());
obj = Object::NewStorageFor(isolate, mhn, Representation::Double());
CHECK(obj->IsMutableHeapNumber());
- CHECK_EQ(kHoleNanInt64, MutableHeapNumber::cast(*obj)->value_as_bits());
+ CHECK_EQ(kHoleNanInt64, MutableHeapNumber::cast(*obj).value_as_bits());
}
namespace {
@@ -2793,10 +2792,10 @@
CHECK(!map->is_deprecated());
CHECK_EQ(1, map->NumberOfOwnDescriptors());
- CHECK(map->instance_descriptors()->GetDetails(0).representation().Equals(
+ CHECK(map->instance_descriptors().GetDetails(0).representation().Equals(
expected_rep));
CHECK_EQ(PropertyConstness::kConst,
- map->instance_descriptors()->GetDetails(0).constness());
+ map->instance_descriptors().GetDetails(0).constness());
// Store value2 to obj2 and check that it got same map and property details
// did not change.
@@ -2808,10 +2807,10 @@
CHECK(!map->is_deprecated());
CHECK_EQ(1, map->NumberOfOwnDescriptors());
- CHECK(map->instance_descriptors()->GetDetails(0).representation().Equals(
+ CHECK(map->instance_descriptors().GetDetails(0).representation().Equals(
expected_rep));
CHECK_EQ(PropertyConstness::kConst,
- map->instance_descriptors()->GetDetails(0).constness());
+ map->instance_descriptors().GetDetails(0).constness());
// Store value2 to obj1 and check that property became mutable.
Call(isolate, store_func, obj1, value2).Check();
@@ -2821,10 +2820,10 @@
CHECK(!map->is_deprecated());
CHECK_EQ(1, map->NumberOfOwnDescriptors());
- CHECK(map->instance_descriptors()->GetDetails(0).representation().Equals(
+ CHECK(map->instance_descriptors().GetDetails(0).representation().Equals(
expected_rep));
CHECK_EQ(expected_constness,
- map->instance_descriptors()->GetDetails(0).constness());
+ map->instance_descriptors().GetDetails(0).constness());
}
void TestStoreToConstantField_PlusMinusZero(const char* store_func_source,
diff --git a/test/cctest/test-func-name-inference.cc b/test/cctest/test-func-name-inference.cc
index afe6fef..24f4431 100644
--- a/test/cctest/test-func-name-inference.cc
+++ b/test/cctest/test-func-name-inference.cc
@@ -59,11 +59,11 @@
Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(*obj), isolate);
} else {
shared_function =
- Handle<SharedFunctionInfo>(JSFunction::cast(*obj)->shared(), isolate);
+ Handle<SharedFunctionInfo>(JSFunction::cast(*obj).shared(), isolate);
}
Handle<i::Script> i_script(i::Script::cast(shared_function->script()),
isolate);
- CHECK(i_script->source()->IsString());
+ CHECK(i_script->source().IsString());
Handle<i::String> script_src(i::String::cast(i_script->source()), isolate);
// Find the position of a given func source substring in the source.
@@ -84,7 +84,7 @@
// Verify inferred function name.
std::unique_ptr<char[]> inferred_name =
- shared_func_info->inferred_name()->ToCString();
+ shared_func_info->inferred_name().ToCString();
i::PrintF("expected: %s, found: %s\n", ref_inferred_name,
inferred_name.get());
CHECK_EQ(0, strcmp(ref_inferred_name, inferred_name.get()));
diff --git a/test/cctest/test-global-handles.cc b/test/cctest/test-global-handles.cc
index d76c5ad..22570bc 100644
--- a/test/cctest/test-global-handles.cc
+++ b/test/cctest/test-global-handles.cc
@@ -388,7 +388,7 @@
Handle<JSReceiver> key =
Utils::OpenHandle(*fp->handle.Get(CcTest::isolate()));
Handle<Smi> smi(Smi::FromInt(23), i_isolate);
- int32_t hash = key->GetOrCreateHash(i_isolate)->value();
+ int32_t hash = key->GetOrCreateHash(i_isolate).value();
JSWeakCollection::Set(weakmap, key, smi, hash);
},
[]() { InvokeScavenge(); }, SurvivalMode::kSurvives);
diff --git a/test/cctest/test-hashcode.cc b/test/cctest/test-hashcode.cc
index 1dc4149..56dc72f 100644
--- a/test/cctest/test-hashcode.cc
+++ b/test/cctest/test-hashcode.cc
@@ -30,16 +30,16 @@
void CheckFastObject(Handle<JSObject> obj, int hash) {
CHECK(obj->HasFastProperties());
- CHECK(obj->raw_properties_or_hash()->IsPropertyArray());
+ CHECK(obj->raw_properties_or_hash().IsPropertyArray());
CHECK_EQ(Smi::FromInt(hash), obj->GetHash());
- CHECK_EQ(hash, obj->property_array()->Hash());
+ CHECK_EQ(hash, obj->property_array().Hash());
}
void CheckDictionaryObject(Handle<JSObject> obj, int hash) {
CHECK(!obj->HasFastProperties());
- CHECK(obj->raw_properties_or_hash()->IsDictionary());
+ CHECK(obj->raw_properties_or_hash().IsDictionary());
CHECK_EQ(Smi::FromInt(hash), obj->GetHash());
- CHECK_EQ(hash, obj->property_dictionary()->Hash());
+ CHECK_EQ(hash, obj->property_dictionary().Hash());
}
TEST(AddHashCodeToFastObjectWithoutProperties) {
@@ -98,7 +98,7 @@
CHECK(obj->HasFastProperties());
JSObject::NormalizeProperties(obj, CLEAR_INOBJECT_PROPERTIES, 0,
"cctest/test-hashcode");
- CHECK(obj->raw_properties_or_hash()->IsDictionary());
+ CHECK(obj->raw_properties_or_hash().IsDictionary());
int hash = AddToSetAndGetHash(isolate, obj, false);
CheckDictionaryObject(obj, hash);
@@ -120,9 +120,9 @@
int hash = AddToSetAndGetHash(isolate, obj, true);
CHECK_EQ(Smi::FromInt(hash), obj->raw_properties_or_hash());
- int length = obj->property_array()->length();
+ int length = obj->property_array().length();
CompileRun("x.e = 5;");
- CHECK(obj->property_array()->length() > length);
+ CHECK(obj->property_array().length() > length);
CheckFastObject(obj, hash);
}
@@ -137,14 +137,14 @@
CompileRun(source);
Handle<JSObject> obj = GetGlobal<JSObject>("x");
- CHECK(obj->raw_properties_or_hash()->IsPropertyArray());
+ CHECK(obj->raw_properties_or_hash().IsPropertyArray());
int hash = AddToSetAndGetHash(isolate, obj, true);
- CHECK_EQ(hash, obj->property_array()->Hash());
+ CHECK_EQ(hash, obj->property_array().Hash());
- int length = obj->property_array()->length();
+ int length = obj->property_array().length();
CompileRun("x.f = 2; x.g = 5; x.h = 2");
- CHECK(obj->property_array()->length() > length);
+ CHECK(obj->property_array().length() > length);
CheckFastObject(obj, hash);
}
@@ -159,11 +159,11 @@
CompileRun(source);
Handle<JSObject> obj = GetGlobal<JSObject>("x");
- CHECK(obj->raw_properties_or_hash()->IsPropertyArray());
+ CHECK(obj->raw_properties_or_hash().IsPropertyArray());
int hash = AddToSetAndGetHash(isolate, obj, true);
- CHECK(obj->raw_properties_or_hash()->IsPropertyArray());
- CHECK_EQ(hash, obj->property_array()->Hash());
+ CHECK(obj->raw_properties_or_hash().IsPropertyArray());
+ CHECK_EQ(hash, obj->property_array().Hash());
JSObject::NormalizeProperties(obj, KEEP_INOBJECT_PROPERTIES, 0,
"cctest/test-hashcode");
@@ -181,14 +181,14 @@
Handle<JSObject> obj = GetGlobal<JSObject>("x");
JSObject::NormalizeProperties(obj, CLEAR_INOBJECT_PROPERTIES, 0,
"cctest/test-hashcode");
- CHECK(obj->raw_properties_or_hash()->IsDictionary());
+ CHECK(obj->raw_properties_or_hash().IsDictionary());
int hash = AddToSetAndGetHash(isolate, obj, false);
- CHECK_EQ(hash, obj->property_dictionary()->Hash());
+ CHECK_EQ(hash, obj->property_dictionary().Hash());
- int length = obj->property_dictionary()->length();
+ int length = obj->property_dictionary().length();
CompileRun("for(var i = 0; i < 10; i++) { x['f'+i] = i };");
- CHECK(obj->property_dictionary()->length() > length);
+ CHECK(obj->property_dictionary().length() > length);
CheckDictionaryObject(obj, hash);
}
@@ -201,10 +201,10 @@
isolate->factory()->NewJSObject(isolate->object_function());
JSObject::NormalizeProperties(obj, CLEAR_INOBJECT_PROPERTIES, 0,
"cctest/test-hashcode");
- CHECK(obj->raw_properties_or_hash()->IsDictionary());
+ CHECK(obj->raw_properties_or_hash().IsDictionary());
int hash = AddToSetAndGetHash(isolate, obj, false);
- CHECK_EQ(hash, obj->property_dictionary()->Hash());
+ CHECK_EQ(hash, obj->property_dictionary().Hash());
JSObject::MigrateSlowToFast(obj, 0, "cctest/test-hashcode");
CHECK_EQ(Smi::FromInt(hash), obj->GetHash());
@@ -221,10 +221,10 @@
CompileRun(source);
Handle<JSObject> obj = GetGlobal<JSObject>("x");
- CHECK(obj->raw_properties_or_hash()->IsDictionary());
+ CHECK(obj->raw_properties_or_hash().IsDictionary());
int hash = AddToSetAndGetHash(isolate, obj, false);
- CHECK_EQ(hash, obj->property_dictionary()->Hash());
+ CHECK_EQ(hash, obj->property_dictionary().Hash());
JSObject::MigrateSlowToFast(obj, 0, "cctest/test-hashcode");
CheckFastObject(obj, hash);
diff --git a/test/cctest/test-heap-profiler.cc b/test/cctest/test-heap-profiler.cc
index e7c09b4..f89a18d 100644
--- a/test/cctest/test-heap-profiler.cc
+++ b/test/cctest/test-heap-profiler.cc
@@ -1899,7 +1899,7 @@
}
static int StringCmp(const char* ref, i::String act) {
- std::unique_ptr<char[]> s_act = act->ToCString();
+ std::unique_ptr<char[]> s_act = act.ToCString();
int result = strcmp(ref, s_act.get());
if (result != 0)
fprintf(stderr, "Expected: \"%s\", Actual: \"%s\"\n", ref, s_act.get());
@@ -3095,7 +3095,7 @@
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(env->GetIsolate());
v8::Local<v8::Value> global_object =
v8::Utils::ToLocal(i::Handle<i::JSObject>(
- (isolate->context()->native_context()->global_object()), isolate));
+ (isolate->context().native_context().global_object()), isolate));
global_object_pointer = &global_object;
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
heap_profiler->AddBuildEmbedderGraphCallback(BuildEmbedderGraph, nullptr);
@@ -3159,7 +3159,7 @@
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(env->GetIsolate());
v8::Local<v8::Value> global_object =
v8::Utils::ToLocal(i::Handle<i::JSObject>(
- (isolate->context()->native_context()->global_object()), isolate));
+ (isolate->context().native_context().global_object()), isolate));
global_object_pointer = &global_object;
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
heap_profiler->AddBuildEmbedderGraphCallback(BuildEmbedderGraphWithNamedEdges,
@@ -3225,7 +3225,7 @@
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(env->GetIsolate());
v8::Local<v8::Value> global_object =
v8::Utils::ToLocal(i::Handle<i::JSObject>(
- (isolate->context()->native_context()->global_object()), isolate));
+ (isolate->context().native_context().global_object()), isolate));
global_object_pointer = &global_object;
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
GraphBuildingContext context;
@@ -3302,7 +3302,7 @@
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(env->GetIsolate());
v8::Local<v8::Value> global_object =
v8::Utils::ToLocal(i::Handle<i::JSObject>(
- (isolate->context()->native_context()->global_object()), isolate));
+ (isolate->context().native_context().global_object()), isolate));
global_object_pointer = &global_object;
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
heap_profiler->AddBuildEmbedderGraphCallback(
@@ -3359,7 +3359,7 @@
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(env->GetIsolate());
v8::Local<v8::Value> global_object =
v8::Utils::ToLocal(i::Handle<i::JSObject>(
- (isolate->context()->native_context()->global_object()), isolate));
+ (isolate->context().native_context().global_object()), isolate));
global_object_pointer = &global_object;
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
heap_profiler->AddBuildEmbedderGraphCallback(BuildEmbedderGraphWithPrefix,
@@ -3908,7 +3908,7 @@
i::Handle<i::Object> obj = v8::Utils::OpenHandle(*script);
i::Handle<i::SharedFunctionInfo> shared_function =
- i::Handle<i::SharedFunctionInfo>(i::JSFunction::cast(*obj)->shared(),
+ i::Handle<i::SharedFunctionInfo>(i::JSFunction::cast(*obj).shared(),
i_isolate);
i::Handle<i::ClosureFeedbackCellArray> feedback_cell_array =
i::ClosureFeedbackCellArray::New(i_isolate, shared_function);
diff --git a/test/cctest/test-inobject-slack-tracking.cc b/test/cctest/test-inobject-slack-tracking.cc
index 6f6b5f4..ef03905 100644
--- a/test/cctest/test-inobject-slack-tracking.cc
+++ b/test/cctest/test-inobject-slack-tracking.cc
@@ -76,25 +76,25 @@
}
static Object GetFieldValue(JSObject obj, int property_index) {
- FieldIndex index = FieldIndex::ForPropertyIndex(obj->map(), property_index);
- return obj->RawFastPropertyAt(index);
+ FieldIndex index = FieldIndex::ForPropertyIndex(obj.map(), property_index);
+ return obj.RawFastPropertyAt(index);
}
static double GetDoubleFieldValue(JSObject obj, FieldIndex field_index) {
- if (obj->IsUnboxedDoubleField(field_index)) {
- return obj->RawFastDoublePropertyAt(field_index);
+ if (obj.IsUnboxedDoubleField(field_index)) {
+ return obj.RawFastDoublePropertyAt(field_index);
} else {
- Object value = obj->RawFastPropertyAt(field_index);
- if (value->IsMutableHeapNumber()) {
- return MutableHeapNumber::cast(value)->value();
+ Object value = obj.RawFastPropertyAt(field_index);
+ if (value.IsMutableHeapNumber()) {
+ return MutableHeapNumber::cast(value).value();
} else {
- return value->Number();
+ return value.Number();
}
}
}
static double GetDoubleFieldValue(JSObject obj, int property_index) {
- FieldIndex index = FieldIndex::ForPropertyIndex(obj->map(), property_index);
+ FieldIndex index = FieldIndex::ForPropertyIndex(obj.map(), property_index);
return GetDoubleFieldValue(obj, index);
}
@@ -102,8 +102,8 @@
Handle<Map> filler_map =
CcTest::i_isolate()->factory()->one_pointer_filler_map();
- int inobject_properties = obj->map()->GetInObjectProperties();
- int unused = obj->map()->UnusedPropertyFields();
+ int inobject_properties = obj.map().GetInObjectProperties();
+ int unused = obj.map().UnusedPropertyFields();
if (unused == 0) return false;
for (int i = inobject_properties - unused; i < inobject_properties; i++) {
@@ -145,7 +145,7 @@
CHECK(initial_map->IsInobjectSlackTrackingInProgress());
// There must be at least some slack.
- CHECK_LT(5, obj->map()->GetInObjectProperties());
+ CHECK_LT(5, obj->map().GetInObjectProperties());
CHECK_EQ(Smi::FromInt(42), GetFieldValue(*obj, 0));
CHECK_EQ(4.2, GetDoubleFieldValue(*obj, 1));
CHECK_EQ(*obj, GetFieldValue(*obj, 2));
@@ -162,7 +162,7 @@
CHECK(!IsObjectShrinkable(*obj));
// No slack left.
- CHECK_EQ(3, obj->map()->GetInObjectProperties());
+ CHECK_EQ(3, obj->map().GetInObjectProperties());
}
@@ -206,7 +206,7 @@
CHECK(initial_map->IsInobjectSlackTrackingInProgress());
// There must be at least some slack.
- CHECK_LT(5, obj3->map()->GetInObjectProperties());
+ CHECK_LT(5, obj3->map().GetInObjectProperties());
CHECK_EQ(Smi::FromInt(42), GetFieldValue(*obj3, 0));
CHECK_EQ(4.2, GetDoubleFieldValue(*obj3, 1));
CHECK_EQ(*obj3, GetFieldValue(*obj3, 2));
@@ -226,14 +226,14 @@
CHECK(IsObjectShrinkable(*obj3));
CHECK(!IsObjectShrinkable(*obj5));
- CHECK_EQ(5, obj1->map()->GetInObjectProperties());
- CHECK_EQ(4, obj1->map()->UnusedPropertyFields());
+ CHECK_EQ(5, obj1->map().GetInObjectProperties());
+ CHECK_EQ(4, obj1->map().UnusedPropertyFields());
- CHECK_EQ(5, obj3->map()->GetInObjectProperties());
- CHECK_EQ(2, obj3->map()->UnusedPropertyFields());
+ CHECK_EQ(5, obj3->map().GetInObjectProperties());
+ CHECK_EQ(2, obj3->map().UnusedPropertyFields());
- CHECK_EQ(5, obj5->map()->GetInObjectProperties());
- CHECK_EQ(0, obj5->map()->UnusedPropertyFields());
+ CHECK_EQ(5, obj5->map().GetInObjectProperties());
+ CHECK_EQ(0, obj5->map().UnusedPropertyFields());
// Since slack tracking is complete, the new objects should not be shrinkable.
obj1 = CompileRunI<JSObject>("new A(1);");
@@ -291,7 +291,7 @@
CHECK(initial_map->IsInobjectSlackTrackingInProgress());
// There must be at least some slack.
- CHECK_LT(5, obj->map()->GetInObjectProperties());
+ CHECK_LT(5, obj->map().GetInObjectProperties());
CHECK_EQ(Smi::FromInt(42), GetFieldValue(*obj, 0));
CHECK_EQ(4.2, GetDoubleFieldValue(*obj, 1));
CHECK_EQ(*obj, GetFieldValue(*obj, 2));
@@ -308,7 +308,7 @@
CHECK(!IsObjectShrinkable(*obj));
// No slack left.
- CHECK_EQ(3, obj->map()->GetInObjectProperties());
+ CHECK_EQ(3, obj->map().GetInObjectProperties());
}
@@ -375,7 +375,7 @@
CHECK(b_initial_map->IsInobjectSlackTrackingInProgress());
// There must be at least some slack.
- CHECK_LT(10, obj->map()->GetInObjectProperties());
+ CHECK_LT(10, obj->map().GetInObjectProperties());
CHECK_EQ(Smi::FromInt(42), GetFieldValue(*obj, 0));
CHECK_EQ(4.2, GetDoubleFieldValue(*obj, 1));
CHECK_EQ(*obj, GetFieldValue(*obj, 2));
@@ -400,7 +400,7 @@
CHECK(a_initial_map->IsInobjectSlackTrackingInProgress());
// No slack left.
- CHECK_EQ(6, obj->map()->GetInObjectProperties());
+ CHECK_EQ(6, obj->map().GetInObjectProperties());
}
@@ -480,10 +480,10 @@
CHECK(!IsObjectShrinkable(*a_obj));
// No slack left.
- CHECK_EQ(3, a_obj->map()->GetInObjectProperties());
+ CHECK_EQ(3, a_obj->map().GetInObjectProperties());
// There must be at least some slack.
- CHECK_LT(10, b_obj->map()->GetInObjectProperties());
+ CHECK_LT(10, b_obj->map().GetInObjectProperties());
CHECK_EQ(Smi::FromInt(42), GetFieldValue(*b_obj, 0));
CHECK_EQ(4.2, GetDoubleFieldValue(*b_obj, 1));
CHECK_EQ(*b_obj, GetFieldValue(*b_obj, 2));
@@ -503,7 +503,7 @@
CHECK(!IsObjectShrinkable(*b_obj));
// No slack left.
- CHECK_EQ(6, b_obj->map()->GetInObjectProperties());
+ CHECK_EQ(6, b_obj->map().GetInObjectProperties());
}
@@ -580,10 +580,10 @@
Handle<Map> initial_map(func->initial_map(), func->GetIsolate());
// If the object is slow-mode already, bail out.
- if (obj->map()->is_dictionary_map()) continue;
+ if (obj->map().is_dictionary_map()) continue;
// There must be at least some slack.
- CHECK_LT(fields_count, obj->map()->GetInObjectProperties());
+ CHECK_LT(fields_count, obj->map().GetInObjectProperties());
// One instance was created.
CHECK_EQ(Map::kSlackTrackingCounterStart - 1,
@@ -607,7 +607,7 @@
CHECK(!IsObjectShrinkable(*obj));
// No slack left.
- CHECK_EQ(fields_count, obj->map()->GetInObjectProperties());
+ CHECK_EQ(fields_count, obj->map().GetInObjectProperties());
}
}
@@ -690,8 +690,8 @@
Handle<Map> initial_map(func->initial_map(), func->GetIsolate());
// There must be no slack left.
- CHECK_EQ(JSObject::kMaxInstanceSize, obj->map()->instance_size());
- CHECK_EQ(kMaxInobjectProperties, obj->map()->GetInObjectProperties());
+ CHECK_EQ(JSObject::kMaxInstanceSize, obj->map().instance_size());
+ CHECK_EQ(kMaxInobjectProperties, obj->map().GetInObjectProperties());
// One instance was created.
CHECK_EQ(Map::kSlackTrackingCounterStart - 1,
@@ -708,7 +708,7 @@
CHECK(!IsObjectShrinkable(*obj));
// No slack left.
- CHECK_EQ(kMaxInobjectProperties, obj->map()->GetInObjectProperties());
+ CHECK_EQ(kMaxInobjectProperties, obj->map().GetInObjectProperties());
}
// The other classes in the hierarchy are not affected.
@@ -718,7 +718,7 @@
static void CheckExpectedProperties(int expected, std::ostringstream& os) {
Handle<HeapObject> obj = Handle<HeapObject>::cast(
v8::Utils::OpenHandle(*CompileRun(os.str().c_str())));
- CHECK_EQ(expected, obj->map()->GetInObjectProperties());
+ CHECK_EQ(expected, obj->map().GetInObjectProperties());
}
TEST(ObjectLiteralPropertyBackingStoreSize) {
@@ -874,8 +874,8 @@
Handle<Map> initial_map(func->initial_map(), func->GetIsolate());
// Object should go dictionary mode.
- CHECK_EQ(JSObject::kHeaderSize, obj->map()->instance_size());
- CHECK(obj->map()->is_dictionary_map());
+ CHECK_EQ(JSObject::kHeaderSize, obj->map().instance_size());
+ CHECK(obj->map().is_dictionary_map());
// One instance was created.
CHECK_EQ(Map::kSlackTrackingCounterStart - 1,
@@ -892,8 +892,8 @@
CHECK(!IsObjectShrinkable(*obj));
// Object should stay in dictionary mode.
- CHECK_EQ(JSObject::kHeaderSize, obj->map()->instance_size());
- CHECK(obj->map()->is_dictionary_map());
+ CHECK_EQ(JSObject::kHeaderSize, obj->map().instance_size());
+ CHECK(obj->map().is_dictionary_map());
}
// The other classes in the hierarchy are not affected.
@@ -956,7 +956,7 @@
CHECK(initial_map->IsInobjectSlackTrackingInProgress());
// There must be at least some slack.
- CHECK_LT(builtin_properties_count + 5, obj->map()->GetInObjectProperties());
+ CHECK_LT(builtin_properties_count + 5, obj->map().GetInObjectProperties());
CHECK_EQ(Smi::FromInt(42), GetFieldValue(*obj, builtin_properties_count + 0));
CHECK_EQ(4.2, GetDoubleFieldValue(*obj, builtin_properties_count + 1));
CHECK_EQ(*obj, GetFieldValue(*obj, builtin_properties_count + 2));
@@ -973,9 +973,9 @@
CHECK(!IsObjectShrinkable(*obj));
// No slack left.
- CHECK_EQ(builtin_properties_count + 3, obj->map()->GetInObjectProperties());
+ CHECK_EQ(builtin_properties_count + 3, obj->map().GetInObjectProperties());
- CHECK_EQ(instance_type, obj->map()->instance_type());
+ CHECK_EQ(instance_type, obj->map().instance_type());
}
@@ -1286,8 +1286,8 @@
CHECK(!IsObjectShrinkable(*obj));
// No slack left.
- CHECK_EQ(21, obj->map()->GetInObjectProperties());
- CHECK_EQ(JS_OBJECT_TYPE, obj->map()->instance_type());
+ CHECK_EQ(21, obj->map().GetInObjectProperties());
+ CHECK_EQ(JS_OBJECT_TYPE, obj->map().instance_type());
}
TEST(Regress8853_ClassConstructor) {
@@ -1297,9 +1297,9 @@
// For classes without any this.prop assignments in their
// constructors we start out with 10 inobject properties.
Handle<JSObject> obj = CompileRunI<JSObject>("new (class {});\n");
- CHECK(obj->map()->IsInobjectSlackTrackingInProgress());
+ CHECK(obj->map().IsInobjectSlackTrackingInProgress());
CHECK(IsObjectShrinkable(*obj));
- CHECK_EQ(10, obj->map()->GetInObjectProperties());
+ CHECK_EQ(10, obj->map().GetInObjectProperties());
// For classes with N explicit this.prop assignments in their
// constructors we start out with N+8 inobject properties.
@@ -1311,9 +1311,9 @@
" this.z = 3;\n"
" }\n"
"});\n");
- CHECK(obj->map()->IsInobjectSlackTrackingInProgress());
+ CHECK(obj->map().IsInobjectSlackTrackingInProgress());
CHECK(IsObjectShrinkable(*obj));
- CHECK_EQ(3 + 8, obj->map()->GetInObjectProperties());
+ CHECK_EQ(3 + 8, obj->map().GetInObjectProperties());
}
TEST(Regress8853_ClassHierarchy) {
@@ -1327,9 +1327,9 @@
for (int i = 1; i < 10; ++i) {
std::string script = "new " + base + ";\n";
Handle<JSObject> obj = CompileRunI<JSObject>(script.c_str());
- CHECK(obj->map()->IsInobjectSlackTrackingInProgress());
+ CHECK(obj->map().IsInobjectSlackTrackingInProgress());
CHECK(IsObjectShrinkable(*obj));
- CHECK_EQ(8 + 2 * i, obj->map()->GetInObjectProperties());
+ CHECK_EQ(8 + 2 * i, obj->map().GetInObjectProperties());
base = "(class extends " + base + " {})";
}
}
@@ -1341,9 +1341,9 @@
// For constructor functions without any this.prop assignments in
// them we start out with 10 inobject properties.
Handle<JSObject> obj = CompileRunI<JSObject>("new (function() {});\n");
- CHECK(obj->map()->IsInobjectSlackTrackingInProgress());
+ CHECK(obj->map().IsInobjectSlackTrackingInProgress());
CHECK(IsObjectShrinkable(*obj));
- CHECK_EQ(10, obj->map()->GetInObjectProperties());
+ CHECK_EQ(10, obj->map().GetInObjectProperties());
// For constructor functions with N explicit this.prop assignments
// in them we start out with N+8 inobject properties.
@@ -1356,9 +1356,9 @@
" this.c = 3;\n"
" this.f = 3;\n"
"});\n");
- CHECK(obj->map()->IsInobjectSlackTrackingInProgress());
+ CHECK(obj->map().IsInobjectSlackTrackingInProgress());
CHECK(IsObjectShrinkable(*obj));
- CHECK_EQ(6 + 8, obj->map()->GetInObjectProperties());
+ CHECK_EQ(6 + 8, obj->map().GetInObjectProperties());
}
TEST(InstanceFieldsArePropertiesDefaultConstructorLazy) {
@@ -1379,7 +1379,7 @@
" x09 = null;\n"
" x10 = null;\n"
"});\n");
- CHECK_EQ(11 + 8, obj->map()->GetInObjectProperties());
+ CHECK_EQ(11 + 8, obj->map().GetInObjectProperties());
}
TEST(InstanceFieldsArePropertiesFieldsAndConstructorLazy) {
@@ -1412,7 +1412,7 @@
" this.x20 = null;\n"
" }\n"
"});\n");
- CHECK_EQ(21 + 8, obj->map()->GetInObjectProperties());
+ CHECK_EQ(21 + 8, obj->map().GetInObjectProperties());
}
TEST(InstanceFieldsArePropertiesDefaultConstructorEager) {
@@ -1434,7 +1434,7 @@
" x09 = null;\n"
" x10 = null;\n"
"});\n");
- CHECK_EQ(11 + 8, obj->map()->GetInObjectProperties());
+ CHECK_EQ(11 + 8, obj->map().GetInObjectProperties());
}
TEST(InstanceFieldsArePropertiesFieldsAndConstructorEager) {
@@ -1468,7 +1468,7 @@
" this.x20 = null;\n"
" }\n"
"});\n");
- CHECK_EQ(21 + 8, obj->map()->GetInObjectProperties());
+ CHECK_EQ(21 + 8, obj->map().GetInObjectProperties());
}
} // namespace test_inobject_slack_tracking
diff --git a/test/cctest/test-js-weak-refs.cc b/test/cctest/test-js-weak-refs.cc
index 393d8fa..858c9f5 100644
--- a/test/cctest/test-js-weak-refs.cc
+++ b/test/cctest/test-js-weak-refs.cc
@@ -69,7 +69,7 @@
Handle<Object> holdings, Handle<Object> key, Isolate* isolate) {
JSFinalizationGroup::Register(finalization_group, target, holdings, key,
isolate);
- CHECK(finalization_group->active_cells()->IsWeakCell());
+ CHECK(finalization_group->active_cells().IsWeakCell());
Handle<WeakCell> weak_cell =
handle(WeakCell::cast(finalization_group->active_cells()), isolate);
#ifdef VERIFY_HEAP
@@ -105,19 +105,19 @@
if (n_args == 0) {
// Verify empty list
- CHECK(list_head->IsUndefined(isolate));
+ CHECK(list_head.IsUndefined(isolate));
} else {
WeakCell current = WeakCell::cast(Object(va_arg(args, Address)));
CHECK_EQ(current, list_head);
- CHECK(current->prev()->IsUndefined(isolate));
+ CHECK(current.prev().IsUndefined(isolate));
for (int i = 1; i < n_args; i++) {
WeakCell next = WeakCell::cast(Object(va_arg(args, Address)));
- CHECK_EQ(current->next(), next);
- CHECK_EQ(next->prev(), current);
+ CHECK_EQ(current.next(), next);
+ CHECK_EQ(next.prev(), current);
current = next;
}
- CHECK(current->next()->IsUndefined(isolate));
+ CHECK(current.next().IsUndefined(isolate));
}
va_end(args);
}
@@ -133,19 +133,19 @@
if (n_args == 0) {
// Verify empty list
- CHECK(list_head->IsTheHole(isolate));
+ CHECK(list_head.IsTheHole(isolate));
} else {
WeakCell current = WeakCell::cast(Object(va_arg(args, Address)));
CHECK_EQ(current, list_head);
- CHECK(current->key_list_prev()->IsUndefined(isolate));
+ CHECK(current.key_list_prev().IsUndefined(isolate));
for (int i = 1; i < n_args; i++) {
WeakCell next = WeakCell::cast(Object(va_arg(args, Address)));
- CHECK_EQ(current->key_list_next(), next);
- CHECK_EQ(next->key_list_prev(), current);
+ CHECK_EQ(current.key_list_next(), next);
+ CHECK_EQ(next.key_list_prev(), current);
current = next;
}
- CHECK(current->key_list_next()->IsUndefined(isolate));
+ CHECK(current.key_list_next().IsUndefined(isolate));
}
va_end(args);
}
@@ -169,13 +169,13 @@
VerifyWeakCellChain(isolate, finalization_group->active_cells(), 1,
*weak_cell1);
- CHECK(weak_cell1->key_list_prev()->IsUndefined(isolate));
- CHECK(weak_cell1->key_list_next()->IsUndefined(isolate));
+ CHECK(weak_cell1->key_list_prev().IsUndefined(isolate));
+ CHECK(weak_cell1->key_list_next().IsUndefined(isolate));
- CHECK(finalization_group->cleared_cells()->IsUndefined(isolate));
+ CHECK(finalization_group->cleared_cells().IsUndefined(isolate));
// No key was used during registration, key-based map stays uninitialized.
- CHECK(finalization_group->key_map()->IsUndefined(isolate));
+ CHECK(finalization_group->key_map().IsUndefined(isolate));
// Register another weak reference and verify internal data structures.
Handle<WeakCell> weak_cell2 =
@@ -183,11 +183,11 @@
VerifyWeakCellChain(isolate, finalization_group->active_cells(), 2,
*weak_cell2, *weak_cell1);
- CHECK(weak_cell2->key_list_prev()->IsUndefined(isolate));
- CHECK(weak_cell2->key_list_next()->IsUndefined(isolate));
+ CHECK(weak_cell2->key_list_prev().IsUndefined(isolate));
+ CHECK(weak_cell2->key_list_next().IsUndefined(isolate));
- CHECK(finalization_group->cleared_cells()->IsUndefined(isolate));
- CHECK(finalization_group->key_map()->IsUndefined(isolate));
+ CHECK(finalization_group->cleared_cells().IsUndefined(isolate));
+ CHECK(finalization_group->key_map().IsUndefined(isolate));
}
TEST(TestRegisterWithKey) {
@@ -211,7 +211,7 @@
finalization_group, js_object, undefined, key1, isolate);
{
- CHECK(finalization_group->key_map()->IsObjectHashTable());
+ CHECK(finalization_group->key_map().IsObjectHashTable());
Handle<ObjectHashTable> key_map =
handle(ObjectHashTable::cast(finalization_group->key_map()), isolate);
VerifyWeakCellKeyChain(isolate, key_map->Lookup(key1), 1, *weak_cell1);
@@ -224,7 +224,7 @@
finalization_group, js_object, undefined, key2, isolate);
{
- CHECK(finalization_group->key_map()->IsObjectHashTable());
+ CHECK(finalization_group->key_map().IsObjectHashTable());
Handle<ObjectHashTable> key_map =
handle(ObjectHashTable::cast(finalization_group->key_map()), isolate);
VerifyWeakCellKeyChain(isolate, key_map->Lookup(key1), 1, *weak_cell1);
@@ -237,7 +237,7 @@
finalization_group, js_object, undefined, key1, isolate);
{
- CHECK(finalization_group->key_map()->IsObjectHashTable());
+ CHECK(finalization_group->key_map().IsObjectHashTable());
Handle<ObjectHashTable> key_map =
handle(ObjectHashTable::cast(finalization_group->key_map()), isolate);
VerifyWeakCellKeyChain(isolate, key_map->Lookup(key1), 2, *weak_cell3,
@@ -265,20 +265,20 @@
// Nullify the first WeakCell and verify internal data structures.
NullifyWeakCell(weak_cell1, isolate);
CHECK_EQ(finalization_group->active_cells(), *weak_cell2);
- CHECK(weak_cell2->prev()->IsUndefined(isolate));
- CHECK(weak_cell2->next()->IsUndefined(isolate));
+ CHECK(weak_cell2->prev().IsUndefined(isolate));
+ CHECK(weak_cell2->next().IsUndefined(isolate));
CHECK_EQ(finalization_group->cleared_cells(), *weak_cell1);
- CHECK(weak_cell1->prev()->IsUndefined(isolate));
- CHECK(weak_cell1->next()->IsUndefined(isolate));
+ CHECK(weak_cell1->prev().IsUndefined(isolate));
+ CHECK(weak_cell1->next().IsUndefined(isolate));
// Nullify the second WeakCell and verify internal data structures.
NullifyWeakCell(weak_cell2, isolate);
- CHECK(finalization_group->active_cells()->IsUndefined(isolate));
+ CHECK(finalization_group->active_cells().IsUndefined(isolate));
CHECK_EQ(finalization_group->cleared_cells(), *weak_cell2);
CHECK_EQ(weak_cell2->next(), *weak_cell1);
- CHECK(weak_cell2->prev()->IsUndefined(isolate));
+ CHECK(weak_cell2->prev().IsUndefined(isolate));
CHECK_EQ(weak_cell1->prev(), *weak_cell2);
- CHECK(weak_cell1->next()->IsUndefined(isolate));
+ CHECK(weak_cell1->next().IsUndefined(isolate));
}
TEST(TestWeakCellNullify2) {
@@ -300,19 +300,19 @@
// Like TestWeakCellNullify1 but nullify the WeakCells in opposite order.
NullifyWeakCell(weak_cell2, isolate);
CHECK_EQ(finalization_group->active_cells(), *weak_cell1);
- CHECK(weak_cell1->prev()->IsUndefined(isolate));
- CHECK(weak_cell1->next()->IsUndefined(isolate));
+ CHECK(weak_cell1->prev().IsUndefined(isolate));
+ CHECK(weak_cell1->next().IsUndefined(isolate));
CHECK_EQ(finalization_group->cleared_cells(), *weak_cell2);
- CHECK(weak_cell2->prev()->IsUndefined(isolate));
- CHECK(weak_cell2->next()->IsUndefined(isolate));
+ CHECK(weak_cell2->prev().IsUndefined(isolate));
+ CHECK(weak_cell2->next().IsUndefined(isolate));
NullifyWeakCell(weak_cell1, isolate);
- CHECK(finalization_group->active_cells()->IsUndefined(isolate));
+ CHECK(finalization_group->active_cells().IsUndefined(isolate));
CHECK_EQ(finalization_group->cleared_cells(), *weak_cell1);
CHECK_EQ(weak_cell1->next(), *weak_cell2);
- CHECK(weak_cell1->prev()->IsUndefined(isolate));
+ CHECK(weak_cell1->prev().IsUndefined(isolate));
CHECK_EQ(weak_cell2->prev(), *weak_cell1);
- CHECK(weak_cell2->next()->IsUndefined(isolate));
+ CHECK(weak_cell2->next().IsUndefined(isolate));
}
TEST(TestJSFinalizationGroupPopClearedCellHoldings1) {
@@ -346,15 +346,15 @@
Object cleared1 =
JSFinalizationGroup::PopClearedCellHoldings(finalization_group, isolate);
CHECK_EQ(cleared1, *holdings3);
- CHECK(weak_cell3->prev()->IsUndefined(isolate));
- CHECK(weak_cell3->next()->IsUndefined(isolate));
+ CHECK(weak_cell3->prev().IsUndefined(isolate));
+ CHECK(weak_cell3->next().IsUndefined(isolate));
CHECK(finalization_group->NeedsCleanup());
Object cleared2 =
JSFinalizationGroup::PopClearedCellHoldings(finalization_group, isolate);
CHECK_EQ(cleared2, *holdings2);
- CHECK(weak_cell2->prev()->IsUndefined(isolate));
- CHECK(weak_cell2->next()->IsUndefined(isolate));
+ CHECK(weak_cell2->prev().IsUndefined(isolate));
+ CHECK(weak_cell2->next().IsUndefined(isolate));
CHECK(!finalization_group->NeedsCleanup());
@@ -364,12 +364,12 @@
Object cleared3 =
JSFinalizationGroup::PopClearedCellHoldings(finalization_group, isolate);
CHECK_EQ(cleared3, *holdings1);
- CHECK(weak_cell1->prev()->IsUndefined(isolate));
- CHECK(weak_cell1->next()->IsUndefined(isolate));
+ CHECK(weak_cell1->prev().IsUndefined(isolate));
+ CHECK(weak_cell1->next().IsUndefined(isolate));
CHECK(!finalization_group->NeedsCleanup());
- CHECK(finalization_group->active_cells()->IsUndefined(isolate));
- CHECK(finalization_group->cleared_cells()->IsUndefined(isolate));
+ CHECK(finalization_group->active_cells().IsUndefined(isolate));
+ CHECK(finalization_group->cleared_cells().IsUndefined(isolate));
}
TEST(TestJSFinalizationGroupPopClearedCellHoldings2) {
@@ -656,16 +656,16 @@
Handle<JSWeakRef> inner_weak_ref = ConstructJSWeakRef(js_object, isolate);
CcTest::CollectAllGarbage();
- CHECK(!inner_weak_ref->target()->IsUndefined(isolate));
+ CHECK(!inner_weak_ref->target().IsUndefined(isolate));
weak_ref = inner_scope.CloseAndEscape(inner_weak_ref);
}
- CHECK(!weak_ref->target()->IsUndefined(isolate));
+ CHECK(!weak_ref->target().IsUndefined(isolate));
CcTest::CollectAllGarbage();
- CHECK(weak_ref->target()->IsUndefined(isolate));
+ CHECK(weak_ref->target().IsUndefined(isolate));
}
TEST(TestJSWeakRefIncrementalMarking) {
@@ -691,17 +691,17 @@
heap::SimulateIncrementalMarking(heap, true);
CcTest::CollectAllGarbage();
- CHECK(!inner_weak_ref->target()->IsUndefined(isolate));
+ CHECK(!inner_weak_ref->target().IsUndefined(isolate));
weak_ref = inner_scope.CloseAndEscape(inner_weak_ref);
}
- CHECK(!weak_ref->target()->IsUndefined(isolate));
+ CHECK(!weak_ref->target().IsUndefined(isolate));
heap::SimulateIncrementalMarking(heap, true);
CcTest::CollectAllGarbage();
- CHECK(weak_ref->target()->IsUndefined(isolate));
+ CHECK(weak_ref->target().IsUndefined(isolate));
}
TEST(TestJSWeakRefKeepDuringJob) {
@@ -724,17 +724,17 @@
weak_ref = inner_scope.CloseAndEscape(inner_weak_ref);
}
- CHECK(!weak_ref->target()->IsUndefined(isolate));
+ CHECK(!weak_ref->target().IsUndefined(isolate));
CcTest::CollectAllGarbage();
- CHECK(!weak_ref->target()->IsUndefined(isolate));
+ CHECK(!weak_ref->target().IsUndefined(isolate));
// Clears the KeepDuringJob set.
isolate->default_microtask_queue()->RunMicrotasks(isolate);
CcTest::CollectAllGarbage();
- CHECK(weak_ref->target()->IsUndefined(isolate));
+ CHECK(weak_ref->target().IsUndefined(isolate));
}
TEST(TestJSWeakRefKeepDuringJobIncrementalMarking) {
@@ -761,19 +761,19 @@
weak_ref = inner_scope.CloseAndEscape(inner_weak_ref);
}
- CHECK(!weak_ref->target()->IsUndefined(isolate));
+ CHECK(!weak_ref->target().IsUndefined(isolate));
heap::SimulateIncrementalMarking(heap, true);
CcTest::CollectAllGarbage();
- CHECK(!weak_ref->target()->IsUndefined(isolate));
+ CHECK(!weak_ref->target().IsUndefined(isolate));
// Clears the KeepDuringJob set.
isolate->default_microtask_queue()->RunMicrotasks(isolate);
heap::SimulateIncrementalMarking(heap, true);
CcTest::CollectAllGarbage();
- CHECK(weak_ref->target()->IsUndefined(isolate));
+ CHECK(weak_ref->target().IsUndefined(isolate));
}
} // namespace internal
diff --git a/test/cctest/test-liveedit.cc b/test/cctest/test-liveedit.cc
index b3f8b06..0ae9cca 100644
--- a/test/cctest/test-liveedit.cc
+++ b/test/cctest/test-liveedit.cc
@@ -208,7 +208,7 @@
v8::Script::Compile(context, v8_str(isolate, source_a)).ToLocalChecked();
script_a->Run(context).ToLocalChecked();
i::Handle<i::Script> i_script_a(
- i::Script::cast(v8::Utils::OpenHandle(*script_a)->shared()->script()),
+ i::Script::cast(v8::Utils::OpenHandle(*script_a)->shared().script()),
i_isolate);
if (result) {
@@ -541,7 +541,7 @@
v8::Local<v8::Function> f =
script->Run(context).ToLocalChecked().As<v8::Function>();
i::Handle<i::Script> i_script(
- i::Script::cast(v8::Utils::OpenHandle(*script)->shared()->script()),
+ i::Script::cast(v8::Utils::OpenHandle(*script)->shared().script()),
i_isolate);
debug::LiveEditResult result;
LiveEdit::PatchScript(
diff --git a/test/cctest/test-log-stack-tracer.cc b/test/cctest/test-log-stack-tracer.cc
index 19cb785..f7f343a 100644
--- a/test/cctest/test-log-stack-tracer.cc
+++ b/test/cctest/test-log-stack-tracer.cc
@@ -43,8 +43,8 @@
namespace internal {
static bool IsAddressWithinFuncCode(JSFunction function, void* addr) {
- i::AbstractCode code = function->abstract_code();
- return code->contains(reinterpret_cast<Address>(addr));
+ i::AbstractCode code = function.abstract_code();
+ return code.contains(reinterpret_cast<Address>(addr));
}
static bool IsAddressWithinFuncCode(v8::Local<v8::Context> context,
diff --git a/test/cctest/test-log.cc b/test/cctest/test-log.cc
index c54ac37..0ac2a65 100644
--- a/test/cctest/test-log.cc
+++ b/test/cctest/test-log.cc
@@ -934,28 +934,28 @@
size_t i = 0;
for (i::HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (!obj->IsMap()) continue;
+ if (!obj.IsMap()) continue;
i++;
- uintptr_t address = obj->ptr();
+ uintptr_t address = obj.ptr();
if (map_create_addresses.find(address) == map_create_addresses.end()) {
// logger->PrintLog();
- i::Map::cast(obj)->Print();
+ i::Map::cast(obj).Print();
FATAL(
"Map (%p, #%zu) creation not logged during startup with "
"--trace-maps!"
"\n# Expected Log Line: map-create, ... %p",
- reinterpret_cast<void*>(obj->ptr()), i,
- reinterpret_cast<void*>(obj->ptr()));
+ reinterpret_cast<void*>(obj.ptr()), i,
+ reinterpret_cast<void*>(obj.ptr()));
} else if (map_details_addresses.find(address) ==
map_details_addresses.end()) {
// logger->PrintLog();
- i::Map::cast(obj)->Print();
+ i::Map::cast(obj).Print();
FATAL(
"Map (%p, #%zu) details not logged during startup with "
"--trace-maps!"
"\n# Expected Log Line: map-details, ... %p",
- reinterpret_cast<void*>(obj->ptr()), i,
- reinterpret_cast<void*>(obj->ptr()));
+ reinterpret_cast<void*>(obj.ptr()), i,
+ reinterpret_cast<void*>(obj.ptr()));
}
}
}
diff --git a/test/cctest/test-macro-assembler-mips.cc b/test/cctest/test-macro-assembler-mips.cc
index f065f6c..448a429 100644
--- a/test/cctest/test-macro-assembler-mips.cc
+++ b/test/cctest/test-macro-assembler-mips.cc
@@ -122,7 +122,7 @@
i::Handle<i::JSReceiver> o = v8::Utils::OpenHandle(*result);
i::Handle<i::JSArray> array1(i::JSArray::cast(*o), o->GetIsolate());
i::FixedDoubleArray a = i::FixedDoubleArray::cast(array1->elements());
- double value = a->get_scalar(0);
+ double value = a.get_scalar(0);
CHECK(std::isnan(value) &&
bit_cast<uint64_t>(value) ==
bit_cast<uint64_t>(std::numeric_limits<double>::quiet_NaN()));
diff --git a/test/cctest/test-macro-assembler-x64.cc b/test/cctest/test-macro-assembler-x64.cc
index 12aa19a..d5537f9 100644
--- a/test/cctest/test-macro-assembler-x64.cc
+++ b/test/cctest/test-macro-assembler-x64.cc
@@ -84,7 +84,7 @@
Smi smi_from_int = Smi::FromInt(static_cast<int32_t>(number));
CHECK_EQ(smi_from_int, smi_from_intptr);
}
- int64_t smi_value = smi_from_intptr->value();
+ int64_t smi_value = smi_from_intptr.value();
CHECK_EQ(number, smi_value);
}
}
diff --git a/test/cctest/test-mementos.cc b/test/cctest/test-mementos.cc
index 25fb64e..59c5c36 100644
--- a/test/cctest/test-mementos.cc
+++ b/test/cctest/test-mementos.cc
@@ -51,9 +51,9 @@
// site pointer.
AllocationMemento memento = AllocationMemento::unchecked_cast(
Object(new_space->top() + kHeapObjectTag));
- memento->set_map_after_allocation(
- ReadOnlyRoots(heap).allocation_memento_map(), SKIP_WRITE_BARRIER);
- memento->set_allocation_site(
+ memento.set_map_after_allocation(ReadOnlyRoots(heap).allocation_memento_map(),
+ SKIP_WRITE_BARRIER);
+ memento.set_allocation_site(
AllocationSite::unchecked_cast(Object(kHeapObjectTag)),
SKIP_WRITE_BARRIER);
}
diff --git a/test/cctest/test-object.cc b/test/cctest/test-object.cc
index 52af8cd..de36ac8 100644
--- a/test/cctest/test-object.cc
+++ b/test/cctest/test-object.cc
@@ -115,143 +115,143 @@
*env->Global()->Get(env.local(), v8_str("cc")).ToLocalChecked()));
// Check the transition tree.
- CHECK_EQ(a->map()->instance_descriptors(), b->map()->instance_descriptors());
- CHECK_EQ(b->map()->instance_descriptors(), c->map()->instance_descriptors());
- CHECK_NE(c->map()->instance_descriptors(), cc->map()->instance_descriptors());
- CHECK_NE(b->map()->instance_descriptors(), cc->map()->instance_descriptors());
+ CHECK_EQ(a->map().instance_descriptors(), b->map().instance_descriptors());
+ CHECK_EQ(b->map().instance_descriptors(), c->map().instance_descriptors());
+ CHECK_NE(c->map().instance_descriptors(), cc->map().instance_descriptors());
+ CHECK_NE(b->map().instance_descriptors(), cc->map().instance_descriptors());
// Check that the EnumLength is unset.
- CHECK_EQ(a->map()->EnumLength(), kInvalidEnumCacheSentinel);
- CHECK_EQ(b->map()->EnumLength(), kInvalidEnumCacheSentinel);
- CHECK_EQ(c->map()->EnumLength(), kInvalidEnumCacheSentinel);
- CHECK_EQ(cc->map()->EnumLength(), kInvalidEnumCacheSentinel);
+ CHECK_EQ(a->map().EnumLength(), kInvalidEnumCacheSentinel);
+ CHECK_EQ(b->map().EnumLength(), kInvalidEnumCacheSentinel);
+ CHECK_EQ(c->map().EnumLength(), kInvalidEnumCacheSentinel);
+ CHECK_EQ(cc->map().EnumLength(), kInvalidEnumCacheSentinel);
// Check that the EnumCache is empty.
- CHECK_EQ(a->map()->instance_descriptors()->enum_cache(),
+ CHECK_EQ(a->map().instance_descriptors().enum_cache(),
*factory->empty_enum_cache());
- CHECK_EQ(b->map()->instance_descriptors()->enum_cache(),
+ CHECK_EQ(b->map().instance_descriptors().enum_cache(),
*factory->empty_enum_cache());
- CHECK_EQ(c->map()->instance_descriptors()->enum_cache(),
+ CHECK_EQ(c->map().instance_descriptors().enum_cache(),
*factory->empty_enum_cache());
- CHECK_EQ(cc->map()->instance_descriptors()->enum_cache(),
+ CHECK_EQ(cc->map().instance_descriptors().enum_cache(),
*factory->empty_enum_cache());
// The EnumCache is shared on the DescriptorArray, creating it on {cc} has no
// effect on the other maps.
CompileRun("var s = 0; for (let key in cc) { s += cc[key] };");
{
- CHECK_EQ(a->map()->EnumLength(), kInvalidEnumCacheSentinel);
- CHECK_EQ(b->map()->EnumLength(), kInvalidEnumCacheSentinel);
- CHECK_EQ(c->map()->EnumLength(), kInvalidEnumCacheSentinel);
- CHECK_EQ(cc->map()->EnumLength(), 3);
+ CHECK_EQ(a->map().EnumLength(), kInvalidEnumCacheSentinel);
+ CHECK_EQ(b->map().EnumLength(), kInvalidEnumCacheSentinel);
+ CHECK_EQ(c->map().EnumLength(), kInvalidEnumCacheSentinel);
+ CHECK_EQ(cc->map().EnumLength(), 3);
- CHECK_EQ(a->map()->instance_descriptors()->enum_cache(),
+ CHECK_EQ(a->map().instance_descriptors().enum_cache(),
*factory->empty_enum_cache());
- CHECK_EQ(b->map()->instance_descriptors()->enum_cache(),
+ CHECK_EQ(b->map().instance_descriptors().enum_cache(),
*factory->empty_enum_cache());
- CHECK_EQ(c->map()->instance_descriptors()->enum_cache(),
+ CHECK_EQ(c->map().instance_descriptors().enum_cache(),
*factory->empty_enum_cache());
- EnumCache enum_cache = cc->map()->instance_descriptors()->enum_cache();
+ EnumCache enum_cache = cc->map().instance_descriptors().enum_cache();
CHECK_NE(enum_cache, *factory->empty_enum_cache());
- CHECK_EQ(enum_cache->keys()->length(), 3);
- CHECK_EQ(enum_cache->indices()->length(), 3);
+ CHECK_EQ(enum_cache.keys().length(), 3);
+ CHECK_EQ(enum_cache.indices().length(), 3);
}
// Initializing the EnumCache for the the topmost map {a} will not create the
// cache for the other maps.
CompileRun("var s = 0; for (let key in a) { s += a[key] };");
{
- CHECK_EQ(a->map()->EnumLength(), 1);
- CHECK_EQ(b->map()->EnumLength(), kInvalidEnumCacheSentinel);
- CHECK_EQ(c->map()->EnumLength(), kInvalidEnumCacheSentinel);
- CHECK_EQ(cc->map()->EnumLength(), 3);
+ CHECK_EQ(a->map().EnumLength(), 1);
+ CHECK_EQ(b->map().EnumLength(), kInvalidEnumCacheSentinel);
+ CHECK_EQ(c->map().EnumLength(), kInvalidEnumCacheSentinel);
+ CHECK_EQ(cc->map().EnumLength(), 3);
// The enum cache is shared on the descriptor array of maps {a}, {b} and
// {c} only.
- EnumCache enum_cache = a->map()->instance_descriptors()->enum_cache();
+ EnumCache enum_cache = a->map().instance_descriptors().enum_cache();
CHECK_NE(enum_cache, *factory->empty_enum_cache());
- CHECK_NE(cc->map()->instance_descriptors()->enum_cache(),
+ CHECK_NE(cc->map().instance_descriptors().enum_cache(),
*factory->empty_enum_cache());
- CHECK_NE(cc->map()->instance_descriptors()->enum_cache(), enum_cache);
- CHECK_EQ(a->map()->instance_descriptors()->enum_cache(), enum_cache);
- CHECK_EQ(b->map()->instance_descriptors()->enum_cache(), enum_cache);
- CHECK_EQ(c->map()->instance_descriptors()->enum_cache(), enum_cache);
+ CHECK_NE(cc->map().instance_descriptors().enum_cache(), enum_cache);
+ CHECK_EQ(a->map().instance_descriptors().enum_cache(), enum_cache);
+ CHECK_EQ(b->map().instance_descriptors().enum_cache(), enum_cache);
+ CHECK_EQ(c->map().instance_descriptors().enum_cache(), enum_cache);
- CHECK_EQ(enum_cache->keys()->length(), 1);
- CHECK_EQ(enum_cache->indices()->length(), 1);
+ CHECK_EQ(enum_cache.keys().length(), 1);
+ CHECK_EQ(enum_cache.indices().length(), 1);
}
// Creating the EnumCache for {c} will create a new EnumCache on the shared
// DescriptorArray.
Handle<EnumCache> previous_enum_cache(
- a->map()->instance_descriptors()->enum_cache(), a->GetIsolate());
+ a->map().instance_descriptors().enum_cache(), a->GetIsolate());
Handle<FixedArray> previous_keys(previous_enum_cache->keys(),
a->GetIsolate());
Handle<FixedArray> previous_indices(previous_enum_cache->indices(),
a->GetIsolate());
CompileRun("var s = 0; for (let key in c) { s += c[key] };");
{
- CHECK_EQ(a->map()->EnumLength(), 1);
- CHECK_EQ(b->map()->EnumLength(), kInvalidEnumCacheSentinel);
- CHECK_EQ(c->map()->EnumLength(), 3);
- CHECK_EQ(cc->map()->EnumLength(), 3);
+ CHECK_EQ(a->map().EnumLength(), 1);
+ CHECK_EQ(b->map().EnumLength(), kInvalidEnumCacheSentinel);
+ CHECK_EQ(c->map().EnumLength(), 3);
+ CHECK_EQ(cc->map().EnumLength(), 3);
- EnumCache enum_cache = c->map()->instance_descriptors()->enum_cache();
+ EnumCache enum_cache = c->map().instance_descriptors().enum_cache();
CHECK_NE(enum_cache, *factory->empty_enum_cache());
// The keys and indices caches are updated.
CHECK_EQ(enum_cache, *previous_enum_cache);
- CHECK_NE(enum_cache->keys(), *previous_keys);
- CHECK_NE(enum_cache->indices(), *previous_indices);
+ CHECK_NE(enum_cache.keys(), *previous_keys);
+ CHECK_NE(enum_cache.indices(), *previous_indices);
CHECK_EQ(previous_keys->length(), 1);
CHECK_EQ(previous_indices->length(), 1);
- CHECK_EQ(enum_cache->keys()->length(), 3);
- CHECK_EQ(enum_cache->indices()->length(), 3);
+ CHECK_EQ(enum_cache.keys().length(), 3);
+ CHECK_EQ(enum_cache.indices().length(), 3);
// The enum cache is shared on the descriptor array of maps {a}, {b} and
// {c} only.
- CHECK_NE(cc->map()->instance_descriptors()->enum_cache(),
+ CHECK_NE(cc->map().instance_descriptors().enum_cache(),
*factory->empty_enum_cache());
- CHECK_NE(cc->map()->instance_descriptors()->enum_cache(), enum_cache);
- CHECK_NE(cc->map()->instance_descriptors()->enum_cache(),
+ CHECK_NE(cc->map().instance_descriptors().enum_cache(), enum_cache);
+ CHECK_NE(cc->map().instance_descriptors().enum_cache(),
*previous_enum_cache);
- CHECK_EQ(a->map()->instance_descriptors()->enum_cache(), enum_cache);
- CHECK_EQ(b->map()->instance_descriptors()->enum_cache(), enum_cache);
- CHECK_EQ(c->map()->instance_descriptors()->enum_cache(), enum_cache);
+ CHECK_EQ(a->map().instance_descriptors().enum_cache(), enum_cache);
+ CHECK_EQ(b->map().instance_descriptors().enum_cache(), enum_cache);
+ CHECK_EQ(c->map().instance_descriptors().enum_cache(), enum_cache);
}
// {b} can reuse the existing EnumCache, hence we only need to set the correct
// EnumLength on the map without modifying the cache itself.
previous_enum_cache =
- handle(a->map()->instance_descriptors()->enum_cache(), a->GetIsolate());
+ handle(a->map().instance_descriptors().enum_cache(), a->GetIsolate());
previous_keys = handle(previous_enum_cache->keys(), a->GetIsolate());
previous_indices = handle(previous_enum_cache->indices(), a->GetIsolate());
CompileRun("var s = 0; for (let key in b) { s += b[key] };");
{
- CHECK_EQ(a->map()->EnumLength(), 1);
- CHECK_EQ(b->map()->EnumLength(), 2);
- CHECK_EQ(c->map()->EnumLength(), 3);
- CHECK_EQ(cc->map()->EnumLength(), 3);
+ CHECK_EQ(a->map().EnumLength(), 1);
+ CHECK_EQ(b->map().EnumLength(), 2);
+ CHECK_EQ(c->map().EnumLength(), 3);
+ CHECK_EQ(cc->map().EnumLength(), 3);
- EnumCache enum_cache = c->map()->instance_descriptors()->enum_cache();
+ EnumCache enum_cache = c->map().instance_descriptors().enum_cache();
CHECK_NE(enum_cache, *factory->empty_enum_cache());
// The keys and indices caches are not updated.
CHECK_EQ(enum_cache, *previous_enum_cache);
- CHECK_EQ(enum_cache->keys(), *previous_keys);
- CHECK_EQ(enum_cache->indices(), *previous_indices);
- CHECK_EQ(enum_cache->keys()->length(), 3);
- CHECK_EQ(enum_cache->indices()->length(), 3);
+ CHECK_EQ(enum_cache.keys(), *previous_keys);
+ CHECK_EQ(enum_cache.indices(), *previous_indices);
+ CHECK_EQ(enum_cache.keys().length(), 3);
+ CHECK_EQ(enum_cache.indices().length(), 3);
// The enum cache is shared on the descriptor array of maps {a}, {b} and
// {c} only.
- CHECK_NE(cc->map()->instance_descriptors()->enum_cache(),
+ CHECK_NE(cc->map().instance_descriptors().enum_cache(),
*factory->empty_enum_cache());
- CHECK_NE(cc->map()->instance_descriptors()->enum_cache(), enum_cache);
- CHECK_NE(cc->map()->instance_descriptors()->enum_cache(),
+ CHECK_NE(cc->map().instance_descriptors().enum_cache(), enum_cache);
+ CHECK_NE(cc->map().instance_descriptors().enum_cache(),
*previous_enum_cache);
- CHECK_EQ(a->map()->instance_descriptors()->enum_cache(), enum_cache);
- CHECK_EQ(b->map()->instance_descriptors()->enum_cache(), enum_cache);
- CHECK_EQ(c->map()->instance_descriptors()->enum_cache(), enum_cache);
+ CHECK_EQ(a->map().instance_descriptors().enum_cache(), enum_cache);
+ CHECK_EQ(b->map().instance_descriptors().enum_cache(), enum_cache);
+ CHECK_EQ(c->map().instance_descriptors().enum_cache(), enum_cache);
}
}
diff --git a/test/cctest/test-parsing.cc b/test/cctest/test-parsing.cc
index 1ff0514..595e4d9 100644
--- a/test/cctest/test-parsing.cc
+++ b/test/cctest/test-parsing.cc
@@ -3222,7 +3222,7 @@
i::DeclarationScope* script_scope =
new (&zone) i::DeclarationScope(&zone, &avf);
i::Scope* s = i::Scope::DeserializeScopeChain(
- isolate, &zone, context->scope_info(), script_scope, &avf,
+ isolate, &zone, context.scope_info(), script_scope, &avf,
i::Scope::DeserializationMode::kIncludingVariables);
CHECK(s != script_scope);
CHECK_NOT_NULL(name);
@@ -3271,7 +3271,7 @@
i::DeclarationScope* script_scope =
new (&zone) i::DeclarationScope(&zone, &avf);
i::Scope* s = i::Scope::DeserializeScopeChain(
- isolate, &zone, context->scope_info(), script_scope, &avf,
+ isolate, &zone, context.scope_info(), script_scope, &avf,
i::Scope::DeserializationMode::kIncludingVariables);
CHECK(s != script_scope);
@@ -4185,7 +4185,7 @@
i::DeclarationScope* script_scope =
new (zone) i::DeclarationScope(zone, &avf);
i::Scope* s = i::Scope::DeserializeScopeChain(
- isolate, zone, f->context()->scope_info(), script_scope, &avf,
+ isolate, zone, f->context().scope_info(), script_scope, &avf,
i::Scope::DeserializationMode::kIncludingVariables);
return s;
}
diff --git a/test/cctest/test-poison-disasm-arm.cc b/test/cctest/test-poison-disasm-arm.cc
index 989a0d0..3b4983a 100644
--- a/test/cctest/test-poison-disasm-arm.cc
+++ b/test/cctest/test-poison-disasm-arm.cc
@@ -20,8 +20,8 @@
v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
CcTest::global()->Get(context, v8_str(function)).ToLocalChecked())));
- Address begin = f->code()->raw_instruction_start();
- Address end = f->code()->raw_instruction_end();
+ Address begin = f->code().raw_instruction_start();
+ Address end = f->code().raw_instruction_end();
Isolate* isolate = CcTest::i_isolate();
std::ostringstream os;
Disassembler::Decode(isolate, &os, reinterpret_cast<byte*>(begin),
diff --git a/test/cctest/test-profile-generator.cc b/test/cctest/test-profile-generator.cc
index d0a34d4..bf28e20 100644
--- a/test/cctest/test-profile-generator.cc
+++ b/test/cctest/test-profile-generator.cc
@@ -678,7 +678,7 @@
v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
env->Global()->Get(env.local(), v8_str(name)).ToLocalChecked())));
CodeEntry* func_entry =
- code_map->FindEntry(func->abstract_code()->InstructionStart());
+ code_map->FindEntry(func->abstract_code().InstructionStart());
if (!func_entry) FATAL("%s", name);
return func_entry->line_number();
}
diff --git a/test/cctest/test-roots.cc b/test/cctest/test-roots.cc
index a0ad1f0..65acbb7 100644
--- a/test/cctest/test-roots.cc
+++ b/test/cctest/test-roots.cc
@@ -14,7 +14,7 @@
namespace {
AllocationSpace GetSpaceFromObject(Object object) {
- DCHECK(object->IsHeapObject());
+ DCHECK(object.IsHeapObject());
return MemoryChunk::FromHeapObject(HeapObject::cast(object))
->owner()
->identity();
diff --git a/test/cctest/test-serialize.cc b/test/cctest/test-serialize.cc
index ba28960..0e3cd48 100644
--- a/test/cctest/test-serialize.cc
+++ b/test/cctest/test-serialize.cc
@@ -785,7 +785,7 @@
v8::Local<v8::Value> result = CompileRun("f()").As<v8::Value>();
CHECK(result->IsString());
i::String str = *v8::Utils::OpenHandle(*result.As<v8::String>());
- CHECK_EQ(std::string(str->ToCString().get()), "A");
+ CHECK_EQ(std::string(str.ToCString().get()), "A");
CHECK(!str.IsInternalizedString());
CHECK(!i::ReadOnlyHeap::Contains(str));
}
@@ -1105,8 +1105,7 @@
i::Handle<i::JSTypedArray> array =
i::Handle<i::JSTypedArray>::cast(v8::Utils::OpenHandle(*x));
CHECK(array->WasDetached());
- CHECK_NULL(
- FixedTypedArrayBase::cast(array->elements())->external_pointer());
+ CHECK_NULL(FixedTypedArrayBase::cast(array->elements()).external_pointer());
}
isolate->Dispose();
delete[] blob.data; // We can dispose of the snapshot blob now.
@@ -1400,7 +1399,7 @@
return i::Handle<i::JSFunction>::cast(
v8::Utils::OpenHandle(*CompileRun(name)))
->shared()
- ->is_compiled();
+ .is_compiled();
}
UNINITIALIZED_TEST(SnapshotDataBlobWithWarmup) {
@@ -1525,7 +1524,7 @@
int counter = 0;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
- if (obj->IsCode() && Code::cast(obj)->kind() == Code::BUILTIN) counter++;
+ if (obj.IsCode() && Code::cast(obj).kind() == Code::BUILTIN) counter++;
}
return counter;
}
@@ -1592,12 +1591,12 @@
}
CHECK_NE(*orig, *copy);
- CHECK(Script::cast(copy->script())->source() == *copy_source);
+ CHECK(Script::cast(copy->script()).source() == *copy_source);
Handle<JSFunction> copy_fun =
isolate->factory()->NewFunctionFromSharedFunctionInfo(
copy, isolate->native_context());
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
Handle<Object> copy_result =
Execution::Call(isolate, copy_fun, global, 0, nullptr).ToHandleChecked();
CHECK_EQ(2, Handle<Smi>::cast(copy_result)->value());
@@ -1689,7 +1688,7 @@
CHECK(!orig_source.is_identical_to(copy_source));
CHECK(orig_source->Equals(*copy_source));
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
i::ScriptData* script_data = nullptr;
Handle<SharedFunctionInfo> orig = CompileScriptAndProduceCache(
@@ -1711,7 +1710,7 @@
v8::ScriptCompiler::kConsumeCodeCache);
}
CHECK_NE(*orig, *copy);
- CHECK(Script::cast(copy->script())->source() == *copy_source);
+ CHECK(Script::cast(copy->script()).source() == *copy_source);
Handle<JSFunction> copy_fun =
isolate->factory()->NewFunctionFromSharedFunctionInfo(
@@ -1748,7 +1747,7 @@
Handle<String> source_str =
isolate->factory()->NewStringFromOneByte(source).ToHandleChecked();
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
ScriptData* cache = nullptr;
Handle<SharedFunctionInfo> orig = CompileScriptAndProduceCache(
@@ -1813,7 +1812,7 @@
ec_page = Page::FromHeapObject(*moving_object);
}
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
ScriptData* cache = nullptr;
Handle<SharedFunctionInfo> orig = CompileScriptAndProduceCache(
@@ -1876,7 +1875,7 @@
f->NewStringFromOneByte(source_t).ToHandleChecked())
.ToHandleChecked();
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
ScriptData* cache = nullptr;
Handle<SharedFunctionInfo> orig = CompileScriptAndProduceCache(
@@ -1948,7 +1947,7 @@
f->NewConsString(source_a_str, source_b_str).ToHandleChecked(),
source_c_str).ToHandleChecked();
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
ScriptData* cache = nullptr;
Handle<SharedFunctionInfo> orig = CompileScriptAndProduceCache(
@@ -2067,7 +2066,7 @@
->NewStringFromUtf8(CStrVector(source))
.ToHandleChecked();
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
ScriptData* cache = nullptr;
Handle<SharedFunctionInfo> orig = CompileScriptAndProduceCache(
@@ -2129,7 +2128,7 @@
f->NewConsString(f->NewStringFromAsciiChecked(" = 42; "), name)
.ToHandleChecked()).ToHandleChecked();
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
ScriptData* cache = nullptr;
Handle<SharedFunctionInfo> orig = CompileScriptAndProduceCache(
@@ -2181,7 +2180,7 @@
CHECK(name->IsExternalOneByteString());
CHECK(!name->IsInternalizedString());
- Handle<JSObject> global(isolate->context()->global_object(), isolate);
+ Handle<JSObject> global(isolate->context().global_object(), isolate);
ScriptData* cache = nullptr;
Handle<SharedFunctionInfo> orig =
@@ -2394,7 +2393,7 @@
Handle<SharedFunctionInfo> sfi = v8::Utils::OpenHandle(*script);
CHECK(sfi->HasBytecodeArray());
BytecodeArray bytecode = sfi->GetBytecodeArray();
- CHECK_EQ(bytecode->osr_loop_nesting_level(), 0);
+ CHECK_EQ(bytecode.osr_loop_nesting_level(), 0);
{
DisallowCompilation no_compile_expected(
@@ -3765,14 +3764,14 @@
}
void CheckSFIsAreWeak(WeakFixedArray sfis, Isolate* isolate) {
- CHECK_GT(sfis->length(), 0);
+ CHECK_GT(sfis.length(), 0);
int no_of_weak = 0;
- for (int i = 0; i < sfis->length(); ++i) {
- MaybeObject maybe_object = sfis->Get(i);
+ for (int i = 0; i < sfis.length(); ++i) {
+ MaybeObject maybe_object = sfis.Get(i);
HeapObject heap_object;
CHECK(maybe_object->IsWeakOrCleared() ||
(maybe_object->GetHeapObjectIfStrong(&heap_object) &&
- heap_object->IsUndefined(isolate)));
+ heap_object.IsUndefined(isolate)));
if (maybe_object->IsWeak()) {
++no_of_weak;
}
@@ -3825,7 +3824,7 @@
// Verify that the pointers in shared_function_infos are weak.
WeakFixedArray sfis =
- Script::cast(function->shared()->script())->shared_function_infos();
+ Script::cast(function->shared().script()).shared_function_infos();
CheckSFIsAreWeak(sfis, reinterpret_cast<i::Isolate*>(isolate));
}
isolate->Dispose();
@@ -3855,7 +3854,7 @@
isolate, src, src, cache, v8::ScriptCompiler::kConsumeCodeCache);
// Verify that the pointers in shared_function_infos are weak.
- WeakFixedArray sfis = Script::cast(copy->script())->shared_function_infos();
+ WeakFixedArray sfis = Script::cast(copy->script()).shared_function_infos();
CheckSFIsAreWeak(sfis, isolate);
delete cache;
diff --git a/test/cctest/test-smi-lexicographic-compare.cc b/test/cctest/test-smi-lexicographic-compare.cc
index 7e4f766..7a71e43 100644
--- a/test/cctest/test-smi-lexicographic-compare.cc
+++ b/test/cctest/test-smi-lexicographic-compare.cc
@@ -23,8 +23,8 @@
// Uses std::lexicographical_compare twice to convert the result to -1, 0 or 1.
int ExpectedCompareResult(Smi a, Smi b) {
- std::string str_a = std::to_string(a->value());
- std::string str_b = std::to_string(b->value());
+ std::string str_a = std::to_string(a.value());
+ std::string str_b = std::to_string(b.value());
bool expected_a_lt_b = std::lexicographical_compare(
str_a.begin(), str_a.end(), str_b.begin(), str_b.end());
bool expected_b_lt_a = std::lexicographical_compare(
diff --git a/test/cctest/test-strings.cc b/test/cctest/test-strings.cc
index ee5f84a..ec897e5 100644
--- a/test/cctest/test-strings.cc
+++ b/test/cctest/test-strings.cc
@@ -321,23 +321,23 @@
}
void AccumulateStats(ConsString cons_string, ConsStringStats* stats) {
- int left_length = cons_string->first()->length();
- int right_length = cons_string->second()->length();
- CHECK(cons_string->length() == left_length + right_length);
+ int left_length = cons_string.first().length();
+ int right_length = cons_string.second().length();
+ CHECK(cons_string.length() == left_length + right_length);
// Check left side.
- bool left_is_cons = cons_string->first()->IsConsString();
+ bool left_is_cons = cons_string.first().IsConsString();
if (left_is_cons) {
stats->left_traversals_++;
- AccumulateStats(ConsString::cast(cons_string->first()), stats);
+ AccumulateStats(ConsString::cast(cons_string.first()), stats);
} else {
CHECK_NE(left_length, 0);
stats->leaves_++;
stats->chars_ += left_length;
}
// Check right side.
- if (cons_string->second()->IsConsString()) {
+ if (cons_string.second().IsConsString()) {
stats->right_traversals_++;
- AccumulateStats(ConsString::cast(cons_string->second()), stats);
+ AccumulateStats(ConsString::cast(cons_string.second()), stats);
} else {
if (right_length == 0) {
stats->empty_leaves_++;
@@ -366,7 +366,7 @@
// Accumulate stats.
CHECK_EQ(0, offset);
stats->leaves_++;
- stats->chars_ += string->length();
+ stats->chars_ += string.length();
}
}
@@ -640,10 +640,10 @@
static void VerifyCharacterStream(String flat_string, String cons_string) {
// Do not want to test ConString traversal on flat string.
- CHECK(flat_string->IsFlat() && !flat_string->IsConsString());
- CHECK(cons_string->IsConsString());
+ CHECK(flat_string.IsFlat() && !flat_string.IsConsString());
+ CHECK(cons_string.IsConsString());
// TODO(dcarney) Test stream reset as well.
- int length = flat_string->length();
+ int length = flat_string.length();
// Iterate start search in multiple places in the string.
int outer_iterations = length > 20 ? 20 : length;
for (int j = 0; j <= outer_iterations; j++) {
@@ -654,7 +654,7 @@
StringCharacterStream flat_stream(flat_string, offset);
StringCharacterStream cons_stream(cons_string, offset);
for (int i = offset; i < length; i++) {
- uint16_t c = flat_string->Get(i);
+ uint16_t c = flat_string.Get(i);
CHECK(flat_stream.HasMore());
CHECK(cons_stream.HasMore());
CHECK_EQ(c, flat_stream.GetNext());
@@ -704,7 +704,7 @@
cons_string_stats.VerifyEqual(data.stats_);
VerifyConsString(cons_string, &data);
String flat_string_ptr = flat_string->IsConsString()
- ? ConsString::cast(*flat_string)->first()
+ ? ConsString::cast(*flat_string).first()
: *flat_string;
VerifyCharacterStream(flat_string_ptr, *cons_string);
}
@@ -1381,11 +1381,11 @@
// After slicing, the original string becomes a flat cons.
CHECK(parent->IsFlat());
CHECK(slice->IsSlicedString());
- CHECK_EQ(SlicedString::cast(*slice)->parent(),
- // Parent could have been short-circuited.
- parent->IsConsString() ? ConsString::cast(*parent)->first()
- : *parent);
- CHECK(SlicedString::cast(*slice)->parent()->IsSeqString());
+ CHECK_EQ(
+ SlicedString::cast(*slice).parent(),
+ // Parent could have been short-circuited.
+ parent->IsConsString() ? ConsString::cast(*parent).first() : *parent);
+ CHECK(SlicedString::cast(*slice).parent().IsSeqString());
CHECK(slice->IsFlat());
}
@@ -1451,8 +1451,8 @@
Handle<String> slice = factory->NewSubString(string, 1, 25);
CHECK(slice->IsSlicedString());
CHECK(string->IsExternalString());
- CHECK_EQ(SlicedString::cast(*slice)->parent(), *string);
- CHECK(SlicedString::cast(*slice)->parent()->IsExternalString());
+ CHECK_EQ(SlicedString::cast(*slice).parent(), *string);
+ CHECK(SlicedString::cast(*slice).parent().IsExternalString());
CHECK(slice->IsFlat());
// This avoids the GC from trying to free stack allocated resources.
i::Handle<i::ExternalOneByteString>::cast(string)->SetResource(
@@ -1507,14 +1507,14 @@
CHECK(result->IsString());
string = v8::Utils::OpenHandle(v8::String::Cast(*result));
CHECK(string->IsSlicedString());
- CHECK(SlicedString::cast(*string)->parent()->IsSeqString());
+ CHECK(SlicedString::cast(*string).parent().IsSeqString());
CHECK_EQ(0, strcmp("bcdefghijklmnopqrstuvwxy", string->ToCString().get()));
result = CompileRun(slice_from_slice);
CHECK(result->IsString());
string = v8::Utils::OpenHandle(v8::String::Cast(*result));
CHECK(string->IsSlicedString());
- CHECK(SlicedString::cast(*string)->parent()->IsSeqString());
+ CHECK(SlicedString::cast(*string).parent().IsSeqString());
CHECK_EQ(0, strcmp("cdefghijklmnopqrstuvwx", string->ToCString().get()));
}
@@ -1945,8 +1945,8 @@
factory->InternalizeString(parent);
CHECK(parent->IsThinString());
Handle<String> grandparent =
- handle(ThinString::cast(*parent)->actual(), isolate);
- CHECK_EQ(*parent, SlicedString::cast(*sliced)->parent());
+ handle(ThinString::cast(*parent).actual(), isolate);
+ CHECK_EQ(*parent, SlicedString::cast(*sliced).parent());
OneByteStringResource* resource =
new OneByteStringResource(external_one_byte_buf, kLength);
grandparent->MakeExternal(resource);
diff --git a/test/cctest/test-transitions.cc b/test/cctest/test-transitions.cc
index af868ba..7c57081 100644
--- a/test/cctest/test-transitions.cc
+++ b/test/cctest/test-transitions.cc
@@ -289,7 +289,7 @@
if (key == *name) {
// Attributes transition.
PropertyAttributes attributes =
- target->GetLastDescriptorDetails().attributes();
+ target.GetLastDescriptorDetails().attributes();
CHECK_EQ(*attr_maps[static_cast<int>(attributes)], target);
} else {
for (int j = 0; j < PROPS_COUNT; j++) {
diff --git a/test/cctest/test-unboxed-doubles.cc b/test/cctest/test-unboxed-doubles.cc
index 2406007..776351e 100644
--- a/test/cctest/test-unboxed-doubles.cc
+++ b/test/cctest/test-unboxed-doubles.cc
@@ -41,8 +41,8 @@
static void InitializeVerifiedMapDescriptors(
Isolate* isolate, Map map, DescriptorArray descriptors,
LayoutDescriptor layout_descriptor) {
- map->InitializeDescriptors(isolate, descriptors, layout_descriptor);
- CHECK(layout_descriptor->IsConsistentWithMap(map, true));
+ map.InitializeDescriptors(isolate, descriptors, layout_descriptor);
+ CHECK(layout_descriptor.IsConsistentWithMap(map, true));
}
static Handle<String> MakeString(const char* str) {
@@ -69,19 +69,19 @@
}
static double GetDoubleFieldValue(JSObject obj, FieldIndex field_index) {
- if (obj->IsUnboxedDoubleField(field_index)) {
- return obj->RawFastDoublePropertyAt(field_index);
+ if (obj.IsUnboxedDoubleField(field_index)) {
+ return obj.RawFastDoublePropertyAt(field_index);
} else {
- Object value = obj->RawFastPropertyAt(field_index);
- CHECK(value->IsMutableHeapNumber());
- return MutableHeapNumber::cast(value)->value();
+ Object value = obj.RawFastPropertyAt(field_index);
+ CHECK(value.IsMutableHeapNumber());
+ return MutableHeapNumber::cast(value).value();
}
}
void WriteToField(JSObject object, int descriptor, Object value) {
- DescriptorArray descriptors = object->map()->instance_descriptors();
- PropertyDetails details = descriptors->GetDetails(descriptor);
- object->WriteToField(descriptor, details, value);
+ DescriptorArray descriptors = object.map().instance_descriptors();
+ PropertyDetails details = descriptors.GetDetails(descriptor);
+ object.WriteToField(descriptor, details, value);
}
const int kNumberOfBits = 32;
@@ -142,32 +142,32 @@
LayoutDescriptor layout_desc = LayoutDescriptor::FastPointerLayout();
- CHECK(!layout_desc->IsSlowLayout());
- CHECK(layout_desc->IsFastPointerLayout());
- CHECK_EQ(kBitsInSmiLayout, layout_desc->capacity());
+ CHECK(!layout_desc.IsSlowLayout());
+ CHECK(layout_desc.IsFastPointerLayout());
+ CHECK_EQ(kBitsInSmiLayout, layout_desc.capacity());
for (int i = 0; i < kBitsInSmiLayout + 13; i++) {
- CHECK(layout_desc->IsTagged(i));
+ CHECK(layout_desc.IsTagged(i));
}
- CHECK(layout_desc->IsTagged(-1));
- CHECK(layout_desc->IsTagged(-12347));
- CHECK(layout_desc->IsTagged(15635));
- CHECK(layout_desc->IsFastPointerLayout());
+ CHECK(layout_desc.IsTagged(-1));
+ CHECK(layout_desc.IsTagged(-12347));
+ CHECK(layout_desc.IsTagged(15635));
+ CHECK(layout_desc.IsFastPointerLayout());
for (int i = 0; i < kBitsInSmiLayout; i++) {
- layout_desc = layout_desc->SetTaggedForTesting(i, false);
- CHECK(!layout_desc->IsTagged(i));
- layout_desc = layout_desc->SetTaggedForTesting(i, true);
- CHECK(layout_desc->IsTagged(i));
+ layout_desc = layout_desc.SetTaggedForTesting(i, false);
+ CHECK(!layout_desc.IsTagged(i));
+ layout_desc = layout_desc.SetTaggedForTesting(i, true);
+ CHECK(layout_desc.IsTagged(i));
}
- CHECK(layout_desc->IsFastPointerLayout());
+ CHECK(layout_desc.IsFastPointerLayout());
int sequence_length;
- CHECK_EQ(true, layout_desc->IsTagged(0, std::numeric_limits<int>::max(),
- &sequence_length));
+ CHECK_EQ(true, layout_desc.IsTagged(0, std::numeric_limits<int>::max(),
+ &sequence_length));
CHECK_EQ(std::numeric_limits<int>::max(), sequence_length);
- CHECK(layout_desc->IsTagged(0, 7, &sequence_length));
+ CHECK(layout_desc.IsTagged(0, 7, &sequence_length));
CHECK_EQ(7, sequence_length);
}
@@ -252,13 +252,13 @@
LayoutDescriptor layout_desc = *layout_descriptor;
// Play with the bits but leave it in consistent state with map at the end.
for (int i = 1; i < kPropsCount - 1; i++) {
- layout_desc = layout_desc->SetTaggedForTesting(i, false);
- CHECK(!layout_desc->IsTagged(i));
- layout_desc = layout_desc->SetTaggedForTesting(i, true);
- CHECK(layout_desc->IsTagged(i));
+ layout_desc = layout_desc.SetTaggedForTesting(i, false);
+ CHECK(!layout_desc.IsTagged(i));
+ layout_desc = layout_desc.SetTaggedForTesting(i, true);
+ CHECK(layout_desc.IsTagged(i));
}
- CHECK(layout_desc->IsSlowLayout());
- CHECK(!layout_desc->IsFastPointerLayout());
+ CHECK(layout_desc.IsSlowLayout());
+ CHECK(!layout_desc.IsFastPointerLayout());
CHECK(layout_descriptor->IsConsistentWithMap(*map, true));
}
}
@@ -282,11 +282,11 @@
++cur_bit_flip_index;
CHECK(i < bit_flip_positions[cur_bit_flip_index]); // check test data
}
- layout_desc = layout_desc->SetTaggedForTesting(i, tagged);
+ layout_desc = layout_desc.SetTaggedForTesting(i, tagged);
}
}
- if (layout_desc->IsFastPointerLayout()) {
+ if (layout_desc.IsFastPointerLayout()) {
return;
}
@@ -299,30 +299,29 @@
tagged = !tagged;
++cur_bit_flip_index;
}
- CHECK_EQ(tagged, layout_desc->IsTagged(i));
+ CHECK_EQ(tagged, layout_desc.IsTagged(i));
int next_bit_flip_position = bit_flip_positions[cur_bit_flip_index];
int expected_sequence_length;
- if (next_bit_flip_position < layout_desc->capacity()) {
+ if (next_bit_flip_position < layout_desc.capacity()) {
expected_sequence_length = next_bit_flip_position - i;
} else {
expected_sequence_length = tagged ? std::numeric_limits<int>::max()
- : (layout_desc->capacity() - i);
+ : (layout_desc.capacity() - i);
}
expected_sequence_length =
Min(expected_sequence_length, max_sequence_length);
int sequence_length;
CHECK_EQ(tagged,
- layout_desc->IsTagged(i, max_sequence_length, &sequence_length));
+ layout_desc.IsTagged(i, max_sequence_length, &sequence_length));
CHECK_GT(sequence_length, 0);
CHECK_EQ(expected_sequence_length, sequence_length);
}
int sequence_length;
- CHECK_EQ(true,
- layout_desc->IsTagged(layout_descriptor_length,
- max_sequence_length, &sequence_length));
+ CHECK_EQ(true, layout_desc.IsTagged(layout_descriptor_length,
+ max_sequence_length, &sequence_length));
CHECK_EQ(max_sequence_length, sequence_length);
}
}
@@ -334,7 +333,7 @@
int sequence_length;
for (int i = 0; i < kNumberOfBits; i++) {
CHECK_EQ(true,
- layout_desc->IsTagged(i, max_sequence_length, &sequence_length));
+ layout_desc.IsTagged(i, max_sequence_length, &sequence_length));
CHECK_GT(sequence_length, 0);
CHECK_EQ(max_sequence_length, sequence_length);
}
@@ -624,17 +623,17 @@
LayoutDescriptor layout_desc = *layout_descriptor;
CHECK_EQ(layout_desc, LayoutDescriptor::cast(layout_desc));
CHECK_EQ(layout_desc, LayoutDescriptor::cast_gc_safe(layout_desc));
- CHECK(layout_desc->IsSlowLayout());
+ CHECK(layout_desc.IsSlowLayout());
// Now make it look like a forwarding pointer to layout_descriptor_copy.
- MapWord map_word = layout_desc->map_word();
+ MapWord map_word = layout_desc.map_word();
CHECK(!map_word.IsForwardingAddress());
- layout_desc->set_map_word(
+ layout_desc.set_map_word(
MapWord::FromForwardingAddress(*layout_descriptor_copy));
- CHECK(layout_desc->map_word().IsForwardingAddress());
+ CHECK(layout_desc.map_word().IsForwardingAddress());
CHECK_EQ(layout_desc, LayoutDescriptor::cast_gc_safe(layout_desc));
// Restore it back.
- layout_desc->set_map_word(map_word);
+ layout_desc.set_map_word(map_word);
CHECK_EQ(layout_desc, LayoutDescriptor::cast(layout_desc));
}
}
@@ -801,10 +800,10 @@
Map map = *last_map;
for (int i = 0; i < descriptors_length; i++) {
maps[descriptors_length - 1 - i] = handle(map, isolate);
- Object maybe_map = map->GetBackPointer();
- CHECK(maybe_map->IsMap());
+ Object maybe_map = map.GetBackPointer();
+ CHECK(maybe_map.IsMap());
map = Map::cast(maybe_map);
- CHECK(!map->is_stable());
+ CHECK(!map.is_stable());
}
CHECK_EQ(1, maps[0]->NumberOfOwnDescriptors());
}
@@ -816,7 +815,7 @@
map = maps[i];
LayoutDescriptor layout_desc = map->layout_descriptor();
- if (layout_desc->IsSlowLayout()) {
+ if (layout_desc.IsSlowLayout()) {
switched_to_slow_mode = true;
CHECK_EQ(*full_layout_descriptor, layout_desc);
} else {
@@ -829,12 +828,12 @@
bool is_inobject = field_index < map->GetInObjectProperties();
for (int bit = 0; bit < field_width_in_words; bit++) {
CHECK_EQ(is_inobject && details.representation().IsDouble(),
- !layout_desc->IsTagged(field_index + bit));
+ !layout_desc.IsTagged(field_index + bit));
}
- CHECK(layout_desc->IsTagged(field_index + field_width_in_words));
+ CHECK(layout_desc.IsTagged(field_index + field_width_in_words));
}
}
- CHECK(map->layout_descriptor()->IsConsistentWithMap(*map));
+ CHECK(map->layout_descriptor().IsConsistentWithMap(*map));
}
Handle<LayoutDescriptor> layout_descriptor(map->GetLayoutDescriptor(),
@@ -960,7 +959,7 @@
Address fake_address = static_cast<Address>(~kHeapObjectTagMask);
HeapObject fake_object = HeapObject::FromAddress(fake_address);
- CHECK(fake_object->IsHeapObject());
+ CHECK(fake_object.IsHeapObject());
uint64_t boom_value = bit_cast<uint64_t>(fake_object);
for (int i = 0; i < kPropsCount; i++) {
@@ -969,13 +968,13 @@
object->RawFastDoublePropertyAsBitsAtPut(index, boom_value);
}
CHECK(object->HasFastProperties());
- CHECK(!object->map()->HasFastPointerLayout());
+ CHECK(!object->map().HasFastPointerLayout());
Handle<Map> normalized_map =
Map::Normalize(isolate, map, KEEP_INOBJECT_PROPERTIES, "testing");
JSObject::MigrateToMap(object, normalized_map);
CHECK(!object->HasFastProperties());
- CHECK(object->map()->HasFastPointerLayout());
+ CHECK(object->map().HasFastPointerLayout());
// Trigger GCs and heap verification.
CcTest::CollectAllGarbage();
@@ -1004,10 +1003,10 @@
any_type, NONE, PropertyConstness::kMutable,
Representation::Double(), INSERT_TRANSITION)
.ToHandleChecked();
- CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
- CHECK(map->layout_descriptor()->IsSlowLayout());
+ CHECK(map->layout_descriptor().IsConsistentWithMap(*map, true));
+ CHECK(map->layout_descriptor().IsSlowLayout());
CHECK(map->owns_descriptors());
- CHECK_EQ(8, map->layout_descriptor()->length());
+ CHECK_EQ(8, map->layout_descriptor().length());
{
// Add transitions to double fields.
@@ -1019,35 +1018,35 @@
any_type, NONE, PropertyConstness::kMutable,
Representation::Double(), INSERT_TRANSITION)
.ToHandleChecked();
- CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
+ CHECK(tmp_map->layout_descriptor().IsConsistentWithMap(*tmp_map, true));
}
// Check that descriptors are shared.
CHECK(tmp_map->owns_descriptors());
CHECK_EQ(map->instance_descriptors(), tmp_map->instance_descriptors());
CHECK_EQ(map->layout_descriptor(), tmp_map->layout_descriptor());
}
- CHECK(map->layout_descriptor()->IsSlowLayout());
- CHECK_EQ(16, map->layout_descriptor()->length());
+ CHECK(map->layout_descriptor().IsSlowLayout());
+ CHECK_EQ(16, map->layout_descriptor().length());
// The unused tail of the layout descriptor is now "durty" because of sharing.
- CHECK(map->layout_descriptor()->IsConsistentWithMap(*map));
+ CHECK(map->layout_descriptor().IsConsistentWithMap(*map));
for (int i = kSplitFieldIndex + 1; i < kTrimmedLayoutDescriptorLength; i++) {
- CHECK(!map->layout_descriptor()->IsTagged(i));
+ CHECK(!map->layout_descriptor().IsTagged(i));
}
CHECK_LT(map->NumberOfOwnDescriptors(),
- map->instance_descriptors()->number_of_descriptors());
+ map->instance_descriptors().number_of_descriptors());
// Call GC that should trim both |map|'s descriptor array and layout
// descriptor.
CcTest::CollectAllGarbage();
// The unused tail of the layout descriptor is now "clean" again.
- CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
+ CHECK(map->layout_descriptor().IsConsistentWithMap(*map, true));
CHECK(map->owns_descriptors());
CHECK_EQ(map->NumberOfOwnDescriptors(),
- map->instance_descriptors()->number_of_descriptors());
- CHECK(map->layout_descriptor()->IsSlowLayout());
- CHECK_EQ(8, map->layout_descriptor()->length());
+ map->instance_descriptors().number_of_descriptors());
+ CHECK(map->layout_descriptor().IsSlowLayout());
+ CHECK_EQ(8, map->layout_descriptor().length());
{
// Add transitions to tagged fields.
@@ -1059,18 +1058,18 @@
any_type, NONE, PropertyConstness::kMutable,
Representation::Tagged(), INSERT_TRANSITION)
.ToHandleChecked();
- CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
+ CHECK(tmp_map->layout_descriptor().IsConsistentWithMap(*tmp_map, true));
}
tmp_map = Map::CopyWithField(isolate, tmp_map, MakeString("dbl"), any_type,
NONE, PropertyConstness::kMutable,
Representation::Double(), INSERT_TRANSITION)
.ToHandleChecked();
- CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
+ CHECK(tmp_map->layout_descriptor().IsConsistentWithMap(*tmp_map, true));
// Check that descriptors are shared.
CHECK(tmp_map->owns_descriptors());
CHECK_EQ(map->instance_descriptors(), tmp_map->instance_descriptors());
}
- CHECK(map->layout_descriptor()->IsSlowLayout());
+ CHECK(map->layout_descriptor().IsSlowLayout());
}
@@ -1417,7 +1416,7 @@
// Layout descriptors should be shared with |split_map|.
CHECK(map1->owns_descriptors());
CHECK_EQ(*split_layout_descriptor, map1->layout_descriptor());
- CHECK(map1->layout_descriptor()->IsConsistentWithMap(*map1, true));
+ CHECK(map1->layout_descriptor().IsConsistentWithMap(*map1, true));
Handle<Map> map2 =
Map::CopyWithField(isolate, split_map, MakeString("bar"), any_type, NONE,
@@ -1428,7 +1427,7 @@
// Layout descriptors should not be shared with |split_map|.
CHECK(map2->owns_descriptors());
CHECK_NE(*split_layout_descriptor, map2->layout_descriptor());
- CHECK(map2->layout_descriptor()->IsConsistentWithMap(*map2, true));
+ CHECK(map2->layout_descriptor().IsConsistentWithMap(*map2, true));
}
diff --git a/test/cctest/test-unwinder.cc b/test/cctest/test-unwinder.cc
index af3388d..c452db3 100644
--- a/test/cctest/test-unwinder.cc
+++ b/test/cctest/test-unwinder.cc
@@ -50,9 +50,9 @@
// Put the current PC inside of a valid builtin.
Code builtin = i_isolate->builtins()->builtin(Builtins::kStringEqual);
const uintptr_t offset = 40;
- CHECK_LT(offset, builtin->InstructionSize());
+ CHECK_LT(offset, builtin.InstructionSize());
register_state.pc =
- reinterpret_cast<void*>(builtin->InstructionStart() + offset);
+ reinterpret_cast<void*>(builtin.InstructionStart() + offset);
bool unwound = v8::Unwinder::TryUnwindV8Frames(unwind_state, ®ister_state,
stack_base);
@@ -97,7 +97,7 @@
// Put the current PC at the start of a valid builtin, so that we are setting
// up the frame.
Code builtin = i_isolate->builtins()->builtin(Builtins::kStringEqual);
- register_state.pc = reinterpret_cast<void*>(builtin->InstructionStart());
+ register_state.pc = reinterpret_cast<void*>(builtin.InstructionStart());
bool unwound = v8::Unwinder::TryUnwindV8Frames(unwind_state, ®ister_state,
stack_base);
@@ -154,16 +154,16 @@
// Put the current PC inside of the created code object.
AbstractCode abstract_code = foo->abstract_code();
// We don't produce optimized code when run with --no-opt.
- if (!abstract_code->IsCode() && FLAG_opt == false) return;
- CHECK(abstract_code->IsCode());
+ if (!abstract_code.IsCode() && FLAG_opt == false) return;
+ CHECK(abstract_code.IsCode());
- Code code = abstract_code->GetCode();
+ Code code = abstract_code.GetCode();
// We don't want the offset too early or it could be the `push rbp`
// instruction (which is not at the start of generated code, because the lazy
// deopt check happens before frame setup).
- const uintptr_t offset = code->InstructionSize() - 20;
- CHECK_LT(offset, code->InstructionSize());
- Address pc = code->InstructionStart() + offset;
+ const uintptr_t offset = code.InstructionSize() - 20;
+ CHECK_LT(offset, code.InstructionSize());
+ Address pc = code.InstructionStart() + offset;
register_state.pc = reinterpret_cast<void*>(pc);
// Check that the created code is within the code range that we get from the
@@ -337,7 +337,7 @@
RegisterState register_state;
Code js_entry = i_isolate->heap()->builtin(Builtins::kJSEntry);
- byte* start = reinterpret_cast<byte*>(js_entry->InstructionStart());
+ byte* start = reinterpret_cast<byte*>(js_entry.InstructionStart());
register_state.pc = start + 10;
bool unwound = v8::Unwinder::TryUnwindV8Frames(unwind_state, ®ister_state,
@@ -495,8 +495,8 @@
UnwindState unwind_state = isolate->GetUnwindState();
Code js_entry = i_isolate->heap()->builtin(Builtins::kJSEntry);
- byte* start = reinterpret_cast<byte*>(js_entry->InstructionStart());
- size_t length = js_entry->InstructionSize();
+ byte* start = reinterpret_cast<byte*>(js_entry.InstructionStart());
+ size_t length = js_entry.InstructionSize();
void* pc = start;
CHECK(v8::Unwinder::PCIsInV8(unwind_state, pc));
diff --git a/test/cctest/test-weakmaps.cc b/test/cctest/test-weakmaps.cc
index 0419044..2f3d570 100644
--- a/test/cctest/test-weakmaps.cc
+++ b/test/cctest/test-weakmaps.cc
@@ -81,19 +81,19 @@
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
Handle<Smi> smi(Smi::FromInt(23), isolate);
- int32_t hash = key->GetOrCreateHash(isolate)->value();
+ int32_t hash = key->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakmap, key, object, hash);
- int32_t object_hash = object->GetOrCreateHash(isolate)->value();
+ int32_t object_hash = object->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakmap, object, smi, object_hash);
}
- CHECK_EQ(2, EphemeronHashTable::cast(weakmap->table())->NumberOfElements());
+ CHECK_EQ(2, EphemeronHashTable::cast(weakmap->table()).NumberOfElements());
// Force a full GC.
CcTest::PreciseCollectAllGarbage();
CHECK_EQ(0, NumberOfWeakCalls);
- CHECK_EQ(2, EphemeronHashTable::cast(weakmap->table())->NumberOfElements());
+ CHECK_EQ(2, EphemeronHashTable::cast(weakmap->table()).NumberOfElements());
CHECK_EQ(
- 0, EphemeronHashTable::cast(weakmap->table())->NumberOfDeletedElements());
+ 0, EphemeronHashTable::cast(weakmap->table()).NumberOfDeletedElements());
// Make the global reference to the key weak.
std::pair<Handle<Object>*, int> handle_and_id(&key, 1234);
@@ -104,9 +104,9 @@
CcTest::PreciseCollectAllGarbage();
CHECK_EQ(1, NumberOfWeakCalls);
- CHECK_EQ(0, EphemeronHashTable::cast(weakmap->table())->NumberOfElements());
+ CHECK_EQ(0, EphemeronHashTable::cast(weakmap->table()).NumberOfElements());
CHECK_EQ(
- 2, EphemeronHashTable::cast(weakmap->table())->NumberOfDeletedElements());
+ 2, EphemeronHashTable::cast(weakmap->table()).NumberOfDeletedElements());
}
@@ -118,7 +118,7 @@
Handle<JSWeakMap> weakmap = isolate->factory()->NewJSWeakMap();
// Check initial capacity.
- CHECK_EQ(32, EphemeronHashTable::cast(weakmap->table())->Capacity());
+ CHECK_EQ(32, EphemeronHashTable::cast(weakmap->table()).Capacity());
// Fill up weak map to trigger capacity change.
{
@@ -127,32 +127,31 @@
for (int i = 0; i < 32; i++) {
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
Handle<Smi> smi(Smi::FromInt(i), isolate);
- int32_t object_hash = object->GetOrCreateHash(isolate)->value();
+ int32_t object_hash = object->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakmap, object, smi, object_hash);
}
}
// Check increased capacity.
- CHECK_EQ(128, EphemeronHashTable::cast(weakmap->table())->Capacity());
+ CHECK_EQ(128, EphemeronHashTable::cast(weakmap->table()).Capacity());
// Force a full GC.
- CHECK_EQ(32, EphemeronHashTable::cast(weakmap->table())->NumberOfElements());
+ CHECK_EQ(32, EphemeronHashTable::cast(weakmap->table()).NumberOfElements());
CHECK_EQ(
- 0, EphemeronHashTable::cast(weakmap->table())->NumberOfDeletedElements());
+ 0, EphemeronHashTable::cast(weakmap->table()).NumberOfDeletedElements());
CcTest::PreciseCollectAllGarbage();
- CHECK_EQ(0, EphemeronHashTable::cast(weakmap->table())->NumberOfElements());
+ CHECK_EQ(0, EphemeronHashTable::cast(weakmap->table()).NumberOfElements());
CHECK_EQ(
- 32,
- EphemeronHashTable::cast(weakmap->table())->NumberOfDeletedElements());
+ 32, EphemeronHashTable::cast(weakmap->table()).NumberOfDeletedElements());
// Check shrunk capacity.
- CHECK_EQ(32, EphemeronHashTable::cast(weakmap->table())->Capacity());
+ CHECK_EQ(32, EphemeronHashTable::cast(weakmap->table()).Capacity());
}
namespace {
bool EphemeronHashTableContainsKey(EphemeronHashTable table, HeapObject key) {
for (int i = 0; i < table.Capacity(); ++i) {
- if (table->KeyAt(i) == key) return true;
+ if (table.KeyAt(i) == key) return true;
}
return false;
}
@@ -171,7 +170,7 @@
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
Handle<Smi> smi(Smi::FromInt(1), isolate);
- int32_t object_hash = object->GetOrCreateHash(isolate)->value();
+ int32_t object_hash = object->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakmap, object, smi, object_hash);
CHECK(EphemeronHashTableContainsKey(
@@ -203,7 +202,7 @@
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
Handle<Smi> smi(Smi::FromInt(1), isolate);
- int32_t object_hash = object->GetOrCreateHash(isolate)->value();
+ int32_t object_hash = object->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakmap, object, smi, object_hash);
CHECK(EphemeronHashTableContainsKey(
@@ -249,7 +248,7 @@
factory->NewJSObject(function, AllocationType::kOld);
CHECK(!Heap::InYoungGeneration(*object));
CHECK(!first_page->Contains(object->address()));
- int32_t hash = key->GetOrCreateHash(isolate)->value();
+ int32_t hash = key->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakmap, key, object, hash);
}
}
@@ -291,7 +290,7 @@
Handle<JSWeakMap> weakmap = isolate->factory()->NewJSWeakMap();
for (int i = 0; i < 32; i++) {
Handle<Smi> smi(Smi::FromInt(i), isolate);
- int32_t hash = keys[i]->GetOrCreateHash(isolate)->value();
+ int32_t hash = keys[i]->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakmap, keys[i], smi, hash);
}
@@ -343,8 +342,8 @@
g2.SetWeak();
Handle<Object> i_o1 = v8::Utils::OpenHandle(*o1);
Handle<Object> i_o2 = v8::Utils::OpenHandle(*o2);
- int32_t hash1 = i_o1->GetOrCreateHash(i_isolate)->value();
- int32_t hash2 = i_o2->GetOrCreateHash(i_isolate)->value();
+ int32_t hash1 = i_o1->GetOrCreateHash(i_isolate).value();
+ int32_t hash2 = i_o2->GetOrCreateHash(i_isolate).value();
JSWeakCollection::Set(weakmap1, i_o1, i_o2, hash1);
JSWeakCollection::Set(weakmap2, i_o2, i_o1, hash2);
}
diff --git a/test/cctest/test-weaksets.cc b/test/cctest/test-weaksets.cc
index b76519d..6dc32a5 100644
--- a/test/cctest/test-weaksets.cc
+++ b/test/cctest/test-weaksets.cc
@@ -94,17 +94,17 @@
{
HandleScope scope(isolate);
Handle<Smi> smi(Smi::FromInt(23), isolate);
- int32_t hash = key->GetOrCreateHash(isolate)->value();
+ int32_t hash = key->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakset, key, smi, hash);
}
- CHECK_EQ(1, EphemeronHashTable::cast(weakset->table())->NumberOfElements());
+ CHECK_EQ(1, EphemeronHashTable::cast(weakset->table()).NumberOfElements());
// Force a full GC.
CcTest::PreciseCollectAllGarbage();
CHECK_EQ(0, NumberOfWeakCalls);
- CHECK_EQ(1, EphemeronHashTable::cast(weakset->table())->NumberOfElements());
+ CHECK_EQ(1, EphemeronHashTable::cast(weakset->table()).NumberOfElements());
CHECK_EQ(
- 0, EphemeronHashTable::cast(weakset->table())->NumberOfDeletedElements());
+ 0, EphemeronHashTable::cast(weakset->table()).NumberOfDeletedElements());
// Make the global reference to the key weak.
std::pair<Handle<Object>*, int> handle_and_id(&key, 1234);
@@ -115,9 +115,9 @@
CcTest::PreciseCollectAllGarbage();
CHECK_EQ(1, NumberOfWeakCalls);
- CHECK_EQ(0, EphemeronHashTable::cast(weakset->table())->NumberOfElements());
+ CHECK_EQ(0, EphemeronHashTable::cast(weakset->table()).NumberOfElements());
CHECK_EQ(
- 1, EphemeronHashTable::cast(weakset->table())->NumberOfDeletedElements());
+ 1, EphemeronHashTable::cast(weakset->table()).NumberOfDeletedElements());
}
@@ -129,7 +129,7 @@
Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
// Check initial capacity.
- CHECK_EQ(32, EphemeronHashTable::cast(weakset->table())->Capacity());
+ CHECK_EQ(32, EphemeronHashTable::cast(weakset->table()).Capacity());
// Fill up weak set to trigger capacity change.
{
@@ -138,26 +138,25 @@
for (int i = 0; i < 32; i++) {
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
Handle<Smi> smi(Smi::FromInt(i), isolate);
- int32_t hash = object->GetOrCreateHash(isolate)->value();
+ int32_t hash = object->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakset, object, smi, hash);
}
}
// Check increased capacity.
- CHECK_EQ(128, EphemeronHashTable::cast(weakset->table())->Capacity());
+ CHECK_EQ(128, EphemeronHashTable::cast(weakset->table()).Capacity());
// Force a full GC.
- CHECK_EQ(32, EphemeronHashTable::cast(weakset->table())->NumberOfElements());
+ CHECK_EQ(32, EphemeronHashTable::cast(weakset->table()).NumberOfElements());
CHECK_EQ(
- 0, EphemeronHashTable::cast(weakset->table())->NumberOfDeletedElements());
+ 0, EphemeronHashTable::cast(weakset->table()).NumberOfDeletedElements());
CcTest::PreciseCollectAllGarbage();
- CHECK_EQ(0, EphemeronHashTable::cast(weakset->table())->NumberOfElements());
+ CHECK_EQ(0, EphemeronHashTable::cast(weakset->table()).NumberOfElements());
CHECK_EQ(
- 32,
- EphemeronHashTable::cast(weakset->table())->NumberOfDeletedElements());
+ 32, EphemeronHashTable::cast(weakset->table()).NumberOfDeletedElements());
// Check shrunk capacity.
- CHECK_EQ(32, EphemeronHashTable::cast(weakset->table())->Capacity());
+ CHECK_EQ(32, EphemeronHashTable::cast(weakset->table()).Capacity());
}
@@ -188,7 +187,7 @@
factory->NewJSObject(function, AllocationType::kOld);
CHECK(!Heap::InYoungGeneration(*object));
CHECK(!first_page->Contains(object->address()));
- int32_t hash = key->GetOrCreateHash(isolate)->value();
+ int32_t hash = key->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakset, key, object, hash);
}
}
@@ -230,7 +229,7 @@
Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
for (int i = 0; i < 32; i++) {
Handle<Smi> smi(Smi::FromInt(i), isolate);
- int32_t hash = keys[i]->GetOrCreateHash(isolate)->value();
+ int32_t hash = keys[i]->GetOrCreateHash(isolate).value();
JSWeakCollection::Set(weakset, keys[i], smi, hash);
}
diff --git a/test/cctest/trace-extension.cc b/test/cctest/trace-extension.cc
index 3fddf21..08cc024 100644
--- a/test/cctest/trace-extension.cc
+++ b/test/cctest/trace-extension.cc
@@ -77,9 +77,9 @@
uint64_t kSmiValueMask =
(static_cast<uintptr_t>(1) << (kSmiValueSize - 1)) - 1;
uint64_t low_bits =
- Smi(*reinterpret_cast<Address*>(*args[0]))->value() & kSmiValueMask;
+ Smi(*reinterpret_cast<Address*>(*args[0])).value() & kSmiValueMask;
uint64_t high_bits =
- Smi(*reinterpret_cast<Address*>(*args[1]))->value() & kSmiValueMask;
+ Smi(*reinterpret_cast<Address*>(*args[1])).value() & kSmiValueMask;
Address fp =
static_cast<Address>((high_bits << (kSmiValueSize - 1)) | low_bits);
#else
diff --git a/test/cctest/wasm/test-run-wasm-module.cc b/test/cctest/wasm/test-run-wasm-module.cc
index cc8d329..795fa76 100644
--- a/test/cctest/wasm/test-run-wasm-module.cc
+++ b/test/cctest/wasm/test-run-wasm-module.cc
@@ -576,7 +576,7 @@
isolate, &thrower, ModuleWireBytes(buffer.begin(), buffer.end()))
.ToHandleChecked();
- Handle<JSArrayBuffer> memory(instance->memory_object()->array_buffer(),
+ Handle<JSArrayBuffer> memory(instance->memory_object().array_buffer(),
isolate);
int32_t* memory_array = reinterpret_cast<int32_t*>(memory->backing_store());
@@ -952,12 +952,12 @@
ManuallyExternalizedBuffer(JSArrayBuffer buffer, Isolate* isolate)
: isolate_(isolate),
buffer_(buffer, isolate),
- allocation_base_(buffer->allocation_base()),
- allocation_length_(buffer->allocation_length()),
+ allocation_base_(buffer.allocation_base()),
+ allocation_length_(buffer.allocation_length()),
should_free_(!isolate_->wasm_engine()->memory_tracker()->IsWasmMemory(
- buffer->backing_store())) {
+ buffer.backing_store())) {
if (!isolate_->wasm_engine()->memory_tracker()->IsWasmMemory(
- buffer->backing_store())) {
+ buffer.backing_store())) {
v8::Utils::ToLocal(buffer_)->Externalize();
}
}
@@ -1072,7 +1072,7 @@
contents.Data(), is_wasm_memory));
// Make sure we can write to the buffer without crashing
uint32_t* int_buffer =
- reinterpret_cast<uint32_t*>(mem->array_buffer()->backing_store());
+ reinterpret_cast<uint32_t*>(mem->array_buffer().backing_store());
int_buffer[0] = 0;
}
diff --git a/test/cctest/wasm/test-run-wasm.cc b/test/cctest/wasm/test-run-wasm.cc
index b5fe34a..5dd88ac 100644
--- a/test/cctest/wasm/test-run-wasm.cc
+++ b/test/cctest/wasm/test-run-wasm.cc
@@ -3703,7 +3703,7 @@
WASM_GET_LOCAL(1)));
NativeModule* native_module =
- r.builder().instance_object()->module_object()->native_module();
+ r.builder().instance_object()->module_object().native_module();
// This test only works if we managed to compile with Liftoff.
if (native_module->GetCode(add.function_index())->is_liftoff()) {
diff --git a/test/cctest/wasm/test-wasm-breakpoints.cc b/test/cctest/wasm/test-wasm-breakpoints.cc
index 51f3eca..bd3cce6 100644
--- a/test/cctest/wasm/test-wasm-breakpoints.cc
+++ b/test/cctest/wasm/test-wasm-breakpoints.cc
@@ -25,7 +25,7 @@
WasmModuleObject module_object, debug::Location start, debug::Location end,
std::initializer_list<debug::Location> expected_locations_init) {
std::vector<debug::BreakLocation> locations;
- bool success = module_object->GetPossibleBreakpoints(start, end, &locations);
+ bool success = module_object.GetPossibleBreakpoints(start, end, &locations);
CHECK(success);
printf("got %d locations: ", static_cast<int>(locations.size()));
@@ -48,7 +48,7 @@
void CheckLocationsFail(WasmModuleObject module_object, debug::Location start,
debug::Location end) {
std::vector<debug::BreakLocation> locations;
- bool success = module_object->GetPossibleBreakpoints(start, end, &locations);
+ bool success = module_object.GetPossibleBreakpoints(start, end, &locations);
CHECK(!success);
}
@@ -205,7 +205,7 @@
Handle<WasmInstanceObject> instance = summ.wasm_instance();
auto frame =
- instance->debug_info()->GetInterpretedFrame(frame_it.frame()->fp(), 0);
+ instance->debug_info().GetInterpretedFrame(frame_it.frame()->fp(), 0);
CHECK_EQ(expected.locals.size(), frame->GetLocalCount());
for (int i = 0; i < frame->GetLocalCount(); ++i) {
CHECK_EQ(WasmValWrapper{expected.locals[i]},
@@ -247,7 +247,7 @@
BUILD(runner, WASM_NOP, WASM_I32_ADD(WASM_ZERO, WASM_ONE));
WasmInstanceObject instance = *runner.builder().instance_object();
- WasmModuleObject module_object = instance->module_object();
+ WasmModuleObject module_object = instance.module_object();
std::vector<debug::Location> locations;
// Check all locations for function 0.
@@ -280,7 +280,7 @@
BreakHandler count_breaks(isolate, {{4, BreakHandler::Continue}});
- Handle<Object> global(isolate->context()->global_object(), isolate);
+ Handle<Object> global(isolate->context().global_object(), isolate);
MaybeHandle<Object> retval =
Execution::Call(isolate, main_fun_wrapper, global, 0, nullptr);
CHECK(!retval.is_null());
@@ -307,7 +307,7 @@
{5, BreakHandler::Continue} // I32Add
});
- Handle<Object> global(isolate->context()->global_object(), isolate);
+ Handle<Object> global(isolate->context().global_object(), isolate);
MaybeHandle<Object> retval =
Execution::Call(isolate, main_fun_wrapper, global, 0, nullptr);
CHECK(!retval.is_null());
@@ -351,7 +351,7 @@
{23, BreakHandler::Continue} // After Call
});
- Handle<Object> global(isolate->context()->global_object(), isolate);
+ Handle<Object> global(isolate->context().global_object(), isolate);
CHECK(!Execution::Call(isolate, main_fun_wrapper, global, 0, nullptr)
.is_null());
}
@@ -396,7 +396,7 @@
{wasmVec(7, 17L, 7.f, 8.5), wasmVec()}, // 10: end
});
- Handle<Object> global(isolate->context()->global_object(), isolate);
+ Handle<Object> global(isolate->context().global_object(), isolate);
Handle<Object> args[]{handle(Smi::FromInt(7), isolate)};
CHECK(!Execution::Call(isolate, main_fun_wrapper, global, 1, args).is_null());
}
diff --git a/test/cctest/wasm/test-wasm-interpreter-entry.cc b/test/cctest/wasm/test-wasm-interpreter-entry.cc
index 15e5077..792c7c5 100644
--- a/test/cctest/wasm/test-wasm-interpreter-entry.cc
+++ b/test/cctest/wasm/test-wasm-interpreter-entry.cc
@@ -59,7 +59,7 @@
Handle<Object> arg_objs[] = {isolate_->factory()->NewNumber(args)...};
uint64_t num_interpreted_before = debug_info_->NumInterpretedCalls();
- Handle<Object> global(isolate_->context()->global_object(), isolate_);
+ Handle<Object> global(isolate_->context().global_object(), isolate_);
MaybeHandle<Object> retval = Execution::Call(
isolate_, main_fun_wrapper_, global, arraysize(arg_objs), arg_objs);
uint64_t num_interpreted_after = debug_info_->NumInterpretedCalls();
diff --git a/test/cctest/wasm/test-wasm-shared-engine.cc b/test/cctest/wasm/test-wasm-shared-engine.cc
index 3aa6ffa..5fc245e 100644
--- a/test/cctest/wasm/test-wasm-shared-engine.cc
+++ b/test/cctest/wasm/test-wasm-shared-engine.cc
@@ -95,7 +95,7 @@
}
SharedModule ExportInstance(Handle<WasmInstanceObject> instance) {
- return instance->module_object()->shared_native_module();
+ return instance->module_object().shared_native_module();
}
int32_t Run(Handle<WasmInstanceObject> instance) {
diff --git a/test/cctest/wasm/test-wasm-stack.cc b/test/cctest/wasm/test-wasm-stack.cc
index d755901..3fc9614 100644
--- a/test/cctest/wasm/test-wasm-stack.cc
+++ b/test/cctest/wasm/test-wasm-stack.cc
@@ -129,7 +129,7 @@
Isolate* isolate = js_wasm_wrapper->GetIsolate();
isolate->SetCaptureStackTraceForUncaughtExceptions(true, 10,
v8::StackTrace::kOverview);
- Handle<Object> global(isolate->context()->global_object(), isolate);
+ Handle<Object> global(isolate->context().global_object(), isolate);
MaybeHandle<Object> maybe_exc;
Handle<Object> args[] = {js_wasm_wrapper};
MaybeHandle<Object> returnObjMaybe =
@@ -178,7 +178,7 @@
Isolate* isolate = js_wasm_wrapper->GetIsolate();
isolate->SetCaptureStackTraceForUncaughtExceptions(
true, 10, v8::StackTrace::kOverview);
- Handle<Object> global(isolate->context()->global_object(), isolate);
+ Handle<Object> global(isolate->context().global_object(), isolate);
MaybeHandle<Object> maybe_exc;
Handle<Object> args[] = {js_wasm_wrapper};
MaybeHandle<Object> maybe_return_obj =
diff --git a/test/cctest/wasm/test-wasm-trap-position.cc b/test/cctest/wasm/test-wasm-trap-position.cc
index b12c3b0..31e661a 100644
--- a/test/cctest/wasm/test-wasm-trap-position.cc
+++ b/test/cctest/wasm/test-wasm-trap-position.cc
@@ -83,7 +83,7 @@
Isolate* isolate = js_wasm_wrapper->GetIsolate();
isolate->SetCaptureStackTraceForUncaughtExceptions(true, 10,
v8::StackTrace::kOverview);
- Handle<Object> global(isolate->context()->global_object(), isolate);
+ Handle<Object> global(isolate->context().global_object(), isolate);
MaybeHandle<Object> maybe_exc;
Handle<Object> args[] = {js_wasm_wrapper};
MaybeHandle<Object> returnObjMaybe =
@@ -126,7 +126,7 @@
Isolate* isolate = js_wasm_wrapper->GetIsolate();
isolate->SetCaptureStackTraceForUncaughtExceptions(true, 10,
v8::StackTrace::kOverview);
- Handle<Object> global(isolate->context()->global_object(), isolate);
+ Handle<Object> global(isolate->context().global_object(), isolate);
MaybeHandle<Object> maybe_exc;
Handle<Object> args[] = {js_wasm_wrapper};
MaybeHandle<Object> returnObjMaybe =
diff --git a/test/cctest/wasm/wasm-run-utils.cc b/test/cctest/wasm/wasm-run-utils.cc
index 50c5dd0..1727aac 100644
--- a/test/cctest/wasm/wasm-run-utils.cc
+++ b/test/cctest/wasm/wasm-run-utils.cc
@@ -143,7 +143,7 @@
size_t max_num_sigs = MaxNumExportWrappers(test_module_.get());
Handle<FixedArray> export_wrappers =
isolate_->factory()->NewFixedArray(static_cast<int>(max_num_sigs));
- instance_object_->module_object()->set_export_wrappers(*export_wrappers);
+ instance_object_->module_object().set_export_wrappers(*export_wrappers);
}
Handle<JSFunction> TestingModuleBuilder::WrapCode(uint32_t index) {
@@ -492,7 +492,7 @@
Vector<const uint8_t> wire_bytes = builder_->instance_object()
->module_object()
- ->native_module()
+ .native_module()
->wire_bytes();
CompilationEnv env = builder_->CreateCompilationEnv();
@@ -503,7 +503,7 @@
FunctionBody func_body{function_->sig, function_->code.offset(),
func_wire_bytes.begin(), func_wire_bytes.end()};
NativeModule* native_module =
- builder_->instance_object()->module_object()->native_module();
+ builder_->instance_object()->module_object().native_module();
WasmCompilationUnit unit(function_->func_index, builder_->execution_tier());
WasmFeatures unused_detected_features;
WasmCompilationResult result = unit.ExecuteCompilation(
diff --git a/test/cctest/wasm/wasm-run-utils.h b/test/cctest/wasm/wasm-run-utils.h
index 8d06253..98ec5e1 100644
--- a/test/cctest/wasm/wasm-run-utils.h
+++ b/test/cctest/wasm/wasm-run-utils.h
@@ -170,7 +170,7 @@
void SetMaxMemPages(uint32_t maximum_pages) {
test_module_->maximum_pages = maximum_pages;
if (instance_object()->has_memory_object()) {
- instance_object()->memory_object()->set_maximum_pages(maximum_pages);
+ instance_object()->memory_object().set_maximum_pages(maximum_pages);
}
}
@@ -527,7 +527,7 @@
jsfuncs_[function_index] = builder_.WrapCode(function_index);
}
Handle<JSFunction> jsfunc = jsfuncs_[function_index];
- Handle<Object> global(isolate->context()->global_object(), isolate);
+ Handle<Object> global(isolate->context().global_object(), isolate);
MaybeHandle<Object> retval =
Execution::TryCall(isolate, jsfunc, global, count, buffer,
Execution::MessageHandling::kReport, nullptr);
@@ -540,7 +540,7 @@
CHECK_EQ(expected, Smi::ToInt(*result));
} else {
CHECK(result->IsHeapNumber());
- CHECK_DOUBLE_EQ(expected, HeapNumber::cast(*result)->value());
+ CHECK_DOUBLE_EQ(expected, HeapNumber::cast(*result).value());
}
}
diff --git a/test/common/wasm/wasm-module-runner.cc b/test/common/wasm/wasm-module-runner.cc
index f2fb53e..377cc5c 100644
--- a/test/common/wasm/wasm-module-runner.cc
+++ b/test/common/wasm/wasm-module-runner.cc
@@ -261,7 +261,7 @@
return Smi::ToInt(*result);
}
if (result->IsHeapNumber()) {
- return static_cast<int32_t>(HeapNumber::cast(*result)->value());
+ return static_cast<int32_t>(HeapNumber::cast(*result).value());
}
thrower->RuntimeError(
"Calling exported wasm function failed: Return value should be number");
diff --git a/test/mkgrokdump/mkgrokdump.cc b/test/mkgrokdump/mkgrokdump.cc
index d6f27e4..88c447e 100644
--- a/test/mkgrokdump/mkgrokdump.cc
+++ b/test/mkgrokdump/mkgrokdump.cc
@@ -60,7 +60,7 @@
if (root_name == nullptr) return;
i::PrintF(" (\"%s\", 0x%05" V8PRIxPTR "): (%d, \"%s\"),\n", space_name,
- root_ptr, map->instance_type(), root_name);
+ root_ptr, map.instance_type(), root_name);
#undef MUTABLE_ROOT_LIST_CASE
#undef RO_ROOT_LIST_CASE
@@ -124,13 +124,13 @@
i::ReadOnlyHeapIterator ro_iterator(heap->read_only_heap());
for (i::HeapObject object = ro_iterator.Next(); !object.is_null();
object = ro_iterator.Next()) {
- if (!object->IsMap()) continue;
+ if (!object.IsMap()) continue;
DumpKnownMap(heap, i::Heap::GetSpaceName(i::RO_SPACE), object);
}
i::HeapObjectIterator iterator(heap->map_space());
for (i::HeapObject object = iterator.Next(); !object.is_null();
object = iterator.Next()) {
- if (!object->IsMap()) continue;
+ if (!object.IsMap()) continue;
DumpKnownMap(heap, i::Heap::GetSpaceName(i::MAP_SPACE), object);
}
i::PrintF("}\n");
@@ -144,7 +144,7 @@
for (i::HeapObject object = ro_iterator.Next(); !object.is_null();
object = ro_iterator.Next()) {
// Skip read-only heap maps, they will be reported elsewhere.
- if (object->IsMap()) continue;
+ if (object.IsMap()) continue;
DumpKnownObject(heap, i::Heap::GetSpaceName(i::RO_SPACE), object);
}
diff --git a/test/unittests/background-compile-task-unittest.cc b/test/unittests/background-compile-task-unittest.cc
index 6f23442..b1a093e 100644
--- a/test/unittests/background-compile-task-unittest.cc
+++ b/test/unittests/background-compile-task-unittest.cc
@@ -233,7 +233,7 @@
Handle<JSFunction> e = RunJS<JSFunction>("f();");
- ASSERT_TRUE(e->shared()->is_compiled());
+ ASSERT_TRUE(e->shared().is_compiled());
}
TEST_F(BackgroundCompileTaskTest, LazyInnerFunctions) {
@@ -261,7 +261,7 @@
Handle<JSFunction> e = RunJS<JSFunction>("f();");
- ASSERT_FALSE(e->shared()->is_compiled());
+ ASSERT_FALSE(e->shared().is_compiled());
}
} // namespace internal
diff --git a/test/unittests/compiler/js-create-lowering-unittest.cc b/test/unittests/compiler/js-create-lowering-unittest.cc
index 29f3201..e416122 100644
--- a/test/unittests/compiler/js-create-lowering-unittest.cc
+++ b/test/unittests/compiler/js-create-lowering-unittest.cc
@@ -85,7 +85,7 @@
EXPECT_THAT(
r.replacement(),
IsFinishRegion(
- IsAllocate(IsNumberConstant(function->initial_map()->instance_size()),
+ IsAllocate(IsNumberConstant(function->initial_map().instance_size()),
IsBeginRegion(effect), control),
_));
}
diff --git a/test/unittests/heap/spaces-unittest.cc b/test/unittests/heap/spaces-unittest.cc
index f803c3f..140d3d4 100644
--- a/test/unittests/heap/spaces-unittest.cc
+++ b/test/unittests/heap/spaces-unittest.cc
@@ -39,7 +39,7 @@
HeapObject object =
compaction_space->AllocateRawUnaligned(kMaxRegularHeapObjectSize)
.ToObjectChecked();
- heap->CreateFillerObjectAt(object->address(), kMaxRegularHeapObjectSize,
+ heap->CreateFillerObjectAt(object.address(), kMaxRegularHeapObjectSize,
ClearRecordedSlots::kNo);
}
int pages_in_old_space = old_space->CountTotalPages();
diff --git a/test/unittests/interpreter/bytecode-array-builder-unittest.cc b/test/unittests/interpreter/bytecode-array-builder-unittest.cc
index a77e074..6595eea 100644
--- a/test/unittests/interpreter/bytecode-array-builder-unittest.cc
+++ b/test/unittests/interpreter/bytecode-array-builder-unittest.cc
@@ -560,7 +560,7 @@
ast_factory.Internalize(isolate());
Handle<BytecodeArray> array = builder.ToBytecodeArray(isolate());
// Should only have one entry for each identical constant.
- EXPECT_EQ(4, array->constant_pool()->length());
+ EXPECT_EQ(4, array->constant_pool().length());
}
TEST_F(BytecodeArrayBuilderTest, ForwardJumps) {
diff --git a/test/unittests/interpreter/bytecode-array-iterator-unittest.cc b/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
index 0e72e2e..46620be 100644
--- a/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
+++ b/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
@@ -81,7 +81,7 @@
EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
EXPECT_EQ(iterator.current_offset(), offset);
EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
- EXPECT_EQ(iterator.GetConstantForIndexOperand(0)->Number(), heap_num_0);
+ EXPECT_EQ(iterator.GetConstantForIndexOperand(0).Number(), heap_num_0);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
iterator.Advance();
@@ -98,7 +98,7 @@
EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
EXPECT_EQ(iterator.current_offset(), offset);
EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
- EXPECT_EQ(iterator.GetConstantForIndexOperand(0)->Number(), heap_num_1);
+ EXPECT_EQ(iterator.GetConstantForIndexOperand(0).Number(), heap_num_1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
iterator.Advance();
diff --git a/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc b/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc
index 6ec19fb..fa44165 100644
--- a/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc
+++ b/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc
@@ -184,7 +184,7 @@
EXPECT_EQ(iterator.current_index(), 0);
EXPECT_EQ(iterator.current_offset(), 0);
EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
- EXPECT_EQ(iterator.GetConstantForIndexOperand(0)->Number(), heap_num_0);
+ EXPECT_EQ(iterator.GetConstantForIndexOperand(0).Number(), heap_num_0);
ASSERT_TRUE(iterator.IsValid());
}
@@ -331,7 +331,7 @@
EXPECT_EQ(iterator.current_index(), 2);
EXPECT_EQ(iterator.current_offset(), offset);
EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
- EXPECT_EQ(iterator.GetConstantForIndexOperand(0)->Number(), heap_num_1);
+ EXPECT_EQ(iterator.GetConstantForIndexOperand(0).Number(), heap_num_1);
ASSERT_TRUE(iterator.IsValid());
iterator.GoToIndex(18);
@@ -488,7 +488,7 @@
EXPECT_EQ(iterator.current_index(), 0);
EXPECT_EQ(iterator.current_offset(), offset);
EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
- EXPECT_EQ(iterator.GetConstantForIndexOperand(0)->Number(), heap_num_0);
+ EXPECT_EQ(iterator.GetConstantForIndexOperand(0).Number(), heap_num_0);
ASSERT_TRUE(iterator.IsValid());
offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
++iterator;
@@ -507,7 +507,7 @@
EXPECT_EQ(iterator.current_index(), 2);
EXPECT_EQ(iterator.current_offset(), offset);
EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
- EXPECT_EQ(iterator.GetConstantForIndexOperand(0)->Number(), heap_num_1);
+ EXPECT_EQ(iterator.GetConstantForIndexOperand(0).Number(), heap_num_1);
ASSERT_TRUE(iterator.IsValid());
offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
++iterator;
@@ -968,7 +968,7 @@
EXPECT_EQ(iterator.current_index(), 2);
EXPECT_EQ(iterator.current_offset(), offset);
EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
- EXPECT_EQ(iterator.GetConstantForIndexOperand(0)->Number(), heap_num_1);
+ EXPECT_EQ(iterator.GetConstantForIndexOperand(0).Number(), heap_num_1);
ASSERT_TRUE(iterator.IsValid());
--iterator;
@@ -987,7 +987,7 @@
EXPECT_EQ(iterator.current_index(), 0);
EXPECT_EQ(iterator.current_offset(), offset);
EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
- EXPECT_EQ(iterator.GetConstantForIndexOperand(0)->Number(), heap_num_0);
+ EXPECT_EQ(iterator.GetConstantForIndexOperand(0).Number(), heap_num_0);
ASSERT_TRUE(iterator.IsValid());
--iterator;
ASSERT_FALSE(iterator.IsValid());
diff --git a/test/unittests/interpreter/constant-array-builder-unittest.cc b/test/unittests/interpreter/constant-array-builder-unittest.cc
index 1993d6f..4a6847a 100644
--- a/test/unittests/interpreter/constant-array-builder-unittest.cc
+++ b/test/unittests/interpreter/constant-array-builder-unittest.cc
@@ -158,12 +158,12 @@
for (size_t i = 0; i < k8BitCapacity - reserved; i++) {
Object value = constant_array->get(static_cast<int>(i));
Smi smi = Smi::FromInt(static_cast<int>(i));
- CHECK(value->SameValue(smi));
+ CHECK(value.SameValue(smi));
}
for (size_t i = k8BitCapacity; i < 2 * k8BitCapacity + reserved; i++) {
Object value = constant_array->get(static_cast<int>(i));
Smi smi = Smi::FromInt(static_cast<int>(i - reserved));
- CHECK(value->SameValue(smi));
+ CHECK(value.SameValue(smi));
}
}
}
@@ -209,7 +209,7 @@
static_cast<int>(k8BitCapacity + reserved));
for (size_t i = 0; i < k8BitCapacity + reserved; i++) {
Object value = constant_array->get(static_cast<int>(i));
- CHECK(value->SameValue(*isolate()->factory()->NewNumberFromSize(i)));
+ CHECK(value.SameValue(*isolate()->factory()->NewNumberFromSize(i)));
}
}
}
@@ -240,9 +240,9 @@
for (size_t i = 0; i < k8BitCapacity; i++) {
Object original = constant_array->get(static_cast<int>(k8BitCapacity + i));
Object duplicate = constant_array->get(static_cast<int>(i));
- CHECK(original->SameValue(duplicate));
+ CHECK(original.SameValue(duplicate));
Handle<Object> reference = isolate()->factory()->NewNumberFromSize(i);
- CHECK(original->SameValue(*reference));
+ CHECK(original.SameValue(*reference));
}
}
@@ -304,13 +304,13 @@
Handle<FixedArray> constant_array = builder.ToFixedArray(isolate());
CHECK_EQ(constant_array->length(), k8BitCapacity + 1);
for (int i = kNumberOfHoles; i < k8BitCapacity; i++) {
- CHECK(constant_array->get(i)->SameValue(
+ CHECK(constant_array->get(i).SameValue(
*isolate()->factory()->the_hole_value()));
}
CHECK(!constant_array->get(kNumberOfHoles - 1)
- ->SameValue(*isolate()->factory()->the_hole_value()));
+ .SameValue(*isolate()->factory()->the_hole_value()));
CHECK(!constant_array->get(k8BitCapacity)
- ->SameValue(*isolate()->factory()->the_hole_value()));
+ .SameValue(*isolate()->factory()->the_hole_value()));
}
TEST_F(ConstantArrayBuilderTest, ReservationsAtAllScales) {
@@ -354,7 +354,7 @@
} else {
expected = isolate()->factory()->the_hole_value();
}
- CHECK(constant_array->get(i)->SameValue(*expected));
+ CHECK(constant_array->get(i).SameValue(*expected));
}
}
diff --git a/test/unittests/microtask-queue-unittest.cc b/test/unittests/microtask-queue-unittest.cc
index 2841c48..e20661c 100644
--- a/test/unittests/microtask-queue-unittest.cc
+++ b/test/unittests/microtask-queue-unittest.cc
@@ -240,9 +240,9 @@
Handle<Context> context2 = Utils::OpenHandle(*v8_context2, isolate());
Handle<Context> context3 = Utils::OpenHandle(*v8_context3, isolate());
Handle<Context> context4 = Utils::OpenHandle(*v8_context3, isolate());
- context2->native_context()->set_microtask_queue(microtask_queue());
- context3->native_context()->set_microtask_queue(microtask_queue());
- context4->native_context()->set_microtask_queue(microtask_queue());
+ context2->native_context().set_microtask_queue(microtask_queue());
+ context3->native_context().set_microtask_queue(microtask_queue());
+ context4->native_context().set_microtask_queue(microtask_queue());
Handle<JSFunction> handler;
Handle<JSProxy> proxy;
@@ -546,7 +546,7 @@
Local<v8::Context> sub_context = v8::Context::New(v8_isolate());
Utils::OpenHandle(*sub_context)
->native_context()
- ->set_microtask_queue(microtask_queue());
+ .set_microtask_queue(microtask_queue());
Handle<JSArray> result;
Handle<JSFunction> stale_handler;
diff --git a/test/unittests/object-unittest.cc b/test/unittests/object-unittest.cc
index 6d6b84d..2f38fd4 100644
--- a/test/unittests/object-unittest.cc
+++ b/test/unittests/object-unittest.cc
@@ -156,10 +156,10 @@
// Check that the empty_function has a properly set up ScopeInfo.
Handle<JSFunction> function = RunJS<JSFunction>("(function(){})");
- Handle<ScopeInfo> scope_info(function->shared()->scope_info(),
+ Handle<ScopeInfo> scope_info(function->shared().scope_info(),
function->GetIsolate());
Handle<ScopeInfo> empty_function_scope_info(
- isolate()->empty_function()->shared()->scope_info(),
+ isolate()->empty_function()->shared().scope_info(),
function->GetIsolate());
EXPECT_EQ(scope_info->length(), empty_function_scope_info->length());