[heap] Scavenger fixes
Various Scavenger fixes for smaller issues that accumulated over the
last years.
Bug: chromium:738865
Change-Id: I7573e438eba030944b99c65807944c662526a171
Reviewed-on: https://chromium-review.googlesource.com/567190
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46578}
diff --git a/src/heap/scavenger-inl.h b/src/heap/scavenger-inl.h
index 72d70c2..2b22851 100644
--- a/src/heap/scavenger-inl.h
+++ b/src/heap/scavenger-inl.h
@@ -34,11 +34,8 @@
} // namespace
-// Helper function used by CopyObject to copy a source object to an
-// allocated target object and update the forwarding pointer in the source
-// object. Returns the target object.
-HeapObject* Scavenger::MigrateObject(HeapObject* source, HeapObject* target,
- int size) {
+void Scavenger::MigrateObject(HeapObject* source, HeapObject* target,
+ int size) {
// If we migrate into to-space, then the to-space top pointer should be
// right after the target object. Incorporate double alignment
// over-allocation.
@@ -61,7 +58,6 @@
if (is_incremental_marking_) {
heap()->incremental_marking()->TransferColor(source, target);
}
- return target;
}
bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObject** slot,
@@ -71,11 +67,11 @@
AllocationResult allocation =
heap()->new_space()->AllocateRaw(object_size, alignment);
- HeapObject* target = NULL; // Initialization to please compiler.
+ HeapObject* target = nullptr;
if (allocation.To(&target)) {
+ DCHECK(ObjectMarking::IsWhite(
+ target, heap()->mark_compact_collector()->marking_state(target)));
MigrateObject(object, target, object_size);
-
- // Update slot to new target.
*slot = target;
copied_list_.Insert(target, object_size);
@@ -91,18 +87,12 @@
AllocationResult allocation =
heap()->old_space()->AllocateRaw(object_size, alignment);
- HeapObject* target = NULL; // Initialization to please compiler.
+ HeapObject* target = nullptr;
if (allocation.To(&target)) {
DCHECK(ObjectMarking::IsWhite(
target, heap()->mark_compact_collector()->marking_state(target)));
MigrateObject(object, target, object_size);
-
- // Update slot to new target using CAS. A concurrent sweeper thread my
- // filter the slot concurrently.
- HeapObject* old = *slot;
- base::Release_CompareAndSwap(reinterpret_cast<base::AtomicWord*>(slot),
- reinterpret_cast<base::AtomicWord>(old),
- reinterpret_cast<base::AtomicWord>(target));
+ *slot = target;
if (!ContainsOnlyData(static_cast<VisitorId>(map->visitor_id()))) {
promotion_list_.Push(ObjectAndSize(target, object_size));
@@ -248,10 +238,11 @@
heap()->UpdateAllocationSite<Heap::kCached>(object,
&local_pretenuring_feedback_);
+ Map* map = first_word.ToMap();
// AllocationMementos are unrooted and shouldn't survive a scavenge
- DCHECK_NE(heap()->allocation_memento_map(), object->map());
+ DCHECK_NE(heap()->allocation_memento_map(), map);
// Call the slow part of scavenge object.
- EvacuateObject(p, first_word.ToMap(), object);
+ EvacuateObject(p, map, object);
}
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
diff --git a/src/heap/scavenger.cc b/src/heap/scavenger.cc
index ca07f82..551d004 100644
--- a/src/heap/scavenger.cc
+++ b/src/heap/scavenger.cc
@@ -21,10 +21,9 @@
inline void VisitPointers(HeapObject* host, Object** start,
Object** end) final {
- Address slot_address = reinterpret_cast<Address>(start);
- Page* page = Page::FromAddress(slot_address);
-
- while (slot_address < reinterpret_cast<Address>(end)) {
+ for (Address slot_address = reinterpret_cast<Address>(start);
+ slot_address < reinterpret_cast<Address>(end);
+ slot_address += kPointerSize) {
Object** slot = reinterpret_cast<Object**>(slot_address);
Object* target = *slot;
@@ -36,7 +35,8 @@
if (heap_->InNewSpace(target)) {
SLOW_DCHECK(target->IsHeapObject());
SLOW_DCHECK(heap_->InToSpace(target));
- RememberedSet<OLD_TO_NEW>::Insert(page, slot_address);
+ RememberedSet<OLD_TO_NEW>::Insert(Page::FromAddress(slot_address),
+ slot_address);
}
SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
HeapObject::cast(target)));
@@ -46,8 +46,6 @@
heap_->mark_compact_collector()->RecordSlot(host, slot, target);
}
}
-
- slot_address += kPointerSize;
}
}
@@ -63,7 +61,7 @@
private:
Heap* const heap_;
Scavenger* const scavenger_;
- bool record_slots_;
+ const bool record_slots_;
};
void Scavenger::IterateAndScavengePromotedObject(HeapObject* target, int size) {
diff --git a/src/heap/scavenger.h b/src/heap/scavenger.h
index 956ffb2..aeb4bf9 100644
--- a/src/heap/scavenger.h
+++ b/src/heap/scavenger.h
@@ -100,8 +100,9 @@
inline Heap* heap() { return heap_; }
- V8_INLINE HeapObject* MigrateObject(HeapObject* source, HeapObject* target,
- int size);
+ // Copies |source| to |target| and sets the forwarding pointer in |source|.
+ V8_INLINE void MigrateObject(HeapObject* source, HeapObject* target,
+ int size);
V8_INLINE bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
HeapObject* object, int object_size);