Allow Oilpan heap objects account for their external allocations.

The GC heuristics take into account the amount of objects allocated since
the last GC, in terms of the amount of Oilpan heap bytes allocated.

It does not consider the amount of external memory owned by an
Oilpan object. Allow objects to register such external allocations and
have the GC heuristics consider it -- if the heap isn't otherwise
considered worth GCing but it contains references to a large amount of
external memory, scheduling a GC may lessen the overall memory pressure
for the process.

Along with this, also add support for letting the outside world notify
Oilpan that a GC is now really worth considering. Intended used by other
allocators if they are running into near-OOM conditions.

R=haraken
BUG=456498

Review URL: https://codereview.chromium.org/875503003

git-svn-id: svn://svn.chromium.org/blink/trunk@190702 bbb929c8-8fbe-4397-9dbb-9b2b20218538
diff --git a/Source/core/dom/Text.cpp b/Source/core/dom/Text.cpp
index c983fbd..ab887a5 100644
--- a/Source/core/dom/Text.cpp
+++ b/Source/core/dom/Text.cpp
@@ -42,13 +42,48 @@
 
 namespace blink {
 
+#if ENABLE(OILPAN)
+namespace {
+// If the external string kept by a Text node exceed this threshold length,
+// Oilpan is informed. External allocation amounts owned by heap objects are
+// taken into account when scheduling urgent Oilpan GCs.
+//
+// FIXME: having only Text nodes with strings above an ad-hoc local threshold
+// influence Oilpan's GC behavior isn't a satisfactory long-term solution.
+// But code that allocates a lot of Text nodes in tight loops, and not much more,
+// we do have to trigger Oilpan GCs to avoid PartitionAlloc OOMs. The accounting
+// does add overhead on the allocation of every Text node however, so for now, only
+// register those above the given threshold. TBC.
+const size_t stringLengthThreshold = 256;
+
+void increaseExternallyAllocatedBytesIfNeeded(size_t length)
+{
+    if (length > stringLengthThreshold)
+        Heap::increaseExternallyAllocatedBytes(length);
+}
+
+void increaseExternallyAllocatedBytesAliveIfNeeded(size_t length)
+{
+    if (length > stringLengthThreshold)
+        Heap::increaseExternallyAllocatedBytesAlive(length);
+}
+
+} // namespace
+#endif
+
 PassRefPtrWillBeRawPtr<Text> Text::create(Document& document, const String& data)
 {
+#if ENABLE(OILPAN)
+    increaseExternallyAllocatedBytesIfNeeded(data.length());
+#endif
     return adoptRefWillBeNoop(new Text(document, data, CreateText));
 }
 
 PassRefPtrWillBeRawPtr<Text> Text::createEditingText(Document& document, const String& data)
 {
+#if ENABLE(OILPAN)
+    increaseExternallyAllocatedBytesIfNeeded(data.length());
+#endif
     return adoptRefWillBeNoop(new Text(document, data, CreateEditingText));
 }
 
@@ -408,6 +443,14 @@
     return create(document(), data);
 }
 
+void Text::trace(Visitor* visitor)
+{
+#if ENABLE(OILPAN)
+    increaseExternallyAllocatedBytesAliveIfNeeded(m_data.length());
+#endif
+    CharacterData::trace(visitor);
+}
+
 #ifndef NDEBUG
 void Text::formatForDebugger(char *buffer, unsigned length) const
 {
diff --git a/Source/core/dom/Text.h b/Source/core/dom/Text.h
index 1b4c0bf..b7b205a 100644
--- a/Source/core/dom/Text.h
+++ b/Source/core/dom/Text.h
@@ -61,6 +61,8 @@
     virtual bool canContainRangeEndPoint() const override final { return true; }
     virtual NodeType nodeType() const override;
 
+    virtual void trace(Visitor*) override;
+
 protected:
     Text(TreeScope& treeScope, const String& data, ConstructionType type)
         : CharacterData(treeScope, data, type) { }
diff --git a/Source/platform/heap/Heap.cpp b/Source/platform/heap/Heap.cpp
index e21fdf6..3edbf62 100644
--- a/Source/platform/heap/Heap.cpp
+++ b/Source/platform/heap/Heap.cpp
@@ -2475,8 +2475,7 @@
     s_markingVisitor->configureEagerTraceLimit();
     ASSERT(s_markingVisitor->canTraceEagerly());
 
-    Heap::resetMarkedObjectSize();
-    Heap::resetAllocatedObjectSize();
+    Heap::resetHeapCounters();
 
     // 1. Trace persistent roots.
     ThreadState::visitPersistentRoots(s_markingVisitor);
@@ -2840,6 +2839,32 @@
     delete current;
 }
 
+void Heap::resetHeapCounters()
+{
+    ASSERT(ThreadState::current()->isInGC());
+
+    s_allocatedObjectSize = 0;
+    s_markedObjectSize = 0;
+
+    // Similarly, reset the amount of externally allocated memory.
+    s_externallyAllocatedBytes = 0;
+    s_externallyAllocatedBytesAlive = 0;
+
+    s_requestedUrgentGC = false;
+}
+
+void Heap::requestUrgentGC()
+{
+    // The urgent-gc flag will be considered the next time an out-of-line
+    // allocation is made. Bump allocations from the current block will
+    // go ahead until it can no longer service an allocation request.
+    //
+    // FIXME: if that delays urgently needed GCs for too long, consider
+    // flushing out per-heap "allocation points" to trigger the GC
+    // right away.
+    releaseStore(&s_requestedUrgentGC, 1);
+}
+
 Visitor* Heap::s_markingVisitor;
 CallbackStack* Heap::s_markingStack;
 CallbackStack* Heap::s_postMarkingCallbackStack;
@@ -2855,4 +2880,8 @@
 size_t Heap::s_allocatedSpace = 0;
 size_t Heap::s_markedObjectSize = 0;
 
+size_t Heap::s_externallyAllocatedBytes = 0;
+size_t Heap::s_externallyAllocatedBytesAlive = 0;
+unsigned Heap::s_requestedUrgentGC = false;
+
 } // namespace blink
diff --git a/Source/platform/heap/Heap.h b/Source/platform/heap/Heap.h
index bda9917..2dc38fc 100644
--- a/Source/platform/heap/Heap.h
+++ b/Source/platform/heap/Heap.h
@@ -999,8 +999,25 @@
     static void increaseAllocatedSpace(size_t delta) { atomicAdd(&s_allocatedSpace, static_cast<long>(delta)); }
     static void decreaseAllocatedSpace(size_t delta) { atomicSubtract(&s_allocatedSpace, static_cast<long>(delta)); }
     static size_t allocatedSpace() { return acquireLoad(&s_allocatedSpace); }
+
     static double estimatedMarkingTime();
 
+    // On object allocation, register the object's externally allocated memory.
+    static inline void increaseExternallyAllocatedBytes(size_t);
+    static size_t externallyAllocatedBytes() { return acquireLoad(&s_externallyAllocatedBytes); }
+
+    // On object tracing, register the object's externally allocated memory (as still live.)
+    static void increaseExternallyAllocatedBytesAlive(size_t delta)
+    {
+        ASSERT(ThreadState::current()->isInGC());
+        s_externallyAllocatedBytesAlive += delta;
+    }
+    static size_t externallyAllocatedBytesAlive() { return s_externallyAllocatedBytesAlive; }
+
+    static void requestUrgentGC();
+    static void clearUrgentGC() { releaseStore(&s_requestedUrgentGC, 0); }
+    static bool isUrgentGCRequested() { return acquireLoad(&s_requestedUrgentGC); }
+
 private:
     // A RegionTree is a simple binary search tree of PageMemoryRegions sorted
     // by base addresses.
@@ -1021,8 +1038,8 @@
         RegionTree* m_right;
     };
 
-    static void resetAllocatedObjectSize() { ASSERT(ThreadState::current()->isInGC()); s_allocatedObjectSize = 0; }
-    static void resetMarkedObjectSize() { ASSERT(ThreadState::current()->isInGC()); s_markedObjectSize = 0; }
+    // Reset counters that track live and allocated-since-last-GC sizes.
+    static void resetHeapCounters();
 
     static Visitor* s_markingVisitor;
     static CallbackStack* s_markingStack;
@@ -1038,6 +1055,10 @@
     static size_t s_allocatedSpace;
     static size_t s_allocatedObjectSize;
     static size_t s_markedObjectSize;
+    static size_t s_externallyAllocatedBytes;
+    static size_t s_externallyAllocatedBytesAlive;
+    static unsigned s_requestedUrgentGC;
+
     friend class ThreadState;
 };
 
@@ -1505,6 +1526,28 @@
     return address;
 }
 
+void Heap::increaseExternallyAllocatedBytes(size_t delta)
+{
+    // Flag GC urgency on a 50% increase in external allocation
+    // since the last GC, but not for less than 100M.
+    //
+    // FIXME: consider other, 'better' policies (e.g., have the count of
+    // heap objects with external allocations be taken into
+    // account, ...) The overall goal here is to trigger a
+    // GC such that it considerably lessens memory pressure
+    // for a renderer process, when absolutely needed.
+    size_t externalBytesAllocatedSinceLastGC = atomicAdd(&s_externallyAllocatedBytes, static_cast<long>(delta));
+    if (LIKELY(externalBytesAllocatedSinceLastGC < 100 * 1024 * 1024))
+        return;
+
+    if (UNLIKELY(isUrgentGCRequested()))
+        return;
+
+    size_t externalBytesAliveAtLastGC = externallyAllocatedBytesAlive();
+    if (UNLIKELY(externalBytesAllocatedSinceLastGC > externalBytesAliveAtLastGC / 2))
+        Heap::requestUrgentGC();
+}
+
 class HeapAllocatorQuantizer {
 public:
     template<typename T>
diff --git a/Source/platform/heap/ThreadState.cpp b/Source/platform/heap/ThreadState.cpp
index 2be4900..08cae49e 100644
--- a/Source/platform/heap/ThreadState.cpp
+++ b/Source/platform/heap/ThreadState.cpp
@@ -673,6 +673,9 @@
     if (UNLIKELY(m_gcForbiddenCount))
         return false;
 
+    if (Heap::isUrgentGCRequested())
+        return true;
+
     size_t newSize = Heap::allocatedObjectSize();
     if (newSize >= 300 * 1024 * 1024) {
         // If we consume too much memory, trigger a conservative GC
@@ -697,14 +700,18 @@
 {
     checkThread();
     // Allocation is allowed during sweeping, but those allocations should not
-    // trigger nested GCs
-    if (isSweepingInProgress())
+    // trigger nested GCs. Does not apply if an urgent GC has been requested.
+    if (isSweepingInProgress() && UNLIKELY(!Heap::isUrgentGCRequested()))
         return;
     ASSERT(!sweepForbidden());
 
-    if (shouldForceConservativeGC())
-        Heap::collectGarbage(ThreadState::HeapPointersOnStack, ThreadState::GCWithoutSweep);
-    else if (shouldSchedulePreciseGC())
+    if (shouldForceConservativeGC()) {
+        // If GC is deemed urgent, eagerly sweep and finalize any external allocations right away.
+        GCType gcType = Heap::isUrgentGCRequested() ? GCWithSweep : GCWithoutSweep;
+        Heap::collectGarbage(HeapPointersOnStack, gcType);
+        return;
+    }
+    if (shouldSchedulePreciseGC())
         schedulePreciseGC();
     else if (shouldScheduleIdleGC())
         scheduleIdleGC();