diff options
author | Fabian Kosmale <[email protected]> | 2024-01-17 16:19:54 +0100 |
---|---|---|
committer | Fabian Kosmale <[email protected]> | 2024-03-05 14:06:30 +0100 |
commit | a32bcfe03db015f0b0e8c2e217dd603ad23edbba (patch) | |
tree | 6a7ab7b959845a275ccb79641672cdbdc5fe9205 /src/qml/memory/qv4mm.cpp | |
parent | 2b7e1b11a105af928a8d2999e60bb1110af3edae (diff) |
qv4mm: Update non-managed memory limits at correct point in time
With the incremental gc, we might have not freed any memory at all after
the gc call, as we might only be in the marking phase.
Instead, update the limits when we know that we're done, after sweeping
has completed.
Additionally, move the assertion that the gc does not lose memory
there, too, so that it also is called in all sweep calls.
Change-Id: I444ff8817f39153f6f81bb6d21b6b87ae253a5f9
Reviewed-by: Ulf Hermann <[email protected]>
Diffstat (limited to 'src/qml/memory/qv4mm.cpp')
-rw-r--r-- | src/qml/memory/qv4mm.cpp | 37 |
1 files changed, 28 insertions, 9 deletions
diff --git a/src/qml/memory/qv4mm.cpp b/src/qml/memory/qv4mm.cpp index efaf15dcf9..a47a62de10 100644 --- a/src/qml/memory/qv4mm.cpp +++ b/src/qml/memory/qv4mm.cpp @@ -883,6 +883,9 @@ GCState doSweep(GCStateMachine *that, ExtraData &) mm->gcBlocked = MemoryManager::Unblocked; mm->m_markStack.reset(); mm->engine->isGCOngoing = false; + + mm->updateUnmanagedHeapSizeGCLimit(); + return Invalid; } @@ -1134,7 +1137,8 @@ void MemoryManager::sweep(bool lastSweep, ClassDestroyStatsCallback classCountPt icAllocator.resetBlackBits(); usedSlotsAfterLastFullSweep = blockAllocator.usedSlotsAfterLastSweep + icAllocator.usedSlotsAfterLastSweep; - gcBlocked = false; + updateUnmanagedHeapSizeGCLimit(); + gcBlocked = MemoryManager::Unblocked; } /* @@ -1328,14 +1332,6 @@ void MemoryManager::runGC() if (gcStats) statistics.maxUsedMem = qMax(statistics.maxUsedMem, getUsedMem() + getLargeItemsMem()); - - if (aggressiveGC) { - // ensure we don't 'loose' any memory - Q_ASSERT(blockAllocator.allocatedMem() - == blockAllocator.usedMem() + dumpBins(&blockAllocator, nullptr)); - Q_ASSERT(icAllocator.allocatedMem() - == icAllocator.usedMem() + dumpBins(&icAllocator, nullptr)); - } } size_t MemoryManager::getUsedMem() const @@ -1353,6 +1349,29 @@ size_t MemoryManager::getLargeItemsMem() const return hugeItemAllocator.usedMem(); } +void MemoryManager::updateUnmanagedHeapSizeGCLimit() +{ + if (3*unmanagedHeapSizeGCLimit <= 4 * unmanagedHeapSize) { + // more than 75% full, raise limit + unmanagedHeapSizeGCLimit = std::max(unmanagedHeapSizeGCLimit, + unmanagedHeapSize) * 2; + } else if (unmanagedHeapSize * 4 <= unmanagedHeapSizeGCLimit) { + // less than 25% full, lower limit + unmanagedHeapSizeGCLimit = qMax(std::size_t(MinUnmanagedHeapSizeGCLimit), + unmanagedHeapSizeGCLimit/2); + } + + if (aggressiveGC && !engine->inShutdown) { + // ensure we don't 'loose' any memory + // but not during shutdown, because than we skip parts of sweep + // and use freeAll instead + Q_ASSERT(blockAllocator.allocatedMem() + == blockAllocator.usedMem() + dumpBins(&blockAllocator, nullptr)); + Q_ASSERT(icAllocator.allocatedMem() + == icAllocator.usedMem() + dumpBins(&icAllocator, nullptr)); + } +} + void MemoryManager::registerWeakMap(Heap::MapObject *map) { map->nextWeakMap = weakMaps; |