Bug 989390 - Start background sweeping as soon as arenas are available to sweep r=terrence

This commit is contained in:
Jon Coppeard 2014-11-25 10:19:25 +00:00
parent e083d47e73
commit e3f043936c
8 changed files with 250 additions and 78 deletions

View File

@ -383,6 +383,14 @@ class LifoAlloc
return n;
}
// Get the total size of the arena chunks (including unused space).
size_t computedSizeOfExcludingThis() const {
size_t n = 0;
for (BumpChunk *chunk = first; chunk; chunk = chunk->next())
n += chunk->computedSizeOfIncludingThis();
return n;
}
// Like sizeOfExcludingThis(), but includes the size of the LifoAlloc itself.
size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
return mallocSizeOf(this) + sizeOfExcludingThis(mallocSizeOf);

View File

@ -147,7 +147,6 @@ struct MovingTracer : JSTracer {
};
#endif
} /* namespace gc */
} /* namespace js */

View File

@ -599,7 +599,8 @@ class GCRuntime
void decommitAllWithoutUnlocking(const AutoLockGC &lock);
void decommitArenas(AutoLockGC &lock);
void expireChunksAndArenas(bool shouldShrink, AutoLockGC &lock);
void sweepBackgroundThings();
void queueZonesForBackgroundSweep(js::gc::ZoneList& zones);
void sweepBackgroundThings(js::gc::ZoneList &zones, ThreadType threadType);
void assertBackgroundSweepingFinished();
bool shouldCompact();
#ifdef JSGC_COMPACTING
@ -758,9 +759,8 @@ class GCRuntime
/* Whether any black->gray edges were found during marking. */
bool foundBlackGrayEdges;
/* List head of zones to be swept in the background. */
JS::Zone *sweepingZones;
/* Singly linekd list of zones to be swept in the background. */
js::gc::ZoneList backgroundSweepZones;
/*
* Free LIFO blocks are transferred to this allocator before being freed on
* the background GC thread.

View File

@ -19,6 +19,8 @@
using namespace js;
using namespace js::gc;
Zone * const Zone::NotOnList = reinterpret_cast<Zone *>(1);
JS::Zone::Zone(JSRuntime *rt)
: JS::shadow::Zone(rt, &rt->gc.marker),
allocator(this),
@ -37,7 +39,8 @@ JS::Zone::Zone(JSRuntime *rt)
gcState_(NoGC),
gcScheduled_(false),
gcPreserveCode_(false),
jitUsingBarriers_(false)
jitUsingBarriers_(false),
listNext_(NotOnList)
{
/* Ensure that there are no vtables to mess us up here. */
MOZ_ASSERT(reinterpret_cast<JS::shadow::Zone *>(this) ==
@ -265,3 +268,95 @@ js::ZonesIter::atAtomsZone(JSRuntime *rt)
{
return rt->isAtomsZone(*it);
}
bool Zone::isOnList()
{
return listNext_ != NotOnList;
}
ZoneList::ZoneList()
: head(nullptr), tail(nullptr)
{}
ZoneList::ZoneList(Zone *zone)
: head(zone), tail(zone)
{
MOZ_ASSERT(!zone->isOnList());
zone->listNext_ = nullptr;
}
void
ZoneList::check() const
{
#ifdef DEBUG
MOZ_ASSERT((head == nullptr) == (tail == nullptr));
if (head) {
Zone *zone = head;
while (zone != tail) {
zone = zone->listNext_;
MOZ_ASSERT(zone);
}
MOZ_ASSERT(!zone->listNext_);
}
#endif
}
bool ZoneList::isEmpty() const
{
return head == nullptr;
}
Zone *
ZoneList::front() const
{
MOZ_ASSERT(!isEmpty());
return head;
}
void
ZoneList::append(Zone *zone)
{
ZoneList singleZone(zone);
append(singleZone);
}
void
ZoneList::append(ZoneList &other)
{
check();
other.check();
MOZ_ASSERT(tail != other.tail);
if (tail)
tail->listNext_ = other.head;
else
head = other.head;
tail = other.tail;
}
Zone *
ZoneList::removeFront()
{
MOZ_ASSERT(!isEmpty());
check();
Zone *front = head;
head = head->listNext_;
if (!head)
tail = nullptr;
front->listNext_ = Zone::NotOnList;
return front;
}
void
ZoneList::transferFrom(ZoneList& other)
{
MOZ_ASSERT(isEmpty());
other.check();
head = other.head;
tail = other.tail;
other.head = nullptr;
other.tail = nullptr;
}

View File

@ -250,6 +250,10 @@ struct Zone : public JS::shadow::Zone,
js::jit::JitZone *createJitZone(JSContext *cx);
bool isQueuedForBackgroundSweep() {
return isOnList();
}
public:
js::Allocator allocator;
@ -314,6 +318,12 @@ struct Zone : public JS::shadow::Zone,
bool gcPreserveCode_;
bool jitUsingBarriers_;
// Allow zones to be linked into a list
friend class js::gc::ZoneList;
static Zone * const NotOnList;
Zone *listNext_;
bool isOnList();
friend bool js::CurrentThreadCanAccessZone(Zone *zone);
friend class js::gc::GCRuntime;
};

View File

@ -560,7 +560,7 @@ FinalizeTypedArenas(FreeOp *fop,
{
// When operating in the foreground, take the lock at the top.
Maybe<AutoLockGC> maybeLock;
if (!fop->runtime()->gc.isBackgroundSweeping())
if (!fop->onBackgroundThread())
maybeLock.emplace(fop->runtime());
/*
@ -580,9 +580,9 @@ FinalizeTypedArenas(FreeOp *fop,
if (nmarked) {
dest.insertAt(aheader, nfree);
} else if (keepArenas) {
} else if (keepArenas == ArenaLists::KEEP_ARENAS) {
aheader->chunk()->recycleArena(aheader, dest, thingKind, thingsPerArena);
} else if (fop->runtime()->gc.isBackgroundSweeping()) {
} else if (fop->onBackgroundThread()) {
// When background sweeping, take the lock around each release so
// that we do not block the foreground for extended periods.
AutoLockGC lock(fop->runtime());
@ -1159,7 +1159,6 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
lastMarkSlice(false),
sweepOnBackgroundThread(false),
foundBlackGrayEdges(false),
sweepingZones(nullptr),
freeLifoAlloc(JSRuntime::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
zoneGroupIndex(0),
zoneGroups(nullptr),
@ -2869,7 +2868,6 @@ inline void
ArenaLists::queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind)
{
MOZ_ASSERT(IsBackgroundFinalized(thingKind));
MOZ_ASSERT(!fop->runtime()->gc.isBackgroundSweeping());
ArenaList *al = &arenaLists[thingKind];
if (al->isEmpty()) {
@ -2885,10 +2883,11 @@ ArenaLists::queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind)
}
/*static*/ void
ArenaLists::backgroundFinalize(FreeOp *fop, ArenaHeader *listHead)
ArenaLists::backgroundFinalize(FreeOp *fop, ArenaHeader *listHead, ArenaHeader **empty)
{
MOZ_ASSERT(listHead);
MOZ_ASSERT(!InParallelSection());
MOZ_ASSERT(empty);
AllocKind thingKind = listHead->getAllocKind();
Zone *zone = listHead->zone;
@ -2897,9 +2896,11 @@ ArenaLists::backgroundFinalize(FreeOp *fop, ArenaHeader *listHead)
SortedArenaList finalizedSorted(thingsPerArena);
SliceBudget budget;
FinalizeArenas(fop, &listHead, finalizedSorted, thingKind, budget, RELEASE_ARENAS);
FinalizeArenas(fop, &listHead, finalizedSorted, thingKind, budget, KEEP_ARENAS);
MOZ_ASSERT(!listHead);
finalizedSorted.extractEmpty(empty);
// When arenas are queued for background finalization, all arenas are moved
// to arenaListsToSweep[], leaving the arenaLists[] empty. However, new
// arenas may be allocated before background finalization finishes; now that
@ -3444,38 +3445,41 @@ GCRuntime::expireChunksAndArenas(bool shouldShrink, AutoLockGC &lock)
}
void
GCRuntime::sweepBackgroundThings()
GCRuntime::sweepBackgroundThings(ZoneList &zones, ThreadType threadType)
{
/*
* We must finalize in the correct order, see comments in
* finalizeObjects.
*/
FreeOp fop(rt);
for (unsigned phase = 0 ; phase < ArrayLength(BackgroundFinalizePhases) ; ++phase) {
for (Zone *zone = sweepingZones; zone; zone = zone->gcNextGraphNode) {
// We must finalize thing kinds in the order specified by BackgroundFinalizePhases.
FreeOp fop(rt, threadType);
while (!zones.isEmpty()) {
Zone *zone = zones.front();
ArenaHeader *emptyArenas = nullptr;
for (unsigned phase = 0 ; phase < ArrayLength(BackgroundFinalizePhases) ; ++phase) {
for (unsigned index = 0 ; index < BackgroundFinalizePhases[phase].length ; ++index) {
AllocKind kind = BackgroundFinalizePhases[phase].kinds[index];
ArenaHeader *arenas = zone->allocator.arenas.arenaListsToSweep[kind];
if (arenas)
ArenaLists::backgroundFinalize(&fop, arenas);
ArenaLists::backgroundFinalize(&fop, arenas, &emptyArenas);
}
}
}
sweepingZones = nullptr;
AutoLockGC lock(rt);
ReleaseArenaList(rt, emptyArenas, lock);
zones.removeFront();
}
}
void
GCRuntime::assertBackgroundSweepingFinished()
{
#ifdef DEBUG
MOZ_ASSERT(!sweepingZones);
MOZ_ASSERT(backgroundSweepZones.isEmpty());
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
MOZ_ASSERT(!zone->isOnList());
for (unsigned i = 0; i < FINALIZE_LIMIT; ++i) {
MOZ_ASSERT(!zone->allocator.arenas.arenaListsToSweep[i]);
MOZ_ASSERT(zone->allocator.arenas.doneBackgroundFinalize(AllocKind(i)));
}
}
MOZ_ASSERT(freeLifoAlloc.computedSizeOfExcludingThis() == 0);
#endif
}
@ -3622,22 +3626,43 @@ BackgroundAllocTask::run()
}
void
GCHelperState::startBackgroundSweep()
GCRuntime::queueZonesForBackgroundSweep(ZoneList &zones)
{
AutoLockHelperThreadState helperLock;
AutoLockGC lock(rt);
backgroundSweepZones.append(zones);
helperState.maybeStartBackgroundSweep(lock);
}
void
GCRuntime::freeUnusedLifoBlocksAfterSweeping(LifoAlloc *lifo)
{
MOZ_ASSERT(isHeapBusy());
AutoLockGC lock(rt);
freeLifoAlloc.transferUnusedFrom(lifo);
}
void
GCRuntime::freeAllLifoBlocksAfterSweeping(LifoAlloc *lifo)
{
MOZ_ASSERT(isHeapBusy());
AutoLockGC lock(rt);
freeLifoAlloc.transferFrom(lifo);
}
void
GCHelperState::maybeStartBackgroundSweep(const AutoLockGC &lock)
{
MOZ_ASSERT(CanUseExtraThreads());
AutoLockHelperThreadState helperLock;
AutoLockGC lock(rt);
MOZ_ASSERT(state() == IDLE);
MOZ_ASSERT(!sweepFlag);
sweepFlag = true;
shrinkFlag = false;
startBackgroundThread(SWEEPING);
if (state() == IDLE)
startBackgroundThread(SWEEPING);
}
/* Must be called with the GC lock taken. */
void
GCHelperState::startBackgroundShrink()
GCHelperState::startBackgroundShrink(const AutoLockGC &lock)
{
MOZ_ASSERT(CanUseExtraThreads());
switch (state()) {
@ -3667,13 +3692,16 @@ GCHelperState::waitBackgroundSweepEnd()
void
GCHelperState::doSweep(AutoLockGC &lock)
{
if (sweepFlag) {
while (sweepFlag) {
sweepFlag = false;
ZoneList zones;
zones.transferFrom(rt->gc.backgroundSweepZones);
LifoAlloc freeLifoAlloc(JSRuntime::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
freeLifoAlloc.transferFrom(&rt->gc.freeLifoAlloc);
AutoUnlockGC unlock(lock);
rt->gc.sweepBackgroundThings();
rt->gc.freeLifoAlloc.freeAll();
rt->gc.sweepBackgroundThings(zones, BackgroundThread);
freeLifoAlloc.freeAll();
}
bool shrinking = shrinkFlag;
@ -3762,6 +3790,8 @@ Zone::sweepCompartments(FreeOp *fop, bool keepAtleastOne, bool lastGC)
void
GCRuntime::sweepZones(FreeOp *fop, bool lastGC)
{
AutoLockGC lock(rt); // Avoid race with background sweeping.
JSZoneCallback callback = rt->destroyZoneCallback;
/* Skip the atomsCompartment zone. */
@ -3775,10 +3805,13 @@ GCRuntime::sweepZones(FreeOp *fop, bool lastGC)
Zone *zone = *read++;
if (zone->wasGCStarted()) {
if ((zone->allocator.arenas.arenaListsAreEmpty() && !zone->hasMarkedCompartments()) ||
lastGC)
if ((!zone->isQueuedForBackgroundSweep() &&
zone->allocator.arenas.arenaListsAreEmpty() &&
!zone->hasMarkedCompartments()) || lastGC)
{
zone->allocator.arenas.checkEmptyFreeLists();
AutoUnlockGC unlock(lock);
if (callback)
callback(zone);
zone->sweepCompartments(fop, false, lastGC);
@ -3793,27 +3826,12 @@ GCRuntime::sweepZones(FreeOp *fop, bool lastGC)
zones.resize(write - zones.begin());
}
void
GCRuntime::freeUnusedLifoBlocksAfterSweeping(LifoAlloc *lifo)
{
MOZ_ASSERT(isHeapBusy());
freeLifoAlloc.transferUnusedFrom(lifo);
}
void
GCRuntime::freeAllLifoBlocksAfterSweeping(LifoAlloc *lifo)
{
MOZ_ASSERT(isHeapBusy());
freeLifoAlloc.transferFrom(lifo);
}
void
GCRuntime::purgeRuntime()
{
for (GCCompartmentsIter comp(rt); !comp.done(); comp.next())
comp->purge();
freeUnusedLifoBlocksAfterSweeping(&rt->tempLifoAlloc);
rt->interpreterStack().purge(rt);
@ -4264,6 +4282,8 @@ js::gc::MarkingValidator::nonIncrementalMark()
JSRuntime *runtime = gc->rt;
GCMarker *gcmarker = &gc->marker;
gc->waitBackgroundSweepEnd();
/* Save existing mark bits. */
for (auto chunk = gc->allNonEmptyChunks(); !chunk.done(); chunk.next()) {
ChunkBitmap *bitmap = &chunk->bitmap;
@ -4372,6 +4392,8 @@ js::gc::MarkingValidator::validate()
if (!initialized)
return;
gc->waitBackgroundSweepEnd();
for (auto chunk = gc->allNonEmptyChunks(); !chunk.done(); chunk.next()) {
BitmapMap::Ptr ptr = map.lookup(chunk);
if (!ptr)
@ -5169,12 +5191,20 @@ GCRuntime::beginSweepingZoneGroup()
void
GCRuntime::endSweepingZoneGroup()
{
/* Update the GC state for zones we have swept and unlink the list. */
/* Update the GC state for zones we have swept. */
for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
MOZ_ASSERT(zone->isGCSweeping());
zone->setGCState(Zone::Finished);
}
/* Start background thread to sweep zones if required. */
if (sweepOnBackgroundThread) {
ZoneList zones;
for (GCZoneGroupIter zone(rt); !zone.done(); zone.next())
zones.append(zone);
queueZonesForBackgroundSweep(zones);
}
/* Reset the list of arenas marked as being allocated during sweep phase. */
while (ArenaHeader *arena = arenasAllocatedDuringSweep) {
arenasAllocatedDuringSweep = arena->getNextAllocDuringSweep();
@ -5483,18 +5513,14 @@ GCRuntime::endSweepPhase(bool lastGC)
grayBitsValid = true;
}
/* Set up list of zones for sweeping of background things. */
MOZ_ASSERT(!sweepingZones);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
zone->gcNextGraphNode = sweepingZones;
sweepingZones = zone;
}
/* If not sweeping on background thread then we must do it here. */
if (!sweepOnBackgroundThread) {
gcstats::AutoPhase ap(stats, gcstats::PHASE_DESTROY);
sweepBackgroundThings();
ZoneList zones;
for (GCZonesIter zone(rt); !zone.done(); zone.next())
zones.append(zone);
sweepBackgroundThings(zones, MainThread);
/*
* Destroy arenas after we finished the sweeping so finalizers can
@ -5534,9 +5560,6 @@ GCRuntime::endSweepPhase(bool lastGC)
}
}
#endif
if (sweepOnBackgroundThread)
helperState.startBackgroundSweep();
}
#ifdef JSGC_COMPACTING
@ -5710,6 +5733,8 @@ GCRuntime::resetIncrementalGC(const char *reason)
rt->setNeedsIncrementalBarrier(false);
AssertNeedsBarrierFlagsConsistent(rt);
freeLifoAlloc.freeAll();
incrementalState = NO_INCREMENTAL;
MOZ_ASSERT(!marker.shouldCheckCompartments());
@ -6078,9 +6103,8 @@ GCRuntime::gcCycle(bool incremental, SliceBudget &budget, JSGCInvocationKind gck
{
gcstats::AutoPhase ap(stats, gcstats::PHASE_WAIT_BACKGROUND_THREAD);
// As we are about to purge caches and clear the mark bits, wait for
// background finalization to finish. It cannot run between slices
// so we only need to wait on the first slice.
// As we are about to clear the mark bits, wait for background
// finalization to finish. We only need to wait on the first slice.
if (incrementalState == NO_INCREMENTAL)
waitBackgroundSweepEnd();
@ -6389,7 +6413,7 @@ GCRuntime::shrinkBuffers()
MOZ_ASSERT(!rt->isHeapBusy());
if (CanUseExtraThreads())
helperState.startBackgroundShrink();
helperState.startBackgroundShrink(lock);
else
expireChunksAndArenas(true, lock);
}

View File

@ -39,6 +39,12 @@ enum HeapState {
MinorCollecting // doing a GC of the minor heap (nursery)
};
enum ThreadType
{
MainThread,
BackgroundThread
};
namespace jit {
class JitCode;
}
@ -845,7 +851,7 @@ class ArenaLists
bool foregroundFinalize(FreeOp *fop, AllocKind thingKind, SliceBudget &sliceBudget,
SortedArenaList &sweepList);
static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead);
static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead, ArenaHeader **empty);
void wipeDuringParallelExecution(JSRuntime *rt);
@ -1040,11 +1046,8 @@ class GCHelperState
void work();
/* Must be called with the GC lock taken. */
void startBackgroundSweep();
/* Must be called with the GC lock taken. */
void startBackgroundShrink();
void maybeStartBackgroundSweep(const AutoLockGC &lock);
void startBackgroundShrink(const AutoLockGC &lock);
/* Must be called without the GC lock taken. */
void waitBackgroundSweepEnd();
@ -1457,6 +1460,34 @@ class AutoEnterOOMUnsafeRegion {};
bool
IsInsideGGCNursery(const gc::Cell *cell);
// A singly linked list of zones.
class ZoneList
{
static Zone * const End;
Zone *head;
Zone *tail;
public:
ZoneList();
explicit ZoneList(Zone *singleZone);
bool isEmpty() const;
Zone *front() const;
void append(Zone *zone);
void append(ZoneList& list);
Zone *removeFront();
void transferFrom(ZoneList &other);
private:
void check() const;
ZoneList(const ZoneList &other) MOZ_DELETE;
ZoneList &operator=(const ZoneList &other) MOZ_DELETE;
};
} /* namespace gc */
#ifdef DEBUG

View File

@ -364,14 +364,15 @@ class NewObjectCache
class FreeOp : public JSFreeOp
{
Vector<void *, 0, SystemAllocPolicy> freeLaterList;
ThreadType threadType;
public:
static FreeOp *get(JSFreeOp *fop) {
return static_cast<FreeOp *>(fop);
}
explicit FreeOp(JSRuntime *rt)
: JSFreeOp(rt)
explicit FreeOp(JSRuntime *rt, ThreadType thread = MainThread)
: JSFreeOp(rt), threadType(thread)
{}
~FreeOp() {
@ -379,6 +380,10 @@ class FreeOp : public JSFreeOp
free_(freeLaterList[i]);
}
bool onBackgroundThread() {
return threadType == BackgroundThread;
}
inline void free_(void *p);
inline void freeLater(void *p);