Bug 1202865 - Split out Zone selection from stats collection and malloc bytes reset; r=jonco

This commit is contained in:
Terrence Cole 2015-09-10 14:11:56 -07:00
parent c11064bbf3
commit f58052852c
3 changed files with 50 additions and 25 deletions

View File

@ -720,19 +720,19 @@ class GCRuntime
void setAlwaysPreserveCode() { alwaysPreserveCode = true; }
bool isIncrementalGCAllowed() { return incrementalAllowed; }
bool isIncrementalGCAllowed() const { return incrementalAllowed; }
void disallowIncrementalGC() { incrementalAllowed = false; }
bool isIncrementalGCEnabled() { return mode == JSGC_MODE_INCREMENTAL && incrementalAllowed; }
bool isIncrementalGCInProgress() { return state() != gc::NO_INCREMENTAL; }
bool isIncrementalGCEnabled() const { return mode == JSGC_MODE_INCREMENTAL && incrementalAllowed; }
bool isIncrementalGCInProgress() const { return state() != gc::NO_INCREMENTAL; }
bool isGenerationalGCEnabled() { return generationalDisabled == 0; }
bool isGenerationalGCEnabled() const { return generationalDisabled == 0; }
void disableGenerationalGC();
void enableGenerationalGC();
void disableCompactingGC();
void enableCompactingGC();
bool isCompactingGCEnabled();
bool isCompactingGCEnabled() const;
void setGrayRootsTracer(JSTraceDataOp traceOp, void* data);
bool addBlackRootsTracer(JSTraceDataOp traceOp, void* data);

View File

@ -1866,7 +1866,7 @@ GCRuntime::enableCompactingGC()
}
bool
GCRuntime::isCompactingGCEnabled()
GCRuntime::isCompactingGCEnabled() const
{
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
return compactingEnabled && compactingDisabledCount == 0;
@ -6029,6 +6029,35 @@ class AutoDisableStoreBuffer
}
};
class AutoScheduleZonesForGC
{
JSRuntime* rt_;
public:
explicit AutoScheduleZonesForGC(JSRuntime* rt) : rt_(rt) {
for (ZonesIter zone(rt_, WithAtoms); !zone.done(); zone.next()) {
if (rt->gc.gcMode() == JSGC_MODE_GLOBAL)
zone->scheduleGC();
/* This is a heuristic to avoid resets. */
if (rt->gc.isIncrementalGCInProgress() && zone->needsIncrementalBarrier())
zone->scheduleGC();
/* This is a heuristic to reduce the total number of collections. */
if (zone->usage.gcBytes() >=
zone->threshold.allocTrigger(rt->gc.schedulingState.inHighFrequencyGCMode()))
{
zone->scheduleGC();
}
}
}
~AutoScheduleZonesForGC() {
for (ZonesIter zone(rt_, WithAtoms); !zone.done(); zone.next())
zone->unscheduleGC();
}
};
} /* anonymous namespace */
/*
@ -6117,11 +6146,9 @@ GCRuntime::gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason::
clearSelectedForMarking();
#endif
/* Clear gcMallocBytes for all compartments */
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
/* Clear gcMallocBytes for all zones. */
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
zone->resetGCMallocBytes();
zone->unscheduleGC();
}
resetMallocBytes();
@ -6152,17 +6179,6 @@ GCRuntime::scanZonesBeforeGC()
{
gcstats::ZoneGCStats zoneStats;
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
if (mode == JSGC_MODE_GLOBAL)
zone->scheduleGC();
/* This is a heuristic to avoid resets. */
if (isIncrementalGCInProgress() && zone->needsIncrementalBarrier())
zone->scheduleGC();
/* This is a heuristic to reduce the total number of collections. */
if (zone->usage.gcBytes() >= zone->threshold.allocTrigger(schedulingState.inHighFrequencyGCMode()))
zone->scheduleGC();
zoneStats.zoneCount++;
if (zone->isGCScheduled()) {
zoneStats.collectedZoneCount++;
@ -6215,9 +6231,9 @@ GCRuntime::collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::R
return;
AutoTraceLog logGC(TraceLoggerForMainThread(rt), TraceLogger_GC);
AutoStopVerifyingBarriers av(rt, IsShutdownGC(reason));
AutoEnqueuePendingParseTasksAfterGC aept(*this);
AutoScheduleZonesForGC asz(rt);
gcstats::AutoGCSlice agc(stats, scanZonesBeforeGC(), invocationKind, budget, reason);
bool repeat = false;
@ -6270,9 +6286,12 @@ GCRuntime::collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::R
*/
repeat = (poked && cleanUpEverything) || wasReset || repeatForDeadZone;
} while (repeat);
}
if (!isIncrementalGCInProgress())
EnqueuePendingParseTasksAfterGC(rt);
js::AutoEnqueuePendingParseTasksAfterGC::~AutoEnqueuePendingParseTasksAfterGC()
{
if (!gc_.isIncrementalGCInProgress())
EnqueuePendingParseTasksAfterGC(gc_.rt);
}
SliceBudget

View File

@ -402,6 +402,12 @@ StartOffThreadParseScript(JSContext* cx, const ReadOnlyCompileOptions& options,
void
EnqueuePendingParseTasksAfterGC(JSRuntime* rt);
struct AutoEnqueuePendingParseTasksAfterGC {
const gc::GCRuntime& gc_;
explicit AutoEnqueuePendingParseTasksAfterGC(const gc::GCRuntime& gc) : gc_(gc) {}
~AutoEnqueuePendingParseTasksAfterGC();
};
/* Start a compression job for the specified token. */
bool
StartOffThreadCompression(ExclusiveContext* cx, SourceCompressionTask* task);