Bug 1030577 - Move some GC guards below the GC_BEGIN callback; r=jonco

--HG--
extra : rebase_source : 6c4b6576242047a07cbc2cf0ed018e48e523550d
This commit is contained in:
Terrence Cole 2014-07-15 13:47:09 -07:00
parent 502fa82c14
commit 3dd46cba68
4 changed files with 87 additions and 73 deletions

View File

@ -367,6 +367,7 @@ class GCRuntime
void requestInterrupt(JS::gcreason::Reason reason);
bool gcCycle(bool incremental, int64_t budget, JSGCInvocationKind gckind,
JS::gcreason::Reason reason);
gcstats::ZoneGCStats scanZonesBeforeGC();
void budgetIncrementalGC(int64_t *budget);
void resetIncrementalGC(const char *reason);
void incrementalCollectSlice(int64_t budget, JS::gcreason::Reason reason,

View File

@ -363,9 +363,9 @@ Statistics::formatData(StatisticsSerializer &ss, uint64_t timestamp)
else
ss.appendString("Reason", ExplainReason(slices[0].reason));
ss.appendDecimal("Total Time", "ms", t(total));
ss.appendNumber("Zones Collected", "%d", "", collectedCount);
ss.appendNumber("Total Zones", "%d", "", zoneCount);
ss.appendNumber("Total Compartments", "%d", "", compartmentCount);
ss.appendNumber("Zones Collected", "%d", "", zoneStats.collectedCount);
ss.appendNumber("Total Zones", "%d", "", zoneStats.zoneCount);
ss.appendNumber("Total Compartments", "%d", "", zoneStats.compartmentCount);
ss.appendNumber("Minor GCs", "%d", "", counts[STAT_MINOR_GC]);
ss.appendNumber("MMU (20ms)", "%d", "%", int(mmu20 * 100));
ss.appendNumber("MMU (50ms)", "%d", "%", int(mmu50 * 100));
@ -442,9 +442,6 @@ Statistics::Statistics(JSRuntime *rt)
fp(nullptr),
fullFormat(false),
gcDepth(0),
collectedCount(0),
zoneCount(0),
compartmentCount(0),
nonincrementalReason(nullptr),
preBytes(0),
phaseNestingDepth(0),
@ -549,7 +546,7 @@ Statistics::endGC()
int64_t sccTotal, sccLongest;
sccDurations(&sccTotal, &sccLongest);
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, collectedCount == zoneCount ? 0 : 1);
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, !zoneStats.isCollectingAllZones());
(*cb)(JS_TELEMETRY_GC_MS, t(total));
(*cb)(JS_TELEMETRY_GC_MAX_PAUSE_MS, t(longest));
(*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
@ -570,12 +567,9 @@ Statistics::endGC()
}
void
Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount,
JS::gcreason::Reason reason)
Statistics::beginSlice(const ZoneGCStats &zoneStats, JS::gcreason::Reason reason)
{
this->collectedCount = collectedCount;
this->zoneCount = zoneCount;
this->compartmentCount = compartmentCount;
this->zoneStats = zoneStats;
bool first = runtime->gc.state() == gc::NO_INCREMENTAL;
if (first)
@ -589,7 +583,7 @@ Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount,
// Slice callbacks should only fire for the outermost level
if (++gcDepth == 1) {
bool wasFullGC = collectedCount == zoneCount;
bool wasFullGC = zoneStats.isCollectingAllZones();
if (sliceCallback)
(*sliceCallback)(runtime, first ? JS::GC_CYCLE_BEGIN : JS::GC_SLICE_BEGIN,
JS::GCDescription(!wasFullGC));
@ -613,7 +607,7 @@ Statistics::endSlice()
// Slice callbacks should only fire for the outermost level
if (--gcDepth == 0) {
bool wasFullGC = collectedCount == zoneCount;
bool wasFullGC = zoneStats.isCollectingAllZones();
if (sliceCallback)
(*sliceCallback)(runtime, last ? JS::GC_CYCLE_END : JS::GC_SLICE_END,
JS::GCDescription(!wasFullGC));

View File

@ -75,14 +75,31 @@ enum Stat {
class StatisticsSerializer;
struct Statistics {
struct ZoneGCStats
{
/* Number of zones collected in this GC. */
int collectedCount;
/* Total number of zones in the Runtime at the start of this GC. */
int zoneCount;
/* Total number of compartments in the Runtime at the start of this GC. */
int compartmentCount;
bool isCollectingAllZones() const { return collectedCount == zoneCount; }
ZoneGCStats() : collectedCount(0), zoneCount(0), compartmentCount(0) {}
};
struct Statistics
{
explicit Statistics(JSRuntime *rt);
~Statistics();
void beginPhase(Phase phase);
void endPhase(Phase phase);
void beginSlice(int collectedCount, int zoneCount, int compartmentCount, JS::gcreason::Reason reason);
void beginSlice(const ZoneGCStats &zoneStats, JS::gcreason::Reason reason);
void endSlice();
void reset(const char *reason) { slices.back().resetReason = reason; }
@ -115,9 +132,8 @@ struct Statistics {
*/
int gcDepth;
int collectedCount;
int zoneCount;
int compartmentCount;
ZoneGCStats zoneStats;
const char *nonincrementalReason;
struct SliceData {
@ -178,13 +194,12 @@ struct Statistics {
struct AutoGCSlice
{
AutoGCSlice(Statistics &stats, int collectedCount, int zoneCount, int compartmentCount,
JS::gcreason::Reason reason
AutoGCSlice(Statistics &stats, const ZoneGCStats &zoneStats, JS::gcreason::Reason reason
MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
: stats(stats)
{
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
stats.beginSlice(collectedCount, zoneCount, compartmentCount, reason);
stats.beginSlice(zoneStats, reason);
}
~AutoGCSlice() { stats.endSlice(); }

View File

@ -4964,6 +4964,33 @@ GCRuntime::budgetIncrementalGC(int64_t *budget)
resetIncrementalGC("zone change");
}
namespace {
#ifdef JSGC_GENERATIONAL
class AutoDisableStoreBuffer
{
StoreBuffer &sb;
bool prior;
public:
explicit AutoDisableStoreBuffer(GCRuntime *gc) : sb(gc->storeBuffer) {
prior = sb.isEnabled();
sb.disable();
}
~AutoDisableStoreBuffer() {
if (prior)
sb.enable();
}
};
#else
struct AutoDisableStoreBuffer
{
AutoDisableStoreBuffer(GCRuntime *gc) {}
};
#endif
} /* anonymous namespace */
/*
* Run one GC "cycle" (either a slice of incremental GC or an entire
* non-incremental GC. We disable inlining to ensure that the bottom of the
@ -4977,6 +5004,14 @@ MOZ_NEVER_INLINE bool
GCRuntime::gcCycle(bool incremental, int64_t budget, JSGCInvocationKind gckind,
JS::gcreason::Reason reason)
{
minorGC(reason);
/*
* Marking can trigger many incidental post barriers, some of them for
* objects which are not going to be live after the GC.
*/
AutoDisableStoreBuffer adsb(this);
AutoTraceSession session(rt, MajorCollecting);
isNeeded = false;
@ -5073,32 +5108,28 @@ ShouldCleanUpEverything(JS::gcreason::Reason reason, JSGCInvocationKind gckind)
gckind == GC_SHRINK;
}
namespace {
#ifdef JSGC_GENERATIONAL
class AutoDisableStoreBuffer
gcstats::ZoneGCStats
GCRuntime::scanZonesBeforeGC()
{
StoreBuffer &sb;
bool prior;
gcstats::ZoneGCStats zoneStats;
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
if (mode == JSGC_MODE_GLOBAL)
zone->scheduleGC();
public:
explicit AutoDisableStoreBuffer(GCRuntime *gc) : sb(gc->storeBuffer) {
prior = sb.isEnabled();
sb.disable();
}
~AutoDisableStoreBuffer() {
if (prior)
sb.enable();
}
};
#else
struct AutoDisableStoreBuffer
{
AutoDisableStoreBuffer(GCRuntime *gc) {}
};
#endif
/* This is a heuristic to avoid resets. */
if (incrementalState != NO_INCREMENTAL && zone->needsBarrier())
zone->scheduleGC();
} /* anonymous namespace */
zoneStats.zoneCount++;
if (zone->isGCScheduled())
zoneStats.collectedCount++;
}
for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next())
zoneStats.compartmentCount++;
return zoneStats;
}
void
GCRuntime::collect(bool incremental, int64_t budget, JSGCInvocationKind gckind,
@ -5135,39 +5166,12 @@ GCRuntime::collect(bool incremental, int64_t budget, JSGCInvocationKind gckind,
recordNativeStackTop();
int zoneCount = 0;
int compartmentCount = 0;
int collectedCount = 0;
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
if (mode == JSGC_MODE_GLOBAL)
zone->scheduleGC();
/* This is a heuristic to avoid resets. */
if (incrementalState != NO_INCREMENTAL && zone->needsBarrier())
zone->scheduleGC();
zoneCount++;
if (zone->isGCScheduled())
collectedCount++;
}
for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next())
compartmentCount++;
gcstats::AutoGCSlice agc(stats, scanZonesBeforeGC(), reason);
cleanUpEverything = ShouldCleanUpEverything(reason, gckind);
bool repeat = false;
do {
minorGC(reason);
/*
* Marking can trigger many incidental post barriers, some of them for
* objects which are not going to be live after the GC.
*/
AutoDisableStoreBuffer adsb(this);
gcstats::AutoGCSlice agc(stats, collectedCount, zoneCount, compartmentCount, reason);
/*
* Let the API user decide to defer a GC if it wants to (unless this
* is the last context). Invoke the callback regardless.