Bug 739899 - Eliminate the distinction between full and compartment GCs (r=igor)

This commit is contained in:
Bill McCloskey 2012-04-02 18:29:11 -07:00
parent c8af5eba7e
commit 1eff4f5687
11 changed files with 160 additions and 143 deletions

View File

@ -328,7 +328,8 @@ Statistics::formatData(StatisticsSerializer &ss, uint64_t timestamp)
if (ss.isJSON())
ss.appendNumber("Timestamp", "%llu", "", (unsigned long long)timestamp);
ss.appendNumber("Total Time", "%.1f", "ms", t(total));
ss.appendString("Type", wasFullGC ? "global" : "compartment");
ss.appendNumber("Compartments Collected", "%d", "", collectedCount);
ss.appendNumber("Total Compartments", "%d", "", compartmentCount);
ss.appendNumber("MMU (20ms)", "%d", "%", int(mmu20 * 100));
ss.appendNumber("MMU (50ms)", "%d", "%", int(mmu50 * 100));
if (slices.length() > 1 || ss.isJSON())
@ -398,7 +399,8 @@ Statistics::Statistics(JSRuntime *rt)
startupTime(PRMJ_Now()),
fp(NULL),
fullFormat(false),
wasFullGC(false),
collectedCount(0),
compartmentCount(0),
nonincrementalReason(NULL)
{
PodArrayZero(phaseTotals);
@ -492,7 +494,7 @@ Statistics::endGC()
phaseTotals[i] += phaseTimes[i];
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, wasFullGC ? 0 : 1);
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, collectedCount == compartmentCount ? 0 : 1);
(*cb)(JS_TELEMETRY_GC_MS, t(gcDuration()));
(*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
(*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
@ -508,9 +510,10 @@ Statistics::endGC()
}
void
Statistics::beginSlice(bool full, gcreason::Reason reason)
Statistics::beginSlice(int collectedCount, int compartmentCount, gcreason::Reason reason)
{
wasFullGC = full;
collectedCount = collectedCount;
compartmentCount = compartmentCount;
bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
if (first)
@ -522,6 +525,7 @@ Statistics::beginSlice(bool full, gcreason::Reason reason)
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
(*cb)(JS_TELEMETRY_GC_REASON, reason);
bool wasFullGC = collectedCount == compartmentCount;
if (GCSliceCallback cb = runtime->gcSliceCallback)
(*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, GCDescription(!wasFullGC));
}
@ -540,6 +544,7 @@ Statistics::endSlice()
if (last)
endGC();
bool wasFullGC = collectedCount == compartmentCount;
if (GCSliceCallback cb = runtime->gcSliceCallback) {
if (last)
(*cb)(runtime, GC_CYCLE_END, GCDescription(!wasFullGC));

View File

@ -94,7 +94,7 @@ struct Statistics {
void beginPhase(Phase phase);
void endPhase(Phase phase);
void beginSlice(bool full, gcreason::Reason reason);
void beginSlice(int collectedCount, int compartmentCount, gcreason::Reason reason);
void endSlice();
void reset(const char *reason) { slices.back().resetReason = reason; }
@ -116,7 +116,8 @@ struct Statistics {
FILE *fp;
bool fullFormat;
bool wasFullGC;
int collectedCount;
int compartmentCount;
const char *nonincrementalReason;
struct SliceData {
@ -162,9 +163,13 @@ struct Statistics {
};
struct AutoGCSlice {
AutoGCSlice(Statistics &stats, bool full, gcreason::Reason reason
AutoGCSlice(Statistics &stats, int collectedCount, int compartmentCount, gcreason::Reason reason
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginSlice(full, reason); }
: stats(stats)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
stats.beginSlice(collectedCount, compartmentCount, reason);
}
~AutoGCSlice() { stats.endSlice(); }
Statistics &stats;

View File

@ -730,18 +730,14 @@ JSRuntime::JSRuntime()
gcJitReleaseTime(0),
gcMode(JSGC_MODE_GLOBAL),
gcIsNeeded(0),
gcFullIsNeeded(0),
gcWeakMapList(NULL),
gcStats(thisFromCtor()),
gcNumber(0),
gcStartNumber(0),
gcTriggerReason(gcreason::NO_REASON),
gcIsFull(false),
gcStrictCompartmentChecking(false),
gcIncrementalState(gc::NO_INCREMENTAL),
gcCompartmentCreated(false),
gcLastMarkSlice(false),
gcIncrementalIsFull(false),
gcInterFrameGC(0),
gcSliceBudget(SliceBudget::Unlimited),
gcIncrementalEnabled(true),
@ -2874,9 +2870,10 @@ JS_CompartmentGC(JSContext *cx, JSCompartment *comp)
if (comp) {
PrepareCompartmentForGC(comp);
GC(cx, false, GC_NORMAL, gcreason::API);
GC(cx, GC_NORMAL, gcreason::API);
} else {
GC(cx, true, GC_NORMAL, gcreason::API);
PrepareForFullGC(cx->runtime);
GC(cx, GC_NORMAL, gcreason::API);
}
}

View File

@ -224,7 +224,7 @@ js_FinishAtomState(JSRuntime *rt)
}
bool
js_InitCommonAtoms(JSContext *cx)
js::InitCommonAtoms(JSContext *cx)
{
JSAtomState *state = &cx->runtime->atomState;
JSAtom **atoms = state->commonAtomsStart();
@ -242,19 +242,19 @@ js_InitCommonAtoms(JSContext *cx)
}
void
js_FinishCommonAtoms(JSContext *cx)
js::FinishCommonAtoms(JSRuntime *rt)
{
cx->runtime->emptyString = NULL;
cx->runtime->atomState.junkAtoms();
rt->emptyString = NULL;
rt->atomState.junkAtoms();
}
void
js_TraceAtomState(JSTracer *trc)
js::MarkAtomState(JSTracer *trc, bool markAll)
{
JSRuntime *rt = trc->runtime;
JSAtomState *state = &rt->atomState;
if (rt->gcKeepAtoms) {
if (markAll) {
for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
JSAtom *tmp = r.front().asPtr();
MarkStringRoot(trc, &tmp, "locked_atom");
@ -274,7 +274,7 @@ js_TraceAtomState(JSTracer *trc)
}
void
js_SweepAtomState(JSRuntime *rt)
js::SweepAtomState(JSRuntime *rt)
{
JSAtomState *state = &rt->atomState;

View File

@ -408,19 +408,19 @@ js_FinishAtomState(JSRuntime *rt);
* Atom tracing and garbage collection hooks.
*/
extern void
js_TraceAtomState(JSTracer *trc);
namespace js {
extern void
js_SweepAtomState(JSRuntime *rt);
MarkAtomState(JSTracer *trc, bool markAll);
extern void
SweepAtomState(JSRuntime *rt);
extern bool
js_InitCommonAtoms(JSContext *cx);
InitCommonAtoms(JSContext *cx);
extern void
js_FinishCommonAtoms(JSContext *cx);
namespace js {
FinishCommonAtoms(JSRuntime *rt);
/* N.B. must correspond to boolean tagging behavior. */
enum InternBehavior

View File

@ -207,7 +207,7 @@ js_NewContext(JSRuntime *rt, size_t stackChunkSize)
#endif
bool ok = rt->staticStrings.init(cx);
if (ok)
ok = js_InitCommonAtoms(cx);
ok = InitCommonAtoms(cx);
#ifdef JS_THREADSAFE
JS_EndRequest(cx);
@ -270,17 +270,19 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
c->types.print(cx, false);
/* Unpin all common atoms before final GC. */
js_FinishCommonAtoms(cx);
FinishCommonAtoms(cx->runtime);
/* Clear debugging state to remove GC roots. */
for (CompartmentsIter c(rt); !c.done(); c.next())
c->clearTraps(cx);
JS_ClearAllWatchPoints(cx);
GC(cx, true, GC_NORMAL, gcreason::LAST_CONTEXT);
PrepareForFullGC(rt);
GC(cx, GC_NORMAL, gcreason::LAST_CONTEXT);
} else if (mode == JSDCM_FORCE_GC) {
JS_ASSERT(!rt->gcRunning);
GC(cx, true, GC_NORMAL, gcreason::DESTROY_CONTEXT);
PrepareForFullGC(rt);
GC(cx, GC_NORMAL, gcreason::DESTROY_CONTEXT);
} else if (mode == JSDCM_MAYBE_GC) {
JS_ASSERT(!rt->gcRunning);
JS_MaybeGC(cx);
@ -883,7 +885,7 @@ js_InvokeOperationCallback(JSContext *cx)
JS_ATOMIC_SET(&rt->interrupt, 0);
if (rt->gcIsNeeded)
GCSlice(cx, rt->gcFullIsNeeded, GC_NORMAL, rt->gcTriggerReason);
GCSlice(cx, GC_NORMAL, rt->gcTriggerReason);
#ifdef JS_THREADSAFE
/*

View File

@ -352,7 +352,6 @@ struct JSRuntime : js::RuntimeFriendFields
* full GC.
*/
volatile uintptr_t gcIsNeeded;
volatile uintptr_t gcFullIsNeeded;
js::WeakMapBase *gcWeakMapList;
js::gcstats::Statistics gcStats;
@ -366,9 +365,6 @@ struct JSRuntime : js::RuntimeFriendFields
/* The reason that an interrupt-triggered GC should be called. */
js::gcreason::Reason gcTriggerReason;
/* Is the currently running GC a full GC or a compartmental GC? */
bool gcIsFull;
/*
* If this is true, all marked objects must belong to a compartment being
* GCed. This is used to look for compartment bugs.
@ -381,15 +377,9 @@ struct JSRuntime : js::RuntimeFriendFields
*/
js::gc::State gcIncrementalState;
/* Indicates that a new compartment was created during incremental GC. */
bool gcCompartmentCreated;
/* Indicates that the last incremental slice exhausted the mark stack. */
bool gcLastMarkSlice;
/* Is there a full incremental GC in progress. */
bool gcIncrementalIsFull;
/*
* Indicates that a GC slice has taken place in the middle of an animation
* frame, rather than at the beginning. In this case, the next slice will be

View File

@ -134,7 +134,8 @@ JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObj
JS_FRIEND_API(void)
js::GCForReason(JSContext *cx, gcreason::Reason reason)
{
GC(cx, true, GC_NORMAL, reason);
PrepareForFullGC(cx->runtime);
GC(cx, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
@ -144,19 +145,21 @@ js::CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason
JS_ASSERT(comp != cx->runtime->atomsCompartment);
PrepareCompartmentForGC(comp);
GC(cx, false, GC_NORMAL, reason);
GC(cx, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
js::ShrinkingGC(JSContext *cx, gcreason::Reason reason)
{
GC(cx, true, GC_SHRINK, reason);
PrepareForFullGC(cx->runtime);
GC(cx, GC_SHRINK, reason);
}
JS_FRIEND_API(void)
js::IncrementalGC(JSContext *cx, gcreason::Reason reason)
{
GCSlice(cx, true, GC_NORMAL, reason);
PrepareForFullGC(cx->runtime);
GCSlice(cx, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
@ -753,7 +756,8 @@ NotifyDidPaint(JSContext *cx)
}
if (rt->gcZeal() == gc::ZealFrameGCValue) {
GCSlice(cx, true, GC_NORMAL, gcreason::REFRESH_FRAME);
PrepareForFullGC(rt);
GCSlice(cx, GC_NORMAL, gcreason::REFRESH_FRAME);
return;
}
@ -762,7 +766,7 @@ NotifyDidPaint(JSContext *cx)
if (c->needsBarrier())
PrepareCompartmentForGC(c);
}
GCSlice(cx, rt->gcIncrementalIsFull, GC_NORMAL, gcreason::REFRESH_FRAME);
GCSlice(cx, GC_NORMAL, gcreason::REFRESH_FRAME);
}
rt->gcInterFrameGC = false;

View File

@ -1642,13 +1642,13 @@ ArenaLists::finalizeScripts(FreeOp *fop)
}
static void
RunLastDitchGC(JSContext *cx, gcreason::Reason reason, bool full)
RunLastDitchGC(JSContext *cx, gcreason::Reason reason)
{
JSRuntime *rt = cx->runtime;
/* The last ditch GC preserves all atoms. */
AutoKeepAtoms keep(rt);
GC(cx, full, GC_NORMAL, reason);
GC(cx, GC_NORMAL, reason);
}
/* static */ void *
@ -1664,7 +1664,7 @@ ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
for (;;) {
if (JS_UNLIKELY(runGC)) {
PrepareCompartmentForGC(comp);
RunLastDitchGC(cx, gcreason::LAST_DITCH, rt->gcFullIsNeeded);
RunLastDitchGC(cx, gcreason::LAST_DITCH);
/*
* The JSGC_END callback can legitimately allocate new GC
@ -2240,7 +2240,7 @@ MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
JSRuntime *rt = trc->runtime;
JS_ASSERT(trc->callback != GCMarker::GrayCallback);
if (IS_GC_MARKING_TRACER(trc) && !rt->gcIsFull) {
if (IS_GC_MARKING_TRACER(trc)) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (!c->isCollecting())
c->markCrossCompartmentWrappers(trc);
@ -2265,7 +2265,20 @@ MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector");
}
js_TraceAtomState(trc);
/*
* Atoms are not in the cross-compartment map. So if there are any
* compartments that are not being collected, we are not allowed to collect
* atoms. Otherwise, the non-collected compartments could contain pointers
* to atoms that we would miss.
*/
bool isFullGC = true;
if (IS_GC_MARKING_TRACER(trc)) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (!c->isCollecting())
isFullGC = false;
}
}
MarkAtomState(trc, rt->gcKeepAtoms || !isFullGC);
rt->staticStrings.trace(trc);
for (ContextIter acx(rt); !acx.done(); acx.next())
@ -2322,26 +2335,37 @@ MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
}
}
static void
TriggerOperationCallback(JSRuntime *rt, gcreason::Reason reason)
{
if (rt->gcIsNeeded)
return;
rt->gcIsNeeded = true;
rt->gcTriggerReason = reason;
rt->triggerOperationCallback();
}
void
TriggerGC(JSRuntime *rt, gcreason::Reason reason)
{
JS_ASSERT(rt->onOwnerThread());
if (rt->gcRunning || rt->gcIsNeeded)
if (rt->gcRunning)
return;
/* Trigger the GC when it is safe to call an operation callback. */
rt->gcIsNeeded = true;
rt->gcFullIsNeeded = true;
rt->gcTriggerReason = reason;
rt->triggerOperationCallback();
PrepareForFullGC(rt);
TriggerOperationCallback(rt, reason);
}
void
TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason)
{
JSRuntime *rt = comp->rt;
JS_ASSERT(!rt->gcRunning);
JS_ASSERT(rt->onOwnerThread());
if (rt->gcRunning)
return;
if (rt->gcZeal() == ZealAllocValue) {
TriggerGC(rt, reason);
@ -2355,17 +2379,7 @@ TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason)
}
PrepareCompartmentForGC(comp);
if (rt->gcIsNeeded)
return;
/*
* Trigger the GC when it is safe to call an operation callback on any
* thread.
*/
rt->gcIsNeeded = true;
rt->gcTriggerReason = reason;
rt->triggerOperationCallback();
TriggerOperationCallback(rt, reason);
}
void
@ -2375,13 +2389,14 @@ MaybeGC(JSContext *cx)
JS_ASSERT(rt->onOwnerThread());
if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
GC(cx, true, GC_NORMAL, gcreason::MAYBEGC);
PrepareForFullGC(rt);
GC(cx, GC_NORMAL, gcreason::MAYBEGC);
return;
}
JSCompartment *comp = cx->compartment;
if (rt->gcIsNeeded) {
GCSlice(cx, rt->gcFullIsNeeded, GC_NORMAL, gcreason::MAYBEGC);
GCSlice(cx, GC_NORMAL, gcreason::MAYBEGC);
return;
}
@ -2390,13 +2405,13 @@ MaybeGC(JSContext *cx)
rt->gcIncrementalState == NO_INCREMENTAL)
{
PrepareCompartmentForGC(comp);
GCSlice(cx, false, GC_NORMAL, gcreason::MAYBEGC);
GCSlice(cx, GC_NORMAL, gcreason::MAYBEGC);
return;
}
if (comp->gcMallocAndFreeBytes > comp->gcTriggerMallocAndFreeBytes) {
PrepareCompartmentForGC(comp);
GCSlice(cx, false, GC_NORMAL, gcreason::MAYBEGC);
GCSlice(cx, GC_NORMAL, gcreason::MAYBEGC);
return;
}
@ -2410,7 +2425,8 @@ MaybeGC(JSContext *cx)
if (rt->gcChunkAllocationSinceLastGC ||
rt->gcNumArenasFreeCommitted > FreeCommittedArenasThreshold)
{
GCSlice(cx, true, GC_SHRINK, gcreason::MAYBEGC);
PrepareForFullGC(rt);
GCSlice(cx, GC_SHRINK, gcreason::MAYBEGC);
} else {
rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
}
@ -2808,6 +2824,13 @@ GCHelperThread::doSweep()
#endif /* JS_THREADSAFE */
void
PrepareForFullGC(JSRuntime *rt)
{
for (CompartmentsIter c(rt); !c.done(); c.next())
c->scheduleGC();
}
void
PrepareCompartmentForGC(JSCompartment *comp)
{
@ -2845,7 +2868,7 @@ SweepCompartments(FreeOp *fop, JSGCInvocationKind gckind)
while (read < end) {
JSCompartment *compartment = *read++;
if (!compartment->hold &&
if (!compartment->hold && compartment->isCollecting() &&
(compartment->arenas.arenaListsAreEmpty() || !rt->hasContexts()))
{
compartment->arenas.checkEmptyFreeLists();
@ -2978,11 +3001,9 @@ EndMarkPhase(JSContext *cx)
#ifdef DEBUG
/* Make sure that we didn't mark an object in another compartment */
if (!rt->gcIsFull) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
JS_ASSERT_IF(!c->isCollecting() && c != rt->atomsCompartment,
c->arenas.checkArenaListAllUnmarked());
}
for (CompartmentsIter c(rt); !c.done(); c.next()) {
JS_ASSERT_IF(!c->isCollecting() && c != rt->atomsCompartment,
c->arenas.checkArenaListAllUnmarked());
}
#endif
}
@ -3129,7 +3150,7 @@ SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
/* Finalize unreachable (key,value) pairs in all weak maps. */
WeakMapBase::sweepAll(&rt->gcMarker);
js_SweepAtomState(rt);
SweepAtomState(rt);
/* Collect watch points associated with unreachable objects. */
WatchpointMap::sweepAll(rt);
@ -3140,7 +3161,7 @@ SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_COMPARTMENTS);
bool releaseTypes = rt->gcIsFull && ReleaseObservedTypes(rt);
bool releaseTypes = ReleaseObservedTypes(rt);
for (GCCompartmentsIter c(rt); !c.done(); c.next())
c->sweep(&fop, releaseTypes);
}
@ -3194,8 +3215,7 @@ SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
* This removes compartments from rt->compartment, so we do it last to make
* sure we don't miss sweeping any compartments.
*/
if (rt->gcIsFull)
SweepCompartments(&fop, gckind);
SweepCompartments(&fop, gckind);
#ifndef JS_THREADSAFE
/*
@ -3260,7 +3280,7 @@ class AutoHeapSession {
/* ...while this class is to be used only for garbage collection. */
class AutoGCSession : AutoHeapSession {
public:
explicit AutoGCSession(JSRuntime *rt, bool full);
explicit AutoGCSession(JSRuntime *rt);
~AutoGCSession();
};
@ -3279,13 +3299,12 @@ AutoHeapSession::~AutoHeapSession()
runtime->gcRunning = false;
}
AutoGCSession::AutoGCSession(JSRuntime *rt, bool full)
AutoGCSession::AutoGCSession(JSRuntime *rt)
: AutoHeapSession(rt)
{
rt->gcIsFull = full;
DebugOnly<bool> any = false;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (full || c->isGCScheduled()) {
if (c->isGCScheduled()) {
c->setCollecting(true);
any = true;
}
@ -3293,7 +3312,6 @@ AutoGCSession::AutoGCSession(JSRuntime *rt, bool full)
JS_ASSERT(any);
runtime->gcIsNeeded = false;
runtime->gcFullIsNeeded = false;
runtime->gcInterFrameGC = true;
runtime->gcNumber++;
@ -3307,7 +3325,6 @@ AutoGCSession::AutoGCSession(JSRuntime *rt, bool full)
AutoGCSession::~AutoGCSession()
{
runtime->gcIsFull = false;
for (GCCompartmentsIter c(runtime); !c.done(); c.next())
c->setCollecting(false);
@ -3324,7 +3341,6 @@ ResetIncrementalGC(JSRuntime *rt, const char *reason)
for (CompartmentsIter c(rt); !c.done(); c.next())
c->needsBarrier_ = false;
rt->gcIncrementalIsFull = false;
rt->gcMarker.reset();
rt->gcMarker.stop();
rt->gcIncrementalState = NO_INCREMENTAL;
@ -3375,7 +3391,6 @@ AutoGCSlice::~AutoGCSlice()
c->arenas.prepareForIncrementalGC(rt);
} else {
JS_ASSERT(rt->gcIncrementalState == NO_INCREMENTAL);
c->needsBarrier_ = false;
}
}
@ -3408,8 +3423,6 @@ IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
gc::State initialState = rt->gcIncrementalState;
if (rt->gcIncrementalState == NO_INCREMENTAL) {
JS_ASSERT(!rt->gcIncrementalIsFull);
rt->gcIncrementalIsFull = rt->gcIsFull;
rt->gcIncrementalState = MARK_ROOTS;
rt->gcLastMarkSlice = false;
}
@ -3455,8 +3468,6 @@ IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
/* JIT code was already discarded during sweeping. */
rt->gcIncrementalIsFull = false;
rt->gcIncrementalState = NO_INCREMENTAL;
}
}
@ -3487,11 +3498,6 @@ class IncrementalSafety
static IncrementalSafety
IsIncrementalGCSafe(JSRuntime *rt)
{
if (rt->gcCompartmentCreated) {
rt->gcCompartmentCreated = false;
return IncrementalSafety::Unsafe("compartment created");
}
if (rt->gcKeepAtoms)
return IncrementalSafety::Unsafe("gcKeepAtoms set");
@ -3548,22 +3554,24 @@ BudgetIncrementalGC(JSRuntime *rt, int64_t *budget)
/*
* GC, repeatedly if necessary, until we think we have not created any new
* garbage. We disable inlining to ensure that the bottom of the stack with
* possible GC roots recorded in js_GC excludes any pointers we use during the
* marking implementation.
* possible GC roots recorded in MarkRuntime excludes any pointers we use during
* the marking implementation.
*/
static JS_NEVER_INLINE void
GCCycle(JSContext *cx, bool full, int64_t budget, JSGCInvocationKind gckind)
GCCycle(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
{
JSRuntime *rt = cx->runtime;
JS_ASSERT_IF(!full, !rt->atomsCompartment->isCollecting());
JS_ASSERT_IF(!full, rt->gcMode != JSGC_MODE_GLOBAL);
#ifdef DEBUG
for (CompartmentsIter c(rt); !c.done(); c.next())
JS_ASSERT_IF(rt->gcMode == JSGC_MODE_GLOBAL, c->isGCScheduled());
#endif
/* Recursive GC is no-op. */
if (rt->gcRunning)
return;
AutoGCSession gcsession(rt, full);
AutoGCSession gcsession(rt);
/* Don't GC if we are reporting an OOM. */
if (rt->inOOMReport)
@ -3631,8 +3639,7 @@ IsDeterministicGCReason(gcreason::Reason reason)
#endif
static void
Collect(JSContext *cx, bool full, int64_t budget,
JSGCInvocationKind gckind, gcreason::Reason reason)
Collect(JSContext *cx, int64_t budget, JSGCInvocationKind gckind, gcreason::Reason reason)
{
JSRuntime *rt = cx->runtime;
JS_AbortIfWrongThread(rt);
@ -3667,14 +3674,22 @@ Collect(JSContext *cx, bool full, int64_t budget,
RecordNativeStackTopForGC(rt);
if (rt->gcMode == JSGC_MODE_GLOBAL)
full = true;
int compartmentCount = 0;
int collectedCount = 0;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (rt->gcMode == JSGC_MODE_GLOBAL)
c->scheduleGC();
/* This is a heuristic to avoid resets. */
if (rt->gcIncrementalState != NO_INCREMENTAL && rt->gcIncrementalIsFull)
full = true;
/* This is a heuristic to avoid resets. */
if (rt->gcIncrementalState != NO_INCREMENTAL && c->needsBarrier())
c->scheduleGC();
gcstats::AutoGCSlice agc(rt->gcStats, full, reason);
compartmentCount++;
if (c->isGCScheduled())
collectedCount++;
}
gcstats::AutoGCSlice agc(rt->gcStats, collectedCount, compartmentCount, reason);
do {
/*
@ -3691,7 +3706,7 @@ Collect(JSContext *cx, bool full, int64_t budget,
/* Lock out other GC allocator and collector invocations. */
AutoLockGC lock(rt);
rt->gcPoke = false;
GCCycle(cx, full, budget, gckind);
GCCycle(cx, budget, gckind);
}
if (rt->gcIncrementalState == NO_INCREMENTAL) {
@ -3710,21 +3725,22 @@ Collect(JSContext *cx, bool full, int64_t budget,
namespace js {
void
GC(JSContext *cx, bool full, JSGCInvocationKind gckind, gcreason::Reason reason)
GC(JSContext *cx, JSGCInvocationKind gckind, gcreason::Reason reason)
{
Collect(cx, full, SliceBudget::Unlimited, gckind, reason);
Collect(cx, SliceBudget::Unlimited, gckind, reason);
}
void
GCSlice(JSContext *cx, bool full, JSGCInvocationKind gckind, gcreason::Reason reason)
GCSlice(JSContext *cx, JSGCInvocationKind gckind, gcreason::Reason reason)
{
Collect(cx, full, cx->runtime->gcSliceBudget, gckind, reason);
Collect(cx, cx->runtime->gcSliceBudget, gckind, reason);
}
void
GCDebugSlice(JSContext *cx, int64_t objCount)
{
Collect(cx, NULL, SliceBudget::WorkBudget(objCount), GC_NORMAL, gcreason::API);
PrepareForFullGC(cx->runtime);
Collect(cx, SliceBudget::WorkBudget(objCount), GC_NORMAL, gcreason::API);
}
void
@ -3902,15 +3918,6 @@ NewCompartment(JSContext *cx, JSPrincipals *principals)
*/
{
AutoLockGC lock(rt);
/*
* If we're in the middle of an incremental GC, we cancel
* it. Otherwise we might fail the mark the newly created
* compartment fully.
*/
if (rt->gcIncrementalState == MARK)
rt->gcCompartmentCreated = true;
if (rt->compartments.append(compartment))
return compartment;
}
@ -3933,7 +3940,9 @@ RunDebugGC(JSContext *cx)
*/
if (rt->gcDebugCompartmentGC)
PrepareCompartmentForGC(cx->compartment);
RunLastDitchGC(cx, gcreason::DEBUG_GC, !rt->gcDebugCompartmentGC);
else
PrepareForFullGC(cx->runtime);
RunLastDitchGC(cx, gcreason::DEBUG_GC);
#endif
}
@ -4321,6 +4330,14 @@ EndVerifyBarriers(JSContext *cx)
if (!trc)
return;
/* We need to disable barriers before tracing, which may invoke barriers. */
for (CompartmentsIter c(rt); !c.done(); c.next()) {
/* Don't verify if a new compartment was created. */
if (!c->needsBarrier_)
return;
c->needsBarrier_ = false;
}
/*
* We need to bump gcNumber so that the methodjit knows that jitcode has
* been discarded.
@ -4328,10 +4345,6 @@ EndVerifyBarriers(JSContext *cx)
JS_ASSERT(trc->number == rt->gcNumber);
rt->gcNumber++;
/* We need to disable barriers before tracing, which may invoke barriers. */
for (CompartmentsIter c(rt); !c.done(); c.next())
c->needsBarrier_ = false;
for (CompartmentsIter c(rt); !c.done(); c.next())
c->discardJitCode(rt->defaultFreeOp());

View File

@ -1383,6 +1383,9 @@ MaybeGC(JSContext *cx);
extern void
ShrinkGCBuffers(JSRuntime *rt);
extern void
PrepareForFullGC(JSRuntime *rt);
extern void
PrepareCompartmentForGC(JSCompartment *comp);
@ -1397,12 +1400,11 @@ typedef enum JSGCInvocationKind {
GC_SHRINK = 1
} JSGCInvocationKind;
/* Pass NULL for |comp| to get a full GC. */
extern void
GC(JSContext *cx, bool full, JSGCInvocationKind gckind, js::gcreason::Reason reason);
GC(JSContext *cx, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCSlice(JSContext *cx, bool full, JSGCInvocationKind gckind, js::gcreason::Reason reason);
GCSlice(JSContext *cx, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCDebugSlice(JSContext *cx, int64_t objCount);

View File

@ -83,7 +83,6 @@ CheckMarkedThing(JSTracer *trc, T *thing)
DebugOnly<JSRuntime *> rt = trc->runtime;
JS_ASSERT_IF(rt->gcIsFull, IS_GC_MARKING_TRACER(trc));
JS_ASSERT_IF(thing->compartment()->requireGCTracer(), IS_GC_MARKING_TRACER(trc));
JS_ASSERT(thing->isAligned());