Bug 817218 - Move UnmarkGray to the JS engine (r=mccr8)

This commit is contained in:
Bill McCloskey 2012-12-03 11:19:23 -08:00
parent 7b7954aa61
commit d4e498b887
10 changed files with 137 additions and 117 deletions

View File

@ -106,7 +106,7 @@ js::IterateCells(JSRuntime *rt, JSCompartment *compartment, AllocKind thingKind,
}
void
js::IterateGrayObjects(JSCompartment *compartment, GCThingCallback *cellCallback, void *data)
js::IterateGrayObjects(JSCompartment *compartment, GCThingCallback cellCallback, void *data)
{
JS_ASSERT(compartment);
AutoPrepareForTracing prep(compartment->rt);

View File

@ -1488,3 +1488,105 @@ js::CallTracer(JSTracer *trc, void *thing, JSGCTraceKind kind)
MarkKind(trc, &tmp, kind);
JS_ASSERT(tmp == thing);
}
static void
UnmarkGrayGCThing(void *thing)
{
static_cast<js::gc::Cell *>(thing)->unmark(js::gc::GRAY);
}
struct UnmarkGrayTracer : public JSTracer
{
UnmarkGrayTracer() : tracingShape(false), previousShape(NULL) {}
UnmarkGrayTracer(JSTracer *trc, bool tracingShape)
: tracingShape(tracingShape), previousShape(NULL)
{
JS_TracerInit(this, trc->runtime, trc->callback);
}
/* True iff we are tracing the immediate children of a shape. */
bool tracingShape;
/* If tracingShape, shape child or NULL. Otherwise, NULL. */
void *previousShape;
};
/*
* The GC and CC are run independently. Consequently, the following sequence of
* events can occur:
* 1. GC runs and marks an object gray.
* 2. Some JS code runs that creates a pointer from a JS root to the gray
* object. If we re-ran a GC at this point, the object would now be black.
* 3. Now we run the CC. It may think it can collect the gray object, even
* though it's reachable from the JS heap.
*
* To prevent this badness, we unmark the gray bit of an object when it is
* accessed by callers outside XPConnect. This would cause the object to go
* black in step 2 above. This must be done on everything reachable from the
* object being returned. The following code takes care of the recursive
* re-coloring.
*/
static void
UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind)
{
void *thing = *thingp;
int stackDummy;
if (!JS_CHECK_STACK_SIZE(js::GetNativeStackLimit(trc->runtime), &stackDummy)) {
/*
* If we run out of stack, we take a more drastic measure: require that
* we GC again before the next CC.
*/
trc->runtime->gcGrayBitsValid = false;
return;
}
if (!GCThingIsMarkedGray(thing))
return;
UnmarkGrayGCThing(thing);
/*
* Trace children of |thing|. If |thing| and its parent are both shapes,
* |thing| will get saved to mPreviousShape without being traced. The parent
* will later trace |thing|. This is done to avoid increasing the stack
* depth during shape tracing. It is safe to do because a shape can only
* have one child that is a shape.
*/
UnmarkGrayTracer *tracer = static_cast<UnmarkGrayTracer *>(trc);
UnmarkGrayTracer childTracer(tracer, kind == JSTRACE_SHAPE);
if (kind != JSTRACE_SHAPE) {
JS_TraceChildren(&childTracer, thing, kind);
JS_ASSERT(!childTracer.previousShape);
return;
}
if (tracer->tracingShape) {
JS_ASSERT(!tracer->previousShape);
tracer->previousShape = thing;
return;
}
do {
JS_ASSERT(!GCThingIsMarkedGray(thing));
JS_TraceChildren(&childTracer, thing, JSTRACE_SHAPE);
thing = childTracer.previousShape;
childTracer.previousShape = NULL;
} while (thing);
}
JS_FRIEND_API(void)
js::UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind)
{
JS_ASSERT(kind != JSTRACE_SHAPE);
if (!GCThingIsMarkedGray(thing))
return;
UnmarkGrayGCThing(thing);
JSRuntime *rt = static_cast<Cell *>(thing)->compartment()->rt;
UnmarkGrayTracer trc;
JS_TracerInit(&trc, rt, UnmarkGrayChildren);
JS_TraceChildren(&trc, thing, kind);
}

View File

@ -786,6 +786,7 @@ JSRuntime::JSRuntime(JSUseHelperThreads useHelperThreads)
gcDynamicHeapGrowth(false),
gcDynamicMarkSlice(false),
gcShouldCleanUpEverything(false),
gcGrayBitsValid(false),
gcIsNeeded(0),
gcStats(thisFromCtor()),
gcNumber(0),

View File

@ -649,6 +649,12 @@ struct JSRuntime : js::RuntimeFriendFields
/* During shutdown, the GC needs to clean up every possible object. */
bool gcShouldCleanUpEverything;
/*
* The gray bits can become invalid if UnmarkGray overflows the stack. A
* full GC will reset this bit, since it fills in all the gray bits.
*/
bool gcGrayBitsValid;
/*
* These flags must be kept separate so that a thread requesting a
* compartment GC doesn't cancel another thread's concurrent request for a

View File

@ -564,6 +564,12 @@ js::GCThingIsMarkedGray(void *thing)
return reinterpret_cast<gc::Cell *>(thing)->isMarked(gc::GRAY);
}
extern JS_FRIEND_API(bool)
js::AreGCGrayBitsValid(JSRuntime *rt)
{
return rt->gcGrayBitsValid;
}
JS_FRIEND_API(JSGCTraceKind)
js::GCThingTraceKind(void *thing)
{
@ -572,13 +578,7 @@ js::GCThingTraceKind(void *thing)
}
JS_FRIEND_API(void)
js::UnmarkGrayGCThing(void *thing)
{
static_cast<js::gc::Cell *>(thing)->unmark(js::gc::GRAY);
}
JS_FRIEND_API(void)
js::VisitGrayWrapperTargets(JSCompartment *comp, GCThingCallback *callback, void *closure)
js::VisitGrayWrapperTargets(JSCompartment *comp, GCThingCallback callback, void *closure)
{
for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
gc::Cell *thing = e.front().key.wrapped;

View File

@ -269,14 +269,21 @@ TraceWeakMaps(WeakMapTracer *trc);
extern JS_FRIEND_API(bool)
GCThingIsMarkedGray(void *thing);
JS_FRIEND_API(void)
UnmarkGrayGCThing(void *thing);
extern JS_FRIEND_API(bool)
AreGCGrayBitsValid(JSRuntime *rt);
/*
* Unsets the gray bit for anything reachable from |thing|. |kind| should not be
* JSTRACE_SHAPE. |thing| should be non-null.
*/
extern JS_FRIEND_API(void)
UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind);
typedef void
(GCThingCallback)(void *closure, void *gcthing);
(*GCThingCallback)(void *closure, void *gcthing);
extern JS_FRIEND_API(void)
VisitGrayWrapperTargets(JSCompartment *comp, GCThingCallback *callback, void *closure);
VisitGrayWrapperTargets(JSCompartment *comp, GCThingCallback callback, void *closure);
extern JS_FRIEND_API(JSObject *)
GetWeakmapKeyDelegate(JSObject *key);
@ -288,7 +295,7 @@ GCThingTraceKind(void *thing);
* Invoke cellCallback on every gray JS_OBJECT in the given compartment.
*/
extern JS_FRIEND_API(void)
IterateGrayObjects(JSCompartment *compartment, GCThingCallback *cellCallback, void *data);
IterateGrayObjects(JSCompartment *compartment, GCThingCallback cellCallback, void *data);
/*
* Shadow declarations of JS internal structures, for access by inline access

View File

@ -3518,8 +3518,13 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC)
break;
}
}
if (rt->gcFinalizeCallback)
rt->gcFinalizeCallback(&fop, JSFINALIZE_COLLECTION_END, !isFull);
/* If we finished a full GC, then the gray bits are correct. */
if (isFull)
rt->gcGrayBitsValid = true;
}
/* Set up list of compartments for sweeping of background things. */

View File

@ -79,7 +79,6 @@ nsXPConnect::nsXPConnect()
mDefaultSecurityManager(nullptr),
mDefaultSecurityManagerFlags(0),
mShuttingDown(false),
mNeedGCBeforeCC(true),
mEventDepth(0),
mCycleCollectionContext(nullptr)
{
@ -317,7 +316,7 @@ nsXPConnect::GetInfoForName(const char * name, nsIInterfaceInfo** info)
bool
nsXPConnect::NeedCollect()
{
return !!mNeedGCBeforeCC;
return !js::AreGCGrayBitsValid(GetRuntime()->GetJSRuntime());
}
void
@ -590,98 +589,6 @@ xpc_GCThingIsGrayCCThing(void *thing)
xpc_IsGrayGCThing(thing);
}
struct UnmarkGrayTracer : public JSTracer
{
UnmarkGrayTracer() : mTracingShape(false), mPreviousShape(nullptr) {}
UnmarkGrayTracer(JSTracer *trc, bool aTracingShape)
: mTracingShape(aTracingShape), mPreviousShape(nullptr)
{
JS_TracerInit(this, trc->runtime, trc->callback);
}
bool mTracingShape; // true iff we are tracing the immediate children of a shape
void *mPreviousShape; // If mTracingShape, shape child or NULL. Otherwise, NULL.
};
/*
* The GC and CC are run independently. Consequently, the following sequence of
* events can occur:
* 1. GC runs and marks an object gray.
* 2. Some JS code runs that creates a pointer from a JS root to the gray
* object. If we re-ran a GC at this point, the object would now be black.
* 3. Now we run the CC. It may think it can collect the gray object, even
* though it's reachable from the JS heap.
*
* To prevent this badness, we unmark the gray bit of an object when it is
* accessed by callers outside XPConnect. This would cause the object to go
* black in step 2 above. This must be done on everything reachable from the
* object being returned. The following code takes care of the recursive
* re-coloring.
*/
static void
UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind)
{
void *thing = *thingp;
int stackDummy;
if (!JS_CHECK_STACK_SIZE(js::GetNativeStackLimit(trc->runtime), &stackDummy)) {
/*
* If we run out of stack, we take a more drastic measure: require that
* we GC again before the next CC.
*/
nsXPConnect* xpc = nsXPConnect::GetXPConnect();
xpc->EnsureGCBeforeCC();
return;
}
if (!xpc_IsGrayGCThing(thing))
return;
js::UnmarkGrayGCThing(thing);
/*
* Trace children of |thing|. If |thing| and its parent are both shapes, |thing| will
* get saved to mPreviousShape without being traced. The parent will later
* trace |thing|. This is done to avoid increasing the stack depth during shape
* tracing. It is safe to do because a shape can only have one child that is a shape.
*/
UnmarkGrayTracer *tracer = static_cast<UnmarkGrayTracer*>(trc);
UnmarkGrayTracer childTracer(tracer, kind == JSTRACE_SHAPE);
if (kind != JSTRACE_SHAPE) {
JS_TraceChildren(&childTracer, thing, kind);
MOZ_ASSERT(!childTracer.mPreviousShape);
return;
}
if (tracer->mTracingShape) {
MOZ_ASSERT(!tracer->mPreviousShape);
tracer->mPreviousShape = thing;
return;
}
do {
MOZ_ASSERT(!xpc_IsGrayGCThing(thing));
JS_TraceChildren(&childTracer, thing, JSTRACE_SHAPE);
thing = childTracer.mPreviousShape;
childTracer.mPreviousShape = nullptr;
} while (thing);
}
void
xpc_UnmarkGrayGCThingRecursive(void *thing, JSGCTraceKind kind)
{
MOZ_ASSERT(thing, "Don't pass me null!");
MOZ_ASSERT(kind != JSTRACE_SHAPE, "UnmarkGrayGCThingRecursive not intended for Shapes");
// Unmark.
js::UnmarkGrayGCThing(thing);
// Trace children.
UnmarkGrayTracer trc;
JSRuntime *rt = nsXPConnect::GetRuntimeInstance()->GetJSRuntime();
JS_TracerInit(&trc, rt, UnmarkGrayChildren);
JS_TraceChildren(&trc, thing, kind);
}
struct TraversalTracer : public JSTracer
{
TraversalTracer(nsCycleCollectionTraversalCallback &aCb) : cb(aCb)

View File

@ -522,9 +522,6 @@ public:
JSBool IsShuttingDown() const {return mShuttingDown;}
void EnsureGCBeforeCC() { mNeedGCBeforeCC = true; }
void ClearGCBeforeCC() { mNeedGCBeforeCC = false; }
nsresult GetInfoForIID(const nsIID * aIID, nsIInterfaceInfo** info);
nsresult GetInfoForName(const char * name, nsIInterfaceInfo** info);
@ -579,7 +576,6 @@ private:
nsIXPCSecurityManager* mDefaultSecurityManager;
uint16_t mDefaultSecurityManagerFlags;
JSBool mShuttingDown;
JSBool mNeedGCBeforeCC;
// nsIThreadInternal doesn't remember which observers it called
// OnProcessNextEvent on when it gets around to calling AfterProcessNextEvent.

View File

@ -136,16 +136,12 @@ xpc_IsGrayGCThing(void *thing)
extern JSBool
xpc_GCThingIsGrayCCThing(void *thing);
// Implemented in nsXPConnect.cpp.
extern void
xpc_UnmarkGrayGCThingRecursive(void *thing, JSGCTraceKind kind);
// Unmark gray for known-nonnull cases
MOZ_ALWAYS_INLINE void
xpc_UnmarkNonNullGrayObject(JSObject *obj)
{
if (xpc_IsGrayGCThing(obj))
xpc_UnmarkGrayGCThingRecursive(obj, JSTRACE_OBJECT);
js::UnmarkGrayGCThingRecursively(obj, JSTRACE_OBJECT);
else if (js::IsIncrementalBarrierNeededOnObject(obj))
js::IncrementalReferenceBarrier(obj);
}
@ -165,7 +161,7 @@ xpc_UnmarkGrayScript(JSScript *script)
{
if (script) {
if (xpc_IsGrayGCThing(script))
xpc_UnmarkGrayGCThingRecursive(script, JSTRACE_SCRIPT);
js::UnmarkGrayGCThingRecursively(script, JSTRACE_SCRIPT);
else if (js::IsIncrementalBarrierNeededOnScript(script))
js::IncrementalReferenceBarrier(script);
}