bug 488414 - js_GenerateShape just schedules, not run, the GC. r=brendan,gal

This commit is contained in:
Igor Bukanov 2009-04-20 10:17:28 +02:00
parent f10c82d6e3
commit 56547f8087
11 changed files with 159 additions and 125 deletions

View File

@ -5218,12 +5218,7 @@ JS_TriggerOperationCallback(JSContext *cx)
JS_PUBLIC_API(void)
JS_TriggerAllOperationCallbacks(JSRuntime *rt)
{
JSContext *acx, *iter;
JS_LOCK_GC(rt);
iter = NULL;
while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)))
JS_TriggerOperationCallback(acx);
JS_UNLOCK_GC(rt);
js_TriggerAllOperationCallbacks(rt, JS_FALSE);
}
JS_PUBLIC_API(JSBool)

View File

@ -231,57 +231,58 @@ js_CallTree(InterpState* state, Fragment* f)
JSBool FASTCALL
js_AddProperty(JSContext* cx, JSObject* obj, JSScopeProperty* sprop)
{
JSScopeProperty* sprop2 = NULL; // initialize early to make MSVC happy
JS_ASSERT(OBJ_IS_NATIVE(obj));
JS_ASSERT(SPROP_HAS_STUB_SETTER(sprop));
JS_LOCK_OBJ(cx, obj);
JSScope* scope = OBJ_SCOPE(obj);
uint32 slot;
if (scope->object == obj) {
JS_ASSERT(!SCOPE_HAS_PROPERTY(scope, sprop));
} else {
scope = js_GetMutableScope(cx, obj);
if (!scope) {
JS_UNLOCK_OBJ(cx, obj);
return JS_FALSE;
}
if (!scope)
goto exit_trace;
}
uint32 slot = sprop->slot;
slot = sprop->slot;
if (!scope->table && sprop->parent == scope->lastProp && slot == scope->map.freeslot) {
if (slot < STOBJ_NSLOTS(obj) && !OBJ_GET_CLASS(cx, obj)->reserveSlots) {
JS_ASSERT(JSVAL_IS_VOID(STOBJ_GET_SLOT(obj, scope->map.freeslot)));
++scope->map.freeslot;
} else {
if (!js_AllocSlot(cx, obj, &slot)) {
JS_UNLOCK_SCOPE(cx, scope);
return JS_FALSE;
}
if (!js_AllocSlot(cx, obj, &slot))
goto exit_trace;
if (slot != sprop->slot) {
js_FreeSlot(cx, obj, slot);
goto slot_changed;
goto exit_trace;
}
}
SCOPE_EXTEND_SHAPE(cx, scope, sprop);
++scope->entryCount;
scope->lastProp = sprop;
JS_UNLOCK_SCOPE(cx, scope);
return JS_TRUE;
} else {
JSScopeProperty *sprop2 = js_AddScopeProperty(cx, scope, sprop->id,
sprop->getter,
sprop->setter,
SPROP_INVALID_SLOT,
sprop->attrs,
sprop->flags,
sprop->shortid);
if (sprop2 != sprop)
goto exit_trace;
}
sprop2 = js_AddScopeProperty(cx, scope, sprop->id,
sprop->getter, sprop->setter, SPROP_INVALID_SLOT,
sprop->attrs, sprop->flags, sprop->shortid);
if (sprop2 == sprop) {
JS_UNLOCK_SCOPE(cx, scope);
return JS_TRUE;
}
slot = sprop2->slot;
if (js_IsPropertyCacheDisabled(cx))
goto exit_trace;
slot_changed:
JS_UNLOCK_SCOPE(cx, scope);
return JS_TRUE;
exit_trace:
JS_UNLOCK_SCOPE(cx, scope);
return JS_FALSE;
}

View File

@ -1719,14 +1719,17 @@ js_InvokeOperationCallback(JSContext *cx)
cx->operationCallbackFlag = 0;
/*
* We automatically yield the current context every time the operation
* callback is hit since we might be called as a result of an impending
* GC, which would deadlock if we do not yield. Operation callbacks
* are supposed to happen rarely (seconds, not milliseconds) so it is
* acceptable to yield at every callback.
* Unless we are going to run the GC, we automatically yield the current
* context every time the operation callback is hit since we might be
* called as a result of an impending GC, which would deadlock if we do
* not yield. Operation callbacks are supposed to happen rarely (seconds,
* not milliseconds) so it is acceptable to yield at every callback.
*/
if (cx->runtime->gcIsNeeded)
js_GC(cx, GC_NORMAL);
#ifdef JS_THREADSAFE
JS_YieldRequest(cx);
else
JS_YieldRequest(cx);
#endif
JSOperationCallback cb = cx->operationCallback;
@ -1740,6 +1743,23 @@ js_InvokeOperationCallback(JSContext *cx)
return !cb || cb(cx);
}
void
js_TriggerAllOperationCallbacks(JSRuntime *rt, JSBool gcLocked)
{
JSContext *acx, *iter;
#ifdef JS_THREADSAFE
if (!gcLocked)
JS_LOCK_GC(rt);
#endif
iter = NULL;
while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)))
JS_TriggerOperationCallback(acx);
#ifdef JS_THREADSAFE
if (!gcLocked)
JS_UNLOCK_GC(rt);
#endif
}
JSStackFrame *
js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
{

View File

@ -354,6 +354,7 @@ struct JSRuntime {
uint32 gcNumber;
JSTracer *gcMarkingTracer;
uint32 gcTriggerFactor;
volatile JSBool gcIsNeeded;
/*
* NB: do not pack another flag here by claiming gcPadding unless the new
@ -551,15 +552,16 @@ struct JSRuntime {
* Object shape (property cache structural type) identifier generator.
*
* Type 0 stands for the empty scope, and must not be regenerated due to
* uint32 wrap-around. Since we use atomic pre-increment, the initial
* value for the first typed non-empty scope will be 1.
* uint32 wrap-around. Since js_GenerateShape (in jsinterp.cpp) uses
* atomic pre-increment, the initial value for the first typed non-empty
* scope will be 1.
*
* The GC compresses live types, minimizing rt->shapeGen in the process.
* If this counter overflows into SHAPE_OVERFLOW_BIT (in jsinterp.h), the
* GC will disable property caches for all threads, to avoid aliasing two
* different types. Updated by js_GenerateShape (in jsinterp.c).
* cache is disabled, to avoid aliasing two different types. It stays
* disabled until a triggered GC at some later moment compresses live
* types, minimizing rt->shapeGen in the process.
*/
uint32 shapeGen;
volatile uint32 shapeGen;
/* Literal table maintained by jsatom.c functions. */
JSAtomState atomState;
@ -1417,6 +1419,14 @@ extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit];
extern JSBool
js_InvokeOperationCallback(JSContext *cx);
#ifndef JS_THREADSAFE
# define js_TriggerAllOperationCallbacks(rt, gcLocked) \
js_TriggerAllOperationCallbacks (rt)
#endif
void
js_TriggerAllOperationCallbacks(JSRuntime *rt, JSBool gcLocked);
extern JSStackFrame *
js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp);
@ -1468,6 +1478,28 @@ js_GetTopStackFrame(JSContext *cx)
return cx->fp;
}
static JS_INLINE JSBool
js_IsPropertyCacheDisabled(JSContext *cx)
{
return cx->runtime->shapeGen >= SHAPE_OVERFLOW_BIT;
}
static JS_INLINE uint32
js_RegenerateShapeForGC(JSContext *cx)
{
JS_ASSERT(cx->runtime->gcRunning);
/*
* Under the GC, compared with js_GenerateShape, we don't need to use
* atomic increments but we still must make sure that after an overflow
* the shape stays such.
*/
uint32 shape = cx->runtime->shapeGen;
shape = (shape + 1) | (shape & SHAPE_OVERFLOW_BIT);
cx->runtime->shapeGen = shape;
return shape;
}
JS_END_EXTERN_C
#endif /* jscntxt_h___ */

View File

@ -3177,6 +3177,24 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms)
#endif
}
void
js_TriggerGC(JSContext *cx, JSBool gcLocked)
{
JSRuntime *rt = cx->runtime;
JS_ASSERT(cx->requestDepth > 0);
JS_ASSERT(!rt->gcRunning);
if (rt->gcIsNeeded)
return;
/*
* Trigger the GC when it is safe to call an operation callback on any
* thread.
*/
rt->gcIsNeeded = JS_TRUE;
js_TriggerAllOperationCallbacks(rt, gcLocked);
}
static void
ProcessSetSlotRequest(JSContext *cx, JSSetSlotRequest *ssr)
{
@ -3287,7 +3305,6 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind)
JSBool allClear;
#ifdef JS_THREADSAFE
uint32 requestDebit;
JSContext *acx, *iter;
#endif
#ifdef JS_GCMETER
uint32 nlivearenas, nkilledarenas, nthings;
@ -3450,7 +3467,7 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind)
* collect garbage only if a racing thread attempted GC and is waiting
* for us to finish (gcLevel > 1) or if someone already poked us.
*/
if (rt->gcLevel == 1 && !rt->gcPoke)
if (rt->gcLevel == 1 && !rt->gcPoke && !rt->gcIsNeeded)
goto done_running;
rt->gcLevel = 0;
@ -3471,6 +3488,9 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind)
#endif
VOUCH_HAVE_STACK();
/* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
rt->gcIsNeeded = JS_FALSE;
/* Reset malloc counter. */
rt->gcMallocBytes = 0;
@ -3766,23 +3786,6 @@ out:
goto restart;
}
if (rt->shapeGen >= SHAPE_OVERFLOW_BIT - 1) {
/*
* FIXME bug 440834: The shape id space has overflowed. Currently we
* cope badly with this. Every call to js_GenerateShape does GC, and
* we never re-enable the property cache.
*/
js_DisablePropertyCache(cx);
#ifdef JS_THREADSAFE
iter = NULL;
while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) {
if (!acx->thread || acx->thread == cx->thread)
continue;
js_DisablePropertyCache(acx);
}
#endif
}
rt->gcLastBytes = rt->gcBytes;
done_running:
rt->gcLevel = 0;

View File

@ -247,6 +247,16 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms);
extern JS_REQUIRES_STACK JS_FRIEND_API(void)
js_TraceContext(JSTracer *trc, JSContext *acx);
/*
* Schedule the GC call at a later safe point.
*/
#ifndef JS_THREADSAFE
# define js_TriggerGC(cx, gcLocked) js_TriggerGC (cx)
#endif
extern void
js_TriggerGC(JSContext *cx, JSBool gcLocked);
/*
* Kinds of js_GC invocation.
*/

View File

@ -86,26 +86,23 @@
#if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___
uint32
js_GenerateShape(JSContext *cx, JSBool gcLocked, JSScopeProperty *sprop)
js_GenerateShape(JSContext *cx, JSBool gcLocked)
{
JSRuntime *rt;
uint32 shape;
JSTempValueRooter tvr;
rt = cx->runtime;
shape = JS_ATOMIC_INCREMENT(&rt->shapeGen);
JS_ASSERT(shape != 0);
if (shape & SHAPE_OVERFLOW_BIT) {
rt->gcPoke = JS_TRUE;
if (sprop)
JS_PUSH_TEMP_ROOT_SPROP(cx, sprop, &tvr);
js_GC(cx, gcLocked ? GC_LOCK_HELD : GC_NORMAL);
if (sprop)
JS_POP_TEMP_ROOT(cx, &tvr);
shape = JS_ATOMIC_INCREMENT(&rt->shapeGen);
JS_ASSERT(shape != 0);
JS_ASSERT_IF(shape & SHAPE_OVERFLOW_BIT,
JS_PROPERTY_CACHE(cx).disabled);
if (shape >= SHAPE_OVERFLOW_BIT) {
/*
* FIXME bug 440834: The shape id space has overflowed. Currently we
* cope badly with this and schedule the GC on the every call. But
* first we make sure that increments from other threads would not
* have a chance to wrap around shapeGen to zero.
*/
rt->shapeGen = SHAPE_OVERFLOW_BIT;
js_TriggerGC(cx, gcLocked);
}
return shape;
}
@ -129,8 +126,9 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj, jsuword kshape,
JS_ASSERT(!cx->runtime->gcRunning);
cache = &JS_PROPERTY_CACHE(cx);
pc = cx->fp->regs->pc;
if (cache->disabled || (cx->fp->flags & JSFRAME_EVAL)) {
/* FIXME bug 489098: consider enabling the property cache for eval. */
if (js_IsPropertyCacheDisabled(cx) || (cx->fp->flags & JSFRAME_EVAL)) {
PCMETER(cache->disfills++);
*entryp = NULL;
return;
@ -196,6 +194,7 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj, jsuword kshape,
* Optimize the cached vword based on our parameters and the current pc's
* opcode format flags.
*/
pc = cx->fp->regs->pc;
op = js_GetOpcode(cx, cx->fp->script, pc);
cs = &js_CodeSpec[op];
@ -237,6 +236,14 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj, jsuword kshape,
kshape);
#endif
SCOPE_MAKE_UNIQUE_SHAPE(cx, scope);
if (js_IsPropertyCacheDisabled(cx)) {
/*
* js_GenerateShape could not recover from
* rt->shapeGen's overflow.
*/
*entryp = NULL;
return;
}
SCOPE_SET_BRANDED(scope);
if (OBJ_SCOPE(obj) == scope)
kshape = scope->shape;
@ -531,20 +538,6 @@ js_PurgePropertyCacheForScript(JSContext *cx, JSScript *script)
}
}
void
js_DisablePropertyCache(JSContext *cx)
{
JS_ASSERT(JS_PROPERTY_CACHE(cx).disabled >= 0);
++JS_PROPERTY_CACHE(cx).disabled;
}
void
js_EnablePropertyCache(JSContext *cx)
{
--JS_PROPERTY_CACHE(cx).disabled;
JS_ASSERT(JS_PROPERTY_CACHE(cx).disabled >= 0);
}
/*
* Check if the current arena has enough space to fit nslots after sp and, if
* so, reserve the necessary space.

View File

@ -243,12 +243,12 @@ typedef struct JSInlineFrame {
#define SHAPE_OVERFLOW_BIT JS_BIT(32 - PCVCAP_TAGBITS)
/*
* When sprop is not null and the shape generation triggers the GC due to a
* shape overflow, the functions roots sprop.
*/
#ifndef JS_THREADSAFE
# define js_GenerateShape(cx, gcLocked) js_GenerateShape (cx)
#endif
extern uint32
js_GenerateShape(JSContext *cx, JSBool gcLocked, JSScopeProperty *sprop);
js_GenerateShape(JSContext *cx, JSBool gcLocked);
struct JSPropCacheEntry {
jsbytecode *kpc; /* pc if vcap tag is <= 1, else atom */
@ -264,7 +264,6 @@ struct JSPropCacheEntry {
typedef struct JSPropertyCache {
JSPropCacheEntry table[PROPERTY_CACHE_SIZE];
JSBool empty;
jsrefcount disabled; /* signed for anti-underflow asserts */
#ifdef JS_PROPERTY_CACHE_METERING
uint32 fills; /* number of cache entry fills */
uint32 nofills; /* couldn't fill (e.g. default get) */
@ -409,12 +408,6 @@ js_PurgePropertyCache(JSContext *cx, JSPropertyCache *cache);
extern void
js_PurgePropertyCacheForScript(JSContext *cx, JSScript *script);
extern void
js_DisablePropertyCache(JSContext *cx);
extern void
js_EnablePropertyCache(JSContext *cx);
/*
* Interpreter stack arena-pool alloc and free functions.
*/

View File

@ -5817,17 +5817,13 @@ js_TraceObject(JSTracer *trc, JSObject *obj)
if (IS_GC_MARKING_TRACER(trc)) {
uint32 shape, oldshape;
shape = ++cx->runtime->shapeGen;
JS_ASSERT(shape != 0);
shape = js_RegenerateShapeForGC(cx);
if (!(sprop->flags & SPROP_MARK)) {
oldshape = sprop->shape;
sprop->shape = shape;
sprop->flags |= SPROP_FLAG_SHAPE_REGEN;
if (scope->shape != oldshape) {
shape = ++cx->runtime->shapeGen;
JS_ASSERT(shape != 0);
}
if (scope->shape != oldshape)
shape = js_RegenerateShapeForGC(cx);
}
scope->shape = shape;

View File

@ -788,8 +788,8 @@ HashChunks(PropTreeKidsChunk *chunk, uintN n)
* only when inserting a new child. Thus there may be races to find or add a
* node that result in duplicates. We expect such races to be rare!
*
* We use rt->gcLock, not rt->rtLock, to allow the GC potentially to nest here
* under js_GenerateShape.
* We use rt->gcLock, not rt->rtLock, to avoid nesting the former inside the
* latter in js_GenerateShape below.
*/
static JSScopeProperty *
GetPropertyTreeChild(JSContext *cx, JSScopeProperty *parent,
@ -801,7 +801,6 @@ GetPropertyTreeChild(JSContext *cx, JSScopeProperty *parent,
JSScopeProperty *sprop;
PropTreeKidsChunk *chunk;
uintN i, n;
uint32 shape;
rt = cx->runtime;
if (!parent) {
@ -888,12 +887,6 @@ GetPropertyTreeChild(JSContext *cx, JSScopeProperty *parent,
}
locked_not_found:
/*
* Call js_GenerateShape before the allocation to prevent collecting the
* new property when the shape generation triggers the GC.
*/
shape = js_GenerateShape(cx, JS_TRUE, NULL);
sprop = NewScopeProperty(rt);
if (!sprop)
goto out_of_memory;
@ -906,7 +899,7 @@ locked_not_found:
sprop->flags = child->flags;
sprop->shortid = child->shortid;
sprop->parent = sprop->kids = NULL;
sprop->shape = shape;
sprop->shape = js_GenerateShape(cx, JS_TRUE);
if (!parent) {
entry->child = sprop;
@ -1732,12 +1725,10 @@ js_SweepScopeProperties(JSContext *cx)
*/
if (sprop->flags & SPROP_MARK) {
sprop->flags &= ~SPROP_MARK;
if (sprop->flags & SPROP_FLAG_SHAPE_REGEN) {
if (sprop->flags & SPROP_FLAG_SHAPE_REGEN)
sprop->flags &= ~SPROP_FLAG_SHAPE_REGEN;
} else {
sprop->shape = ++cx->runtime->shapeGen;
JS_ASSERT(sprop->shape != 0);
}
else
sprop->shape = js_RegenerateShapeForGC(cx);
liveCount++;
continue;
}

View File

@ -217,7 +217,7 @@ struct JSScope {
#define OBJ_SHAPE(obj) (OBJ_SCOPE(obj)->shape)
#define SCOPE_MAKE_UNIQUE_SHAPE(cx,scope) \
((scope)->shape = js_GenerateShape((cx), JS_FALSE, NULL))
((scope)->shape = js_GenerateShape((cx), JS_FALSE))
#define SCOPE_EXTEND_SHAPE(cx,scope,sprop) \
JS_BEGIN_MACRO \
@ -225,7 +225,7 @@ struct JSScope {
(scope)->shape == (scope)->lastProp->shape) { \
(scope)->shape = (sprop)->shape; \
} else { \
(scope)->shape = js_GenerateShape((cx), JS_FALSE, sprop); \
(scope)->shape = js_GenerateShape(cx, JS_FALSE); \
} \
JS_END_MACRO