diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 365e6316496..9730426f26d 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -4867,7 +4867,7 @@ JS_CompileUCFunctionForPrincipals(JSContext *cx, JSObject *obj, #endif out: - cx->weakRoots.newbornObject = FUN_OBJECT(fun); + cx->weakRoots.finalizableNewborns[FINALIZE_FUNCTION] = fun; JS_POP_TEMP_ROOT(cx, &tvr); out2: diff --git a/js/src/jsarray.cpp b/js/src/jsarray.cpp index 872792738a7..5ce76ece8af 100644 --- a/js/src/jsarray.cpp +++ b/js/src/jsarray.cpp @@ -3463,7 +3463,7 @@ js_NewArrayObject(JSContext *cx, jsuint length, jsval *vector, JSBool holey) JS_POP_TEMP_ROOT(cx, &tvr); /* Set/clear newborn root, in case we lost it. */ - cx->weakRoots.newbornObject = obj; + cx->weakRoots.finalizableNewborns[FINALIZE_OBJECT] = obj; return obj; } diff --git a/js/src/jscntxt.cpp b/js/src/jscntxt.cpp index 35a30b3619e..5ee5886fec9 100644 --- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -93,6 +93,7 @@ FinishThreadData(JSThreadData *data) { #ifdef DEBUG /* All GC-related things must be already removed at this point. */ + data->gcFreeLists.assertEmpty(); for (size_t i = 0; i != JS_ARRAY_LENGTH(data->scriptsToGC); ++i) JS_ASSERT(!data->scriptsToGC[i]); for (size_t i = 0; i != JS_ARRAY_LENGTH(data->nativeEnumCache); ++i) @@ -109,10 +110,7 @@ FinishThreadData(JSThreadData *data) static void PurgeThreadData(JSContext *cx, JSThreadData *data) { - /* - * Clear the double free list to release all the pre-allocated doubles. - */ - data->doubleFreeList = NULL; + data->gcFreeLists.purge(); js_PurgeGSNCache(&data->gsnCache); @@ -271,15 +269,15 @@ thread_purger(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 /* index */, js_DestroyScriptsToGC(cx, &thread->data); /* - * The following is potentially suboptimal as it also zeros the cache + * The following is potentially suboptimal as it also zeros the caches * in data, but the code simplicity wins here. */ + thread->data.gcFreeLists.purge(); js_PurgeCachedNativeEnumerators(cx, &thread->data); DestroyThread(thread); return JS_DHASH_REMOVE; } PurgeThreadData(cx, &thread->data); - memset(thread->gcFreeLists, 0, sizeof(thread->gcFreeLists)); return JS_DHASH_NEXT; } diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 7adc2516cb9..e72dad65aab 100755 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -289,8 +289,7 @@ typedef struct JSFunctionMeter { #endif struct JSThreadData { - /* List of pre-allocated doubles. */ - JSGCDoubleCell *doubleFreeList; + JSGCFreeLists gcFreeLists; /* * The GSN cache is per thread since even multi-cx-per-thread embeddings @@ -366,8 +365,6 @@ struct JSThread { /* Indicates that the thread is waiting in ClaimTitle from jslock.cpp. */ JSTitle *titleToShare; - JSGCThing *gcFreeLists[FINALIZE_LIMIT]; - /* Factored out of JSThread for !JS_THREADSAFE embedding in JSRuntime. */ JSThreadData data; }; diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 9dd02bb4a79..4dc7bae0775 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -168,11 +168,6 @@ JS_STATIC_ASSERT(JSTRACE_XML == 3); */ JS_STATIC_ASSERT(JSTRACE_STRING + 1 == JSTRACE_XML); -/* - * Check that we can reinterpret double as JSGCDoubleCell. - */ -JS_STATIC_ASSERT(sizeof(JSGCDoubleCell) == sizeof(double)); - /* * Check that we can use memset(p, 0, ...) to implement JS_CLEAR_WEAK_ROOTS. */ @@ -332,37 +327,33 @@ struct JSGCArenaInfo { #endif union { - jsuword untracedThings; /* bitset for fast search of marked - but not yet traced things */ - bool hasMarkedDoubles; /* the arena has marked doubles */ - } u; + struct { + JSGCThing *freeList; + jsuword untracedThings; /* bitset for fast search of marked + but not yet traced things */ + } finalizable; + + bool hasMarkedDoubles; /* the arena has marked doubles */ + }; #if JS_GC_ARENA_PAD != 0 uint8 pad[JS_GC_ARENA_PAD]; #endif }; -/* GC flag definitions, must fit in 8 bits (type index goes in the low bits). */ -const uint8 GCF_MARK = JS_BIT(0); -const uint8 GCF_FINAL = JS_BIT(1); -const uint8 GCF_LOCK = JS_BIT(2); /* lock request bit in API */ +/* GC flag definitions, must fit in 8 bits. */ +const uint8 GCF_MARK = JS_BIT(0); +const uint8 GCF_LOCK = JS_BIT(1); /* lock request bit in API */ +const uint8 GCF_CHILDREN = JS_BIT(2); /* GC things with children to be + marked later. */ /* * The private JSGCThing struct, which describes a JSRuntime.gcFreeList element. */ struct JSGCThing { - JSGCThing *next; - uint8 *flagp; + JSGCThing *link; }; -/* - * Verify that the bit fields are indeed shared and JSGCArenaInfo is as small - * as possible. The code does not rely on this check so if on a particular - * platform this does not compile, then, as a workaround, comment the assert - * out and submit a bug report. - */ -JS_STATIC_ASSERT(offsetof(JSGCArenaInfo, u) == 3 * sizeof(jsuword)); - /* * Macros to convert between JSGCArenaInfo, the start address of the arena and * arena's page defined as (start address) >> GC_ARENA_SHIFT. @@ -460,8 +451,6 @@ JS_STATIC_ASSERT(1 <= js_gcArenasPerChunk && #define THING_TO_INDEX(thing, thingSize) \ ((uint32) ((jsuword) (thing) & GC_ARENA_MASK) / (uint32) (thingSize)) -#define THING_FLAGS_END(arena) ((uint8 *)(arena)) - #define THING_FLAGP(arena, thingIndex) \ (JS_ASSERT((jsuword) (thingIndex) \ < (jsuword) THINGS_PER_ARENA((arena)->list->thingSize)), \ @@ -482,6 +471,31 @@ JS_STATIC_ASSERT(1 <= js_gcArenasPerChunk && (JSGCThing *)(((jsuword) (flagp) & ~GC_ARENA_MASK) + \ (thingSize) * FLAGP_TO_INDEX(flagp))) +static inline JSGCThing * +NextThing(JSGCThing *thing, size_t thingSize) +{ + return reinterpret_cast(reinterpret_cast(thing) + + thingSize); +} + +static inline JSGCThing * +MakeNewArenaFreeList(JSGCArenaInfo *a, unsigned thingSize, size_t nthings) +{ + JS_ASSERT(nthings * thingSize < GC_ARENA_SIZE - sizeof(JSGCArenaInfo)); + + jsuword thingsStart = ARENA_INFO_TO_START(a); + JSGCThing *first = reinterpret_cast(thingsStart); + JSGCThing *last = reinterpret_cast(thingsStart + + (nthings - 1) * thingSize); + for (JSGCThing *thing = first; thing != last;) { + JSGCThing *next = NextThing(thing, thingSize); + thing->link = next; + thing = next; + } + last->link = NULL; + return first; +} + /* * Macros for the specialized arena for doubles. * @@ -636,25 +650,15 @@ IsMarkedDouble(JSGCArenaInfo *a, uint32 index) { jsbitmap *bitmap; - JS_ASSERT(a->u.hasMarkedDoubles); + JS_ASSERT(a->hasMarkedDoubles); bitmap = DOUBLE_ARENA_BITMAP(a); return JS_TEST_BIT(bitmap, index); } -#ifdef JS_THREADSAFE -/* - * The maximum number of things to put on the local free list by taking - * several things from the global free list or from the tail of the last - * allocated arena to amortize the cost of rt->gcLock. - */ -#define MAX_THREAD_LOCAL_THINGS 64 - -#endif - JS_STATIC_ASSERT(sizeof(JSStackHeader) >= 2 * sizeof(jsval)); -JS_STATIC_ASSERT(sizeof(JSGCThing) >= sizeof(JSString)); -JS_STATIC_ASSERT(sizeof(JSGCThing) >= sizeof(jsdouble)); +JS_STATIC_ASSERT(sizeof(JSGCThing) <= sizeof(JSString)); +JS_STATIC_ASSERT(sizeof(JSGCThing) <= sizeof(jsdouble)); /* We want to use all the available GC thing space for object's slots. */ JS_STATIC_ASSERT(sizeof(JSObject) % sizeof(JSGCThing) == 0); @@ -999,7 +1003,6 @@ NewGCArena(JSRuntime *rt) rt->gcBytes += GC_ARENA_SIZE; a->prevUntracedPage = 0; - memset(&a->u, 0, sizeof(a->u)); return a; } @@ -1095,10 +1098,9 @@ GetFinalizableThingSize(unsigned thingKind) } static inline size_t -GetFinalizableArenaTraceKind(JSGCArenaInfo *a) +GetFinalizableTraceKind(size_t thingKind) { JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8); - JS_ASSERT(a->list); static const uint8 map[FINALIZE_LIMIT] = { JSTRACE_OBJECT, /* FINALIZE_OBJECT */ @@ -1117,8 +1119,15 @@ GetFinalizableArenaTraceKind(JSGCArenaInfo *a) JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING7 */ }; - JS_ASSERT(a->list->thingKind < FINALIZE_LIMIT); - return map[a->list->thingKind]; + JS_ASSERT(thingKind < FINALIZE_LIMIT); + return map[thingKind]; +} + +static inline size_t +GetFinalizableArenaTraceKind(JSGCArenaInfo *a) +{ + JS_ASSERT(a->list); + return GetFinalizableTraceKind(a->list->thingKind); } static void @@ -1126,13 +1135,12 @@ InitGCArenaLists(JSRuntime *rt) { for (unsigned i = 0; i != FINALIZE_LIMIT; ++i) { JSGCArenaList *arenaList = &rt->gcArenaList[i]; - arenaList->thingSize = GetFinalizableThingSize(i); - arenaList->last = NULL; - arenaList->lastCount = THINGS_PER_ARENA(arenaList->thingSize); + arenaList->head = NULL; + arenaList->cursor = NULL; arenaList->thingKind = i; - arenaList->freeList = NULL; + arenaList->thingSize = GetFinalizableThingSize(i); } - rt->gcDoubleArenaList.first = NULL; + rt->gcDoubleArenaList.head = NULL; rt->gcDoubleArenaList.cursor = NULL; } @@ -1141,13 +1149,12 @@ FinishGCArenaLists(JSRuntime *rt) { for (unsigned i = 0; i < FINALIZE_LIMIT; i++) { JSGCArenaList *arenaList = &rt->gcArenaList[i]; - DestroyGCArenas(rt, arenaList->last); - arenaList->last = NULL; - arenaList->lastCount = THINGS_PER_ARENA(arenaList->thingSize); - arenaList->freeList = NULL; + DestroyGCArenas(rt, arenaList->head); + arenaList->head = NULL; + arenaList->cursor = NULL; } - DestroyGCArenas(rt, rt->gcDoubleArenaList.first); - rt->gcDoubleArenaList.first = NULL; + DestroyGCArenas(rt, rt->gcDoubleArenaList.head); + rt->gcDoubleArenaList.head = NULL; rt->gcDoubleArenaList.cursor = NULL; rt->gcBytes = 0; @@ -1218,14 +1225,14 @@ js_IsAboutToBeFinalized(JSContext *cx, void *thing) * the mark flags contains all garbage as it is initialized only when * marking the first double in the arena. */ - if (!a->u.hasMarkedDoubles) + if (!a->hasMarkedDoubles) return JS_TRUE; index = DOUBLE_THING_TO_INDEX(thing); return !IsMarkedDouble(a, index); } index = THING_TO_INDEX(thing, a->list->thingSize); flags = *THING_FLAGP(a, index); - return !(flags & (GCF_MARK | GCF_LOCK | GCF_FINAL)); + return !(flags & (GCF_MARK | GCF_LOCK)); } /* This is compatible with JSDHashEntryStub. */ @@ -1713,21 +1720,6 @@ CloseNativeIterators(JSContext *cx) ShrinkPtrTable(&rt->gcIteratorTable, &iteratorTableInfo, newCount); } -#if defined(DEBUG_brendan) || defined(DEBUG_timeless) -#define DEBUG_gchist -#endif - -#ifdef DEBUG_gchist -#define NGCHIST 64 - -static struct GCHist { - bool lastDitch; - JSGCThing *freeList; -} gchist[NGCHIST]; - -unsigned gchpos = 0; -#endif - void JSRuntime::setGCTriggerFactor(uint32 factor) { @@ -1747,7 +1739,29 @@ JSRuntime::setGCLastBytes(size_t lastBytes) gcTriggerBytes = size_t(triggerBytes); } -static JS_INLINE bool + +void +JSGCFreeLists::purge() +{ + /* + * Return the free list back to the arena so the GC finalization will not + * run the finalizers over unitialized bytes from free things. + */ + for (size_t i = 0; i != JS_ARRAY_LENGTH(finalizables); ++i) { + JSGCThing *thing = finalizables[i]; + if (thing) { + JSGCArenaInfo *a = THING_TO_ARENA(thing); + JS_ASSERT(a->list->thingKind == i); + JS_ASSERT(!a->finalizable.freeList); + a->finalizable.freeList = thing; + } + } + + doubles = NULL; + memset(finalizables, 0, sizeof(finalizables)); +} + +static inline bool IsGCThresholdReached(JSRuntime *rt) { #ifdef JS_GC_ZEAL @@ -1764,59 +1778,21 @@ IsGCThresholdReached(JSRuntime *rt) rt->gcBytes >= rt->gcTriggerBytes; } -template -static inline T* -NewFinalizableGCThing(JSContext *cx, unsigned thingKind, - NewbornType** newbornRoot) +static JSGCThing * +RefillFinalizableFreeList(JSContext *cx, unsigned thingKind) { - JSRuntime *rt; - bool doGC; - JSGCThing *thing; - uint8 *flagp; - JSGCArenaList *arenaList; - JSGCArenaInfo *a; - uintN thingsLimit; - JSLocalRootStack *lrs; -#ifdef JS_GCMETER - JSGCArenaStats *astats; -#endif -#ifdef JS_THREADSAFE - JSBool gcLocked; - uintN localMallocBytes; - JSGCThing **lastptr; - JSGCThing *tmpthing; - uint8 *tmpflagp; - uintN maxFreeThings; /* max to take from the global free list */ -#endif - - rt = cx->runtime; - - size_t nbytes = sizeof(T); - JS_ASSERT(nbytes == GetFinalizableThingSize(thingKind)); - JS_ASSERT(JS_ROUNDUP(nbytes, sizeof(JSGCThing)) == nbytes); - - /* Updates of metering counters here may not be thread-safe. */ - METER(astats = &cx->runtime->gcStats.arenaStats[flindex]); - METER(astats->alloc++); - -#ifdef JS_THREADSAFE - gcLocked = JS_FALSE; - JS_ASSERT(cx->thread); - - JSGCThing *&freeList = cx->thread->gcFreeLists[thingKind]; - thing = freeList; - localMallocBytes = JS_THREAD_DATA(cx)->gcMallocBytes; - if (thing && rt->gcMaxMallocBytes - rt->gcMallocBytes > localMallocBytes) { - flagp = thing->flagp; - freeList = thing->next; - METER(astats->localalloc++); - goto success; + JSRuntime *rt = cx->runtime; + JS_LOCK_GC(rt); + JS_ASSERT(!rt->gcRunning); + if (rt->gcRunning) { + METER(rt->gcStats.finalfail++); + JS_UNLOCK_GC(rt); + return NULL; } - JS_LOCK_GC(rt); - gcLocked = JS_TRUE; - +#ifdef JS_THREADSAFE /* Transfer thread-local counter to global one. */ + size_t localMallocBytes = JS_THREAD_DATA(cx)->gcMallocBytes; if (localMallocBytes != 0) { JS_THREAD_DATA(cx)->gcMallocBytes = 0; if (rt->gcMaxMallocBytes - rt->gcMallocBytes < localMallocBytes) @@ -1825,26 +1801,14 @@ NewFinalizableGCThing(JSContext *cx, unsigned thingKind, rt->gcMallocBytes += localMallocBytes; } #endif - JS_ASSERT(!rt->gcRunning); - if (rt->gcRunning) { - METER(rt->gcStats.finalfail++); - JS_UNLOCK_GC(rt); - return NULL; - } -#if defined JS_GC_ZEAL && defined JS_TRACER - if (rt->gcZeal >= 1 && JS_TRACE_MONITOR(cx).useReservedObjects) - goto testReservedObjects; -#endif - - arenaList = &rt->gcArenaList[thingKind]; - doGC = IsGCThresholdReached(rt); + METER(JSGCArenaStats *astats = &cx->runtime->gcStats.arenaStats[thingKind]); + bool canGC = !JS_ON_TRACE(cx); + bool doGC = canGC && IsGCThresholdReached(rt); + JSGCArenaList *arenaList = &rt->gcArenaList[thingKind]; + JSGCArenaInfo *a; for (;;) { - if (doGC -#ifdef JS_TRACER - && !JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).useReservedObjects -#endif - ) { + if (doGC) { /* * Keep rt->gcLock across the call into js_GC so we don't starve * and lose to racing threads who deplete the heap just after @@ -1855,124 +1819,116 @@ NewFinalizableGCThing(JSContext *cx, unsigned thingKind, */ js_GC(cx, GC_LAST_DITCH); METER(astats->retry++); + canGC = false; } - /* Try to get thing from the free list. */ - thing = arenaList->freeList; - if (thing) { - arenaList->freeList = thing->next; - flagp = thing->flagp; - JS_ASSERT(*flagp & GCF_FINAL); - -#ifdef JS_THREADSAFE - /* - * Refill the local free list by taking several things from the - * global free list unless the free list is already populated or - * we are still at rt->gcMaxMallocBytes barrier. The former is - * caused via allocating new things in gcCallback(cx, JSGC_END). - * The latter happens when GC is canceled due to - * gcCallback(cx, JSGC_BEGIN) returning false. - */ - if (freeList || rt->gcMallocBytes >= rt->gcMaxMallocBytes) - break; - - tmpthing = arenaList->freeList; - if (tmpthing) { - maxFreeThings = MAX_THREAD_LOCAL_THINGS; - do { - if (!tmpthing->next) - break; - tmpthing = tmpthing->next; - } while (--maxFreeThings != 0); - - freeList = arenaList->freeList; - arenaList->freeList = tmpthing->next; - tmpthing->next = NULL; + while ((a = arenaList->cursor) != NULL) { + arenaList->cursor = a->prev; + JSGCThing *freeList = a->finalizable.freeList; + if (freeList) { + a->finalizable.freeList = NULL; + JS_UNLOCK_GC(rt); + return freeList; } -#endif + } + + a = NewGCArena(rt); + if (a) break; + if (!canGC) { + METER(astats->fail++); + JS_UNLOCK_GC(rt); + return NULL; } - - /* - * Try to allocate things from the last arena. If it is fully used, - * check if we can allocate a new one and, if we cannot, consider - * doing a "last ditch" GC unless already tried. - */ - thingsLimit = THINGS_PER_ARENA(nbytes); - if (arenaList->lastCount != thingsLimit) { - JS_ASSERT(arenaList->lastCount < thingsLimit); - a = arenaList->last; - } else { -#ifdef JS_TRACER - if (JS_TRACE_MONITOR(cx).useReservedObjects) { -#ifdef JS_GC_ZEAL -testReservedObjects: -#endif - JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx); - - thing = (JSGCThing *) tm->reservedObjects; - flagp = GetGCThingFlags(thing); - JS_ASSERT(thing); - tm->reservedObjects = JSVAL_TO_OBJECT(tm->reservedObjects->fslots[0]); - break; - } -#endif - - a = NewGCArena(rt); - if (!a) { - if (doGC || JS_ON_TRACE(cx)) - goto fail; - doGC = true; - continue; - } - a->list = arenaList; - a->prev = arenaList->last; - a->prevUntracedPage = 0; - a->u.untracedThings = 0; - arenaList->last = a; - arenaList->lastCount = 0; - } - - flagp = THING_FLAGP(a, arenaList->lastCount); - thing = FLAGP_TO_THING(flagp, nbytes); - arenaList->lastCount++; - -#ifdef JS_THREADSAFE - /* - * Refill the local free list by taking free things from the last - * arena. Prefer to order free things by ascending address in the - * (unscientific) hope of better cache locality. - */ - if (freeList || rt->gcMallocBytes >= rt->gcMaxMallocBytes) - break; - lastptr = &freeList; - maxFreeThings = thingsLimit - arenaList->lastCount; - if (maxFreeThings > MAX_THREAD_LOCAL_THINGS) - maxFreeThings = MAX_THREAD_LOCAL_THINGS; - uint32 lastCount = arenaList->lastCount; - while (maxFreeThings != 0) { - --maxFreeThings; - - tmpflagp = THING_FLAGP(a, lastCount); - tmpthing = FLAGP_TO_THING(tmpflagp, nbytes); - lastCount++; - tmpthing->flagp = tmpflagp; - *tmpflagp = GCF_FINAL; /* signifying that thing is free */ - - *lastptr = tmpthing; - lastptr = &tmpthing->next; - } - *lastptr = NULL; - arenaList->lastCount = lastCount; -#endif - break; + doGC = true; } - /* We successfully allocated the thing. */ + /* + * Do only minimal initialization of the arena inside the GC lock. We + * can do the rest outside the lock because no other threads will see + * the arena until the GC is run. + */ + a->list = arenaList; + a->prev = arenaList->head; + a->prevUntracedPage = 0; + a->finalizable.untracedThings = 0; + a->finalizable.freeList = NULL; + arenaList->head = a; + JS_UNLOCK_GC(rt); + + unsigned nthings = THINGS_PER_ARENA(arenaList->thingSize); + uint8 *flagsStart = THING_FLAGP(a, nthings - 1); + memset(flagsStart, 0, nthings); + + /* Turn all things in the arena into a free list. */ + return MakeNewArenaFreeList(a, arenaList->thingSize, nthings); +} + +void * +NewFinalizableGCThing(JSContext *cx, unsigned thingKind) +{ + JS_ASSERT(thingKind < FINALIZE_LIMIT); #ifdef JS_THREADSAFE - success: + JS_ASSERT(cx->thread); #endif - lrs = cx->localRootStack; + + /* Updates of metering counters here may not be thread-safe. */ + METER(cx->runtime->gcStats.arenaStats[thingKind].alloc++); + + JSGCThing **freeListp = + JS_THREAD_DATA(cx)->gcFreeLists.finalizables + thingKind; + JSGCThing *thing = *freeListp; + JSRuntime *rt = cx->runtime; +#ifdef JS_THREADSAFE + bool tooMuchMalloc = (rt->gcMaxMallocBytes - rt->gcMallocBytes <= + JS_THREAD_DATA(cx)->gcMallocBytes); +#else + bool tooMuchMalloc = (rt->gcMaxMallocBytes <= rt->gcMallocBytes); +#endif +#ifdef JS_TRACER + bool fromTraceReserve = false; +#endif + + for (;;) { + if (thing && !tooMuchMalloc) { + *freeListp = thing->link; + METER(astats->localalloc++); + break; + } + +#if defined JS_GC_ZEAL && defined JS_TRACER + if (rt->gcZeal >= 1 && JS_TRACE_MONITOR(cx).useReservedObjects) + goto testReservedObjects; +#endif + + thing = RefillFinalizableFreeList(cx, thingKind); + if (thing) { + *freeListp = thing->link; + break; + } + +#ifdef JS_TRACER + if (JS_TRACE_MONITOR(cx).useReservedObjects) { +#ifdef JS_GC_ZEAL + testReservedObjects: +#endif + JS_ASSERT(JS_ON_TRACE(cx)); + JS_ASSERT(thingKind == FINALIZE_OBJECT || + thingKind == FINALIZE_FUNCTION); + JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx); + thing = (JSGCThing *) tm->reservedObjects; + JS_ASSERT(thing); + tm->reservedObjects = JSVAL_TO_OBJECT(tm->reservedObjects->fslots[0]); + fromTraceReserve = true; + break; + } +#endif + + js_ReportOutOfMemory(cx); + return NULL; + } + + JSLocalRootStack *lrs = cx->localRootStack; if (lrs) { /* * If we're in a local root scope, don't set newborn[type] at all, to @@ -1983,99 +1939,41 @@ testReservedObjects: */ if (js_PushLocalRoot(cx, lrs, (jsval) thing) < 0) { /* - * When we fail for a thing allocated through the tail of the last - * arena, thing's flag byte is not initialized. So to prevent GC - * accessing the uninitialized flags during the finalization, we - * always mark the thing as final. See bug 337407. + * When we fail for a thing allocated from a free list, not from + * the reserved pool, the thing is not initialized. To prevent GC + * running the finalizer on the thing, we add the thing back to + * the free list. See bug 337407. */ - *flagp = GCF_FINAL; - goto fail; +#ifdef JS_TRACER + if (!fromTraceReserve) +#endif + { + JS_ASSERT(thing->link == *freeListp); + *freeListp = thing; + } + return NULL; } } else { /* * No local root scope, so we're stuck with the old, fragile model of * depending on a pigeon-hole newborn per type per context. */ - *newbornRoot = (T *) thing; + cx->weakRoots.finalizableNewborns[thingKind] = thing; } - /* We can't fail now, so update flags. */ - *flagp = uint8(0); - -#ifdef DEBUG_gchist - gchist[gchpos].lastDitch = doGC; - gchist[gchpos].freeList = rt->gcArenaList[thingKind].freeList; - if (++gchpos == NGCHIST) - gchpos = 0; -#endif - - /* This is not thread-safe for thread-local allocations. */ - METER_IF(flags & GCF_LOCK, rt->gcStats.lockborn++); - -#ifdef JS_THREADSAFE - if (gcLocked) - JS_UNLOCK_GC(rt); -#endif - return (T*)thing; - -fail: -#ifdef JS_THREADSAFE - if (gcLocked) - JS_UNLOCK_GC(rt); -#endif - METER(astats->fail++); - js_ReportOutOfMemory(cx); - return NULL; + return thing; } -JSObject * -js_NewGCObject(JSContext *cx) -{ - return NewFinalizableGCThing(cx, FINALIZE_OBJECT, - &cx->weakRoots.newbornObject); -} - -JSString * -js_NewGCString(JSContext *cx) -{ - return NewFinalizableGCThing(cx, FINALIZE_STRING, - &cx->weakRoots.newbornString); -} - -JSString * -js_NewGCExternalString(JSContext *cx, uintN type) -{ - JS_ASSERT(type < JS_EXTERNAL_STRING_LIMIT); - return NewFinalizableGCThing(cx, FINALIZE_EXTERNAL_STRING0 + type, - &cx->weakRoots.newbornExternalString[type]); -} - -JSFunction * -js_NewGCFunction(JSContext *cx) -{ - return NewFinalizableGCThing(cx, FINALIZE_FUNCTION, - &cx->weakRoots.newbornObject); -} - -#if JS_HAS_XML_SUPPORT -JSXML * -js_NewGCXML(JSContext *cx) -{ - return NewFinalizableGCThing(cx,FINALIZE_XML, - &cx->weakRoots.newbornXML); -} -#endif - -static JSGCDoubleCell * +static JSGCThing * TurnUsedArenaIntoDoubleList(JSGCArenaInfo *a) { - JSGCDoubleCell *head; - JSGCDoubleCell **tailp = &head; - jsuword cellptr = ARENA_INFO_TO_START(a); + JSGCThing *head; + JSGCThing **tailp = &head; + jsuword thing = ARENA_INFO_TO_START(a); /* * When m below points the last bitmap's word in the arena, its high bits - * corresponds to non-existing cells and cellptr is outside the space + * corresponds to non-existing cells and thingptr is outside the space * allocated for doubles. ClearDoubleArenaFlags sets such bits to 1. Thus * even for this last word its bit is unset iff the corresponding cell * exists and free. @@ -2083,13 +1981,13 @@ TurnUsedArenaIntoDoubleList(JSGCArenaInfo *a) for (jsbitmap *m = DOUBLE_ARENA_BITMAP(a); m != DOUBLE_ARENA_BITMAP_END(a); ++m) { - JS_ASSERT(cellptr < reinterpret_cast(DOUBLE_ARENA_BITMAP(a))); - JS_ASSERT((cellptr - ARENA_INFO_TO_START(a)) % + JS_ASSERT(thing < reinterpret_cast(DOUBLE_ARENA_BITMAP(a))); + JS_ASSERT((thing - ARENA_INFO_TO_START(a)) % (JS_BITS_PER_WORD * sizeof(jsdouble)) == 0); jsbitmap bits = *m; if (bits == jsbitmap(-1)) { - cellptr += JS_BITS_PER_WORD * sizeof(jsdouble); + thing += JS_BITS_PER_WORD * sizeof(jsdouble); } else { /* * We have some zero bits. Turn corresponding cells into a list @@ -2103,18 +2001,17 @@ TurnUsedArenaIntoDoubleList(JSGCArenaInfo *a) jsbitmap bitsChunk = bits & unrollMask; bits >>= unroll; if (bitsChunk == unrollMask) { - cellptr += unroll * sizeof(jsdouble); + thing += unroll * sizeof(jsdouble); } else { #define DO_BIT(bit) \ if (!(bitsChunk & (jsbitmap(1) << (bit)))) { \ - JS_ASSERT(cellptr - ARENA_INFO_TO_START(a) <= \ + JS_ASSERT(thing - ARENA_INFO_TO_START(a) <= \ (DOUBLES_PER_ARENA - 1) * sizeof(jsdouble));\ - JSGCDoubleCell *cell = \ - reinterpret_cast(cellptr); \ - *tailp = cell; \ - tailp = &cell->link; \ + JSGCThing *t = reinterpret_cast(thing); \ + *tailp = t; \ + tailp = &t->link; \ } \ - cellptr += sizeof(jsdouble); + thing += sizeof(jsdouble); DO_BIT(0); DO_BIT(1); DO_BIT(2); @@ -2128,24 +2025,10 @@ TurnUsedArenaIntoDoubleList(JSGCArenaInfo *a) return head; } -static JSGCDoubleCell * -TurnEmptyArenaIntoDoubleList(JSGCArenaInfo *a) -{ - JSGCDoubleCell *head = - reinterpret_cast(ARENA_INFO_TO_START(a)); - JSGCDoubleCell *lastcell = head + DOUBLES_PER_ARENA - 1; - - for (JSGCDoubleCell *cell = head; cell != lastcell; ++cell) - cell->link = cell + 1; - - lastcell->link = NULL; - return head; -} - -static JSGCDoubleCell * +static JSGCThing * RefillDoubleFreeList(JSContext *cx) { - JS_ASSERT(!JS_THREAD_DATA(cx)->doubleFreeList); + JS_ASSERT(!JS_THREAD_DATA(cx)->gcFreeLists.doubles); JSRuntime *rt = cx->runtime; JS_ASSERT(!rt->gcRunning); @@ -2168,10 +2051,10 @@ RefillDoubleFreeList(JSContext *cx) * threads. */ while (!!(a = rt->gcDoubleArenaList.cursor)) { - JS_ASSERT(!a->u.hasMarkedDoubles); + JS_ASSERT(!a->hasMarkedDoubles); rt->gcDoubleArenaList.cursor = a->prev; JS_UNLOCK_GC(rt); - JSGCDoubleCell *list = TurnUsedArenaIntoDoubleList(a); + JSGCThing *list = TurnUsedArenaIntoDoubleList(a); if (list) return list; JS_LOCK_GC(rt); @@ -2189,11 +2072,11 @@ RefillDoubleFreeList(JSContext *cx) } a->list = NULL; - a->u.hasMarkedDoubles = false; - a->prev = rt->gcDoubleArenaList.first; - rt->gcDoubleArenaList.first = a; + a->hasMarkedDoubles = false; + a->prev = rt->gcDoubleArenaList.head; + rt->gcDoubleArenaList.head = a; JS_UNLOCK_GC(rt); - return TurnEmptyArenaIntoDoubleList(a); + return MakeNewArenaFreeList(a, sizeof(jsdouble), DOUBLES_PER_ARENA); } JSBool @@ -2203,10 +2086,10 @@ js_NewDoubleInRootedValue(JSContext *cx, jsdouble d, jsval *vp) METER(JSGCArenaStats *astats = &cx->runtime->gcStats.doubleArenaStats); METER(astats->alloc++); - JSGCDoubleCell *cell = JS_THREAD_DATA(cx)->doubleFreeList; - if (!cell) { - cell = RefillDoubleFreeList(cx); - if (!cell) { + JSGCThing *thing = JS_THREAD_DATA(cx)->gcFreeLists.doubles; + if (!thing) { + thing = RefillDoubleFreeList(cx); + if (!thing) { METER(astats->fail++); return false; } @@ -2218,13 +2101,14 @@ js_NewDoubleInRootedValue(JSContext *cx, jsdouble d, jsval *vp) * The GC things on the free lists come from one arena and the things on * the free list are strictly in the ascending order. */ - JS_ASSERT_IF(cell->link, - THING_TO_ARENA(cell) == THING_TO_ARENA(cell->link)); - JS_ASSERT_IF(cell->link, cell < cell->link); + JS_ASSERT_IF(thing->link, + THING_TO_ARENA(thing) == THING_TO_ARENA(thing->link)); + JS_ASSERT_IF(thing->link, thing < thing->link); - JS_THREAD_DATA(cx)->doubleFreeList = cell->link; - cell->number = d; - *vp = DOUBLE_TO_JSVAL(&cell->number); + JS_THREAD_DATA(cx)->gcFreeLists.doubles = thing->link; + jsdouble *dp = reinterpret_cast(thing); + *dp = d; + *vp = DOUBLE_TO_JSVAL(dp); return true; } @@ -2252,27 +2136,30 @@ js_NewWeaklyRootedDouble(JSContext *cx, jsdouble d) JSBool js_ReserveObjects(JSContext *cx, size_t nobjects) { - /* - * Ensure at least nobjects objects are in the list. fslots[1] of each - * object on the reservedObjects list is the length of the list from there. - */ - JSObject *&head = JS_TRACE_MONITOR(cx).reservedObjects; - size_t i = head ? JSVAL_TO_INT(head->fslots[1]) : 0; - while (i < nobjects) { - JSObject *obj = js_NewGCObject(cx); - if (!obj) - return JS_FALSE; - memset(obj, 0, sizeof(JSObject)); - /* The class must be set to something for finalization. */ - obj->classword = (jsuword) &js_ObjectClass; - obj->fslots[0] = OBJECT_TO_JSVAL(head); - i++; - obj->fslots[1] = INT_TO_JSVAL(i); - head = obj; - } + /* + * Ensure at least nobjects objects are in the list. fslots[1] of each + * object on the reservedObjects list is the length of the list to this + * object. + */ + JSObject *&head = JS_TRACE_MONITOR(cx).reservedObjects; + size_t i = head ? JSVAL_TO_INT(head->fslots[1]) : 0; + while (i < nobjects) { + JSObject *obj = js_NewGCObject(cx); + if (!obj) + return JS_FALSE; + memset(obj, 0, sizeof(JSObject)); - return JS_TRUE; + /* The class must be set to something for finalization. */ + obj->classword = (jsuword) &js_ObjectClass; + obj->fslots[0] = OBJECT_TO_JSVAL(head); + i++; + obj->fslots[1] = INT_TO_JSVAL(i); + head = obj; + } + + return JS_TRUE; } + #endif /* @@ -2422,7 +2309,7 @@ JS_TraceChildren(JSTracer *trc, void *thing, uint32 kind) } /* - * Number of things covered by a single bit of JSGCArenaInfo.u.untracedThings. + * Number of things covered by a single bit of JSGCArenaInfo.untracedThings. */ #define THINGS_PER_UNTRACED_BIT(thingSize) \ JS_HOWMANY(THINGS_PER_ARENA(thingSize), JS_BITS_PER_WORD) @@ -2434,12 +2321,8 @@ DelayTracingChildren(JSRuntime *rt, uint8 *flagp) uint32 untracedBitIndex; jsuword bit; - /* - * Things with children to be traced later are marked with - * GCF_MARK | GCF_FINAL flags. - */ - JS_ASSERT((*flagp & (GCF_MARK | GCF_FINAL)) == GCF_MARK); - *flagp |= GCF_FINAL; + JS_ASSERT(!(*flagp & GCF_CHILDREN)); + *flagp |= GCF_CHILDREN; METER(rt->gcStats.untraced++); #ifdef DEBUG @@ -2452,13 +2335,13 @@ DelayTracingChildren(JSRuntime *rt, uint8 *flagp) THINGS_PER_UNTRACED_BIT(a->list->thingSize); JS_ASSERT(untracedBitIndex < JS_BITS_PER_WORD); bit = (jsuword)1 << untracedBitIndex; - if (a->u.untracedThings != 0) { + if (a->finalizable.untracedThings != 0) { JS_ASSERT(rt->gcUntracedArenaStackTop); - if (a->u.untracedThings & bit) { + if (a->finalizable.untracedThings & bit) { /* bit already covers things with children to trace later. */ return; } - a->u.untracedThings |= bit; + a->finalizable.untracedThings |= bit; } else { /* * The thing is the first thing with not yet traced children in the @@ -2472,7 +2355,7 @@ DelayTracingChildren(JSRuntime *rt, uint8 *flagp) * * See comments in TraceDelayedChildren. */ - a->u.untracedThings = bit; + a->finalizable.untracedThings = bit; if (a->prevUntracedPage == 0) { if (!rt->gcUntracedArenaStackTop) { /* Stack was empty, mark the arena as the bottom element. */ @@ -2516,19 +2399,18 @@ TraceDelayedChildren(JSTracer *trc) JS_ASSERT(rt->gcUntracedArenaStackTop->prevUntracedPage != 0); thingSize = a->list->thingSize; traceKind = GetFinalizableArenaTraceKind(a); - indexLimit = (a == a->list->last) - ? a->list->lastCount - : THINGS_PER_ARENA(thingSize); + indexLimit = THINGS_PER_ARENA(thingSize); thingsPerUntracedBit = THINGS_PER_UNTRACED_BIT(thingSize); /* - * We cannot use do-while loop here as a->u.untracedThings can be zero + * We cannot use do-while loop here as a->untracedThings can be zero * before the loop as a leftover from the previous iterations. See * comments after the loop. */ - while (a->u.untracedThings != 0) { - untracedBitIndex = JS_FLOOR_LOG2W(a->u.untracedThings); - a->u.untracedThings &= ~((jsuword)1 << untracedBitIndex); + while (a->finalizable.untracedThings != 0) { + untracedBitIndex = JS_FLOOR_LOG2W(a->finalizable.untracedThings); + a->finalizable.untracedThings &= + ~((jsuword)1 << untracedBitIndex); thingIndex = untracedBitIndex * thingsPerUntracedBit; endIndex = thingIndex + thingsPerUntracedBit; @@ -2546,9 +2428,9 @@ TraceDelayedChildren(JSTracer *trc) * with untraced ones. */ flagp = THING_FLAGP(a, thingIndex); - if ((*flagp & (GCF_MARK|GCF_FINAL)) != (GCF_MARK|GCF_FINAL)) + if (!(*flagp & GCF_CHILDREN)) continue; - *flagp &= ~GCF_FINAL; + *flagp &= ~GCF_CHILDREN; #ifdef DEBUG JS_ASSERT(rt->gcTraceLaterCount != 0); --rt->gcTraceLaterCount; @@ -2619,9 +2501,9 @@ JS_CallTracer(JSTracer *trc, void *thing, uint32 kind) case JSTRACE_DOUBLE: a = THING_TO_ARENA(thing); JS_ASSERT(!a->list); - if (!a->u.hasMarkedDoubles) { + if (!a->hasMarkedDoubles) { ClearDoubleArenaFlags(a); - a->u.hasMarkedDoubles = true; + a->hasMarkedDoubles = true; } index = DOUBLE_THING_TO_INDEX(thing); JS_SET_BIT(DOUBLE_ARENA_BITMAP(a), index); @@ -2633,7 +2515,6 @@ JS_CallTracer(JSTracer *trc, void *thing, uint32 kind) goto out; a = THING_TO_ARENA(thing); flagp = THING_FLAGP(a, THING_TO_INDEX(thing, sizeof(JSString))); - JS_ASSERT((*flagp & GCF_FINAL) == 0); JS_ASSERT(kind == GetFinalizableArenaTraceKind(a)); if (!((JSString *) thing)->isDependent()) { *flagp |= GCF_MARK; @@ -2652,11 +2533,6 @@ JS_CallTracer(JSTracer *trc, void *thing, uint32 kind) if (*flagp & GCF_MARK) goto out; - /* - * We check for non-final flag only if mark is unset as - * DelayTracingChildren uses the flag. See comments in the function. - */ - JS_ASSERT(*flagp != GCF_FINAL); *flagp |= GCF_MARK; if (!cx->insideGCMarkCallback) { /* @@ -2744,17 +2620,16 @@ gc_root_traversal(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 num, for (unsigned i = 0; i != FINALIZE_LIMIT; i++) { JSGCArenaList *arenaList = &rt->gcArenaList[i]; size_t thingSize = arenaList->thingSize; - size_t limit = arenaList->lastCount * thingSize; - for (JSGCArenaInfo *a = arenaList->last; a; a = a->prev) { + size_t limit = THINGS_PER_ARENA(thingSize) * thingSize; + for (JSGCArenaInfo *a = arenaList->head; a; a = a->prev) { if (thing - ARENA_INFO_TO_START(a) < limit) { root_points_to_gcArenaList = true; break; } - limit = THINGS_PER_ARENA(thingSize) * thingSize; } } if (!root_points_to_gcArenaList) { - for (JSGCArenaInfo *a = rt->gcDoubleArenaList.first; a; a = a->prev) { + for (JSGCArenaInfo *a = rt->gcDoubleArenaList.head; a; a = a->prev) { if (thing - ARENA_INFO_TO_START(a) < DOUBLES_PER_ARENA * sizeof(jsdouble)) { root_points_to_gcArenaList = true; @@ -2866,27 +2741,39 @@ js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp) JS_CALL_OBJECT_TRACER(trc, fp->scopeChain, "scope chain"); } -static void -TraceWeakRoots(JSTracer *trc, JSWeakRoots *wr) +void +JSWeakRoots::mark(JSTracer *trc) { - if (wr->newbornObject) - JS_CALL_OBJECT_TRACER(trc, wr->newbornObject, "newborn_object"); - if (wr->newbornString) - JS_CALL_STRING_TRACER(trc, wr->newbornString, "newborn_string"); - if (wr->newbornDouble) - JS_CALL_DOUBLE_TRACER(trc, wr->newbornDouble, "newborn_double"); +#ifdef DEBUG + const char * const newbornNames[] = { + "newborn_object", /* FINALIZE_OBJECT */ + "newborn_function", /* FINALIZE_FUNCTION */ #if JS_HAS_XML_SUPPORT - if (wr->newbornXML) - JS_CALL_TRACER(trc, wr->newbornXML, JSTRACE_XML, "newborn_xml"); + "newborn_xml", /* FINALIZE_XML */ #endif - for (uint32 i = 0; i != JS_EXTERNAL_STRING_LIMIT; i++) { - JSString *str = wr->newbornExternalString[i]; - if (str) - JS_CALL_STRING_TRACER(trc, str, "newborn_external_string"); + "newborn_string", /* FINALIZE_STRING */ + "newborn_external_string0", /* FINALIZE_EXTERNAL_STRING0 */ + "newborn_external_string1", /* FINALIZE_EXTERNAL_STRING1 */ + "newborn_external_string2", /* FINALIZE_EXTERNAL_STRING2 */ + "newborn_external_string3", /* FINALIZE_EXTERNAL_STRING3 */ + "newborn_external_string4", /* FINALIZE_EXTERNAL_STRING4 */ + "newborn_external_string5", /* FINALIZE_EXTERNAL_STRING5 */ + "newborn_external_string6", /* FINALIZE_EXTERNAL_STRING6 */ + "newborn_external_string7", /* FINALIZE_EXTERNAL_STRING7 */ + }; +#endif + for (size_t i = 0; i != JS_ARRAY_LENGTH(finalizableNewborns); ++i) { + void *newborn = finalizableNewborns[i]; + if (newborn) { + JS_CALL_TRACER(trc, newborn, GetFinalizableTraceKind(i), + newbornNames[i]); + } } - JS_CALL_VALUE_TRACER(trc, wr->lastAtom, "lastAtom"); + if (newbornDouble) + JS_CALL_DOUBLE_TRACER(trc, newbornDouble, "newborn_double"); + JS_CALL_VALUE_TRACER(trc, lastAtom, "lastAtom"); JS_SET_TRACING_NAME(trc, "lastInternalResult"); - js_CallValueTracerIfGCThing(trc, wr->lastInternalResult); + js_CallValueTracerIfGCThing(trc, lastInternalResult); } JS_REQUIRES_STACK JS_FRIEND_API(void) @@ -2960,7 +2847,7 @@ js_TraceContext(JSTracer *trc, JSContext *acx) /* Mark other roots-by-definition in acx. */ if (acx->globalObject && !JS_HAS_OPTION(acx, JSOPTION_UNROOTED_GLOBAL)) JS_CALL_OBJECT_TRACER(trc, acx->globalObject, "global object"); - TraceWeakRoots(trc, &acx->weakRoots); + acx->weakRoots.mark(trc); if (acx->throwing) { JS_CALL_VALUE_TRACER(trc, acx->exception, "exception"); } else { @@ -2990,7 +2877,7 @@ js_TraceContext(JSTracer *trc, JSContext *acx) tvr->u.sprop->trace(trc); break; case JSTVU_WEAK_ROOTS: - TraceWeakRoots(trc, tvr->u.weakRoots); + tvr->u.weakRoots->mark(trc); break; case JSTVU_COMPILER: tvr->u.compiler->trace(trc); @@ -3034,9 +2921,9 @@ MarkReservedGCThings(JSTraceMonitor *tm) JSGCArenaInfo *a = THING_TO_ARENA(dp); JS_ASSERT(!a->list); - if (!a->u.hasMarkedDoubles) { + if (!a->hasMarkedDoubles) { ClearDoubleArenaFlags(a); - a->u.hasMarkedDoubles = JS_TRUE; + a->hasMarkedDoubles = JS_TRUE; } jsuint index = DOUBLE_THING_TO_INDEX(dp); JS_SET_BIT(DOUBLE_ARENA_BITMAP(a), index); @@ -3046,7 +2933,6 @@ MarkReservedGCThings(JSTraceMonitor *tm) JS_ASSERT(js_GetGCThingTraceKind(obj) == JSTRACE_OBJECT); uint8 *flagp = GetGCThingFlags(obj); - JS_ASSERT(*flagp != GCF_FINAL); *flagp |= GCF_MARK; } } @@ -3116,11 +3002,6 @@ js_TriggerGC(JSContext *cx, JSBool gcLocked) if (rt->gcIsNeeded) return; - /* - * Trigger the GC when it is safe to call an operation callback on any - * thread. - */ - rt->gcIsNeeded = JS_TRUE; js_TriggerAllOperationCallbacks(rt, gcLocked); } @@ -3297,69 +3178,103 @@ FinalizeArenaList(JSContext *cx, unsigned thingKind, JSGCArenaList *arenaList = &cx->runtime->gcArenaList[thingKind]; JS_ASSERT(sizeof(T) == arenaList->thingSize); - JSGCArenaInfo **ap = &arenaList->last; + JSGCArenaInfo **ap = &arenaList->head; JSGCArenaInfo *a = *ap; if (!a) return; - JS_ASSERT(arenaList->lastCount > 0); - - JSGCThing *freeList = NULL; - arenaList->freeList = NULL; - - uint32 indexLimit = THINGS_PER_ARENA(sizeof(T)); - uint8 *flagp = THING_FLAGP(a, arenaList->lastCount - 1); - #ifdef JS_GCMETER uint32 nlivearenas = 0, nkilledarenas = 0, nthings = 0; #endif for (;;) { + JS_ASSERT(a->list == arenaList); JS_ASSERT(a->prevUntracedPage == 0); - JS_ASSERT(a->u.untracedThings == 0); + JS_ASSERT(a->finalizable.untracedThings == 0); + JSGCThing *freeList = NULL; + JSGCThing **tailp = &freeList; bool allClear = true; - do { - if (*flagp & (GCF_MARK | GCF_LOCK)) { - *flagp &= ~GCF_MARK; - allClear = false; - METER(nthings++); - } else { - JSGCThing *thing = FLAGP_TO_THING(flagp, sizeof(T)); - if (!(*flagp & GCF_FINAL)) { - /* Call the finalizer with GCF_FINAL ORed into flags. */ - *flagp |= GCF_FINAL; - FinalizeGCThing(cx, (T *) thing, thingKind); -#ifdef DEBUG - memset(thing, JS_FREE_PATTERN, sizeof(T)); -#endif + uint8 *flagp = THING_FLAGP(a, 0); + JSGCThing *thing = + reinterpret_cast(ARENA_INFO_TO_START(a)); + JSGCThing *thingsEnd = + reinterpret_cast(ARENA_INFO_TO_START(a) + + THINGS_PER_ARENA(sizeof(T)) * + sizeof(T)); + JSGCThing* nextFree = a->finalizable.freeList + ? a->finalizable.freeList + : thingsEnd; + for (;; thing = NextThing(thing, sizeof(T)), --flagp) { + if (thing == nextFree) { + if (thing == thingsEnd) + break; + JS_ASSERT(!*flagp); + nextFree = nextFree->link; + if (!nextFree) { + nextFree = thingsEnd; + } else { + JS_ASSERT(thing < nextFree); + JS_ASSERT(nextFree < thingsEnd); + } + } else { + JS_ASSERT(!(*flagp & ~(GCF_MARK | GCF_LOCK))); + if (*flagp) { + *flagp &= ~GCF_MARK; + allClear = false; + METER(nthings++); + continue; } - thing->flagp = flagp; - thing->next = freeList; - freeList = thing; - } - } while (++flagp != THING_FLAGS_END(a)); + /* + * Call the finalizer with cleared flags so + * js_IsAboutToBeFinalized will be false. + */ + *flagp = 0; + FinalizeGCThing(cx, reinterpret_cast(thing), thingKind); +#ifdef DEBUG + memset(thing, JS_FREE_PATTERN, sizeof(T)); +#endif + } + *tailp = thing; + tailp = &thing->link; + } + +#ifdef DEBUG + /* Check that the free list is consistent. */ + unsigned nfree = 0; + if (freeList) { + JS_ASSERT(tailp != &freeList); + JSGCThing *thing = freeList; + for (;;) { + ++nfree; + if (&thing->link == tailp) + break; + JS_ASSERT(thing < thing->link); + thing = thing->link; + } + } +#endif if (allClear) { /* * Forget just assembled free list head for the arena and * add the arena itself to the destroy list. */ - freeList = arenaList->freeList; - if (a == arenaList->last) - arenaList->lastCount = indexLimit; + JS_ASSERT(nfree == THINGS_PER_ARENA(sizeof(T))); *ap = a->prev; a->prev = *emptyArenas; *emptyArenas = a; METER(nkilledarenas++); } else { - arenaList->freeList = freeList; + JS_ASSERT(nfree < THINGS_PER_ARENA(sizeof(T))); + *tailp = NULL; + a->finalizable.freeList = freeList; ap = &a->prev; METER(nlivearenas++); } if (!(a = *ap)) break; - flagp = THING_FLAGP(a, indexLimit - 1); } + arenaList->cursor = arenaList->head; METER(UpdateArenaStats(&rt->gcStats.arenaStats[thingKind], nlivearenas, nkilledarenas, nthings)); @@ -3611,8 +3526,8 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) JS_ASSERT(IS_GC_MARKING_TRACER(&trc)); #ifdef DEBUG - for (a = rt->gcDoubleArenaList.first; a; a = a->prev) - JS_ASSERT(!a->u.hasMarkedDoubles); + for (a = rt->gcDoubleArenaList.head; a; a = a->prev) + JS_ASSERT(!a->hasMarkedDoubles); #endif js_TraceRuntime(&trc, keepAtoms); @@ -3688,17 +3603,17 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) FinalizeArenaList(cx, i, &emptyArenas); } - ap = &rt->gcDoubleArenaList.first; + ap = &rt->gcDoubleArenaList.head; METER((nlivearenas = 0, nkilledarenas = 0, nthings = 0)); while ((a = *ap) != NULL) { - if (!a->u.hasMarkedDoubles) { + if (!a->hasMarkedDoubles) { /* No marked double values in the arena. */ *ap = a->prev; a->prev = emptyArenas; emptyArenas = a; METER(nkilledarenas++); } else { - a->u.hasMarkedDoubles = false; + a->hasMarkedDoubles = false; ap = &a->prev; #ifdef JS_GCMETER for (size_t i = 0; i != DOUBLES_PER_ARENA; ++i) { @@ -3711,7 +3626,7 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) } METER(UpdateArenaStats(&rt->gcStats.doubleArenaStats, nlivearenas, nkilledarenas, nthings)); - rt->gcDoubleArenaList.cursor = rt->gcDoubleArenaList.first; + rt->gcDoubleArenaList.cursor = rt->gcDoubleArenaList.head; /* * Sweep the runtime's property tree after finalizing objects, in case any diff --git a/js/src/jsgc.h b/js/src/jsgc.h index 5b907a19064..ae1d603e280 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -126,27 +126,6 @@ typedef struct JSPtrTable { extern JSBool js_RegisterCloseableIterator(JSContext *cx, JSObject *obj); -/* - * Allocates a new GC thing of the given size. After a successful allocation - * the caller must fully initialize the thing before calling any function that - * can potentially trigger GC. This will ensure that GC tracing never sees junk - * values stored in the partially initialized thing. - */ -extern JSObject* -js_NewGCObject(JSContext *cx); - -extern JSString* -js_NewGCString(JSContext *cx); - -extern JSString* -js_NewGCExternalString(JSContext *cx, uintN type); - -extern JSFunction* -js_NewGCFunction(JSContext *cx); - -extern JSXML* -js_NewGCXML(JSContext *cx); - /* * Allocate a new double jsval and store the result in *vp. vp must be a root. * The function does not copy the result into any weak root. @@ -281,28 +260,78 @@ IsFinalizableStringKind(unsigned thingKind) thingKind <= unsigned(FINALIZE_EXTERNAL_STRING_LAST); } -typedef struct JSGCArenaInfo JSGCArenaInfo; -typedef struct JSGCArenaList JSGCArenaList; -typedef struct JSGCChunkInfo JSGCChunkInfo; +/* + * Allocates a new GC thing. After a successful allocation the caller must + * fully initialize the thing before calling any function that can potentially + * trigger GC. This will ensure that GC tracing never sees junk values stored + * in the partially initialized thing. + */ +extern void * +NewFinalizableGCThing(JSContext *cx, unsigned thingKind); + +static inline JSObject * +js_NewGCObject(JSContext *cx) +{ + return (JSObject *) NewFinalizableGCThing(cx, FINALIZE_OBJECT); +} + +static inline JSString * +js_NewGCString(JSContext *cx) +{ + return (JSString *) NewFinalizableGCThing(cx, FINALIZE_STRING); +} + +static inline JSString * +js_NewGCExternalString(JSContext *cx, uintN type) +{ + JS_ASSERT(type < JS_EXTERNAL_STRING_LIMIT); + return (JSString *) NewFinalizableGCThing(cx, + FINALIZE_EXTERNAL_STRING0 + type); +} + +static inline JSFunction* +js_NewGCFunction(JSContext *cx) +{ + return (JSFunction *) NewFinalizableGCThing(cx, FINALIZE_FUNCTION); +} + +#if JS_HAS_XML_SUPPORT +static inline JSXML * +js_NewGCXML(JSContext *cx) +{ + return (JSXML *) NewFinalizableGCThing(cx, FINALIZE_XML); +} +#endif + +struct JSGCArenaInfo; +struct JSGCChunkInfo; struct JSGCArenaList { - JSGCArenaInfo *last; /* last allocated GC arena */ - uint32 lastCount; /* number of allocated things in the last - arena */ + JSGCArenaInfo *head; /* list start */ + JSGCArenaInfo *cursor; /* arena with free things */ uint32 thingKind; /* one of JSFinalizeGCThingKind */ uint32 thingSize; /* size of things to allocate on this list */ - JSGCThing *freeList; /* list of free GC things */ -}; - -union JSGCDoubleCell { - double number; - JSGCDoubleCell *link; }; struct JSGCDoubleArenaList { - JSGCArenaInfo *first; /* first allocated GC arena */ - JSGCArenaInfo *cursor; /* next arena with free cells */ + JSGCArenaInfo *head; /* list start */ + JSGCArenaInfo *cursor; /* next arena with free cells */ +}; + +struct JSGCFreeLists { + JSGCThing *doubles; + JSGCThing *finalizables[FINALIZE_LIMIT]; + + void purge(); + +#ifdef DEBUG + void assertEmpty() const { + JS_ASSERT(!doubles); + for (size_t i = 0; i != JS_ARRAY_LENGTH(finalizables); ++i) + JS_ASSERT(!finalizables[i]); + } +#endif }; extern void @@ -310,19 +339,16 @@ js_DestroyScriptsToGC(JSContext *cx, JSThreadData *data); struct JSWeakRoots { /* Most recently created things by type, members of the GC's root set. */ - JSObject *newbornObject; + void *finalizableNewborns[FINALIZE_LIMIT]; jsdouble *newbornDouble; - JSString *newbornString; -#if JS_HAS_XML_SUPPORT - JSXML *newbornXML; -#endif - JSString *newbornExternalString[JS_EXTERNAL_STRING_LIMIT]; /* Atom root for the last-looked-up atom on this context. */ jsval lastAtom; /* Root for the result of the most recent js_InternalInvoke call. */ jsval lastInternalResult; + + void mark(JSTracer *trc); }; #define JS_CLEAR_WEAK_ROOTS(wr) (memset((wr), 0, sizeof(JSWeakRoots))) diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 0e48e9d80ea..e601ac3adea 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -2247,9 +2247,6 @@ js_NewObjectWithGivenProto(JSContext *cx, JSClass *clasp, JSObject *proto, obj->map = const_cast(ops->objectMap); } - /* Check that the newborn root still holds the object. */ - JS_ASSERT_IF(!cx->localRootStack, cx->weakRoots.newbornObject == obj); - /* * Do not call debug hooks on trace, because we might be in a non-_FAIL * builtin. See bug 481444. @@ -2260,7 +2257,7 @@ js_NewObjectWithGivenProto(JSContext *cx, JSClass *clasp, JSObject *proto, cx->debugHooks->objectHook(cx, obj, JS_TRUE, cx->debugHooks->objectHookData); JS_UNKEEP_ATOMS(cx->runtime); - cx->weakRoots.newbornObject = obj; + cx->weakRoots.finalizableNewborns[FINALIZE_OBJECT] = obj; } out: @@ -5514,7 +5511,8 @@ js_GetClassPrototype(JSContext *cx, JSObject *scope, jsid id, * instance that delegates to this object, or just query the * prototype for its class. */ - cx->weakRoots.newbornObject = JSVAL_TO_OBJECT(v); + cx->weakRoots.finalizableNewborns[FINALIZE_OBJECT] = + JSVAL_TO_OBJECT(v); } } *protop = JSVAL_IS_OBJECT(v) ? JSVAL_TO_OBJECT(v) : NULL; diff --git a/js/src/jsops.cpp b/js/src/jsops.cpp index e35424bff53..940ebd0aeed 100644 --- a/js/src/jsops.cpp +++ b/js/src/jsops.cpp @@ -3475,7 +3475,7 @@ BEGIN_CASE(JSOP_ENDINIT) JS_ASSERT(regs.sp - StackBase(fp) >= 1); lval = FETCH_OPND(-1); JS_ASSERT(JSVAL_IS_OBJECT(lval)); - cx->weakRoots.newbornObject = JSVAL_TO_OBJECT(lval); + cx->weakRoots.finalizableNewborns[FINALIZE_OBJECT] = JSVAL_TO_OBJECT(lval); END_CASE(JSOP_ENDINIT) BEGIN_CASE(JSOP_INITPROP) diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 2f945993cc9..4b16870c2a0 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -2864,7 +2864,7 @@ NativeToValueBase(JSContext* cx, jsval& v, JSTraceType type, double* slot) * It's not safe to trigger the GC here, so use an emergency heap if we * are out of double boxes. */ - if (JS_THREAD_DATA(cx)->doubleFreeList) { + if (JS_THREAD_DATA(cx)->gcFreeLists.doubles) { #ifdef DEBUG JSBool ok = #endif