Bug 763800 - Changes GCHelperThread to work on the main thread in non-threadsafe builds and removes lots of henceforth redundant #ifdef JS_THREADSAFE's (r=billm)

This commit is contained in:
Till Schneidereit 2012-06-13 11:27:45 +02:00
parent 8a5e6af383
commit b318642115
13 changed files with 57 additions and 117 deletions

View File

@ -766,10 +766,8 @@ JSRuntime::JSRuntime()
hadOutOfMemory(false),
debugScopes(NULL),
data(NULL),
#ifdef JS_THREADSAFE
gcLock(NULL),
gcHelperThread(thisFromCtor()),
#endif
defaultFreeOp_(thisFromCtor(), false, false),
debuggerMutations(0),
securityCallbacks(const_cast<JSSecurityCallbacks *>(&NullSecurityCallbacks)),

View File

@ -3737,11 +3737,8 @@ static JS_ALWAYS_INLINE JSObject *
NewArray(JSContext *cx, uint32_t length, JSObject *proto_)
{
gc::AllocKind kind = GuessArrayGCKind(length);
#ifdef JS_THREADSAFE
JS_ASSERT(CanBeFinalizedInBackground(kind, &ArrayClass));
kind = GetBackgroundAllocKind(kind);
#endif
GlobalObject *parent_ = GetCurrentGlobal(cx);

View File

@ -1146,12 +1146,10 @@ JSRuntime::onOutOfMemory(void *p, size_t nbytes, JSContext *cx)
* all the allocations and released the empty GC chunks.
*/
ShrinkGCBuffers(this);
#ifdef JS_THREADSAFE
{
AutoLockGC lock(this);
gcHelperThread.waitBackgroundSweepOrAllocEnd();
}
#endif
if (!p)
p = OffTheBooks::malloc_(nbytes);
else if (p == reinterpret_cast<void *>(1))

View File

@ -685,12 +685,10 @@ struct JSRuntime : js::RuntimeFriendFields
/* Client opaque pointers */
void *data;
#ifdef JS_THREADSAFE
/* These combine to interlock the GC and new requests. */
PRLock *gcLock;
js::GCHelperThread gcHelperThread;
#endif /* JS_THREADSAFE */
private:
js::FreeOp defaultFreeOp_;
@ -1015,12 +1013,10 @@ typedef HashSet<JSObject *,
inline void
FreeOp::free_(void* p) {
#ifdef JS_THREADSAFE
if (shouldFreeLater()) {
runtime()->gcHelperThread.freeLater(p);
return;
}
#endif
runtime()->free_(p);
}
@ -1396,8 +1392,8 @@ class AutoXMLRooter : private AutoGCRooter {
# define JS_LOCK_GC(rt) PR_Lock((rt)->gcLock)
# define JS_UNLOCK_GC(rt) PR_Unlock((rt)->gcLock)
#else
# define JS_LOCK_GC(rt)
# define JS_UNLOCK_GC(rt)
# define JS_LOCK_GC(rt) do { } while (0)
# define JS_UNLOCK_GC(rt) do { } while (0)
#endif
class AutoLockGC
@ -1409,18 +1405,14 @@ class AutoLockGC
{
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
// Avoid MSVC warning C4390 for non-threadsafe builds.
#ifdef JS_THREADSAFE
if (rt)
JS_LOCK_GC(rt);
#endif
}
~AutoLockGC()
{
#ifdef JS_THREADSAFE
if (runtime)
JS_UNLOCK_GC(runtime);
#endif
}
bool locked() const {

View File

@ -143,21 +143,11 @@ struct JSFunction : public JSObject
}
#if JS_BITS_PER_WORD == 32
# ifdef JS_THREADSAFE
static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT2_BACKGROUND;
static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT4_BACKGROUND;
# else
static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT2;
static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT4;
# endif
#else
# ifdef JS_THREADSAFE
static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT4_BACKGROUND;
static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT8_BACKGROUND;
# else
static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT4;
static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT8;
# endif
#endif
inline void trace(JSTracer *trc);

View File

@ -421,7 +421,6 @@ FreeChunk(Chunk *p) {
UnmapPages(static_cast<void *>(p), ChunkSize);
}
#ifdef JS_THREADSAFE
inline bool
ChunkPool::wantBackgroundAllocation(JSRuntime *rt) const
{
@ -434,7 +433,6 @@ ChunkPool::wantBackgroundAllocation(JSRuntime *rt) const
emptyCount == 0 &&
rt->gcChunkSet.count() >= 4;
}
#endif
/* Must be called with the GC lock taken. */
inline Chunk *
@ -458,10 +456,8 @@ ChunkPool::get(JSRuntime *rt)
JS_ASSERT(chunk->unused());
JS_ASSERT(!rt->gcChunkSet.has(chunk));
#ifdef JS_THREADSAFE
if (wantBackgroundAllocation(rt))
rt->gcHelperThread.startBackgroundAllocationIfIdle();
#endif
return chunk;
}
@ -753,19 +749,15 @@ Chunk::releaseArena(ArenaHeader *aheader)
JS_ASSERT(!aheader->hasDelayedMarking);
JSCompartment *comp = aheader->compartment;
JSRuntime *rt = comp->rt;
#ifdef JS_THREADSAFE
AutoLockGC maybeLock;
if (rt->gcHelperThread.sweeping())
maybeLock.lock(rt);
#endif
Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes - ArenaSize);
JS_ASSERT(rt->gcBytes >= ArenaSize);
JS_ASSERT(comp->gcBytes >= ArenaSize);
#ifdef JS_THREADSAFE
if (rt->gcHelperThread.sweeping())
comp->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize);
#endif
rt->gcBytes -= ArenaSize;
comp->gcBytes -= ArenaSize;
@ -841,9 +833,9 @@ js_InitGC(JSRuntime *rt, uint32_t maxbytes)
rt->gcLock = PR_NewLock();
if (!rt->gcLock)
return false;
#endif
if (!rt->gcHelperThread.init())
return false;
#endif
/*
* Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
@ -1165,9 +1157,7 @@ js_FinishGC(JSRuntime *rt)
* Wait until the background finalization stops and the helper thread
* shuts down before we forcefully release any remaining GC memory.
*/
#ifdef JS_THREADSAFE
rt->gcHelperThread.finish();
#endif
#ifdef JS_GC_ZEAL
/* Free memory associated with GC verification. */
@ -1451,9 +1441,7 @@ void
ArenaLists::finalizeNow(FreeOp *fop, AllocKind thingKind)
{
JS_ASSERT(!fop->onBackgroundThread());
#ifdef JS_THREADSAFE
JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
#endif
FinalizeArenas(fop, &arenaLists[thingKind], thingKind);
}
@ -1508,11 +1496,12 @@ ArenaLists::finalizeLater(FreeOp *fop, AllocKind thingKind)
#endif
}
#ifdef JS_THREADSAFE
/*static*/ void
ArenaLists::backgroundFinalize(FreeOp *fop, ArenaHeader *listHead)
{
#ifdef JS_THREADSAFE
JS_ASSERT(fop->onBackgroundThread());
#endif /* JS_THREADSAFE */
JS_ASSERT(listHead);
AllocKind thingKind = listHead->getAllocKind();
JSCompartment *comp = listHead->compartment;
@ -1550,7 +1539,6 @@ ArenaLists::backgroundFinalize(FreeOp *fop, ArenaHeader *listHead)
lists->backgroundFinalizeState[thingKind] = BFS_DONE;
}
}
#endif /* JS_THREADSAFE */
void
ArenaLists::finalizeObjects(FreeOp *fop)
@ -1562,14 +1550,12 @@ ArenaLists::finalizeObjects(FreeOp *fop)
finalizeNow(fop, FINALIZE_OBJECT12);
finalizeNow(fop, FINALIZE_OBJECT16);
#ifdef JS_THREADSAFE
finalizeLater(fop, FINALIZE_OBJECT0_BACKGROUND);
finalizeLater(fop, FINALIZE_OBJECT2_BACKGROUND);
finalizeLater(fop, FINALIZE_OBJECT4_BACKGROUND);
finalizeLater(fop, FINALIZE_OBJECT8_BACKGROUND);
finalizeLater(fop, FINALIZE_OBJECT12_BACKGROUND);
finalizeLater(fop, FINALIZE_OBJECT16_BACKGROUND);
#endif
#if JS_HAS_XML_SUPPORT
finalizeNow(fop, FINALIZE_XML);
@ -1650,9 +1636,7 @@ ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
break;
AutoLockGC lock(rt);
#ifdef JS_THREADSAFE
rt->gcHelperThread.waitBackgroundSweepEnd();
#endif
}
/*
@ -2624,7 +2608,6 @@ ExpireChunksAndArenas(JSRuntime *rt, bool shouldShrink)
}
#ifdef JS_THREADSAFE
static unsigned
GetCPUCount()
{
@ -2641,10 +2624,12 @@ GetCPUCount()
}
return ncpus;
}
#endif /* JS_THREADSAFE */
bool
GCHelperThread::init()
{
#ifdef JS_THREADSAFE
if (!(wakeup = PR_NewCondVar(rt->gcLock)))
return false;
if (!(done = PR_NewCondVar(rt->gcLock)))
@ -2656,12 +2641,16 @@ GCHelperThread::init()
return false;
backgroundAllocation = (GetCPUCount() >= 2);
#else
backgroundAllocation = false;
#endif /* JS_THREADSAFE */
return true;
}
void
GCHelperThread::finish()
{
#ifdef JS_THREADSAFE
PRThread *join = NULL;
{
AutoLockGC lock(rt);
@ -2685,8 +2674,15 @@ GCHelperThread::finish()
PR_DestroyCondVar(wakeup);
if (done)
PR_DestroyCondVar(done);
#else
/*
* In non-threadsafe configurations, we do all work synchronously, so we must be IDLE
*/
JS_ASSERT(state == IDLE);
#endif /* JS_THREADSAFE */
}
#ifdef JS_THREADSAFE
/* static */
void
GCHelperThread::threadMain(void *arg)
@ -2743,19 +2739,25 @@ GCHelperThread::threadLoop()
}
}
}
#endif /* JS_THREADSAFE */
bool
GCHelperThread::prepareForBackgroundSweep()
{
JS_ASSERT(state == IDLE);
#ifdef JS_THREADSAFE
size_t maxArenaLists = MAX_BACKGROUND_FINALIZE_KINDS * rt->compartments.length();
return finalizeVector.reserve(maxArenaLists);
#else
return false;
#endif /* JS_THREADSAFE */
}
/* Must be called with the GC lock taken. */
void
GCHelperThread::startBackgroundSweep(bool shouldShrink)
{
#ifdef JS_THREADSAFE
/* The caller takes the GC lock. */
JS_ASSERT(state == IDLE);
JS_ASSERT(!sweepFlag);
@ -2763,8 +2765,12 @@ GCHelperThread::startBackgroundSweep(bool shouldShrink)
shrinkFlag = shouldShrink;
state = SWEEPING;
PR_NotifyCondVar(wakeup);
#else
JS_NOT_REACHED("No background sweep if !JS_THREADSAFE");
#endif /* JS_THREADSAFE */
}
#ifdef JS_THREADSAFE
/* Must be called with the GC lock taken. */
void
GCHelperThread::startBackgroundShrink()
@ -2790,33 +2796,46 @@ GCHelperThread::startBackgroundShrink()
JS_NOT_REACHED("No shrink on shutdown");
}
}
#endif /* JS_THREADSAFE */
/* Must be called with the GC lock taken. */
void
GCHelperThread::waitBackgroundSweepEnd()
{
#ifdef JS_THREADSAFE
while (state == SWEEPING)
PR_WaitCondVar(done, PR_INTERVAL_NO_TIMEOUT);
#else
JS_ASSERT(state == IDLE);
#endif /* JS_THREADSAFE */
}
/* Must be called with the GC lock taken. */
void
GCHelperThread::waitBackgroundSweepOrAllocEnd()
{
#ifdef JS_THREADSAFE
if (state == ALLOCATING)
state = CANCEL_ALLOCATION;
while (state == SWEEPING || state == CANCEL_ALLOCATION)
PR_WaitCondVar(done, PR_INTERVAL_NO_TIMEOUT);
#else
JS_ASSERT(state == IDLE);
#endif /* JS_THREADSAFE */
}
/* Must be called with the GC lock taken. */
inline void
GCHelperThread::startBackgroundAllocationIfIdle()
{
#ifdef JS_THREADSAFE
if (state == IDLE) {
state = ALLOCATING;
PR_NotifyCondVar(wakeup);
}
#else
JS_ASSERT(state == IDLE);
#endif /* JS_THREADSAFE */
}
JS_FRIEND_API(void)
@ -2838,6 +2857,7 @@ GCHelperThread::replenishAndFreeLater(void *ptr)
Foreground::free_(ptr);
}
#ifdef JS_THREADSAFE
/* Must be called with the GC lock taken. */
void
GCHelperThread::doSweep()
@ -2882,7 +2902,6 @@ GCHelperThread::doSweep()
ExpireChunksAndArenas(rt, true);
}
}
#endif /* JS_THREADSAFE */
} /* namespace js */
@ -3249,11 +3268,7 @@ SweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool *startBackgroundSweep)
isFull = false;
}
#ifdef JS_THREADSAFE
*startBackgroundSweep = (rt->hasContexts() && rt->gcHelperThread.prepareForBackgroundSweep());
#else
*startBackgroundSweep = false;
#endif
/* Purge the ArenaLists before sweeping. */
for (GCCompartmentsIter c(rt); !c.done(); c.next())
@ -3688,7 +3703,6 @@ GCCycle(JSRuntime *rt, bool incremental, int64_t budget, JSGCInvocationKind gcki
AutoLockGC lock(rt);
AutoGCSession gcsession(rt);
#ifdef JS_THREADSAFE
/*
* As we about to purge caches and clear the mark bits we must wait for
* any background finalization to finish. We must also wait for the
@ -3699,7 +3713,6 @@ GCCycle(JSRuntime *rt, bool incremental, int64_t budget, JSGCInvocationKind gcki
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_WAIT_BACKGROUND_THREAD);
rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
}
#endif
bool startBackgroundSweep = false;
{
@ -3733,10 +3746,8 @@ GCCycle(JSRuntime *rt, bool incremental, int64_t budget, JSGCInvocationKind gcki
SweepPhase(rt, gckind, &startBackgroundSweep);
}
#ifdef JS_THREADSAFE
if (startBackgroundSweep)
rt->gcHelperThread.startBackgroundSweep(gckind == GC_SHRINK);
#endif
}
#ifdef JS_GC_ZEAL
@ -3962,9 +3973,7 @@ IterateCompartmentsArenasCells(JSRuntime *rt, void *data,
AutoLockGC lock(rt);
AutoHeapSession session(rt);
#ifdef JS_THREADSAFE
rt->gcHelperThread.waitBackgroundSweepEnd();
#endif
AutoUnlockGC unlock(rt);
AutoCopyFreeListToArenas copy(rt);
@ -3989,9 +3998,7 @@ IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback)
AutoLockGC lock(rt);
AutoHeapSession session(rt);
#ifdef JS_THREADSAFE
rt->gcHelperThread.waitBackgroundSweepEnd();
#endif
AutoUnlockGC unlock(rt);
for (js::GCChunkSet::Range r = rt->gcChunkSet.all(); !r.empty(); r.popFront())
@ -4007,9 +4014,7 @@ IterateCells(JSRuntime *rt, JSCompartment *compartment, AllocKind thingKind,
AutoLockGC lock(rt);
AutoHeapSession session(rt);
#ifdef JS_THREADSAFE
rt->gcHelperThread.waitBackgroundSweepEnd();
#endif
AutoUnlockGC unlock(rt);
AutoCopyFreeListToArenas copy(rt);
@ -4347,9 +4352,7 @@ StartVerifyBarriers(JSRuntime *rt)
if (!IsIncrementalGCSafe(rt))
return;
#ifdef JS_THREADSAFE
rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
#endif
AutoUnlockGC unlock(rt);
@ -4476,9 +4479,7 @@ EndVerifyBarriers(JSRuntime *rt)
AutoLockGC lock(rt);
AutoHeapSession session(rt);
#ifdef JS_THREADSAFE
rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
#endif
AutoUnlockGC unlock(rt);
@ -4701,9 +4702,7 @@ JS_IterateCompartments(JSRuntime *rt, void *data,
AutoLockGC lock(rt);
AutoHeapSession session(rt);
#ifdef JS_THREADSAFE
rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
#endif
AutoUnlockGC unlock(rt);
for (CompartmentsIter c(rt); !c.done(); c.next())

View File

@ -159,7 +159,6 @@ struct ArenaLists {
ArenaList arenaLists[FINALIZE_LIMIT];
#ifdef JS_THREADSAFE
/*
* The background finalization adds the finalized arenas to the list at
* the *cursor position. backgroundFinalizeState controls the interaction
@ -183,27 +182,22 @@ struct ArenaLists {
};
volatile uintptr_t backgroundFinalizeState[FINALIZE_LIMIT];
#endif
public:
ArenaLists() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
freeLists[i].initAsEmpty();
#ifdef JS_THREADSAFE
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
backgroundFinalizeState[i] = BFS_DONE;
#endif
}
~ArenaLists() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
#ifdef JS_THREADSAFE
/*
* We can only call this during the shutdown after the last GC when
* the background finalization is disabled.
*/
JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE);
#endif
ArenaHeader **headp = &arenaLists[i].head;
while (ArenaHeader *aheader = *headp) {
*headp = aheader->next;
@ -222,14 +216,12 @@ struct ArenaLists {
bool arenaListsAreEmpty() const {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
#ifdef JS_THREADSAFE
/*
* The arena cannot be empty if the background finalization is not yet
* done.
*/
if (backgroundFinalizeState[i] != BFS_DONE)
return false;
#endif
if (arenaLists[i].head)
return false;
}
@ -238,11 +230,9 @@ struct ArenaLists {
void unmarkAll() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
# ifdef JS_THREADSAFE
/* The background finalization must have stopped at this point. */
JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE ||
backgroundFinalizeState[i] == BFS_JUST_FINISHED);
# endif
for (ArenaHeader *aheader = arenaLists[i].head; aheader; aheader = aheader->next) {
uintptr_t *word = aheader->chunk()->bitmap.arenaBits(aheader);
memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t));
@ -250,11 +240,9 @@ struct ArenaLists {
}
}
#ifdef JS_THREADSAFE
bool doneBackgroundFinalize(AllocKind kind) const {
return backgroundFinalizeState[kind] == BFS_DONE;
}
#endif
/*
* Return the free list back to the arena so the GC finalization will not
@ -353,9 +341,7 @@ struct ArenaLists {
void finalizeShapes(FreeOp *fop);
void finalizeScripts(FreeOp *fop);
#ifdef JS_THREADSAFE
static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead);
#endif
private:
inline void finalizeNow(FreeOp *fop, AllocKind thingKind);
@ -507,8 +493,14 @@ namespace js {
void
InitTracer(JSTracer *trc, JSRuntime *rt, JSTraceCallback callback);
#ifdef JS_THREADSAFE
/*
* Helper that implements sweeping and allocation for kinds that can be swept
* and allocated off the main thread.
*
* In non-threadsafe builds, all actual sweeping and allocation is performed
* on the main thread, but GCHelperThread encapsulates this from clients as
* much as possible.
*/
class GCHelperThread {
enum State {
IDLE,
@ -635,7 +627,6 @@ class GCHelperThread {
bool prepareForBackgroundSweep();
};
#endif /* JS_THREADSAFE */
struct GCChunkHasher {
typedef gc::Chunk *Lookup;

View File

@ -398,10 +398,8 @@ inline T *
NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
{
JS_ASSERT(thingSize == js::gc::Arena::thingSize(kind));
#ifdef JS_THREADSAFE
JS_ASSERT_IF((cx->compartment == cx->runtime->atomsCompartment),
JS_ASSERT_IF(cx->compartment == cx->runtime->atomsCompartment,
kind == js::gc::FINALIZE_STRING || kind == js::gc::FINALIZE_SHORT_STRING);
#endif
JS_ASSERT(!cx->runtime->gcRunning);
JS_ASSERT(!cx->runtime->noGCOrAllocationCheck);
@ -431,10 +429,8 @@ inline T *
TryNewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
{
JS_ASSERT(thingSize == js::gc::Arena::thingSize(kind));
#ifdef JS_THREADSAFE
JS_ASSERT_IF((cx->compartment == cx->runtime->atomsCompartment),
JS_ASSERT_IF(cx->compartment == cx->runtime->atomsCompartment,
kind == js::gc::FINALIZE_STRING || kind == js::gc::FINALIZE_SHORT_STRING);
#endif
JS_ASSERT(!cx->runtime->gcRunning);
JS_ASSERT(!cx->runtime->noGCOrAllocationCheck);

View File

@ -23,6 +23,10 @@
#else /* JS_THREADSAFE */
typedef struct PRThread PRThread;
typedef struct PRCondVar PRCondVar;
typedef struct PRLock PRLock;
# define JS_ATOMIC_INCREMENT(p) (++*(p))
# define JS_ATOMIC_DECREMENT(p) (--*(p))
# define JS_ATOMIC_ADD(p,v) (*(p) += (v))

View File

@ -1355,7 +1355,6 @@ NewObjectCache::copyCachedToObject(JSObject *dst, JSObject *src)
static inline bool
CanBeFinalizedInBackground(gc::AllocKind kind, Class *clasp)
{
#ifdef JS_THREADSAFE
JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
/* If the class has no finalizer or a finalizer that is safe to call on
* a different thread, we change the finalize kind. For example,
@ -1364,10 +1363,7 @@ CanBeFinalizedInBackground(gc::AllocKind kind, Class *clasp)
* IsBackgroundAllocKind is called to prevent recursively incrementing
* the finalize kind; kind may already be a background finalize kind.
*/
if (!gc::IsBackgroundAllocKind(kind) && !clasp->finalize)
return true;
#endif
return false;
return (!gc::IsBackgroundAllocKind(kind) && !clasp->finalize);
}
/*
@ -1478,9 +1474,7 @@ CopyInitializerObject(JSContext *cx, HandleObject baseobj)
JS_ASSERT(!baseobj->inDictionaryMode());
gc::AllocKind kind = gc::GetGCObjectFixedSlotsKind(baseobj->numFixedSlots());
#ifdef JS_THREADSAFE
kind = gc::GetBackgroundAllocKind(kind);
#endif
JS_ASSERT(kind == baseobj->getAllocKind());
JSObject *obj = NewBuiltinClassInstance(cx, &ObjectClass, kind);

View File

@ -254,12 +254,7 @@ ArrayBufferObject::create(JSContext *cx, uint32_t nbytes, uint8_t *contents)
RootedObject obj(cx, NewBuiltinClassInstance(cx, &ArrayBufferObject::protoClass));
if (!obj)
return NULL;
#ifdef JS_THREADSAFE
JS_ASSERT(obj->getAllocKind() == gc::FINALIZE_OBJECT16_BACKGROUND);
#else
JS_ASSERT(obj->getAllocKind() == gc::FINALIZE_OBJECT16);
#endif
JS_ASSERT(obj->getClass() == &ArrayBufferObject::protoClass);
js::Shape *empty = EmptyShape::getInitialShape(cx, &ArrayBufferClass,
@ -1397,11 +1392,7 @@ class TypedArrayTemplate
RootedObject obj(cx, NewBuiltinClassInstance(cx, protoClass()));
if (!obj)
return NULL;
#ifdef JS_THREADSAFE
JS_ASSERT(obj->getAllocKind() == gc::FINALIZE_OBJECT8_BACKGROUND);
#else
JS_ASSERT(obj->getAllocKind() == gc::FINALIZE_OBJECT8);
#endif
types::TypeObject *type;
if (proto) {

View File

@ -96,10 +96,8 @@ CallObject::create(JSContext *cx, JSScript *script, HandleObject enclosing, Hand
return NULL;
gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots());
#ifdef JS_THREADSAFE
JS_ASSERT(CanBeFinalizedInBackground(kind, &CallClass));
kind = gc::GetBackgroundAllocKind(kind);
#endif
RootedTypeObject type(cx);
type = cx->compartment->getEmptyType(cx);

View File

@ -201,11 +201,7 @@ class WithObject : public NestedScopeObject
public:
static const unsigned RESERVED_SLOTS = 3;
#ifdef JS_THREADSAFE
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
#else
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
#endif
static WithObject *
create(JSContext *cx, HandleObject proto, HandleObject enclosing, uint32_t depth);
@ -221,11 +217,7 @@ class BlockObject : public NestedScopeObject
{
public:
static const unsigned RESERVED_SLOTS = CALL_BLOCK_RESERVED_SLOTS;
#ifdef JS_THREADSAFE
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
#else
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
#endif
/* Return the number of variables associated with this block. */
inline uint32_t slotCount() const;