Bug 686144 - eliminating gc::MarkingDelay. r=wmccloskey

This commit is contained in:
Igor Bukanov 2011-09-12 11:43:43 +02:00
parent 2dbae9bd42
commit ee75e05a9b
2 changed files with 87 additions and 84 deletions

View File

@ -227,7 +227,7 @@ Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize)
JS_ASSERT(aheader.allocated());
JS_ASSERT(thingKind == aheader.getAllocKind());
JS_ASSERT(thingSize == aheader.getThingSize());
JS_ASSERT(!aheader.getMarkingDelay()->link);
JS_ASSERT(!aheader.hasDelayedMarking);
uintptr_t thing = thingsStart(thingKind);
uintptr_t lastByte = thingsEnd() - 1;
@ -387,32 +387,27 @@ FinalizeArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind)
} /* namespace js */
void
Chunk::init(JSRuntime *rt)
Chunk::init()
{
info.runtime = rt;
info.age = 0;
info.numFree = ArenasPerChunk;
JS_POISON(this, JS_FREE_PATTERN, GC_CHUNK_SIZE);
/* Assemble all arenas into a linked list and mark them as not allocated. */
ArenaHeader **prevp = &info.emptyArenaListHead;
Arena *end = &arenas[JS_ARRAY_LENGTH(arenas)];
for (Arena *a = &arenas[0]; a != end; ++a) {
#ifdef DEBUG
memset(a, ArenaSize, JS_FREE_PATTERN);
#endif
*prevp = &a->aheader;
a->aheader.setAsNotAllocated();
prevp = &a->aheader.next;
}
*prevp = NULL;
for (size_t i = 0; i != JS_ARRAY_LENGTH(markingDelay); ++i)
markingDelay[i].init();
/* We clear the bitmap to guard against xpc_IsGrayGCThing being called on
uninitialized data, which would happen before the first GC cycle. */
bitmap.clear();
info.age = 0;
info.numFree = ArenasPerChunk;
/* The rest of info fields are initialized in PickChunk. */
}
@ -467,7 +462,7 @@ Chunk::allocateArena(JSContext *cx, AllocKind thingKind)
if (!hasAvailableArenas())
removeFromAvailableList();
JSRuntime *rt = info.runtime;
JSRuntime *rt = comp->rt;
Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes + ArenaSize);
JS_ATOMIC_ADD(&rt->gcBytes, ArenaSize);
JS_ATOMIC_ADD(&comp->gcBytes, ArenaSize);
@ -481,13 +476,14 @@ void
Chunk::releaseArena(ArenaHeader *aheader)
{
JS_ASSERT(aheader->allocated());
JSRuntime *rt = info.runtime;
JS_ASSERT(!aheader->hasDelayedMarking);
JSCompartment *comp = aheader->compartment;
JSRuntime *rt = comp->rt;
#ifdef JS_THREADSAFE
AutoLockGC maybeLock;
if (rt->gcHelperThread.sweeping)
maybeLock.lock(info.runtime);
maybeLock.lock(rt);
#endif
JSCompartment *comp = aheader->compartment;
Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes - ArenaSize);
JS_ASSERT(size_t(rt->gcBytes) >= ArenaSize);
@ -576,7 +572,7 @@ PickChunk(JSContext *cx)
if (!chunk)
return NULL;
chunk->init(rt);
chunk->init();
rt->gcChunkAllocationSinceLastGC = true;
}
@ -1546,7 +1542,7 @@ namespace js {
GCMarker::GCMarker(JSContext *cx)
: color(0),
unmarkedArenaStackTop(MarkingDelay::stackBottom()),
unmarkedArenaStackTop(NULL),
objStack(cx->runtime->gcMarkStackObjs, sizeof(cx->runtime->gcMarkStackObjs)),
ropeStack(cx->runtime->gcMarkStackRopes, sizeof(cx->runtime->gcMarkStackRopes)),
typeStack(cx->runtime->gcMarkStackTypes, sizeof(cx->runtime->gcMarkStackTypes)),
@ -1573,24 +1569,23 @@ GCMarker::delayMarkingChildren(const void *thing)
{
const Cell *cell = reinterpret_cast<const Cell *>(thing);
ArenaHeader *aheader = cell->arenaHeader();
if (aheader->getMarkingDelay()->link) {
if (aheader->hasDelayedMarking) {
/* Arena already scheduled to be marked later */
return;
}
aheader->getMarkingDelay()->link = unmarkedArenaStackTop;
unmarkedArenaStackTop = aheader;
aheader->setNextDelayedMarking(unmarkedArenaStackTop);
unmarkedArenaStackTop = aheader->getArena();
markLaterArenas++;
}
static void
MarkDelayedChildren(JSTracer *trc, ArenaHeader *aheader)
MarkDelayedChildren(JSTracer *trc, Arena *a)
{
AllocKind thingKind = aheader->getAllocKind();
JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
size_t thingSize = aheader->getThingSize();
Arena *a = aheader->getArena();
AllocKind allocKind = a->aheader.getAllocKind();
JSGCTraceKind traceKind = MapAllocToTraceKind(allocKind);
size_t thingSize = Arena::thingSize(allocKind);
uintptr_t end = a->thingsEnd();
for (uintptr_t thing = a->thingsStart(thingKind); thing != end; thing += thingSize) {
for (uintptr_t thing = a->thingsStart(allocKind); thing != end; thing += thingSize) {
Cell *t = reinterpret_cast<Cell *>(thing);
if (t->isMarked())
JS_TraceChildren(trc, t, traceKind);
@ -1600,19 +1595,19 @@ MarkDelayedChildren(JSTracer *trc, ArenaHeader *aheader)
void
GCMarker::markDelayedChildren()
{
while (unmarkedArenaStackTop != MarkingDelay::stackBottom()) {
while (unmarkedArenaStackTop) {
/*
* If marking gets delayed at the same arena again, we must repeat
* marking of its things. For that we pop arena from the stack and
* clear its nextDelayedMarking before we begin the marking.
* clear its hasDelayedMarking flag before we begin the marking.
*/
ArenaHeader *aheader = unmarkedArenaStackTop;
unmarkedArenaStackTop = aheader->getMarkingDelay()->link;
JS_ASSERT(unmarkedArenaStackTop);
aheader->getMarkingDelay()->link = NULL;
Arena *a = unmarkedArenaStackTop;
JS_ASSERT(a->aheader.hasDelayedMarking);
JS_ASSERT(markLaterArenas);
unmarkedArenaStackTop = a->aheader.getNextDelayedMarking();
a->aheader.hasDelayedMarking = 0;
markLaterArenas--;
MarkDelayedChildren(this, aheader);
MarkDelayedChildren(this, a);
}
JS_ASSERT(!markLaterArenas);
}

View File

@ -78,7 +78,6 @@ struct Shape;
namespace gc {
struct Arena;
struct MarkingDelay;
/*
* This must be an upper bound, but we do not need the least upper bound, so
@ -357,6 +356,8 @@ struct FreeSpan {
/* Every arena has a header. */
struct ArenaHeader {
friend struct FreeLists;
JSCompartment *compartment;
ArenaHeader *next;
@ -375,21 +376,48 @@ struct ArenaHeader {
* during the conservative GC scanning without searching the arena in the
* list.
*/
unsigned allocKind;
friend struct FreeLists;
size_t allocKind : 8;
/*
* When recursive marking uses too much stack the marking is delayed and
* the corresponding arenas are put into a stack using the following field
* as a linkage. To distinguish the bottom of the stack from the arenas
* not present in the stack we use an extra flag to tag arenas on the
* stack.
*
* To minimize the ArenaHeader size we record the next delayed marking
* linkage as arenaAddress() >> ArenaShift and pack it with the allocKind
* field and hasDelayedMarking flag. We use 8 bits for the allocKind, not
* ArenaShift - 1, so the compiler can use byte-level memory instructions
* to access it.
*/
public:
size_t hasDelayedMarking : 1;
size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1;
static void staticAsserts() {
/* We must be able to fit the allockind into uint8. */
JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
/*
* nextDelayedMarkingpacking assumes that ArenaShift has enough bits
* to cover allocKind and hasDelayedMarking.
*/
JS_STATIC_ASSERT(ArenaShift >= 8 + 1);
}
inline uintptr_t address() const;
inline Chunk *chunk() const;
void setAsNotAllocated() {
allocKind = FINALIZE_LIMIT;
allocKind = size_t(FINALIZE_LIMIT);
hasDelayedMarking = 0;
nextDelayedMarking = 0;
}
bool allocated() const {
JS_ASSERT(allocKind <= FINALIZE_LIMIT);
return allocKind < FINALIZE_LIMIT;
JS_ASSERT(allocKind <= size_t(FINALIZE_LIMIT));
return allocKind < size_t(FINALIZE_LIMIT);
}
inline void init(JSCompartment *comp, AllocKind kind);
@ -431,11 +459,12 @@ struct ArenaHeader {
firstFreeSpanOffsets = span->encodeAsOffsets();
}
inline MarkingDelay *getMarkingDelay() const;
#ifdef DEBUG
void checkSynchronizedWithFreeList() const;
#endif
inline Arena *getNextDelayedMarking() const;
inline void setNextDelayedMarking(Arena *arena);
};
struct Arena {
@ -506,32 +535,8 @@ struct Arena {
bool finalize(JSContext *cx, AllocKind thingKind, size_t thingSize);
};
/*
* When recursive marking uses too much stack the marking is delayed and
* the corresponding arenas are put into a stack using a linked via the
* following per arena structure.
*/
struct MarkingDelay {
ArenaHeader *link;
void init() {
link = NULL;
}
/*
* To separate arenas without things to mark later from the arena at the
* marked delay stack bottom we use for the latter a special sentinel
* value. We set it to the header for the second arena in the chunk
* starting at the 0 address.
*/
static ArenaHeader *stackBottom() {
return reinterpret_cast<ArenaHeader *>(ArenaSize);
}
};
/* The chunk header (located at the end of the chunk to preserve arena alignment). */
struct ChunkInfo {
JSRuntime *runtime;
Chunk *next;
Chunk **prevp;
ArenaHeader *emptyArenaListHead;
@ -539,7 +544,7 @@ struct ChunkInfo {
size_t numFree;
};
const size_t BytesPerArena = ArenaSize + ArenaBitmapBytes + sizeof(MarkingDelay);
const size_t BytesPerArena = ArenaSize + ArenaBitmapBytes;
const size_t ArenasPerChunk = (GC_CHUNK_SIZE - sizeof(ChunkInfo)) / BytesPerArena;
/* A chunk bitmap contains enough mark bits for all the cells in a chunk. */
@ -613,7 +618,6 @@ JS_STATIC_ASSERT(ArenaBitmapBytes * ArenasPerChunk == sizeof(ChunkBitmap));
struct Chunk {
Arena arenas[ArenasPerChunk];
ChunkBitmap bitmap;
MarkingDelay markingDelay[ArenasPerChunk];
ChunkInfo info;
static Chunk *fromAddress(uintptr_t addr) {
@ -637,7 +641,7 @@ struct Chunk {
return addr;
}
void init(JSRuntime *rt);
void init();
bool unused() const {
return info.numFree == ArenasPerChunk;
@ -702,9 +706,11 @@ inline void
ArenaHeader::init(JSCompartment *comp, AllocKind kind)
{
JS_ASSERT(!allocated());
JS_ASSERT(!getMarkingDelay()->link);
JS_ASSERT(!hasDelayedMarking);
compartment = comp;
allocKind = kind;
JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
allocKind = size_t(kind);
/* See comments in FreeSpan::allocateFromNewArena. */
firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
@ -741,6 +747,20 @@ ArenaHeader::getThingSize() const
return Arena::thingSize(getAllocKind());
}
inline Arena *
ArenaHeader::getNextDelayedMarking() const
{
return reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift);
}
inline void
ArenaHeader::setNextDelayedMarking(Arena *arena)
{
JS_ASSERT(!hasDelayedMarking);
hasDelayedMarking = 1;
nextDelayedMarking = arena->address() >> ArenaShift;
}
JS_ALWAYS_INLINE void
ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32 color,
uintptr_t **wordp, uintptr_t *maskp)
@ -752,12 +772,6 @@ ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32 color,
*wordp = &bitmap[bit / JS_BITS_PER_WORD];
}
inline MarkingDelay *
ArenaHeader::getMarkingDelay() const
{
return &chunk()->markingDelay[Chunk::arenaIndex(address())];
}
static void
AssertValidColor(const void *thing, uint32 color)
{
@ -843,12 +857,6 @@ MapAllocToTraceKind(AllocKind thingKind)
inline JSGCTraceKind
GetGCThingTraceKind(const void *thing);
static inline JSRuntime *
GetGCThingRuntime(void *thing)
{
return reinterpret_cast<Cell *>(thing)->chunk()->info.runtime;
}
struct ArenaLists {
/*
@ -1481,7 +1489,7 @@ struct GCMarker : public JSTracer {
public:
/* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::ArenaHeader *unmarkedArenaStackTop;
js::gc::Arena *unmarkedArenaStackTop;
/* Count of arenas that are currently in the stack. */
DebugOnly<size_t> markLaterArenas;