From fcbefa0ec662eae1c67021824c33724a15aaf332 Mon Sep 17 00:00:00 2001 From: Ed Morley Date: Thu, 14 Aug 2014 12:52:34 +0100 Subject: [PATCH] Backed out changeset edc768336c80 (bug 650161) --- js/public/Utility.h | 1 - js/src/builtin/TestingFunctions.cpp | 1 - js/src/gc/GCRuntime.h | 27 +- js/src/gc/Heap.h | 17 - js/src/gc/RootMarking.cpp | 26 +- js/src/gc/Tracer.cpp | 2 +- js/src/gc/Zone.cpp | 2 + js/src/jsapi-tests/testWeakMap.cpp | 2 +- js/src/jsgc.cpp | 514 ++-------------------------- js/src/jsgc.h | 15 +- 10 files changed, 47 insertions(+), 560 deletions(-) diff --git a/js/public/Utility.h b/js/public/Utility.h index f6714f9d783..89c495e4111 100644 --- a/js/public/Utility.h +++ b/js/public/Utility.h @@ -43,7 +43,6 @@ namespace js {} #define JS_SWEPT_NURSERY_PATTERN 0x2B #define JS_ALLOCATED_NURSERY_PATTERN 0x2D #define JS_FRESH_TENURED_PATTERN 0x4F -#define JS_MOVED_TENURED_PATTERN 0x49 #define JS_SWEPT_TENURED_PATTERN 0x4B #define JS_ALLOCATED_TENURED_PATTERN 0x4D #define JS_SWEPT_CODE_PATTERN 0x3b diff --git a/js/src/builtin/TestingFunctions.cpp b/js/src/builtin/TestingFunctions.cpp index acb17c74cac..74e979ddb70 100644 --- a/js/src/builtin/TestingFunctions.cpp +++ b/js/src/builtin/TestingFunctions.cpp @@ -2062,7 +2062,6 @@ static const JSFunctionSpecWithHelp TestingFunctions[] = { " 11: Verify post write barriers between instructions\n" " 12: Verify post write barriers between paints\n" " 13: Check internal hashtables on minor GC\n" -" 14: Always compact arenas after GC\n" " Period specifies that collection happens every n allocations.\n"), JS_FN_HELP("schedulegc", ScheduleGC, 1, 0, diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h index 6eb990d45e6..e6603312905 100644 --- a/js/src/gc/GCRuntime.h +++ b/js/src/gc/GCRuntime.h @@ -293,17 +293,7 @@ class GCRuntime void runDebugGC(); inline void poke(); - enum TraceOrMarkRuntime { - TraceRuntime, - MarkRuntime - }; - enum TraceRootsOrUsedSaved { - TraceRoots, - UseSavedRoots - }; - void markRuntime(JSTracer *trc, - TraceOrMarkRuntime traceOrMark = TraceRuntime, - TraceRootsOrUsedSaved rootsSource = TraceRoots); + void markRuntime(JSTracer *trc, bool useSavedRoots = false); void notifyDidPaint(); void shrinkBuffers(); @@ -501,9 +491,6 @@ class GCRuntime void markWeakReferencesInCurrentGroup(gcstats::Phase phase); template void markGrayReferences(); void markGrayReferencesInCurrentGroup(); - void markAllWeakReferences(gcstats::Phase phase); - void markAllGrayReferences(); - void beginSweepPhase(bool lastGC); void findZoneGroups(); bool findZoneEdgesForWeakMaps(); @@ -520,13 +507,6 @@ class GCRuntime void expireChunksAndArenas(bool shouldShrink); void sweepBackgroundThings(bool onBackgroundThread); void assertBackgroundSweepingFinished(); - bool shouldCompact(); -#ifdef JSGC_COMPACTING - void compactPhase(); - void updatePointersToRelocatedCells(); - void releaseRelocatedArenas(ArenaHeader *relocatedList); -#endif - void finishCollection(); void computeNonIncrementalMarkingForValidation(); void validateIncrementalMarking(); @@ -536,6 +516,8 @@ class GCRuntime #ifdef DEBUG void checkForCompartmentMismatches(); + void markAllWeakReferences(gcstats::Phase phase); + void markAllGrayReferences(); #endif public: @@ -857,8 +839,7 @@ GCRuntime::needZealousGC() { if (zealMode == ZealAllocValue || zealMode == ZealGenerationalGCValue || (zealMode >= ZealIncrementalRootsThenFinish && - zealMode <= ZealIncrementalMultipleSlices) || - zealMode == ZealCompactValue) + zealMode <= ZealIncrementalMultipleSlices)) { nextScheduled = zealFrequency; } diff --git a/js/src/gc/Heap.h b/js/src/gc/Heap.h index 0758d12b7d9..d3444b1f9bd 100644 --- a/js/src/gc/Heap.h +++ b/js/src/gc/Heap.h @@ -102,7 +102,6 @@ struct Cell MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const; MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const; MOZ_ALWAYS_INLINE void unmark(uint32_t color) const; - MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const Cell *src); inline JSRuntime *runtimeFromMainThread() const; inline JS::shadow::Runtime *shadowRuntimeFromMainThread() const; @@ -762,12 +761,6 @@ struct ChunkBitmap *word &= ~mask; } - MOZ_ALWAYS_INLINE void copyMarkBit(Cell *dst, const Cell *src, uint32_t color) { - uintptr_t *word, mask; - getMarkWordAndMask(dst, color, &word, &mask); - *word = (*word & ~mask) | (src->isMarked(color) ? mask : 0); - } - void clear() { memset((void *)bitmap, 0, sizeof(bitmap)); } @@ -1119,16 +1112,6 @@ Cell::unmark(uint32_t color) const chunk()->bitmap.unmark(this, color); } -void -Cell::copyMarkBitsFrom(const Cell *src) -{ - JS_ASSERT(isTenured()); - JS_ASSERT(src->isTenured()); - ChunkBitmap &bitmap = chunk()->bitmap; - bitmap.copyMarkBit(this, src, BLACK); - bitmap.copyMarkBit(this, src, GRAY); -} - JS::Zone * Cell::tenuredZone() const { diff --git a/js/src/gc/RootMarking.cpp b/js/src/gc/RootMarking.cpp index 5949fe798ef..62df4ff4cb4 100644 --- a/js/src/gc/RootMarking.cpp +++ b/js/src/gc/RootMarking.cpp @@ -707,17 +707,13 @@ js::gc::MarkForkJoinStack(ForkJoinNurseryCollectionTracer *trc) #endif // JSGC_FJGENERATIONAL void -js::gc::GCRuntime::markRuntime(JSTracer *trc, - TraceOrMarkRuntime traceOrMark, - TraceRootsOrUsedSaved rootsSource) +js::gc::GCRuntime::markRuntime(JSTracer *trc, bool useSavedRoots) { JS_ASSERT(trc->callback != GCMarker::GrayCallback); - JS_ASSERT(traceOrMark == TraceRuntime || traceOrMark == MarkRuntime); - JS_ASSERT(rootsSource == TraceRoots || rootsSource == UseSavedRoots); JS_ASSERT(!rt->mainThread.suppressGC); - if (traceOrMark == MarkRuntime) { + if (IS_GC_MARKING_TRACER(trc)) { for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { if (!c->zone()->isCollecting()) c->markCrossCompartmentWrappers(trc); @@ -731,7 +727,7 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, #ifdef JSGC_USE_EXACT_ROOTING MarkExactStackRoots(rt, trc); #else - markConservativeStackRoots(trc, rootsSource == UseSavedRoots); + markConservativeStackRoots(trc, useSavedRoots); #endif rt->markSelfHostingGlobal(trc); } @@ -764,7 +760,7 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, } if (!rt->isBeingDestroyed() && !trc->runtime()->isHeapMinorCollecting()) { - if (traceOrMark == TraceRuntime || rt->atomsCompartment()->zone()->isCollecting()) { + if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment()->zone()->isCollecting()) { MarkPermanentAtoms(trc); MarkAtoms(trc); MarkWellKnownSymbols(trc); @@ -776,7 +772,7 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, acx->mark(trc); for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) { - if (traceOrMark == MarkRuntime && !zone->isCollecting()) + if (IS_GC_MARKING_TRACER(trc) && !zone->isCollecting()) continue; /* Do not discard scripts with counts while profiling. */ @@ -796,11 +792,11 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, if (trc->runtime()->isHeapMinorCollecting()) c->globalWriteBarriered = false; - if (traceOrMark == MarkRuntime && !c->zone()->isCollecting()) + if (IS_GC_MARKING_TRACER(trc) && !c->zone()->isCollecting()) continue; /* During a GC, these are treated as weak pointers. */ - if (traceOrMark == TraceRuntime) { + if (!IS_GC_MARKING_TRACER(trc)) { if (c->watchpointMap) c->watchpointMap->markAll(trc); } @@ -816,9 +812,9 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, if (!isHeapMinorCollecting()) { /* - * All JSCompartment::markRoots() does is mark the globals for - * compartments which have been entered. Globals aren't nursery - * allocated so there's no need to do this for minor GCs. + * All JSCompartment::mark does is mark the globals for compartments + * which have been entered. Globals aren't nursery allocated so there's + * no need to do this for minor GCs. */ for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) c->markRoots(trc); @@ -837,7 +833,7 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, /* During GC, we don't mark gray roots at this stage. */ if (JSTraceDataOp op = grayRootTracer.op) { - if (traceOrMark == TraceRuntime) + if (!IS_GC_MARKING_TRACER(trc)) (*op)(trc, grayRootTracer.data); } } diff --git a/js/src/gc/Tracer.cpp b/js/src/gc/Tracer.cpp index 4abfec97eb1..f5c9d6761da 100644 --- a/js/src/gc/Tracer.cpp +++ b/js/src/gc/Tracer.cpp @@ -632,7 +632,7 @@ void GCMarker::markBufferedGrayRoots(JS::Zone *zone) { JS_ASSERT(grayBufferState == GRAY_BUFFER_OK); - JS_ASSERT(zone->isGCMarkingGray() || zone->isGCCompacting()); + JS_ASSERT(zone->isGCMarkingGray()); for (GrayRoot *elem = zone->gcGrayRoots.begin(); elem != zone->gcGrayRoots.end(); elem++) { #ifdef DEBUG diff --git a/js/src/gc/Zone.cpp b/js/src/gc/Zone.cpp index 77a270aac5e..fa6104768a8 100644 --- a/js/src/gc/Zone.cpp +++ b/js/src/gc/Zone.cpp @@ -120,6 +120,8 @@ Zone::sweep(FreeOp *fop, bool releaseTypes, bool *oom) if (!fop->runtime()->debuggerList.isEmpty()) sweepBreakpoints(fop); + + active = false; } void diff --git a/js/src/jsapi-tests/testWeakMap.cpp b/js/src/jsapi-tests/testWeakMap.cpp index ef583c299bd..8c3db691d2f 100644 --- a/js/src/jsapi-tests/testWeakMap.cpp +++ b/js/src/jsapi-tests/testWeakMap.cpp @@ -111,7 +111,7 @@ BEGIN_TEST(testWeakMap_keyDelegates) CHECK(map->zone()->lastZoneGroupIndex() == delegate->zone()->lastZoneGroupIndex()); #endif - /* Check that when the delegate becomes unreachable the entry is removed. */ + /* Check that when the delegate becomes unreacable the entry is removed. */ delegate = nullptr; JS_GC(rt); CHECK(checkSize(map, 0)); diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index cca8bae4b40..24c7a6b7b6b 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -169,16 +169,6 @@ * the mark state, this just stops marking, but if we have started sweeping * already, we continue until we have swept the current zone group. Following a * reset, a new non-incremental collection is started. - * - * Compacting GC - * ------------- - * - * Compacting GC happens at the end of a major GC as part of the last slice. - * There are three parts: - * - * - Arenas are selected for compaction. - * - The contents of those arenas are moved to new arenas. - * - All references to moved things are updated. */ #include "jsgcinlines.h" @@ -926,10 +916,7 @@ Chunk::allocateArena(Zone *zone, AllocKind thingKind) JS_ASSERT(hasAvailableArenas()); JSRuntime *rt = zone->runtimeFromAnyThread(); - if (!rt->isHeapMinorCollecting() && - !rt->isHeapCompacting() && - rt->gc.usage.gcBytes() >= rt->gc.tunables.gcMaxBytes()) - { + if (!rt->isHeapMinorCollecting() && rt->gc.usage.gcBytes() >= rt->gc.tunables.gcMaxBytes()) { #ifdef JSGC_FJGENERATIONAL // This is an approximation to the best test, which would check that // this thread is currently promoting into the tenured area. I doubt @@ -950,7 +937,7 @@ Chunk::allocateArena(Zone *zone, AllocKind thingKind) zone->usage.addGCArena(); - if (!rt->isHeapCompacting() && zone->usage.gcBytes() >= zone->threshold.gcTriggerBytes()) { + if (zone->usage.gcBytes() >= zone->threshold.gcTriggerBytes()) { AutoUnlockGC unlock(rt); rt->gc.triggerZoneGC(zone, JS::gcreason::ALLOC_TRIGGER); } @@ -1998,18 +1985,6 @@ ArenaLists::wipeDuringParallelExecution(JSRuntime *rt) } } -/* Compacting GC */ - -bool -GCRuntime::shouldCompact() -{ -#ifdef JSGC_COMPACTING - return invocationKind == GC_SHRINK; -#else - return false; -#endif -} - #ifdef JSGC_COMPACTING static void @@ -2027,381 +2002,8 @@ ForwardCell(Cell *dest, Cell *src) ptr[1] = ForwardedCellMagicValue; // Moved! } -static bool -ArenaContainsGlobal(ArenaHeader *arena) -{ - if (arena->getAllocKind() > FINALIZE_OBJECT_LAST) - return false; - - for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) { - JSObject *obj = static_cast(i.getCell()); - if (obj->is()) - return true; - } - - return false; -} - -static bool -CanRelocateArena(ArenaHeader *arena) -{ - /* - * We can't currently move global objects because their address is baked - * into compiled code. We therefore skip moving the contents of any arena - * containing a global. - */ - return arena->getAllocKind() <= FINALIZE_OBJECT_LAST && !ArenaContainsGlobal(arena); -} - -static bool -ShouldRelocateArena(ArenaHeader *arena) -{ -#ifdef JS_GC_ZEAL - if (arena->zone->runtimeFromMainThread()->gc.zeal() == ZealCompactValue) - return true; #endif - /* - * Eventually, this will be based on brilliant heuristics that look at fill - * percentage and fragmentation and... stuff. - */ - return arena->hasFreeThings(); -} - -/* - * Choose some arenas to relocate all cells out of and remove them from the - * arena list. Return the head of the list of arenas to relocate. - */ -ArenaHeader * -ArenaList::pickArenasToRelocate() -{ - check(); - ArenaHeader *head = nullptr; - ArenaHeader **tailp = &head; - - // TODO: Only scan through the arenas with space available. - ArenaHeader **arenap = &head_; - while (*arenap) { - ArenaHeader *arena = *arenap; - JS_ASSERT(arena); - if (CanRelocateArena(arena) && ShouldRelocateArena(arena)) { - // Remove from arena list - if (cursorp_ == &arena->next) - cursorp_ = arenap; - *arenap = arena->next; - arena->next = nullptr; - - // Append to relocation list - *tailp = arena; - tailp = &arena->next; - } else { - arenap = &arena->next; - } - } - - check(); - return head; -} - -static bool -RelocateCell(Zone *zone, Cell *src, AllocKind thingKind, size_t thingSize) -{ - // Allocate a new cell. - void *dst = zone->allocator.arenas.allocateFromFreeList(thingKind, thingSize); - if (!dst) - dst = js::gc::ArenaLists::refillFreeListInGC(zone, thingKind); - if (!dst) - return false; - - // Copy source cell contents to destination. - memcpy(dst, src, thingSize); - - // Mark source cell as forwarded and leave a pointer to the destination. - ForwardCell(static_cast(dst), src); - - // Fixup the pointer to inline object elements if necessary. - if (thingKind <= FINALIZE_OBJECT_LAST) { - JSObject *srcObj = static_cast(src); - JSObject *dstObj = static_cast(dst); - if (srcObj->hasFixedElements()) - dstObj->setFixedElements(); - JS_ASSERT( - uintptr_t((HeapSlot*)dstObj->getElementsHeader()) - uintptr_t(srcObj) >= thingSize); - } - - // Copy the mark bits. - static_cast(dst)->copyMarkBitsFrom(src); - - return true; -} - -static bool -RelocateArena(ArenaHeader *aheader) -{ - JS_ASSERT(aheader->allocated()); - JS_ASSERT(!aheader->hasDelayedMarking); - JS_ASSERT(!aheader->markOverflow); - JS_ASSERT(!aheader->allocatedDuringIncremental); - - Zone *zone = aheader->zone; - - AllocKind thingKind = aheader->getAllocKind(); - size_t thingSize = aheader->getThingSize(); - - for (ArenaCellIterUnderFinalize i(aheader); !i.done(); i.next()) { - if (!RelocateCell(zone, i.getCell(), thingKind, thingSize)) { - MOZ_CRASH(); // TODO: Handle failure here. - return false; - } - } - - return true; -} - -/* - * Relocate all arenas identified by pickArenasToRelocate: for each arena, - * relocate each cell within it, then tack it onto a list of relocated arenas. - * Currently, we allow the relocation to fail, in which case the arena will be - * moved back onto the list of arenas with space available. (I did this - * originally to test my list manipulation before implementing the actual - * moving, with half a thought to allowing pinning (moving only a portion of - * the cells in an arena), but now it's probably just dead weight. FIXME) - */ -ArenaHeader * -ArenaList::relocateArenas(ArenaHeader *toRelocate, ArenaHeader *relocated) -{ - check(); - - while (ArenaHeader *arena = toRelocate) { - toRelocate = arena->next; - - if (RelocateArena(arena)) { - // Prepend to list of relocated arenas - arena->next = relocated; - relocated = arena; - } else { - // For some reason, the arena did not end up empty. Prepend it to - // the portion of the list that the cursor is pointing to (the - // arenas with space available) so that it will be used for future - // allocations. - JS_ASSERT(arena->hasFreeThings()); - insertAtCursor(arena); - } - } - - check(); - - return relocated; -} - -ArenaHeader * -ArenaLists::relocateArenas(ArenaHeader *relocatedList) -{ - // Flush all the freeLists back into the arena headers - purge(); - checkEmptyFreeLists(); - - for (size_t i = 0; i < FINALIZE_LIMIT; i++) { - ArenaList &al = arenaLists[i]; - ArenaHeader *toRelocate = al.pickArenasToRelocate(); - if (toRelocate) - relocatedList = al.relocateArenas(toRelocate, relocatedList); - } - - /* - * When we allocate new locations for cells, we use - * allocateFromFreeList(). Reset the free list again so that - * AutoCopyFreeListToArenasForGC doesn't complain that the free lists - * are different now. - */ - purge(); - checkEmptyFreeLists(); - - return relocatedList; -} - -struct MovingTracer : JSTracer { - MovingTracer(JSRuntime *rt) : JSTracer(rt, Visit, TraceWeakMapValues) {} - - static void Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind); - static void Sweep(JSTracer *jstrc); -}; - -void -MovingTracer::Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind) -{ - Cell *thing = static_cast(*thingp); - if (!thing->tenuredZone()->isGCCompacting()) { - JS_ASSERT(!IsForwarded(thing)); - return; - } - - if (IsForwarded(thing)) { - Cell *dst = Forwarded(thing); - *thingp = dst; - } -} - -void -MovingTracer::Sweep(JSTracer *jstrc) -{ - JSRuntime *rt = jstrc->runtime(); - FreeOp *fop = rt->defaultFreeOp(); - - WatchpointMap::sweepAll(rt); - - Debugger::sweepAll(fop); - - for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) { - if (zone->isCollecting()) { - gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_COMPARTMENTS); - - bool oom = false; - zone->sweep(fop, false, &oom); - JS_ASSERT(!oom); - - for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) { - c->sweep(fop, false); - ArrayBufferObject::sweep(c); - } - } else { - /* Update cross compartment wrappers into moved zones. */ - for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) - c->sweepCrossCompartmentWrappers(); - } - } - - /* Type inference may put more blocks here to free. */ - rt->freeLifoAlloc.freeAll(); -} - -/* - * Update the interal pointers in a single cell. - */ -static void -UpdateCellPointers(MovingTracer *trc, Cell *cell, JSGCTraceKind traceKind) { - TraceChildren(trc, cell, traceKind); - - if (traceKind == JSTRACE_SHAPE) { - Shape *shape = static_cast(cell); - shape->fixupAfterMovingGC(); - } else if (traceKind == JSTRACE_BASE_SHAPE) { - BaseShape *base = static_cast(cell); - base->fixupAfterMovingGC(); - } -} - -/* - * Update pointers to relocated cells by doing a full heap traversal and sweep. - * - * The latter is necessary to update weak references which are not marked as - * part of the traversal. - */ -void -GCRuntime::updatePointersToRelocatedCells() -{ - JS_ASSERT(rt->currentThreadHasExclusiveAccess()); - MovingTracer trc(rt); - - { - // TODO: Maybe give compaction its own set of phases. - gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK); - - // TODO: We may need to fix up other weak pointers here. - - // Fixup compartment global pointers as these get accessed during marking. - for (GCCompartmentsIter comp(rt); !comp.done(); comp.next()) - comp->fixupAfterMovingGC(); - - // Fixup cross compartment wrappers as we assert the existence of wrappers in the map. - for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) - comp->fixupCrossCompartmentWrappers(&trc); - - // Fixup generators as these are not normally traced. - for (ContextIter i(rt); !i.done(); i.next()) { - for (JSGenerator *gen = i.get()->innermostGenerator(); gen; gen = gen->prevGenerator) - gen->obj = MaybeForwarded(gen->obj.get()); - } - - // Iterate through all allocated cells to update internal pointers. - for (GCZonesIter zone(rt); !zone.done(); zone.next()) { - ArenaLists &al = zone->allocator.arenas; - for (unsigned i = 0; i < FINALIZE_LIMIT; ++i) { - AllocKind thingKind = static_cast(i); - JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind); - for (ArenaHeader *arena = al.getFirstArena(thingKind); arena; arena = arena->next) { - for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) { - UpdateCellPointers(&trc, i.getCell(), traceKind); - } - } - } - } - - // Mark roots to update them. - markRuntime(&trc, MarkRuntime); - Debugger::markAll(&trc); - Debugger::markCrossCompartmentDebuggerObjectReferents(&trc); - - for (GCCompartmentsIter c(rt); !c.done(); c.next()) { - if (c->watchpointMap) - c->watchpointMap->markAll(&trc); - } - } - - { - gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP); - - markAllGrayReferences(); - - MovingTracer::Sweep(&trc); - } -} - -void -GCRuntime::releaseRelocatedArenas(ArenaHeader *relocatedList) -{ - // Release the relocated arenas, now containing only forwarding pointers - -#ifdef DEBUG - for (ArenaHeader *arena = relocatedList; arena; arena = arena->next) { - for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) { - Cell *src = i.getCell(); - JS_ASSERT(IsForwarded(src)); - Cell *dest = Forwarded(src); - JS_ASSERT(src->isMarked(BLACK) == dest->isMarked(BLACK)); - JS_ASSERT(src->isMarked(GRAY) == dest->isMarked(GRAY)); - } - } -#endif - - unsigned count = 0; - while (relocatedList) { - ArenaHeader *aheader = relocatedList; - relocatedList = relocatedList->next; - - // Mark arena as empty - AllocKind thingKind = aheader->getAllocKind(); - size_t thingSize = aheader->getThingSize(); - Arena *arena = aheader->getArena(); - FreeSpan fullSpan; - fullSpan.initFinal(arena->thingsStart(thingKind), arena->thingsEnd() - thingSize, thingSize); - aheader->setFirstFreeSpan(&fullSpan); - -#if defined(JS_CRASH_DIAGNOSTICS) || defined(JS_GC_ZEAL) - JS_POISON(reinterpret_cast(arena->thingsStart(thingKind)), - JS_MOVED_TENURED_PATTERN, Arena::thingsSpan(thingSize)); -#endif - - aheader->chunk()->releaseArena(aheader); - ++count; - } - - AutoLockGC lock(rt); - expireChunksAndArenas(true); -} - -#endif // JSGC_COMPACTING - void ArenaLists::finalizeNow(FreeOp *fop, AllocKind thingKind) { @@ -2688,22 +2290,6 @@ ArenaLists::refillFreeList(ThreadSafeContext *cx, AllocKind thingKind); template void * ArenaLists::refillFreeList(ThreadSafeContext *cx, AllocKind thingKind); -/* static */ void * -ArenaLists::refillFreeListInGC(Zone *zone, AllocKind thingKind) -{ - /* - * Called by compacting GC to refill a free list while we are in a GC. - */ - - Allocator &allocator = zone->allocator; - JS_ASSERT(allocator.arenas.freeLists[thingKind].isEmpty()); - JSRuntime *rt = zone->runtimeFromMainThread(); - JS_ASSERT(rt->isHeapMajorCollecting()); - JS_ASSERT(!rt->gc.isBackgroundSweeping()); - - return allocator.arenas.allocateFromArena(zone, thingKind); -} - /* static */ int64_t SliceBudget::TimeBudget(int64_t millis) { @@ -3670,7 +3256,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason) if (isFull) UnmarkScriptData(rt); - markRuntime(gcmarker, MarkRuntime); + markRuntime(gcmarker); if (isIncremental) bufferGrayRoots(); @@ -3801,6 +3387,8 @@ GCRuntime::markGrayReferencesInCurrentGroup() markGrayReferences(); } +#ifdef DEBUG + void GCRuntime::markAllWeakReferences(gcstats::Phase phase) { @@ -3813,8 +3401,6 @@ GCRuntime::markAllGrayReferences() markGrayReferences(); } -#ifdef DEBUG - class js::gc::MarkingValidator { public: @@ -3919,7 +3505,7 @@ js::gc::MarkingValidator::nonIncrementalMark() { gcstats::AutoPhase ap1(gc->stats, gcstats::PHASE_MARK); gcstats::AutoPhase ap2(gc->stats, gcstats::PHASE_MARK_ROOTS); - gc->markRuntime(gcmarker, GCRuntime::MarkRuntime, GCRuntime::UseSavedRoots); + gc->markRuntime(gcmarker, true); } { @@ -4681,8 +4267,7 @@ GCRuntime::beginSweepPhase(bool lastGC) gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP); - sweepOnBackgroundThread = - !lastGC && !TraceEnabled() && CanUseExtraThreads() && !shouldCompact(); + sweepOnBackgroundThread = !lastGC && !TraceEnabled() && CanUseExtraThreads(); releaseObservedTypes = shouldReleaseObservedTypes(); @@ -4810,6 +4395,9 @@ GCRuntime::endSweepPhase(bool lastGC) JS_ASSERT_IF(lastGC, !sweepOnBackgroundThread); + JS_ASSERT(marker.isDrained()); + marker.stop(); + /* * Recalculate whether GC was full or not as this may have changed due to * newly created zones. Can only change from full to not full. @@ -4910,17 +4498,30 @@ GCRuntime::endSweepPhase(bool lastGC) sweepZones(&fop, lastGC); } - finishMarkingValidation(); + uint64_t currentTime = PRMJ_Now(); + schedulingState.updateHighFrequencyMode(lastGCTime, currentTime, tunables); + + for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { + zone->threshold.updateAfterGC(zone->usage.gcBytes(), invocationKind, tunables, + schedulingState); + if (zone->isCollecting()) { + JS_ASSERT(zone->isGCFinished()); + zone->setGCState(Zone::NoGC); + } #ifdef DEBUG - for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { + JS_ASSERT(!zone->isCollecting()); + JS_ASSERT(!zone->wasGCStarted()); + for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i) { JS_ASSERT_IF(!IsBackgroundFinalized(AllocKind(i)) || !sweepOnBackgroundThread, !zone->allocator.arenas.arenaListsToSweep[i]); } +#endif } +#ifdef DEBUG for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { JS_ASSERT(!c->gcIncomingGrayPointers); JS_ASSERT(c->gcLiveArrayBuffers.empty()); @@ -4931,61 +4532,8 @@ GCRuntime::endSweepPhase(bool lastGC) } } #endif -} -#ifdef JSGC_COMPACTING -void -GCRuntime::compactPhase() -{ - JS_ASSERT(rt->gc.nursery.isEmpty()); - JS_ASSERT(!sweepOnBackgroundThread); - - ArenaHeader *relocatedList = nullptr; - for (GCZonesIter zone(rt); !zone.done(); zone.next()) { - JS_ASSERT(zone->isGCFinished()); - JS_ASSERT(!zone->isPreservingCode()); - - // We cannot move atoms as we depend on their addresses being constant. - if (!rt->isAtomsZone(zone)) { - zone->setGCState(Zone::Compact); - relocatedList = zone->allocator.arenas.relocateArenas(relocatedList); - } - } - - updatePointersToRelocatedCells(); - releaseRelocatedArenas(relocatedList); - -#ifdef DEBUG - CheckHashTablesAfterMovingGC(rt); - for (GCZonesIter zone(rt); !zone.done(); zone.next()) { - if (!rt->isAtomsZone(zone) && !zone->isPreservingCode()) - zone->allocator.arenas.checkEmptyFreeLists(); - } -#endif -} -#endif // JSGC_COMPACTING - -void -GCRuntime::finishCollection() -{ - JS_ASSERT(marker.isDrained()); - marker.stop(); - - uint64_t currentTime = PRMJ_Now(); - schedulingState.updateHighFrequencyMode(lastGCTime, currentTime, tunables); - - for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { - zone->threshold.updateAfterGC(zone->usage.gcBytes(), invocationKind, tunables, - schedulingState); - if (zone->isCollecting()) { - JS_ASSERT(zone->isGCFinished() || zone->isGCCompacting()); - zone->setGCState(Zone::NoGC); - zone->active = false; - } - - JS_ASSERT(!zone->isCollecting()); - JS_ASSERT(!zone->wasGCStarted()); - } + finishMarkingValidation(); lastGCTime = currentTime; } @@ -5322,14 +4870,6 @@ GCRuntime::incrementalCollectSlice(int64_t budget, if (sweepOnBackgroundThread) helperState.startBackgroundSweep(invocationKind == GC_SHRINK); -#ifdef JSGC_COMPACTING - if (shouldCompact()) { - incrementalState = COMPACT; - compactPhase(); - } -#endif - - finishCollection(); incrementalState = NO_INCREMENTAL; break; } @@ -6002,8 +5542,6 @@ GCRuntime::runDebugGC() { incrementalLimit = zealFrequency / 2; } - } else if (type == ZealCompactValue) { - collect(false, SliceBudget::Unlimited, GC_SHRINK, JS::gcreason::DEBUG_GC); } else { collect(false, SliceBudget::Unlimited, GC_NORMAL, JS::gcreason::DEBUG_GC); } diff --git a/js/src/jsgc.h b/js/src/jsgc.h index 4098135495d..05c825c8391 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -523,11 +523,6 @@ class ArenaList { check(); return *this; } - -#ifdef JSGC_COMPACTING - ArenaHeader *pickArenasToRelocate(); - ArenaHeader *relocateArenas(ArenaHeader *toRelocate, ArenaHeader *relocated); -#endif }; /* @@ -804,6 +799,7 @@ class ArenaLists clearFreeListInArena(AllocKind(i)); } + void clearFreeListInArena(AllocKind kind) { FreeList *freeList = &freeLists[kind]; if (!freeList->isEmpty()) { @@ -849,8 +845,6 @@ class ArenaLists template static void *refillFreeList(ThreadSafeContext *cx, AllocKind thingKind); - static void *refillFreeListInGC(Zone *zone, AllocKind thingKind); - /* * Moves all arenas from |fromArenaLists| into |this|. In * parallel blocks, we temporarily create one ArenaLists per @@ -874,10 +868,6 @@ class ArenaLists JS_ASSERT(freeLists[kind].isEmpty()); } -#ifdef JSGC_COMPACTING - ArenaHeader *relocateArenas(ArenaHeader *relocatedList); -#endif - void queueObjectsForSweep(FreeOp *fop); void queueStringsAndSymbolsForSweep(FreeOp *fop); void queueShapesForSweep(FreeOp *fop); @@ -1330,8 +1320,7 @@ const int ZealIncrementalMultipleSlices = 10; const int ZealVerifierPostValue = 11; const int ZealFrameVerifierPostValue = 12; const int ZealCheckHashTablesOnMinorGC = 13; -const int ZealCompactValue = 14; -const int ZealLimit = 14; +const int ZealLimit = 13; enum VerifierType { PreBarrierVerifier,