diff --git a/js/src/builtin/MapObject.cpp b/js/src/builtin/MapObject.cpp index bf4b85e82c9..865a7dbe50e 100644 --- a/js/src/builtin/MapObject.cpp +++ b/js/src/builtin/MapObject.cpp @@ -1136,7 +1136,7 @@ WriteBarrierPost(JSRuntime *rt, ValueMap *map, const HashableValue &key) { #ifdef JSGC_GENERATIONAL typedef OrderedHashMap UnbarrieredMap; - rt->gcStoreBuffer.putGeneric(OrderedHashTableRef( + rt->gc.storeBuffer.putGeneric(OrderedHashTableRef( reinterpret_cast(map), key.get())); #endif } @@ -1146,7 +1146,7 @@ WriteBarrierPost(JSRuntime *rt, ValueSet *set, const HashableValue &key) { #ifdef JSGC_GENERATIONAL typedef OrderedHashSet UnbarrieredSet; - rt->gcStoreBuffer.putGeneric(OrderedHashTableRef( + rt->gc.storeBuffer.putGeneric(OrderedHashTableRef( reinterpret_cast(set), key.get())); #endif } diff --git a/js/src/builtin/TestingFunctions.cpp b/js/src/builtin/TestingFunctions.cpp index 9f02e5ab296..fdd06603112 100644 --- a/js/src/builtin/TestingFunctions.cpp +++ b/js/src/builtin/TestingFunctions.cpp @@ -238,7 +238,7 @@ GC(JSContext *cx, unsigned argc, jsval *vp) } #ifndef JS_MORE_DETERMINISTIC - size_t preBytes = cx->runtime()->gcBytes; + size_t preBytes = cx->runtime()->gc.bytes; #endif if (compartment) @@ -250,7 +250,7 @@ GC(JSContext *cx, unsigned argc, jsval *vp) char buf[256] = { '\0' }; #ifndef JS_MORE_DETERMINISTIC JS_snprintf(buf, sizeof(buf), "before %lu, after %lu\n", - (unsigned long)preBytes, (unsigned long)cx->runtime()->gcBytes); + (unsigned long)preBytes, (unsigned long)cx->runtime()->gc.bytes); #endif JSString *str = JS_NewStringCopyZ(cx, buf); if (!str) @@ -265,7 +265,7 @@ MinorGC(JSContext *cx, unsigned argc, jsval *vp) CallArgs args = CallArgsFromVp(argc, vp); #ifdef JSGC_GENERATIONAL if (args.get(0) == BooleanValue(true)) - cx->runtime()->gcStoreBuffer.setAboutToOverflow(); + cx->runtime()->gc.storeBuffer.setAboutToOverflow(); MinorGC(cx, gcreason::API); #endif @@ -445,7 +445,7 @@ GCPreserveCode(JSContext *cx, unsigned argc, jsval *vp) return false; } - cx->runtime()->alwaysPreserveCode = true; + cx->runtime()->gc.alwaysPreserveCode = true; args.rval().setUndefined(); return true; @@ -513,7 +513,7 @@ SelectForGC(JSContext *cx, unsigned argc, Value *vp) JSRuntime *rt = cx->runtime(); for (unsigned i = 0; i < args.length(); i++) { if (args[i].isObject()) { - if (!rt->gcSelectedForMarking.append(&args[i].toObject())) + if (!rt->gc.selectedForMarking.append(&args[i].toObject())) return false; } } @@ -564,7 +564,7 @@ GCState(JSContext *cx, unsigned argc, jsval *vp) } const char *state; - gc::State globalState = cx->runtime()->gcIncrementalState; + gc::State globalState = cx->runtime()->gc.incrementalState; if (globalState == gc::NO_INCREMENTAL) state = "none"; else if (globalState == gc::MARK) diff --git a/js/src/gc/GCInternals.h b/js/src/gc/GCInternals.h index 2ee1793f503..69af2a4bd2f 100644 --- a/js/src/gc/GCInternals.h +++ b/js/src/gc/GCInternals.h @@ -124,11 +124,11 @@ class AutoStopVerifyingBarriers MOZ_GUARD_OBJECT_NOTIFIER_PARAM) : runtime(rt) { - restartPreVerifier = !isShutdown && rt->gcVerifyPreData; - restartPostVerifier = !isShutdown && rt->gcVerifyPostData && JS::IsGenerationalGCEnabled(rt); - if (rt->gcVerifyPreData) + restartPreVerifier = !isShutdown && rt->gc.verifyPreData; + restartPostVerifier = !isShutdown && rt->gc.verifyPostData && JS::IsGenerationalGCEnabled(rt); + if (rt->gc.verifyPreData) EndVerifyPreBarriers(rt); - if (rt->gcVerifyPostData) + if (rt->gc.verifyPostData) EndVerifyPostBarriers(rt); MOZ_GUARD_OBJECT_NOTIFIER_INIT; } diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h new file mode 100644 index 00000000000..ca1e8d778b0 --- /dev/null +++ b/js/src/gc/GCRuntime.h @@ -0,0 +1,371 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- + * vim: set ts=8 sts=4 et sw=4 tw=99: + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef gc_GCRuntime_h +#define gc_GCRuntime_h + +#include "jsgc.h" + +#include "gc/Heap.h" +#ifdef JSGC_GENERATIONAL +# include "gc/Nursery.h" +#endif +#include "gc/Statistics.h" +#ifdef JSGC_GENERATIONAL +# include "gc/StoreBuffer.h" +#endif + +namespace js { +namespace gc { + +typedef Vector ZoneVector; + +class MarkingValidator; + +struct ConservativeGCData +{ + /* + * The GC scans conservatively between ThreadData::nativeStackBase and + * nativeStackTop unless the latter is nullptr. + */ + uintptr_t *nativeStackTop; + + union { + jmp_buf jmpbuf; + uintptr_t words[JS_HOWMANY(sizeof(jmp_buf), sizeof(uintptr_t))]; + } registerSnapshot; + + ConservativeGCData() { + mozilla::PodZero(this); + } + + ~ConservativeGCData() { +#ifdef JS_THREADSAFE + /* + * The conservative GC scanner should be disabled when the thread leaves + * the last request. + */ + JS_ASSERT(!hasStackToScan()); +#endif + } + + MOZ_NEVER_INLINE void recordStackTop(); + +#ifdef JS_THREADSAFE + void updateForRequestEnd() { + nativeStackTop = nullptr; + } +#endif + + bool hasStackToScan() const { + return !!nativeStackTop; + } +}; + +class GCRuntime +{ + public: + GCRuntime(JSRuntime *rt); + + public: // Internal state, public for now + + /* Embedders can use this zone however they wish. */ + JS::Zone *systemZone; + + /* List of compartments and zones (protected by the GC lock). */ + js::gc::ZoneVector zones; + + js::gc::SystemPageAllocator pageAllocator; + + /* + * Set of all GC chunks with at least one allocated thing. The + * conservative GC uses it to quickly check if a possible GC thing points + * into an allocated chunk. + */ + js::GCChunkSet chunkSet; + + /* + * Doubly-linked lists of chunks from user and system compartments. The GC + * allocates its arenas from the corresponding list and when all arenas + * in the list head are taken, then the chunk is removed from the list. + * During the GC when all arenas in a chunk become free, that chunk is + * removed from the list and scheduled for release. + */ + js::gc::Chunk *systemAvailableChunkListHead; + js::gc::Chunk *userAvailableChunkListHead; + js::gc::ChunkPool chunkPool; + + js::RootedValueMap rootsHash; + + /* This is updated by both the main and GC helper threads. */ + mozilla::Atomic bytes; + + size_t maxBytes; + size_t maxMallocBytes; + + /* + * Number of the committed arenas in all GC chunks including empty chunks. + */ + mozilla::Atomic numArenasFreeCommitted; + js::GCMarker marker; + void *verifyPreData; + void *verifyPostData; + bool chunkAllocationSinceLastGC; + int64_t nextFullGCTime; + int64_t lastGCTime; + int64_t jitReleaseTime; + + JSGCMode mode; + + size_t allocationThreshold; + bool highFrequencyGC; + uint64_t highFrequencyTimeThreshold; + uint64_t highFrequencyLowLimitBytes; + uint64_t highFrequencyHighLimitBytes; + double highFrequencyHeapGrowthMax; + double highFrequencyHeapGrowthMin; + double lowFrequencyHeapGrowth; + bool dynamicHeapGrowth; + bool dynamicMarkSlice; + uint64_t decommitThreshold; + + /* During shutdown, the GC needs to clean up every possible object. */ + bool shouldCleanUpEverything; + + /* + * The gray bits can become invalid if UnmarkGray overflows the stack. A + * full GC will reset this bit, since it fills in all the gray bits. + */ + bool grayBitsValid; + + /* + * These flags must be kept separate so that a thread requesting a + * compartment GC doesn't cancel another thread's concurrent request for a + * full GC. + */ + volatile uintptr_t isNeeded; + + js::gcstats::Statistics stats; + + /* Incremented on every GC slice. */ + uint64_t number; + + /* The number at the time of the most recent GC's first slice. */ + uint64_t startNumber; + + /* Whether the currently running GC can finish in multiple slices. */ + bool isIncremental; + + /* Whether all compartments are being collected in first GC slice. */ + bool isFull; + + /* The reason that an interrupt-triggered GC should be called. */ + JS::gcreason::Reason triggerReason; + + /* + * If this is true, all marked objects must belong to a compartment being + * GCed. This is used to look for compartment bugs. + */ + bool strictCompartmentChecking; + +#ifdef DEBUG + /* + * If this is 0, all cross-compartment proxies must be registered in the + * wrapper map. This checking must be disabled temporarily while creating + * new wrappers. When non-zero, this records the recursion depth of wrapper + * creation. + */ + uintptr_t disableStrictProxyCheckingCount; +#else + uintptr_t unused1; +#endif + + /* + * The current incremental GC phase. This is also used internally in + * non-incremental GC. + */ + js::gc::State incrementalState; + + /* Indicates that the last incremental slice exhausted the mark stack. */ + bool lastMarkSlice; + + /* Whether any sweeping will take place in the separate GC helper thread. */ + bool sweepOnBackgroundThread; + + /* Whether any black->gray edges were found during marking. */ + bool foundBlackGrayEdges; + + /* List head of zones to be swept in the background. */ + JS::Zone *sweepingZones; + + /* Index of current zone group (for stats). */ + unsigned zoneGroupIndex; + + /* + * Incremental sweep state. + */ + JS::Zone *zoneGroups; + JS::Zone *currentZoneGroup; + int sweepPhase; + JS::Zone *sweepZone; + int sweepKindIndex; + bool abortSweepAfterCurrentGroup; + + /* + * List head of arenas allocated during the sweep phase. + */ + js::gc::ArenaHeader *arenasAllocatedDuringSweep; + +#ifdef DEBUG + js::gc::MarkingValidator *markingValidator; +#endif + + /* + * Indicates that a GC slice has taken place in the middle of an animation + * frame, rather than at the beginning. In this case, the next slice will be + * delayed so that we don't get back-to-back slices. + */ + volatile uintptr_t interFrameGC; + + /* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */ + int64_t sliceBudget; + + /* + * We disable incremental GC if we encounter a js::Class with a trace hook + * that does not implement write barriers. + */ + bool incrementalEnabled; + + /* + * GGC can be enabled from the command line while testing. + */ + unsigned generationalDisabled; + + /* + * This is true if we are in the middle of a brain transplant (e.g., + * JS_TransplantObject) or some other operation that can manipulate + * dead zones. + */ + bool manipulatingDeadZones; + + /* + * This field is incremented each time we mark an object inside a + * zone with no incoming cross-compartment pointers. Typically if + * this happens it signals that an incremental GC is marking too much + * stuff. At various times we check this counter and, if it has changed, we + * run an immediate, non-incremental GC to clean up the dead + * zones. This should happen very rarely. + */ + unsigned objectsMarkedInDeadZones; + + bool poke; + + volatile js::HeapState heapState; + +#ifdef JSGC_GENERATIONAL + js::Nursery nursery; + js::gc::StoreBuffer storeBuffer; +#endif + + /* + * These options control the zealousness of the GC. The fundamental values + * are nextScheduled and gcDebugCompartmentGC. At every allocation, + * nextScheduled is decremented. When it reaches zero, we do either a + * full or a compartmental GC, based on debugCompartmentGC. + * + * At this point, if zeal_ is one of the types that trigger periodic + * collection, then nextScheduled is reset to the value of + * zealFrequency. Otherwise, no additional GCs take place. + * + * You can control these values in several ways: + * - Pass the -Z flag to the shell (see the usage info for details) + * - Call zeal() or schedulegc() from inside shell-executed JS code + * (see the help for details) + * + * If gzZeal_ == 1 then we perform GCs in select places (during MaybeGC and + * whenever a GC poke happens). This option is mainly useful to embedders. + * + * We use zeal_ == 4 to enable write barrier verification. See the comment + * in jsgc.cpp for more information about this. + * + * zeal_ values from 8 to 10 periodically run different types of + * incremental GC. + */ +#ifdef JS_GC_ZEAL + int zealMode; + int zealFrequency; + int nextScheduled; + bool deterministicOnly; + int incrementalLimit; + + js::Vector selectedForMarking; +#endif + + bool validate; + bool fullCompartmentChecks; + + JSGCCallback callback; + JS::GCSliceCallback sliceCallback; + JSFinalizeCallback finalizeCallback; + + void *callbackData; + + /* + * Malloc counter to measure memory pressure for GC scheduling. It runs + * from maxMallocBytes down to zero. + */ + mozilla::Atomic mallocBytes; + + /* + * Whether a GC has been triggered as a result of mallocBytes falling + * below zero. + */ + mozilla::Atomic mallocGCTriggered; + + /* + * The trace operations to trace embedding-specific GC roots. One is for + * tracing through black roots and the other is for tracing through gray + * roots. The black/gray distinction is only relevant to the cycle + * collector. + */ + typedef js::Vector ExtraTracerVector; + ExtraTracerVector blackRootTracers; + ExtraTracer grayRootTracer; + + /* + * The GC can only safely decommit memory when the page size of the + * running process matches the compiled arena size. + */ + size_t systemPageSize; + + /* The OS allocation granularity may not match the page size. */ + size_t systemAllocGranularity; + + /* Strong references on scripts held for PCCount profiling API. */ + js::ScriptAndCountsVector *scriptAndCountsVector; + + /* Always preserve JIT code during GCs, for testing. */ + bool alwaysPreserveCode; + +#ifdef DEBUG + size_t noGCOrAllocationCheck; +#endif + + /* Synchronize GC heap access between main thread and GCHelperThread. */ + PRLock *lock; + mozilla::DebugOnly lockOwner; + + friend class js::GCHelperThread; + + js::GCHelperThread helperThread; + + ConservativeGCData conservativeGC; +}; + +} /* namespace gc */ +} /* namespace js */ + +#endif diff --git a/js/src/gc/Iteration.cpp b/js/src/gc/Iteration.cpp index 3b1582d606b..81dedb58dfb 100644 --- a/js/src/gc/Iteration.cpp +++ b/js/src/gc/Iteration.cpp @@ -84,7 +84,7 @@ js::IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback) { AutoPrepareForTracing prep(rt, SkipAtoms); - for (js::GCChunkSet::Range r = rt->gcChunkSet.all(); !r.empty(); r.popFront()) + for (js::GCChunkSet::Range r = rt->gc.chunkSet.all(); !r.empty(); r.popFront()) chunkCallback(rt, data, r.front()); } diff --git a/js/src/gc/Marking.cpp b/js/src/gc/Marking.cpp index 106768a33d4..7a7ff3e0bfe 100644 --- a/js/src/gc/Marking.cpp +++ b/js/src/gc/Marking.cpp @@ -169,7 +169,7 @@ CheckMarkedThing(JSTracer *trc, T *thing) DebugOnly rt = trc->runtime(); - JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gcManipulatingDeadZones, + JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gc.manipulatingDeadZones, !thing->zone()->scheduledForDestruction); JS_ASSERT(CurrentThreadCanAccessRuntime(rt)); @@ -181,7 +181,7 @@ CheckMarkedThing(JSTracer *trc, T *thing) JS_ASSERT(MapTypeToTraceKind::kind == GetGCThingTraceKind(thing)); - JS_ASSERT_IF(rt->gcStrictCompartmentChecking, + JS_ASSERT_IF(rt->gc.strictCompartmentChecking, thing->zone()->isCollecting() || rt->isAtomsZone(thing->zone())); JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && AsGCMarker(trc)->getMarkColor() == GRAY, @@ -247,8 +247,8 @@ MarkInternal(JSTracer *trc, T **thingp) #define JS_ROOT_MARKING_ASSERT(trc) \ JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc), \ - trc->runtime()->gcIncrementalState == NO_INCREMENTAL || \ - trc->runtime()->gcIncrementalState == MARK_ROOTS); + trc->runtime()->gc.incrementalState == NO_INCREMENTAL || \ + trc->runtime()->gc.incrementalState == MARK_ROOTS); namespace js { namespace gc { @@ -339,7 +339,7 @@ IsMarked(T **thingp) JS_ASSERT(thingp); JS_ASSERT(*thingp); #ifdef JSGC_GENERATIONAL - Nursery &nursery = (*thingp)->runtimeFromMainThread()->gcNursery; + Nursery &nursery = (*thingp)->runtimeFromMainThread()->gc.nursery; if (nursery.isInside(*thingp)) return nursery.getForwardedPointer(thingp); #endif @@ -364,7 +364,7 @@ IsAboutToBeFinalized(T **thingp) return false; #ifdef JSGC_GENERATIONAL - Nursery &nursery = rt->gcNursery; + Nursery &nursery = rt->gc.nursery; JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !nursery.isInside(thing)); if (rt->isHeapMinorCollecting()) { if (nursery.isInside(thing)) @@ -394,8 +394,8 @@ UpdateIfRelocated(JSRuntime *rt, T **thingp) { JS_ASSERT(thingp); #ifdef JSGC_GENERATIONAL - if (*thingp && rt->isHeapMinorCollecting() && rt->gcNursery.isInside(*thingp)) - rt->gcNursery.getForwardedPointer(thingp); + if (*thingp && rt->isHeapMinorCollecting() && rt->gc.nursery.isInside(*thingp)) + rt->gc.nursery.getForwardedPointer(thingp); #endif return *thingp; } @@ -784,7 +784,7 @@ ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell) */ if (cell->isMarked(GRAY)) { JS_ASSERT(!zone->isCollecting()); - trc->runtime()->gcFoundBlackGrayEdges = true; + trc->runtime()->gc.foundBlackGrayEdges = true; } return zone->isGCMarking(); } else { @@ -1540,7 +1540,7 @@ GCMarker::processMarkStackTop(SliceBudget &budget) // if the gloal has no custom trace hook of it's own, or has been moved to a different // compartment, and so can't have one. JS_ASSERT_IF(runtime()->gcMode() == JSGC_MODE_INCREMENTAL && - runtime()->gcIncrementalEnabled && + runtime()->gc.incrementalEnabled && !(clasp->trace == JS_GlobalObjectTraceHook && (!obj->compartment()->options().getTrace() || !obj->isOwnGlobal())), @@ -1586,10 +1586,10 @@ GCMarker::drainMarkStack(SliceBudget &budget) struct AutoCheckCompartment { JSRuntime *runtime; AutoCheckCompartment(JSRuntime *rt) : runtime(rt) { - JS_ASSERT(!rt->gcStrictCompartmentChecking); - runtime->gcStrictCompartmentChecking = true; + JS_ASSERT(!rt->gc.strictCompartmentChecking); + runtime->gc.strictCompartmentChecking = true; } - ~AutoCheckCompartment() { runtime->gcStrictCompartmentChecking = false; } + ~AutoCheckCompartment() { runtime->gc.strictCompartmentChecking = false; } } acc(rt); #endif @@ -1739,7 +1739,7 @@ UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind) * If we run out of stack, we take a more drastic measure: require that * we GC again before the next CC. */ - trc->runtime()->gcGrayBitsValid = false; + trc->runtime()->gc.grayBitsValid = false; return; } diff --git a/js/src/gc/Nursery.cpp b/js/src/gc/Nursery.cpp index 59e0bc03997..fe3528d6a5f 100644 --- a/js/src/gc/Nursery.cpp +++ b/js/src/gc/Nursery.cpp @@ -53,7 +53,7 @@ js::Nursery::init() if (!hugeSlots.init()) return false; - void *heap = runtime()->pageAllocator.mapAlignedPages(NurserySize, Alignment); + void *heap = runtime()->gc.pageAllocator.mapAlignedPages(NurserySize, Alignment); if (!heap) return false; @@ -79,7 +79,7 @@ js::Nursery::init() js::Nursery::~Nursery() { if (start()) - runtime()->pageAllocator.unmapPages((void *)start(), NurserySize); + runtime()->gc.pageAllocator.unmapPages((void *)start(), NurserySize); } void @@ -108,7 +108,7 @@ js::Nursery::enable() setCurrentChunk(0); currentStart_ = position(); #ifdef JS_GC_ZEAL - if (runtime()->gcZeal_ == ZealGenerationalGCValue) + if (runtime()->gc.zealMode == ZealGenerationalGCValue) enterZealMode(); #endif } @@ -130,7 +130,7 @@ js::Nursery::isEmpty() const JS_ASSERT(runtime_); if (!isEnabled()) return true; - JS_ASSERT_IF(runtime_->gcZeal_ != ZealGenerationalGCValue, currentStart_ == start()); + JS_ASSERT_IF(runtime_->gc.zealMode != ZealGenerationalGCValue, currentStart_ == start()); return position() == currentStart_; } @@ -324,7 +324,7 @@ class MinorCollectionTracer : public JSTracer savedRuntimeNeedBarrier(rt->needsBarrier()), disableStrictProxyChecking(rt) { - rt->gcNumber++; + rt->gc.number++; /* * We disable the runtime needsBarrier() check so that pre-barriers do @@ -341,7 +341,7 @@ class MinorCollectionTracer : public JSTracer * sweep their dead views. Incremental collection also use these lists, * so we may need to save and restore their contents here. */ - if (rt->gcIncrementalState != NO_INCREMENTAL) { + if (rt->gc.incrementalState != NO_INCREMENTAL) { for (GCCompartmentsIter c(rt); !c.done(); c.next()) { if (!ArrayBufferObject::saveArrayBufferList(c, liveArrayBuffers)) CrashAtUnhandlableOOM("OOM while saving live array buffers"); @@ -352,7 +352,7 @@ class MinorCollectionTracer : public JSTracer ~MinorCollectionTracer() { runtime()->setNeedsBarrier(savedRuntimeNeedBarrier); - if (runtime()->gcIncrementalState != NO_INCREMENTAL) + if (runtime()->gc.incrementalState != NO_INCREMENTAL) ArrayBufferObject::restoreArrayBufferLists(liveArrayBuffers); } }; @@ -740,7 +740,7 @@ js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList if (isEmpty()) return; - rt->gcStats.count(gcstats::STAT_MINOR_GC); + rt->gc.stats.count(gcstats::STAT_MINOR_GC); TIME_START(total); @@ -750,7 +750,7 @@ js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList MinorCollectionTracer trc(rt, this); // Mark the store buffer. This must happen first. - StoreBuffer &sb = rt->gcStoreBuffer; + StoreBuffer &sb = rt->gc.storeBuffer; TIME_START(markValues); sb.markValues(&trc); TIME_END(markValues); @@ -852,13 +852,13 @@ js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList TIME_END(sweep); TIME_START(clearStoreBuffer); - rt->gcStoreBuffer.clear(); + rt->gc.storeBuffer.clear(); TIME_END(clearStoreBuffer); // We ignore gcMaxBytes when allocating for minor collection. However, if we // overflowed, we disable the nursery. The next time we allocate, we'll fail // because gcBytes >= gcMaxBytes. - if (rt->gcBytes >= rt->gcMaxBytes) + if (rt->gc.bytes >= rt->gc.maxBytes) disable(); TIME_END(total); @@ -922,7 +922,7 @@ js::Nursery::sweep(JSRuntime *rt) for (int i = 0; i < NumNurseryChunks; ++i) initChunk(i); - if (rt->gcZeal_ == ZealGenerationalGCValue) { + if (rt->gc.zealMode == ZealGenerationalGCValue) { MOZ_ASSERT(numActiveChunks_ == NumNurseryChunks); /* Only reset the alloc point when we are close to the end. */ @@ -947,7 +947,8 @@ void js::Nursery::growAllocableSpace() { #ifdef JS_GC_ZEAL - MOZ_ASSERT_IF(runtime()->gcZeal_ == ZealGenerationalGCValue, numActiveChunks_ == NumNurseryChunks); + MOZ_ASSERT_IF(runtime()->gc.zealMode == ZealGenerationalGCValue, + numActiveChunks_ == NumNurseryChunks); #endif numActiveChunks_ = Min(numActiveChunks_ * 2, NumNurseryChunks); } @@ -956,7 +957,7 @@ void js::Nursery::shrinkAllocableSpace() { #ifdef JS_GC_ZEAL - if (runtime()->gcZeal_ == ZealGenerationalGCValue) + if (runtime()->gc.zealMode == ZealGenerationalGCValue) return; #endif numActiveChunks_ = Max(numActiveChunks_ - 1, 1); diff --git a/js/src/gc/RootMarking.cpp b/js/src/gc/RootMarking.cpp index 155ebafa0bb..f78ce522939 100644 --- a/js/src/gc/RootMarking.cpp +++ b/js/src/gc/RootMarking.cpp @@ -143,7 +143,7 @@ IsAddressableGCThing(JSRuntime *rt, uintptr_t w, Chunk *chunk = Chunk::fromAddress(addr); - if (!rt->gcChunkSet.has(chunk)) + if (!rt->gc.chunkSet.has(chunk)) return CGCT_NOTCHUNK; /* @@ -223,7 +223,7 @@ MarkIfGCThingWord(JSTracer *trc, uintptr_t w) JS_ASSERT(tmp == thing); #ifdef DEBUG - if (trc->runtime()->gcIncrementalState == MARK_ROOTS) + if (trc->runtime()->gc.incrementalState == MARK_ROOTS) trc->runtime()->mainThread.gcSavedRoots.append( PerThreadData::SavedGCRoot(thing, traceKind)); #endif @@ -691,7 +691,7 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots) rt->markSelfHostingGlobal(trc); } - for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) { + for (RootRange r = rt->gc.rootsHash.all(); !r.empty(); r.popFront()) { const RootEntry &entry = r.front(); const char *name = entry.value().name ? entry.value().name : "root"; JSGCRootType type = entry.value().type; @@ -712,8 +712,8 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots) MarkPersistentRootedChains(trc); - if (rt->scriptAndCountsVector) { - ScriptAndCountsVector &vec = *rt->scriptAndCountsVector; + if (rt->gc.scriptAndCountsVector) { + ScriptAndCountsVector &vec = *rt->gc.scriptAndCountsVector; for (size_t i = 0; i < vec.length(); i++) MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector"); } @@ -788,15 +788,15 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots) * the nursery should be in the store buffer, and we want to avoid the * time taken to trace all these roots. */ - for (size_t i = 0; i < rt->gcBlackRootTracers.length(); i++) { - const JSRuntime::ExtraTracer &e = rt->gcBlackRootTracers[i]; + for (size_t i = 0; i < rt->gc.blackRootTracers.length(); i++) { + const ExtraTracer &e = rt->gc.blackRootTracers[i]; (*e.op)(trc, e.data); } /* During GC, we don't mark gray roots at this stage. */ - if (JSTraceDataOp op = rt->gcGrayRootTracer.op) { + if (JSTraceDataOp op = rt->gc.grayRootTracer.op) { if (!IS_GC_MARKING_TRACER(trc)) - (*op)(trc, rt->gcGrayRootTracer.data); + (*op)(trc, rt->gc.grayRootTracer.data); } } } @@ -806,7 +806,7 @@ js::gc::BufferGrayRoots(GCMarker *gcmarker) { JSRuntime *rt = gcmarker->runtime(); gcmarker->startBufferingGrayRoots(); - if (JSTraceDataOp op = rt->gcGrayRootTracer.op) - (*op)(gcmarker, rt->gcGrayRootTracer.data); + if (JSTraceDataOp op = rt->gc.grayRootTracer.op) + (*op)(gcmarker, rt->gc.grayRootTracer.data); gcmarker->endBufferingGrayRoots(); } diff --git a/js/src/gc/Statistics.cpp b/js/src/gc/Statistics.cpp index eb068119a42..0886f3c4135 100644 --- a/js/src/gc/Statistics.cpp +++ b/js/src/gc/Statistics.cpp @@ -521,7 +521,7 @@ Statistics::beginGC() sccTimes.clearAndFree(); nonincrementalReason = nullptr; - preBytes = runtime->gcBytes; + preBytes = runtime->gc.bytes; } void @@ -547,7 +547,7 @@ Statistics::endGC() (*cb)(JS_TELEMETRY_GC_MARK_ROOTS_MS, t(phaseTimes[PHASE_MARK_ROOTS])); (*cb)(JS_TELEMETRY_GC_MARK_GRAY_MS, t(phaseTimes[PHASE_SWEEP_MARK_GRAY])); (*cb)(JS_TELEMETRY_GC_NON_INCREMENTAL, !!nonincrementalReason); - (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gcIncrementalEnabled); + (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gc.incrementalEnabled); (*cb)(JS_TELEMETRY_GC_SCC_SWEEP_TOTAL_MS, t(sccTotal)); (*cb)(JS_TELEMETRY_GC_SCC_SWEEP_MAX_PAUSE_MS, t(sccLongest)); @@ -567,7 +567,7 @@ Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount, this->zoneCount = zoneCount; this->compartmentCount = compartmentCount; - bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL; + bool first = runtime->gc.incrementalState == gc::NO_INCREMENTAL; if (first) beginGC(); @@ -580,7 +580,7 @@ Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount, // Slice callbacks should only fire for the outermost level if (++gcDepth == 1) { bool wasFullGC = collectedCount == zoneCount; - if (JS::GCSliceCallback cb = runtime->gcSliceCallback) + if (JS::GCSliceCallback cb = runtime->gc.sliceCallback) (*cb)(runtime, first ? JS::GC_CYCLE_BEGIN : JS::GC_SLICE_BEGIN, JS::GCDescription(!wasFullGC)); } @@ -597,14 +597,14 @@ Statistics::endSlice() (*cb)(JS_TELEMETRY_GC_RESET, !!slices.back().resetReason); } - bool last = runtime->gcIncrementalState == gc::NO_INCREMENTAL; + bool last = runtime->gc.incrementalState == gc::NO_INCREMENTAL; if (last) endGC(); // Slice callbacks should only fire for the outermost level if (--gcDepth == 0) { bool wasFullGC = collectedCount == zoneCount; - if (JS::GCSliceCallback cb = runtime->gcSliceCallback) + if (JS::GCSliceCallback cb = runtime->gc.sliceCallback) (*cb)(runtime, last ? JS::GC_CYCLE_END : JS::GC_SLICE_END, JS::GCDescription(!wasFullGC)); } diff --git a/js/src/gc/StoreBuffer.cpp b/js/src/gc/StoreBuffer.cpp index b3a8f996de4..328a5de07fb 100644 --- a/js/src/gc/StoreBuffer.cpp +++ b/js/src/gc/StoreBuffer.cpp @@ -26,7 +26,7 @@ StoreBuffer::SlotsEdge::mark(JSTracer *trc) { JSObject *obj = object(); - if (trc->runtime()->gcNursery.isInside(obj)) + if (trc->runtime()->gc.nursery.isInside(obj)) return; if (!obj->isNative()) { @@ -337,7 +337,7 @@ JS::HeapCellPostBarrier(js::gc::Cell **cellp) { JS_ASSERT(*cellp); JSRuntime *runtime = (*cellp)->runtimeFromMainThread(); - runtime->gcStoreBuffer.putRelocatableCell(cellp); + runtime->gc.storeBuffer.putRelocatableCell(cellp); } JS_PUBLIC_API(void) @@ -346,7 +346,7 @@ JS::HeapCellRelocate(js::gc::Cell **cellp) /* Called with old contents of *pp before overwriting. */ JS_ASSERT(*cellp); JSRuntime *runtime = (*cellp)->runtimeFromMainThread(); - runtime->gcStoreBuffer.removeRelocatableCell(cellp); + runtime->gc.storeBuffer.removeRelocatableCell(cellp); } JS_PUBLIC_API(void) @@ -356,7 +356,7 @@ JS::HeapValuePostBarrier(JS::Value *valuep) if (valuep->isString() && StringIsPermanentAtom(valuep->toString())) return; JSRuntime *runtime = static_cast(valuep->toGCThing())->runtimeFromMainThread(); - runtime->gcStoreBuffer.putRelocatableValue(valuep); + runtime->gc.storeBuffer.putRelocatableValue(valuep); } JS_PUBLIC_API(void) @@ -367,7 +367,7 @@ JS::HeapValueRelocate(JS::Value *valuep) if (valuep->isString() && StringIsPermanentAtom(valuep->toString())) return; JSRuntime *runtime = static_cast(valuep->toGCThing())->runtimeFromMainThread(); - runtime->gcStoreBuffer.removeRelocatableValue(valuep); + runtime->gc.storeBuffer.removeRelocatableValue(valuep); } template class StoreBuffer::MonoTypeBuffer; diff --git a/js/src/gc/Tracer.cpp b/js/src/gc/Tracer.cpp index edabb860431..1b8cf61cbae 100644 --- a/js/src/gc/Tracer.cpp +++ b/js/src/gc/Tracer.cpp @@ -531,8 +531,8 @@ bool GCMarker::markDelayedChildren(SliceBudget &budget) { gcstats::MaybeAutoPhase ap; - if (runtime()->gcIncrementalState == MARK) - ap.construct(runtime()->gcStats, gcstats::PHASE_MARK_DELAYED); + if (runtime()->gc.incrementalState == MARK) + ap.construct(runtime()->gc.stats, gcstats::PHASE_MARK_DELAYED); JS_ASSERT(unmarkedArenaStackTop); do { @@ -669,6 +669,6 @@ js::SetMarkStackLimit(JSRuntime *rt, size_t limit) { JS_ASSERT(!rt->isHeapBusy()); AutoStopVerifyingBarriers pauseVerification(rt, false); - rt->gcMarker.setMaxCapacity(limit); + rt->gc.marker.setMaxCapacity(limit); } diff --git a/js/src/gc/Verifier.cpp b/js/src/gc/Verifier.cpp index e7292b4de56..8add21fd669 100644 --- a/js/src/gc/Verifier.cpp +++ b/js/src/gc/Verifier.cpp @@ -103,7 +103,7 @@ struct VerifyPreTracer : JSTracer NodeMap nodemap; VerifyPreTracer(JSRuntime *rt, JSTraceCallback callback) - : JSTracer(rt, callback), noggc(rt), number(rt->gcNumber), count(0), root(nullptr) + : JSTracer(rt, callback), noggc(rt), number(rt->gc.number), count(0), root(nullptr) {} ~VerifyPreTracer() { @@ -171,7 +171,7 @@ NextNode(VerifyNode *node) void gc::StartVerifyPreBarriers(JSRuntime *rt) { - if (rt->gcVerifyPreData || rt->gcIncrementalState != NO_INCREMENTAL) + if (rt->gc.verifyPreData || rt->gc.incrementalState != NO_INCREMENTAL) return; /* @@ -180,7 +180,7 @@ gc::StartVerifyPreBarriers(JSRuntime *rt) * starting the pre barrier verifier if the post barrier verifier is already * running. */ - if (rt->gcVerifyPostData) + if (rt->gc.verifyPostData) return; MinorGC(rt, JS::gcreason::EVICT_NURSERY); @@ -190,10 +190,10 @@ gc::StartVerifyPreBarriers(JSRuntime *rt) if (!IsIncrementalGCSafe(rt)) return; - for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) + for (GCChunkSet::Range r(rt->gc.chunkSet.all()); !r.empty(); r.popFront()) r.front()->bitmap.clear(); - rt->gcNumber++; + rt->gc.number++; VerifyPreTracer *trc = js_new(rt, JSTraceCallback(nullptr)); if (!trc) @@ -219,7 +219,7 @@ gc::StartVerifyPreBarriers(JSRuntime *rt) trc->curnode = MakeNode(trc, nullptr, JSGCTraceKind(0)); /* We want MarkRuntime to save the roots to gcSavedRoots. */ - rt->gcIncrementalState = MARK_ROOTS; + rt->gc.incrementalState = MARK_ROOTS; /* Make all the roots be edges emanating from the root node. */ MarkRuntime(trc); @@ -245,9 +245,9 @@ gc::StartVerifyPreBarriers(JSRuntime *rt) node = NextNode(node); } - rt->gcVerifyPreData = trc; - rt->gcIncrementalState = MARK; - rt->gcMarker.start(); + rt->gc.verifyPreData = trc; + rt->gc.incrementalState = MARK; + rt->gc.marker.start(); rt->setNeedsBarrier(true); for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { @@ -259,9 +259,9 @@ gc::StartVerifyPreBarriers(JSRuntime *rt) return; oom: - rt->gcIncrementalState = NO_INCREMENTAL; + rt->gc.incrementalState = NO_INCREMENTAL; js_delete(trc); - rt->gcVerifyPreData = nullptr; + rt->gc.verifyPreData = nullptr; } static bool @@ -323,7 +323,7 @@ gc::EndVerifyPreBarriers(JSRuntime *rt) AutoPrepareForTracing prep(rt, SkipAtoms); - VerifyPreTracer *trc = (VerifyPreTracer *)rt->gcVerifyPreData; + VerifyPreTracer *trc = (VerifyPreTracer *)rt->gc.verifyPreData; if (!trc) return; @@ -344,11 +344,11 @@ gc::EndVerifyPreBarriers(JSRuntime *rt) * We need to bump gcNumber so that the methodjit knows that jitcode has * been discarded. */ - JS_ASSERT(trc->number == rt->gcNumber); - rt->gcNumber++; + JS_ASSERT(trc->number == rt->gc.number); + rt->gc.number++; - rt->gcVerifyPreData = nullptr; - rt->gcIncrementalState = NO_INCREMENTAL; + rt->gc.verifyPreData = nullptr; + rt->gc.incrementalState = NO_INCREMENTAL; if (!compartmentCreated && IsIncrementalGCSafe(rt)) { trc->setTraceCallback(CheckEdge); @@ -368,8 +368,8 @@ gc::EndVerifyPreBarriers(JSRuntime *rt) } } - rt->gcMarker.reset(); - rt->gcMarker.stop(); + rt->gc.marker.reset(); + rt->gc.marker.stop(); js_delete(trc); } @@ -389,7 +389,7 @@ struct VerifyPostTracer : JSTracer EdgeSet *edges; VerifyPostTracer(JSRuntime *rt, JSTraceCallback callback) - : JSTracer(rt, callback), number(rt->gcNumber), count(0) + : JSTracer(rt, callback), number(rt->gc.number), count(0) {} }; @@ -402,21 +402,21 @@ void gc::StartVerifyPostBarriers(JSRuntime *rt) { #ifdef JSGC_GENERATIONAL - if (rt->gcVerifyPostData || - rt->gcIncrementalState != NO_INCREMENTAL) + if (rt->gc.verifyPostData || + rt->gc.incrementalState != NO_INCREMENTAL) { return; } MinorGC(rt, JS::gcreason::EVICT_NURSERY); - rt->gcNumber++; + rt->gc.number++; VerifyPostTracer *trc = js_new(rt, JSTraceCallback(nullptr)); if (!trc) return; - rt->gcVerifyPostData = trc; + rt->gc.verifyPostData = trc; #endif } @@ -432,7 +432,7 @@ PostVerifierCollectStoreBufferEdges(JSTracer *jstrc, void **thingp, JSGCTraceKin /* The store buffer may store extra, non-cross-generational edges. */ JSObject *dst = *reinterpret_cast(thingp); - if (trc->runtime()->gcNursery.isInside(thingp) || !trc->runtime()->gcNursery.isInside(dst)) + if (trc->runtime()->gc.nursery.isInside(thingp) || !trc->runtime()->gc.nursery.isInside(dst)) return; /* @@ -468,9 +468,9 @@ PostVerifierVisitEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind) return; /* Filter out non cross-generational edges. */ - JS_ASSERT(!trc->runtime()->gcNursery.isInside(thingp)); + JS_ASSERT(!trc->runtime()->gc.nursery.isInside(thingp)); JSObject *dst = *reinterpret_cast(thingp); - if (!trc->runtime()->gcNursery.isInside(dst)) + if (!trc->runtime()->gc.nursery.isInside(dst)) return; /* @@ -492,14 +492,14 @@ js::gc::EndVerifyPostBarriers(JSRuntime *rt) VerifyPostTracer::EdgeSet edges; AutoPrepareForTracing prep(rt, SkipAtoms); - VerifyPostTracer *trc = (VerifyPostTracer *)rt->gcVerifyPostData; + VerifyPostTracer *trc = (VerifyPostTracer *)rt->gc.verifyPostData; /* Visit every entry in the store buffer and put the edges in a hash set. */ trc->setTraceCallback(PostVerifierCollectStoreBufferEdges); if (!edges.init()) goto oom; trc->edges = &edges; - rt->gcStoreBuffer.markAll(trc); + rt->gc.storeBuffer.markAll(trc); /* Walk the heap to find any edges not the the |edges| set. */ trc->setTraceCallback(PostVerifierVisitEdge); @@ -514,7 +514,7 @@ js::gc::EndVerifyPostBarriers(JSRuntime *rt) oom: js_delete(trc); - rt->gcVerifyPostData = nullptr; + rt->gc.verifyPostData = nullptr; #endif } @@ -523,7 +523,7 @@ oom: static void VerifyPreBarriers(JSRuntime *rt) { - if (rt->gcVerifyPreData) + if (rt->gc.verifyPreData) EndVerifyPreBarriers(rt); else StartVerifyPreBarriers(rt); @@ -532,7 +532,7 @@ VerifyPreBarriers(JSRuntime *rt) static void VerifyPostBarriers(JSRuntime *rt) { - if (rt->gcVerifyPostData) + if (rt->gc.verifyPostData) EndVerifyPostBarriers(rt); else StartVerifyPostBarriers(rt); @@ -556,8 +556,8 @@ MaybeVerifyPreBarriers(JSRuntime *rt, bool always) if (rt->mainThread.suppressGC) return; - if (VerifyPreTracer *trc = (VerifyPreTracer *)rt->gcVerifyPreData) { - if (++trc->count < rt->gcZealFrequency && !always) + if (VerifyPreTracer *trc = (VerifyPreTracer *)rt->gc.verifyPreData) { + if (++trc->count < rt->gc.zealFrequency && !always) return; EndVerifyPreBarriers(rt); @@ -573,11 +573,11 @@ MaybeVerifyPostBarriers(JSRuntime *rt, bool always) if (rt->gcZeal() != ZealVerifierPostValue) return; - if (rt->mainThread.suppressGC || !rt->gcStoreBuffer.isEnabled()) + if (rt->mainThread.suppressGC || !rt->gc.storeBuffer.isEnabled()) return; - if (VerifyPostTracer *trc = (VerifyPostTracer *)rt->gcVerifyPostData) { - if (++trc->count < rt->gcZealFrequency && !always) + if (VerifyPostTracer *trc = (VerifyPostTracer *)rt->gc.verifyPostData) { + if (++trc->count < rt->gc.zealFrequency && !always) return; EndVerifyPostBarriers(rt); @@ -596,14 +596,14 @@ js::gc::MaybeVerifyBarriers(JSContext *cx, bool always) void js::gc::FinishVerifier(JSRuntime *rt) { - if (VerifyPreTracer *trc = (VerifyPreTracer *)rt->gcVerifyPreData) { + if (VerifyPreTracer *trc = (VerifyPreTracer *)rt->gc.verifyPreData) { js_delete(trc); - rt->gcVerifyPreData = nullptr; + rt->gc.verifyPreData = nullptr; } #ifdef JSGC_GENERATIONAL - if (VerifyPostTracer *trc = (VerifyPostTracer *)rt->gcVerifyPostData) { + if (VerifyPostTracer *trc = (VerifyPostTracer *)rt->gc.verifyPostData) { js_delete(trc); - rt->gcVerifyPostData = nullptr; + rt->gc.verifyPostData = nullptr; } #endif } diff --git a/js/src/gc/Zone.cpp b/js/src/gc/Zone.cpp index 463f48ee8e2..a60f29d4761 100644 --- a/js/src/gc/Zone.cpp +++ b/js/src/gc/Zone.cpp @@ -22,7 +22,7 @@ using namespace js; using namespace js::gc; JS::Zone::Zone(JSRuntime *rt) - : JS::shadow::Zone(rt, &rt->gcMarker), + : JS::shadow::Zone(rt, &rt->gc.marker), allocator(this), ionUsingBarriers_(false), active(false), @@ -49,13 +49,14 @@ JS::Zone::Zone(JSRuntime *rt) JS_ASSERT(reinterpret_cast(this) == static_cast(this)); - setGCMaxMallocBytes(rt->gcMaxMallocBytes * 0.9); + setGCMaxMallocBytes(rt->gc.maxMallocBytes * 0.9); } Zone::~Zone() { - if (this == runtimeFromMainThread()->systemZone) - runtimeFromMainThread()->systemZone = nullptr; + JSRuntime *rt = runtimeFromMainThread(); + if (this == rt->gc.systemZone) + rt->gc.systemZone = nullptr; #ifdef JS_ION js_delete(jitZone_); @@ -115,7 +116,7 @@ Zone::sweep(FreeOp *fop, bool releaseTypes, bool *oom) releaseTypes = false; { - gcstats::AutoPhase ap(fop->runtime()->gcStats, gcstats::PHASE_DISCARD_ANALYSIS); + gcstats::AutoPhase ap(fop->runtime()->gc.stats, gcstats::PHASE_DISCARD_ANALYSIS); types.sweep(fop, releaseTypes, oom); } @@ -133,8 +134,8 @@ Zone::sweepBreakpoints(FreeOp *fop) * to iterate over the scripts belonging to a single compartment in a zone. */ - gcstats::AutoPhase ap1(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_TABLES); - gcstats::AutoPhase ap2(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT); + gcstats::AutoPhase ap1(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_TABLES); + gcstats::AutoPhase ap2(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT); JS_ASSERT(isGCSweeping()); for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) { @@ -229,7 +230,7 @@ Zone::gcNumber() { // Zones in use by exclusive threads are not collected, and threads using // them cannot access the main runtime's gcNumber without racing. - return usedByExclusiveThread ? 0 : runtimeFromMainThread()->gcNumber; + return usedByExclusiveThread ? 0 : runtimeFromMainThread()->gc.number; } #ifdef JS_ION diff --git a/js/src/gc/Zone.h b/js/src/gc/Zone.h index cbbde6b2e83..c5becb540f5 100644 --- a/js/src/gc/Zone.h +++ b/js/src/gc/Zone.h @@ -357,8 +357,8 @@ class ZonesIter { public: ZonesIter(JSRuntime *rt, ZoneSelector selector) { - it = rt->zones.begin(); - end = rt->zones.end(); + it = rt->gc.zones.begin(); + end = rt->gc.zones.end(); if (selector == SkipAtoms) { JS_ASSERT(rt->isAtomsZone(*it)); diff --git a/js/src/jit/BaselineIC.cpp b/js/src/jit/BaselineIC.cpp index ded9da3d628..cf32d70c6e5 100644 --- a/js/src/jit/BaselineIC.cpp +++ b/js/src/jit/BaselineIC.cpp @@ -734,7 +734,7 @@ inline bool ICStubCompiler::emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val, Register scratch, GeneralRegisterSet saveRegs) { - Nursery &nursery = cx->runtime()->gcNursery; + Nursery &nursery = cx->runtime()->gc.nursery; Label skipBarrier; masm.branchTestObject(Assembler::NotEqual, val, &skipBarrier); @@ -3433,7 +3433,7 @@ IsCacheableGetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *sh #ifdef JSGC_GENERATIONAL // Information from get prop call ICs may be used directly from Ion code, // and should not be nursery allocated. - if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func)) + if (cx->runtime()->gc.nursery.isInside(holder) || cx->runtime()->gc.nursery.isInside(func)) return false; #endif @@ -3552,7 +3552,7 @@ IsCacheableSetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *sh #ifdef JSGC_GENERATIONAL // Information from set prop call ICs may be used directly from Ion code, // and should not be nursery allocated. - if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func)) + if (cx->runtime()->gc.nursery.isInside(holder) || cx->runtime()->gc.nursery.isInside(func)) return false; #endif diff --git a/js/src/jit/BaselineJIT.cpp b/js/src/jit/BaselineJIT.cpp index 63faf00e93d..429213a7140 100644 --- a/js/src/jit/BaselineJIT.cpp +++ b/js/src/jit/BaselineJIT.cpp @@ -456,7 +456,7 @@ BaselineScript::Destroy(FreeOp *fop, BaselineScript *script) * in invalid store buffer entries. Assert that if we do destroy scripts * outside of a GC that we at least emptied the nursery first. */ - JS_ASSERT(fop->runtime()->gcNursery.isEmpty()); + JS_ASSERT(fop->runtime()->gc.nursery.isEmpty()); #endif fop->delete_(script); } diff --git a/js/src/jit/CompileWrappers.cpp b/js/src/jit/CompileWrappers.cpp index 2fb1c3971fc..908ccd5058e 100644 --- a/js/src/jit/CompileWrappers.cpp +++ b/js/src/jit/CompileWrappers.cpp @@ -68,7 +68,7 @@ CompileRuntime::addressOfLastCachedNativeIterator() const void * CompileRuntime::addressOfGCZeal() { - return &runtime()->gcZeal_; + return &runtime()->gc.zealMode; } #endif @@ -170,7 +170,7 @@ CompileRuntime::maybeGetMathCache() const Nursery & CompileRuntime::gcNursery() { - return runtime()->gcNursery; + return runtime()->gc.nursery; } #endif diff --git a/js/src/jit/Ion.cpp b/js/src/jit/Ion.cpp index 3e9675f855f..47611af9757 100644 --- a/js/src/jit/Ion.cpp +++ b/js/src/jit/Ion.cpp @@ -1730,7 +1730,7 @@ OffThreadCompilationAvailable(JSContext *cx) // when running off thread. return cx->runtime()->canUseParallelIonCompilation() && WorkerThreadState().cpuCount > 1 - && cx->runtime()->gcIncrementalState == gc::NO_INCREMENTAL + && cx->runtime()->gc.incrementalState == gc::NO_INCREMENTAL && !cx->runtime()->profilingScripts; #else return false; @@ -2845,13 +2845,13 @@ jit::FinishInvalidation(FreeOp *fop, JSScript *script); void jit::MarkValueFromIon(JSRuntime *rt, Value *vp) { - gc::MarkValueUnbarriered(&rt->gcMarker, vp, "write barrier"); + gc::MarkValueUnbarriered(&rt->gc.marker, vp, "write barrier"); } void jit::MarkShapeFromIon(JSRuntime *rt, Shape **shapep) { - gc::MarkShapeUnbarriered(&rt->gcMarker, shapep, "write barrier"); + gc::MarkShapeUnbarriered(&rt->gc.marker, shapep, "write barrier"); } void diff --git a/js/src/jit/IonFrames.cpp b/js/src/jit/IonFrames.cpp index 70448553179..aa1a1fb51ed 100644 --- a/js/src/jit/IonFrames.cpp +++ b/js/src/jit/IonFrames.cpp @@ -965,7 +965,7 @@ UpdateIonJSFrameForMinorGC(JSTracer *trc, const JitFrameIterator &frame) for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills()); iter.more(); iter++) { --spill; if (slotsRegs.has(*iter)) - trc->runtime()->gcNursery.forwardBufferPointer(reinterpret_cast(spill)); + trc->runtime()->gc.nursery.forwardBufferPointer(reinterpret_cast(spill)); } // Skip to the right place in the safepoint @@ -979,7 +979,7 @@ UpdateIonJSFrameForMinorGC(JSTracer *trc, const JitFrameIterator &frame) while (safepoint.getSlotsOrElementsSlot(&slot)) { HeapSlot **slots = reinterpret_cast(layout->slotRef(slot)); - trc->runtime()->gcNursery.forwardBufferPointer(slots); + trc->runtime()->gc.nursery.forwardBufferPointer(slots); } } #endif @@ -1302,7 +1302,7 @@ GetPcScript(JSContext *cx, JSScript **scriptRes, jsbytecode **pcRes) if (MOZ_UNLIKELY(rt->ionPcScriptCache == nullptr)) { rt->ionPcScriptCache = (PcScriptCache *)js_malloc(sizeof(struct PcScriptCache)); if (rt->ionPcScriptCache) - rt->ionPcScriptCache->clear(rt->gcNumber); + rt->ionPcScriptCache->clear(rt->gc.number); } // Attempt to lookup address in cache. diff --git a/js/src/jit/IonLinker.h b/js/src/jit/IonLinker.h index 84907feb173..d7371a9ad7b 100644 --- a/js/src/jit/IonLinker.h +++ b/js/src/jit/IonLinker.h @@ -67,7 +67,7 @@ class Linker masm.link(code); #ifdef JSGC_GENERATIONAL if (masm.embedsNurseryPointers()) - cx->runtime()->gcStoreBuffer.putWholeCell(code); + cx->runtime()->gc.storeBuffer.putWholeCell(code); #endif return code; } diff --git a/js/src/jit/JitCompartment.h b/js/src/jit/JitCompartment.h index 4c7bbb86d5d..7d314768154 100644 --- a/js/src/jit/JitCompartment.h +++ b/js/src/jit/JitCompartment.h @@ -460,7 +460,7 @@ ShouldPreserveParallelJITCode(JSRuntime *rt, JSScript *script, bool increase = f { IonScript *parallelIon = script->parallelIonScript(); uint32_t age = increase ? parallelIon->increaseParallelAge() : parallelIon->parallelAge(); - return age < jit::IonScript::MAX_PARALLEL_AGE && !rt->gcShouldCleanUpEverything; + return age < jit::IonScript::MAX_PARALLEL_AGE && !rt->gc.shouldCleanUpEverything; } // On windows systems, really large frames need to be incrementally touched. diff --git a/js/src/jit/PcScriptCache.h b/js/src/jit/PcScriptCache.h index 889b6644a9b..60af08dde52 100644 --- a/js/src/jit/PcScriptCache.h +++ b/js/src/jit/PcScriptCache.h @@ -45,8 +45,8 @@ struct PcScriptCache JSScript **scriptRes, jsbytecode **pcRes) { // If a GC occurred, lazily clear the cache now. - if (gcNumber != rt->gcNumber) { - clear(rt->gcNumber); + if (gcNumber != rt->gc.number) { + clear(rt->gc.number); return false; } diff --git a/js/src/jit/VMFunctions.cpp b/js/src/jit/VMFunctions.cpp index 0ae0566f55e..6ef3d938dca 100644 --- a/js/src/jit/VMFunctions.cpp +++ b/js/src/jit/VMFunctions.cpp @@ -554,7 +554,7 @@ NewCallObject(JSContext *cx, HandleShape shape, HandleTypeObject type, HeapSlot // the initializing writes. The interpreter, however, may have allocated // the call object tenured, so barrier as needed before re-entering. if (!IsInsideNursery(cx->runtime(), obj)) - cx->runtime()->gcStoreBuffer.putWholeCell(obj); + cx->runtime()->gc.storeBuffer.putWholeCell(obj); #endif return obj; @@ -573,7 +573,7 @@ NewSingletonCallObject(JSContext *cx, HandleShape shape, HeapSlot *slots) // the call object tenured, so barrier as needed before re-entering. MOZ_ASSERT(!IsInsideNursery(cx->runtime(), obj), "singletons are created in the tenured heap"); - cx->runtime()->gcStoreBuffer.putWholeCell(obj); + cx->runtime()->gc.storeBuffer.putWholeCell(obj); #endif return obj; @@ -714,7 +714,7 @@ void PostWriteBarrier(JSRuntime *rt, JSObject *obj) { JS_ASSERT(!IsInsideNursery(rt, obj)); - rt->gcStoreBuffer.putWholeCell(obj); + rt->gc.storeBuffer.putWholeCell(obj); } void diff --git a/js/src/jsapi-tests/testGCFinalizeCallback.cpp b/js/src/jsapi-tests/testGCFinalizeCallback.cpp index c911001af91..730581b3563 100644 --- a/js/src/jsapi-tests/testGCFinalizeCallback.cpp +++ b/js/src/jsapi-tests/testGCFinalizeCallback.cpp @@ -17,7 +17,7 @@ BEGIN_TEST(testGCFinalizeCallback) /* Full GC, non-incremental. */ FinalizeCalls = 0; JS_GC(rt); - CHECK(rt->gcIsFull); + CHECK(rt->gc.isFull); CHECK(checkSingleGroup()); CHECK(checkFinalizeStatus()); CHECK(checkFinalizeIsCompartmentGC(false)); @@ -26,8 +26,8 @@ BEGIN_TEST(testGCFinalizeCallback) FinalizeCalls = 0; JS::PrepareForFullGC(rt); JS::IncrementalGC(rt, JS::gcreason::API, 1000000); - CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL); - CHECK(rt->gcIsFull); + CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL); + CHECK(rt->gc.isFull); CHECK(checkMultipleGroups()); CHECK(checkFinalizeStatus()); CHECK(checkFinalizeIsCompartmentGC(false)); @@ -43,7 +43,7 @@ BEGIN_TEST(testGCFinalizeCallback) FinalizeCalls = 0; JS::PrepareZoneForGC(global1->zone()); JS::GCForReason(rt, JS::gcreason::API); - CHECK(!rt->gcIsFull); + CHECK(!rt->gc.isFull); CHECK(checkSingleGroup()); CHECK(checkFinalizeStatus()); CHECK(checkFinalizeIsCompartmentGC(true)); @@ -54,7 +54,7 @@ BEGIN_TEST(testGCFinalizeCallback) JS::PrepareZoneForGC(global2->zone()); JS::PrepareZoneForGC(global3->zone()); JS::GCForReason(rt, JS::gcreason::API); - CHECK(!rt->gcIsFull); + CHECK(!rt->gc.isFull); CHECK(checkSingleGroup()); CHECK(checkFinalizeStatus()); CHECK(checkFinalizeIsCompartmentGC(true)); @@ -63,8 +63,8 @@ BEGIN_TEST(testGCFinalizeCallback) FinalizeCalls = 0; JS::PrepareZoneForGC(global1->zone()); JS::IncrementalGC(rt, JS::gcreason::API, 1000000); - CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL); - CHECK(!rt->gcIsFull); + CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL); + CHECK(!rt->gc.isFull); CHECK(checkSingleGroup()); CHECK(checkFinalizeStatus()); CHECK(checkFinalizeIsCompartmentGC(true)); @@ -75,8 +75,8 @@ BEGIN_TEST(testGCFinalizeCallback) JS::PrepareZoneForGC(global2->zone()); JS::PrepareZoneForGC(global3->zone()); JS::IncrementalGC(rt, JS::gcreason::API, 1000000); - CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL); - CHECK(!rt->gcIsFull); + CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL); + CHECK(!rt->gc.isFull); CHECK(checkMultipleGroups()); CHECK(checkFinalizeStatus()); CHECK(checkFinalizeIsCompartmentGC(true)); @@ -89,13 +89,13 @@ BEGIN_TEST(testGCFinalizeCallback) JS_SetGCZeal(cx, 9, 1000000); JS::PrepareForFullGC(rt); js::GCDebugSlice(rt, true, 1); - CHECK(rt->gcIncrementalState == js::gc::MARK); - CHECK(rt->gcIsFull); + CHECK(rt->gc.incrementalState == js::gc::MARK); + CHECK(rt->gc.isFull); JS::RootedObject global4(cx, createGlobal()); js::GCDebugSlice(rt, true, 1); - CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL); - CHECK(!rt->gcIsFull); + CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL); + CHECK(!rt->gc.isFull); CHECK(checkMultipleGroups()); CHECK(checkFinalizeStatus()); diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 2211e147d0b..f68906fec37 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -158,7 +158,7 @@ namespace js { void AssertHeapIsIdle(JSRuntime *rt) { - JS_ASSERT(rt->heapState == js::Idle); + JS_ASSERT(rt->gc.heapState == js::Idle); } void @@ -718,7 +718,7 @@ StopRequest(JSContext *cx) if (rt->requestDepth != 1) { rt->requestDepth--; } else { - rt->conservativeGC.updateForRequestEnd(); + rt->gc.conservativeGC.updateForRequestEnd(); rt->requestDepth = 0; rt->triggerActivityCallback(false); } @@ -1613,17 +1613,17 @@ JS_PUBLIC_API(bool) JS_AddExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data) { AssertHeapIsIdle(rt); - return !!rt->gcBlackRootTracers.append(JSRuntime::ExtraTracer(traceOp, data)); + return !!rt->gc.blackRootTracers.append(ExtraTracer(traceOp, data)); } JS_PUBLIC_API(void) JS_RemoveExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data) { AssertHeapIsIdle(rt); - for (size_t i = 0; i < rt->gcBlackRootTracers.length(); i++) { - JSRuntime::ExtraTracer *e = &rt->gcBlackRootTracers[i]; + for (size_t i = 0; i < rt->gc.blackRootTracers.length(); i++) { + ExtraTracer *e = &rt->gc.blackRootTracers[i]; if (e->op == traceOp && e->data == data) { - rt->gcBlackRootTracers.erase(e); + rt->gc.blackRootTracers.erase(e); break; } } @@ -1897,15 +1897,15 @@ JS_PUBLIC_API(void) JS_SetGCCallback(JSRuntime *rt, JSGCCallback cb, void *data) { AssertHeapIsIdle(rt); - rt->gcCallback = cb; - rt->gcCallbackData = data; + rt->gc.callback = cb; + rt->gc.callbackData = data; } JS_PUBLIC_API(void) JS_SetFinalizeCallback(JSRuntime *rt, JSFinalizeCallback cb) { AssertHeapIsIdle(rt); - rt->gcFinalizeCallback = cb; + rt->gc.finalizeCallback = cb; } JS_PUBLIC_API(bool) @@ -1925,51 +1925,51 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value) { switch (key) { case JSGC_MAX_BYTES: { - JS_ASSERT(value >= rt->gcBytes); - rt->gcMaxBytes = value; + JS_ASSERT(value >= rt->gc.bytes); + rt->gc.maxBytes = value; break; } case JSGC_MAX_MALLOC_BYTES: rt->setGCMaxMallocBytes(value); break; case JSGC_SLICE_TIME_BUDGET: - rt->gcSliceBudget = SliceBudget::TimeBudget(value); + rt->gc.sliceBudget = SliceBudget::TimeBudget(value); break; case JSGC_MARK_STACK_LIMIT: js::SetMarkStackLimit(rt, value); break; case JSGC_HIGH_FREQUENCY_TIME_LIMIT: - rt->gcHighFrequencyTimeThreshold = value; + rt->gc.highFrequencyTimeThreshold = value; break; case JSGC_HIGH_FREQUENCY_LOW_LIMIT: - rt->gcHighFrequencyLowLimitBytes = value * 1024 * 1024; + rt->gc.highFrequencyLowLimitBytes = value * 1024 * 1024; break; case JSGC_HIGH_FREQUENCY_HIGH_LIMIT: - rt->gcHighFrequencyHighLimitBytes = value * 1024 * 1024; + rt->gc.highFrequencyHighLimitBytes = value * 1024 * 1024; break; case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX: - rt->gcHighFrequencyHeapGrowthMax = value / 100.0; - MOZ_ASSERT(rt->gcHighFrequencyHeapGrowthMax / 0.85 > 1.0); + rt->gc.highFrequencyHeapGrowthMax = value / 100.0; + MOZ_ASSERT(rt->gc.highFrequencyHeapGrowthMax / 0.85 > 1.0); break; case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN: - rt->gcHighFrequencyHeapGrowthMin = value / 100.0; - MOZ_ASSERT(rt->gcHighFrequencyHeapGrowthMin / 0.85 > 1.0); + rt->gc.highFrequencyHeapGrowthMin = value / 100.0; + MOZ_ASSERT(rt->gc.highFrequencyHeapGrowthMin / 0.85 > 1.0); break; case JSGC_LOW_FREQUENCY_HEAP_GROWTH: - rt->gcLowFrequencyHeapGrowth = value / 100.0; - MOZ_ASSERT(rt->gcLowFrequencyHeapGrowth / 0.9 > 1.0); + rt->gc.lowFrequencyHeapGrowth = value / 100.0; + MOZ_ASSERT(rt->gc.lowFrequencyHeapGrowth / 0.9 > 1.0); break; case JSGC_DYNAMIC_HEAP_GROWTH: - rt->gcDynamicHeapGrowth = value; + rt->gc.dynamicHeapGrowth = value; break; case JSGC_DYNAMIC_MARK_SLICE: - rt->gcDynamicMarkSlice = value; + rt->gc.dynamicMarkSlice = value; break; case JSGC_ALLOCATION_THRESHOLD: - rt->gcAllocationThreshold = value * 1024 * 1024; + rt->gc.allocationThreshold = value * 1024 * 1024; break; case JSGC_DECOMMIT_THRESHOLD: - rt->gcDecommitThreshold = value * 1024 * 1024; + rt->gc.decommitThreshold = value * 1024 * 1024; break; default: JS_ASSERT(key == JSGC_MODE); @@ -1986,42 +1986,42 @@ JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key) { switch (key) { case JSGC_MAX_BYTES: - return uint32_t(rt->gcMaxBytes); + return uint32_t(rt->gc.maxBytes); case JSGC_MAX_MALLOC_BYTES: - return rt->gcMaxMallocBytes; + return rt->gc.maxMallocBytes; case JSGC_BYTES: - return uint32_t(rt->gcBytes); + return uint32_t(rt->gc.bytes); case JSGC_MODE: return uint32_t(rt->gcMode()); case JSGC_UNUSED_CHUNKS: - return uint32_t(rt->gcChunkPool.getEmptyCount()); + return uint32_t(rt->gc.chunkPool.getEmptyCount()); case JSGC_TOTAL_CHUNKS: - return uint32_t(rt->gcChunkSet.count() + rt->gcChunkPool.getEmptyCount()); + return uint32_t(rt->gc.chunkSet.count() + rt->gc.chunkPool.getEmptyCount()); case JSGC_SLICE_TIME_BUDGET: - return uint32_t(rt->gcSliceBudget > 0 ? rt->gcSliceBudget / PRMJ_USEC_PER_MSEC : 0); + return uint32_t(rt->gc.sliceBudget > 0 ? rt->gc.sliceBudget / PRMJ_USEC_PER_MSEC : 0); case JSGC_MARK_STACK_LIMIT: - return rt->gcMarker.maxCapacity(); + return rt->gc.marker.maxCapacity(); case JSGC_HIGH_FREQUENCY_TIME_LIMIT: - return rt->gcHighFrequencyTimeThreshold; + return rt->gc.highFrequencyTimeThreshold; case JSGC_HIGH_FREQUENCY_LOW_LIMIT: - return rt->gcHighFrequencyLowLimitBytes / 1024 / 1024; + return rt->gc.highFrequencyLowLimitBytes / 1024 / 1024; case JSGC_HIGH_FREQUENCY_HIGH_LIMIT: - return rt->gcHighFrequencyHighLimitBytes / 1024 / 1024; + return rt->gc.highFrequencyHighLimitBytes / 1024 / 1024; case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX: - return uint32_t(rt->gcHighFrequencyHeapGrowthMax * 100); + return uint32_t(rt->gc.highFrequencyHeapGrowthMax * 100); case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN: - return uint32_t(rt->gcHighFrequencyHeapGrowthMin * 100); + return uint32_t(rt->gc.highFrequencyHeapGrowthMin * 100); case JSGC_LOW_FREQUENCY_HEAP_GROWTH: - return uint32_t(rt->gcLowFrequencyHeapGrowth * 100); + return uint32_t(rt->gc.lowFrequencyHeapGrowth * 100); case JSGC_DYNAMIC_HEAP_GROWTH: - return rt->gcDynamicHeapGrowth; + return rt->gc.dynamicHeapGrowth; case JSGC_DYNAMIC_MARK_SLICE: - return rt->gcDynamicMarkSlice; + return rt->gc.dynamicMarkSlice; case JSGC_ALLOCATION_THRESHOLD: - return rt->gcAllocationThreshold / 1024 / 1024; + return rt->gc.allocationThreshold / 1024 / 1024; default: JS_ASSERT(key == JSGC_NUMBER); - return uint32_t(rt->gcNumber); + return uint32_t(rt->gc.number); } } @@ -2501,7 +2501,7 @@ JS_NewGlobalObject(JSContext *cx, const JSClass *clasp, JSPrincipals *principals Zone *zone; if (options.zoneSpecifier() == JS::SystemZone) - zone = rt->systemZone; + zone = rt->gc.systemZone; else if (options.zoneSpecifier() == JS::FreshZone) zone = nullptr; else @@ -2512,9 +2512,9 @@ JS_NewGlobalObject(JSContext *cx, const JSClass *clasp, JSPrincipals *principals return nullptr; // Lazily create the system zone. - if (!rt->systemZone && options.zoneSpecifier() == JS::SystemZone) { - rt->systemZone = compartment->zone(); - rt->systemZone->isSystem = true; + if (!rt->gc.systemZone && options.zoneSpecifier() == JS::SystemZone) { + rt->gc.systemZone = compartment->zone(); + rt->gc.systemZone->isSystem = true; } Rooted global(cx); @@ -6203,7 +6203,7 @@ JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency) JS_PUBLIC_API(void) JS_ScheduleGC(JSContext *cx, uint32_t count) { - cx->runtime()->gcNextScheduled = count; + cx->runtime()->gc.nextScheduled = count; } #endif diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index c0e9fc7373b..20fb6b401e1 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -223,7 +223,7 @@ struct ThreadSafeContext : ContextFriendFields, inline js::Nursery &nursery() { JS_ASSERT(hasNursery()); - return runtime_->gcNursery; + return runtime_->gc.nursery; } #endif @@ -289,7 +289,7 @@ struct ThreadSafeContext : ContextFriendFields, void *runtimeAddressForJit() { return runtime_; } void *stackLimitAddress(StackKind kind) { return &runtime_->mainThread.nativeStackLimit[kind]; } void *stackLimitAddressForJitCode(StackKind kind); - size_t gcSystemPageSize() { return runtime_->pageAllocator.systemPageSize(); } + size_t gcSystemPageSize() { return runtime_->gc.pageAllocator.systemPageSize(); } bool signalHandlersInstalled() const { return runtime_->signalHandlersInstalled(); } bool jitSupportsFloatingPoint() const { return runtime_->jitSupportsFloatingPoint; } diff --git a/js/src/jscompartment.cpp b/js/src/jscompartment.cpp index 5f1c33d0b69..c6c1b748709 100644 --- a/js/src/jscompartment.cpp +++ b/js/src/jscompartment.cpp @@ -258,7 +258,7 @@ JSCompartment::putWrapper(JSContext *cx, const CrossCompartmentKey &wrapped, con if (success && (nursery.isInside(wrapped.wrapped) || nursery.isInside(wrapped.debugger))) { WrapperMapRef ref(&crossCompartmentWrappers, wrapped); - cx->runtime()->gcStoreBuffer.putGeneric(ref); + cx->runtime()->gc.storeBuffer.putGeneric(ref); } #endif @@ -557,7 +557,7 @@ JSCompartment::sweep(FreeOp *fop, bool releaseTypes) JSRuntime *rt = runtimeFromMainThread(); { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_TABLES); /* Remove dead references held weakly by the compartment. */ @@ -616,8 +616,8 @@ JSCompartment::sweepCrossCompartmentWrappers() { JSRuntime *rt = runtimeFromMainThread(); - gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_SWEEP_TABLES); - gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_WRAPPER); + gcstats::AutoPhase ap1(rt->gc.stats, gcstats::PHASE_SWEEP_TABLES); + gcstats::AutoPhase ap2(rt->gc.stats, gcstats::PHASE_SWEEP_TABLES_WRAPPER); /* Remove dead wrappers from the table. */ for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) { diff --git a/js/src/jsfriendapi.cpp b/js/src/jsfriendapi.cpp index aea38933c69..7daca6bd7a5 100644 --- a/js/src/jsfriendapi.cpp +++ b/js/src/jsfriendapi.cpp @@ -59,8 +59,8 @@ js::ForgetSourceHook(JSRuntime *rt) JS_FRIEND_API(void) JS_SetGrayGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data) { - rt->gcGrayRootTracer.op = traceOp; - rt->gcGrayRootTracer.data = data; + rt->gc.grayRootTracer.op = traceOp; + rt->gc.grayRootTracer.data = data; } JS_FRIEND_API(JSString *) @@ -632,7 +632,7 @@ js::TraceWeakMaps(WeakMapTracer *trc) extern JS_FRIEND_API(bool) js::AreGCGrayBitsValid(JSRuntime *rt) { - return rt->gcGrayBitsValid; + return rt->gc.grayBitsValid; } JS_FRIEND_API(bool) @@ -857,27 +857,27 @@ js::IsContextRunningJS(JSContext *cx) JS_FRIEND_API(JS::GCSliceCallback) JS::SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback) { - JS::GCSliceCallback old = rt->gcSliceCallback; - rt->gcSliceCallback = callback; + JS::GCSliceCallback old = rt->gc.sliceCallback; + rt->gc.sliceCallback = callback; return old; } JS_FRIEND_API(bool) JS::WasIncrementalGC(JSRuntime *rt) { - return rt->gcIsIncremental; + return rt->gc.isIncremental; } jschar * GCDescription::formatMessage(JSRuntime *rt) const { - return rt->gcStats.formatMessage(); + return rt->gc.stats.formatMessage(); } jschar * GCDescription::formatJSON(JSRuntime *rt, uint64_t timestamp) const { - return rt->gcStats.formatJSON(timestamp); + return rt->gc.stats.formatJSON(timestamp); } JS_FRIEND_API(void) @@ -899,36 +899,36 @@ JS::NotifyDidPaint(JSRuntime *rt) return; } - if (JS::IsIncrementalGCInProgress(rt) && !rt->gcInterFrameGC) { + if (JS::IsIncrementalGCInProgress(rt) && !rt->gc.interFrameGC) { JS::PrepareForIncrementalGC(rt); GCSlice(rt, GC_NORMAL, gcreason::REFRESH_FRAME); } - rt->gcInterFrameGC = false; + rt->gc.interFrameGC = false; } JS_FRIEND_API(bool) JS::IsIncrementalGCEnabled(JSRuntime *rt) { - return rt->gcIncrementalEnabled && rt->gcMode() == JSGC_MODE_INCREMENTAL; + return rt->gc.incrementalEnabled && rt->gcMode() == JSGC_MODE_INCREMENTAL; } JS_FRIEND_API(bool) JS::IsIncrementalGCInProgress(JSRuntime *rt) { - return rt->gcIncrementalState != gc::NO_INCREMENTAL && !rt->gcVerifyPreData; + return rt->gc.incrementalState != gc::NO_INCREMENTAL && !rt->gc.verifyPreData; } JS_FRIEND_API(void) JS::DisableIncrementalGC(JSRuntime *rt) { - rt->gcIncrementalEnabled = false; + rt->gc.incrementalEnabled = false; } JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime *rt) : runtime(rt) #if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL) - , restartVerifier(rt->gcVerifyPostData) + , restartVerifier(rt->gc.verifyPostData) #endif { #ifdef JSGC_GENERATIONAL @@ -938,21 +938,21 @@ JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime *rt) gc::EndVerifyPostBarriers(rt); #endif MinorGC(rt, JS::gcreason::API); - rt->gcNursery.disable(); - rt->gcStoreBuffer.disable(); + rt->gc.nursery.disable(); + rt->gc.storeBuffer.disable(); } #endif - ++rt->gcGenerationalDisabled; + ++rt->gc.generationalDisabled; } JS::AutoDisableGenerationalGC::~AutoDisableGenerationalGC() { - JS_ASSERT(runtime->gcGenerationalDisabled > 0); - --runtime->gcGenerationalDisabled; + JS_ASSERT(runtime->gc.generationalDisabled > 0); + --runtime->gc.generationalDisabled; #ifdef JSGC_GENERATIONAL - if (runtime->gcGenerationalDisabled == 0) { - runtime->gcNursery.enable(); - runtime->gcStoreBuffer.enable(); + if (runtime->gc.generationalDisabled == 0) { + runtime->gc.nursery.enable(); + runtime->gc.storeBuffer.enable(); #ifdef JS_GC_ZEAL if (restartVerifier) gc::StartVerifyPostBarriers(runtime); @@ -964,13 +964,13 @@ JS::AutoDisableGenerationalGC::~AutoDisableGenerationalGC() extern JS_FRIEND_API(bool) JS::IsGenerationalGCEnabled(JSRuntime *rt) { - return rt->gcGenerationalDisabled == 0; + return rt->gc.generationalDisabled == 0; } JS_FRIEND_API(bool) JS::IsIncrementalBarrierNeeded(JSRuntime *rt) { - return rt->gcIncrementalState == gc::MARK && !rt->isHeapBusy(); + return rt->gc.incrementalState == gc::MARK && !rt->isHeapBusy(); } JS_FRIEND_API(bool) @@ -1034,7 +1034,7 @@ JS::IncrementalValueBarrier(const Value &v) JS_FRIEND_API(void) JS::PokeGC(JSRuntime *rt) { - rt->gcPoke = true; + rt->gc.poke = true; } JS_FRIEND_API(JSCompartment *) @@ -1203,7 +1203,7 @@ js_DefineOwnProperty(JSContext *cx, JSObject *objArg, jsid idArg, { RootedObject obj(cx, objArg); RootedId id(cx, idArg); - JS_ASSERT(cx->runtime()->heapState == js::Idle); + JS_ASSERT(cx->runtime()->gc.heapState == js::Idle); CHECK_REQUEST(cx); assertSameCompartment(cx, obj, id, descriptor.value()); if (descriptor.hasGetterObject()) @@ -1240,7 +1240,7 @@ JS_StoreObjectPostBarrierCallback(JSContext* cx, { JSRuntime *rt = cx->runtime(); if (IsInsideNursery(rt, key)) - rt->gcStoreBuffer.putCallback(callback, key, data); + rt->gc.storeBuffer.putCallback(callback, key, data); } extern JS_FRIEND_API(void) @@ -1250,6 +1250,6 @@ JS_StoreStringPostBarrierCallback(JSContext* cx, { JSRuntime *rt = cx->runtime(); if (IsInsideNursery(rt, key)) - rt->gcStoreBuffer.putCallback(callback, key, data); + rt->gc.storeBuffer.putCallback(callback, key, data); } #endif /* JSGC_GENERATIONAL */ diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index ddca7ddc0d7..da2d3c19eb2 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -393,7 +393,7 @@ ArenaHeader::checkSynchronizedWithFreeList() const * list in the zone can mutate at any moment. We cannot do any * checks in this case. */ - if (IsBackgroundFinalized(getAllocKind()) && zone->runtimeFromAnyThread()->gcHelperThread.onBackgroundThread()) + if (IsBackgroundFinalized(getAllocKind()) && zone->runtimeFromAnyThread()->gc.helperThread.onBackgroundThread()) return; FreeSpan firstSpan = FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets); @@ -628,13 +628,13 @@ FinalizeArenas(FreeOp *fop, static inline Chunk * AllocChunk(JSRuntime *rt) { - return static_cast(rt->pageAllocator.mapAlignedPages(ChunkSize, ChunkSize)); + return static_cast(rt->gc.pageAllocator.mapAlignedPages(ChunkSize, ChunkSize)); } static inline void FreeChunk(JSRuntime *rt, Chunk *p) { - rt->pageAllocator.unmapPages(static_cast(p), ChunkSize); + rt->gc.pageAllocator.unmapPages(static_cast(p), ChunkSize); } inline bool @@ -645,16 +645,16 @@ ChunkPool::wantBackgroundAllocation(JSRuntime *rt) const * allocation if we have empty chunks or when the runtime needs just few * of them. */ - return rt->gcHelperThread.canBackgroundAllocate() && + return rt->gc.helperThread.canBackgroundAllocate() && emptyCount == 0 && - rt->gcChunkSet.count() >= 4; + rt->gc.chunkSet.count() >= 4; } /* Must be called with the GC lock taken. */ inline Chunk * ChunkPool::get(JSRuntime *rt) { - JS_ASSERT(this == &rt->gcChunkPool); + JS_ASSERT(this == &rt->gc.chunkPool); Chunk *chunk = emptyChunkListHead; if (chunk) { @@ -669,10 +669,10 @@ ChunkPool::get(JSRuntime *rt) JS_ASSERT(chunk->info.numArenasFreeCommitted == 0); } JS_ASSERT(chunk->unused()); - JS_ASSERT(!rt->gcChunkSet.has(chunk)); + JS_ASSERT(!rt->gc.chunkSet.has(chunk)); if (wantBackgroundAllocation(rt)) - rt->gcHelperThread.startBackgroundAllocationIfIdle(); + rt->gc.helperThread.startBackgroundAllocationIfIdle(); return chunk; } @@ -691,7 +691,7 @@ ChunkPool::put(Chunk *chunk) Chunk * ChunkPool::expire(JSRuntime *rt, bool releaseAll) { - JS_ASSERT(this == &rt->gcChunkPool); + JS_ASSERT(this == &rt->gc.chunkPool); /* * Return old empty chunks to the system while preserving the order of @@ -705,7 +705,7 @@ ChunkPool::expire(JSRuntime *rt, bool releaseAll) JS_ASSERT(emptyCount); Chunk *chunk = *chunkp; JS_ASSERT(chunk->unused()); - JS_ASSERT(!rt->gcChunkSet.has(chunk)); + JS_ASSERT(!rt->gc.chunkSet.has(chunk)); JS_ASSERT(chunk->info.age <= MAX_EMPTY_CHUNK_AGE); if (releaseAll || chunk->info.age == MAX_EMPTY_CHUNK_AGE || freeChunkCount++ > MAX_EMPTY_CHUNK_COUNT) @@ -748,7 +748,7 @@ Chunk::allocate(JSRuntime *rt) if (!chunk) return nullptr; chunk->init(rt); - rt->gcStats.count(gcstats::STAT_NEW_CHUNK); + rt->gc.stats.count(gcstats::STAT_NEW_CHUNK); return chunk; } @@ -764,9 +764,9 @@ Chunk::release(JSRuntime *rt, Chunk *chunk) inline void Chunk::prepareToBeFreed(JSRuntime *rt) { - JS_ASSERT(rt->gcNumArenasFreeCommitted >= info.numArenasFreeCommitted); - rt->gcNumArenasFreeCommitted -= info.numArenasFreeCommitted; - rt->gcStats.count(gcstats::STAT_DESTROY_CHUNK); + JS_ASSERT(rt->gc.numArenasFreeCommitted >= info.numArenasFreeCommitted); + rt->gc.numArenasFreeCommitted -= info.numArenasFreeCommitted; + rt->gc.stats.count(gcstats::STAT_DESTROY_CHUNK); #ifdef DEBUG /* @@ -780,7 +780,7 @@ Chunk::prepareToBeFreed(JSRuntime *rt) void Chunk::decommitAllArenas(JSRuntime *rt) { decommittedArenas.clear(true); - rt->pageAllocator.markPagesUnused(&arenas[0], ArenasPerChunk * ArenaSize); + rt->gc.pageAllocator.markPagesUnused(&arenas[0], ArenasPerChunk * ArenaSize); info.freeArenasHead = nullptr; info.lastDecommittedArenaOffset = 0; @@ -818,8 +818,8 @@ GetAvailableChunkList(Zone *zone) { JSRuntime *rt = zone->runtimeFromAnyThread(); return zone->isSystem - ? &rt->gcSystemAvailableChunkListHead - : &rt->gcUserAvailableChunkListHead; + ? &rt->gc.systemAvailableChunkListHead + : &rt->gc.userAvailableChunkListHead; } inline void @@ -888,7 +888,7 @@ Chunk::fetchNextDecommittedArena() decommittedArenas.unset(offset); Arena *arena = &arenas[offset]; - info.trailer.runtime->pageAllocator.markPagesInUse(arena, ArenaSize); + info.trailer.runtime->gc.pageAllocator.markPagesInUse(arena, ArenaSize); arena->aheader.setAsNotAllocated(); return &arena->aheader; @@ -899,13 +899,13 @@ Chunk::fetchNextFreeArena(JSRuntime *rt) { JS_ASSERT(info.numArenasFreeCommitted > 0); JS_ASSERT(info.numArenasFreeCommitted <= info.numArenasFree); - JS_ASSERT(info.numArenasFreeCommitted <= rt->gcNumArenasFreeCommitted); + JS_ASSERT(info.numArenasFreeCommitted <= rt->gc.numArenasFreeCommitted); ArenaHeader *aheader = info.freeArenasHead; info.freeArenasHead = aheader->next; --info.numArenasFreeCommitted; --info.numArenasFree; - --rt->gcNumArenasFreeCommitted; + --rt->gc.numArenasFreeCommitted; return aheader; } @@ -916,7 +916,7 @@ Chunk::allocateArena(Zone *zone, AllocKind thingKind) JS_ASSERT(hasAvailableArenas()); JSRuntime *rt = zone->runtimeFromAnyThread(); - if (!rt->isHeapMinorCollecting() && rt->gcBytes >= rt->gcMaxBytes) + if (!rt->isHeapMinorCollecting() && rt->gc.bytes >= rt->gc.maxBytes) return nullptr; ArenaHeader *aheader = MOZ_LIKELY(info.numArenasFreeCommitted > 0) @@ -926,7 +926,7 @@ Chunk::allocateArena(Zone *zone, AllocKind thingKind) if (MOZ_UNLIKELY(!hasAvailableArenas())) removeFromAvailableList(); - rt->gcBytes += ArenaSize; + rt->gc.bytes += ArenaSize; zone->gcBytes += ArenaSize; if (zone->gcBytes >= zone->gcTriggerBytes) { @@ -945,7 +945,7 @@ Chunk::addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader) info.freeArenasHead = aheader; ++info.numArenasFreeCommitted; ++info.numArenasFree; - ++rt->gcNumArenasFreeCommitted; + ++rt->gc.numArenasFreeCommitted; } void @@ -963,14 +963,14 @@ Chunk::releaseArena(ArenaHeader *aheader) Zone *zone = aheader->zone; JSRuntime *rt = zone->runtimeFromAnyThread(); AutoLockGC maybeLock; - if (rt->gcHelperThread.sweeping()) + if (rt->gc.helperThread.sweeping()) maybeLock.lock(rt); - JS_ASSERT(rt->gcBytes >= ArenaSize); + JS_ASSERT(rt->gc.bytes >= ArenaSize); JS_ASSERT(zone->gcBytes >= ArenaSize); - if (rt->gcHelperThread.sweeping()) + if (rt->gc.helperThread.sweeping()) zone->reduceGCTriggerBytes(zone->gcHeapGrowthFactor * ArenaSize); - rt->gcBytes -= ArenaSize; + rt->gc.bytes -= ArenaSize; zone->gcBytes -= ArenaSize; aheader->setAsNotAllocated(); @@ -983,11 +983,11 @@ Chunk::releaseArena(ArenaHeader *aheader) } else if (!unused()) { JS_ASSERT(info.prevp); } else { - rt->gcChunkSet.remove(this); + rt->gc.chunkSet.remove(this); removeFromAvailableList(); JS_ASSERT(info.numArenasFree == ArenasPerChunk); decommitAllArenas(rt); - rt->gcChunkPool.put(this); + rt->gc.chunkPool.put(this); } } @@ -1001,19 +1001,19 @@ PickChunk(Zone *zone) if (chunk) return chunk; - chunk = rt->gcChunkPool.get(rt); + chunk = rt->gc.chunkPool.get(rt); if (!chunk) return nullptr; - rt->gcChunkAllocationSinceLastGC = true; + rt->gc.chunkAllocationSinceLastGC = true; /* * FIXME bug 583732 - chunk is newly allocated and cannot be present in * the table so using ordinary lookupForAdd is suboptimal here. */ - GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk); + GCChunkSet::AddPtr p = rt->gc.chunkSet.lookupForAdd(chunk); JS_ASSERT(!p); - if (!rt->gcChunkSet.add(p, chunk)) { + if (!rt->gc.chunkSet.add(p, chunk)) { Chunk::release(rt, chunk); return nullptr; } @@ -1025,30 +1025,121 @@ PickChunk(Zone *zone) return chunk; } +js::gc::GCRuntime::GCRuntime(JSRuntime *rt) : + systemZone(nullptr), + systemAvailableChunkListHead(nullptr), + userAvailableChunkListHead(nullptr), + bytes(0), + maxBytes(0), + maxMallocBytes(0), + numArenasFreeCommitted(0), + marker(rt), + verifyPreData(nullptr), + verifyPostData(nullptr), + chunkAllocationSinceLastGC(false), + nextFullGCTime(0), + lastGCTime(0), + jitReleaseTime(0), + allocationThreshold(30 * 1024 * 1024), + highFrequencyGC(false), + highFrequencyTimeThreshold(1000), + highFrequencyLowLimitBytes(100 * 1024 * 1024), + highFrequencyHighLimitBytes(500 * 1024 * 1024), + highFrequencyHeapGrowthMax(3.0), + highFrequencyHeapGrowthMin(1.5), + lowFrequencyHeapGrowth(1.5), + dynamicHeapGrowth(false), + dynamicMarkSlice(false), + decommitThreshold(32 * 1024 * 1024), + shouldCleanUpEverything(false), + grayBitsValid(false), + isNeeded(0), + stats(rt), + number(0), + startNumber(0), + isFull(false), + triggerReason(JS::gcreason::NO_REASON), + strictCompartmentChecking(false), +#ifdef DEBUG + disableStrictProxyCheckingCount(0), +#endif + incrementalState(gc::NO_INCREMENTAL), + lastMarkSlice(false), + sweepOnBackgroundThread(false), + foundBlackGrayEdges(false), + sweepingZones(nullptr), + zoneGroupIndex(0), + zoneGroups(nullptr), + currentZoneGroup(nullptr), + sweepPhase(0), + sweepZone(nullptr), + sweepKindIndex(0), + abortSweepAfterCurrentGroup(false), + arenasAllocatedDuringSweep(nullptr), +#ifdef DEBUG + markingValidator(nullptr), +#endif + interFrameGC(0), + sliceBudget(SliceBudget::Unlimited), + incrementalEnabled(true), + generationalDisabled(0), + manipulatingDeadZones(false), + objectsMarkedInDeadZones(0), + poke(false), + heapState(Idle), +#ifdef JSGC_GENERATIONAL + nursery(rt), + storeBuffer(rt, nursery), +#endif +#ifdef JS_GC_ZEAL + zealMode(0), + zealFrequency(0), + nextScheduled(0), + deterministicOnly(false), + incrementalLimit(0), +#endif + validate(true), + fullCompartmentChecks(false), + callback(nullptr), + sliceCallback(nullptr), + finalizeCallback(nullptr), + mallocBytes(0), + mallocGCTriggered(false), + scriptAndCountsVector(nullptr), + alwaysPreserveCode(false), +#ifdef DEBUG + noGCOrAllocationCheck(0), +#endif + lock(nullptr), + lockOwner(nullptr), + helperThread(rt) +{ +} + #ifdef JS_GC_ZEAL extern void js::SetGCZeal(JSRuntime *rt, uint8_t zeal, uint32_t frequency) { - if (rt->gcVerifyPreData) + if (rt->gc.verifyPreData) VerifyBarriers(rt, PreBarrierVerifier); - if (rt->gcVerifyPostData) + if (rt->gc.verifyPostData) VerifyBarriers(rt, PostBarrierVerifier); #ifdef JSGC_GENERATIONAL - if (rt->gcZeal_ == ZealGenerationalGCValue) { + if (rt->gc.zealMode == ZealGenerationalGCValue) { MinorGC(rt, JS::gcreason::DEBUG_GC); - rt->gcNursery.leaveZealMode(); + rt->gc.nursery.leaveZealMode(); } if (zeal == ZealGenerationalGCValue) - rt->gcNursery.enterZealMode(); + rt->gc.nursery.enterZealMode(); #endif bool schedule = zeal >= js::gc::ZealAllocValue; - rt->gcZeal_ = zeal; - rt->gcZealFrequency = frequency; - rt->gcNextScheduled = schedule ? frequency : 0; + rt->gc.zealMode = zeal; + rt->gc.zealFrequency = frequency; + rt->gc.nextScheduled = schedule ? frequency : 0; } static bool @@ -1100,31 +1191,31 @@ static const int64_t JIT_SCRIPT_RELEASE_TYPES_INTERVAL = 60 * 1000 * 1000; bool js_InitGC(JSRuntime *rt, uint32_t maxbytes) { - if (!rt->gcChunkSet.init(INITIAL_CHUNK_CAPACITY)) + if (!rt->gc.chunkSet.init(INITIAL_CHUNK_CAPACITY)) return false; - if (!rt->gcRootsHash.init(256)) + if (!rt->gc.rootsHash.init(256)) return false; - if (!rt->gcHelperThread.init()) + if (!rt->gc.helperThread.init()) return false; /* * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes * for default backward API compatibility. */ - rt->gcMaxBytes = maxbytes; + rt->gc.maxBytes = maxbytes; rt->setGCMaxMallocBytes(maxbytes); #ifndef JS_MORE_DETERMINISTIC - rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_RELEASE_TYPES_INTERVAL; + rt->gc.jitReleaseTime = PRMJ_Now() + JIT_SCRIPT_RELEASE_TYPES_INTERVAL; #endif #ifdef JSGC_GENERATIONAL - if (!rt->gcNursery.init()) + if (!rt->gc.nursery.init()) return false; - if (!rt->gcStoreBuffer.enable()) + if (!rt->gc.storeBuffer.enable()) return false; #endif @@ -1139,7 +1230,7 @@ js_InitGC(JSRuntime *rt, uint32_t maxbytes) static void RecordNativeStackTopForGC(JSRuntime *rt) { - ConservativeGCData *cgcd = &rt->conservativeGC; + ConservativeGCData *cgcd = &rt->gc.conservativeGC; #ifdef JS_THREADSAFE /* Record the stack top here only if we are called from a request. */ @@ -1156,7 +1247,7 @@ js_FinishGC(JSRuntime *rt) * Wait until the background finalization stops and the helper thread * shuts down before we forcefully release any remaining GC memory. */ - rt->gcHelperThread.finish(); + rt->gc.helperThread.finish(); #ifdef JS_GC_ZEAL /* Free memory associated with GC verification. */ @@ -1172,20 +1263,20 @@ js_FinishGC(JSRuntime *rt) } } - rt->zones.clear(); + rt->gc.zones.clear(); - rt->gcSystemAvailableChunkListHead = nullptr; - rt->gcUserAvailableChunkListHead = nullptr; - if (rt->gcChunkSet.initialized()) { - for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) + rt->gc.systemAvailableChunkListHead = nullptr; + rt->gc.userAvailableChunkListHead = nullptr; + if (rt->gc.chunkSet.initialized()) { + for (GCChunkSet::Range r(rt->gc.chunkSet.all()); !r.empty(); r.popFront()) Chunk::release(rt, r.front()); - rt->gcChunkSet.clear(); + rt->gc.chunkSet.clear(); } - rt->gcChunkPool.expireAndFree(rt, true); + rt->gc.chunkPool.expireAndFree(rt, true); - if (rt->gcRootsHash.initialized()) - rt->gcRootsHash.clear(); + if (rt->gc.rootsHash.initialized()) + rt->gc.rootsHash.clear(); rt->functionPersistentRooteds.clear(); rt->idPersistentRooteds.clear(); @@ -1209,10 +1300,10 @@ AddRoot(JSRuntime *rt, T *rp, const char *name, JSGCRootType rootType) * or ModifyBusyCount in workers). We need a read barrier to cover these * cases. */ - if (rt->gcIncrementalState != NO_INCREMENTAL) + if (rt->gc.incrementalState != NO_INCREMENTAL) BarrierOwner::result::writeBarrierPre(*rp); - return rt->gcRootsHash.put((void *)rp, RootInfo(name, rootType)); + return rt->gc.rootsHash.put((void *)rp, RootInfo(name, rootType)); } template @@ -1276,8 +1367,8 @@ js::RemoveRawValueRoot(JSContext *cx, Value *vp) void js::RemoveRoot(JSRuntime *rt, void *rp) { - rt->gcRootsHash.remove(rp); - rt->gcPoke = true; + rt->gc.rootsHash.remove(rp); + rt->gc.poke = true; } typedef RootedValueMap::Range RootRange; @@ -1287,7 +1378,7 @@ typedef RootedValueMap::Enum RootEnum; static size_t ComputeTriggerBytes(Zone *zone, size_t lastBytes, size_t maxBytes, JSGCInvocationKind gckind) { - size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, zone->runtimeFromMainThread()->gcAllocationThreshold); + size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, zone->runtimeFromMainThread()->gc.allocationThreshold); double trigger = double(base) * zone->gcHeapGrowthFactor; return size_t(Min(double(maxBytes), trigger)); } @@ -1305,33 +1396,33 @@ Zone::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind) */ JSRuntime *rt = runtimeFromMainThread(); - if (!rt->gcDynamicHeapGrowth) { + if (!rt->gc.dynamicHeapGrowth) { gcHeapGrowthFactor = 3.0; } else if (lastBytes < 1 * 1024 * 1024) { - gcHeapGrowthFactor = rt->gcLowFrequencyHeapGrowth; + gcHeapGrowthFactor = rt->gc.lowFrequencyHeapGrowth; } else { - JS_ASSERT(rt->gcHighFrequencyHighLimitBytes > rt->gcHighFrequencyLowLimitBytes); + JS_ASSERT(rt->gc.highFrequencyHighLimitBytes > rt->gc.highFrequencyLowLimitBytes); uint64_t now = PRMJ_Now(); - if (rt->gcLastGCTime && rt->gcLastGCTime + rt->gcHighFrequencyTimeThreshold * PRMJ_USEC_PER_MSEC > now) { - if (lastBytes <= rt->gcHighFrequencyLowLimitBytes) { - gcHeapGrowthFactor = rt->gcHighFrequencyHeapGrowthMax; - } else if (lastBytes >= rt->gcHighFrequencyHighLimitBytes) { - gcHeapGrowthFactor = rt->gcHighFrequencyHeapGrowthMin; + if (rt->gc.lastGCTime && rt->gc.lastGCTime + rt->gc.highFrequencyTimeThreshold * PRMJ_USEC_PER_MSEC > now) { + if (lastBytes <= rt->gc.highFrequencyLowLimitBytes) { + gcHeapGrowthFactor = rt->gc.highFrequencyHeapGrowthMax; + } else if (lastBytes >= rt->gc.highFrequencyHighLimitBytes) { + gcHeapGrowthFactor = rt->gc.highFrequencyHeapGrowthMin; } else { - double k = (rt->gcHighFrequencyHeapGrowthMin - rt->gcHighFrequencyHeapGrowthMax) - / (double)(rt->gcHighFrequencyHighLimitBytes - rt->gcHighFrequencyLowLimitBytes); - gcHeapGrowthFactor = (k * (lastBytes - rt->gcHighFrequencyLowLimitBytes) - + rt->gcHighFrequencyHeapGrowthMax); - JS_ASSERT(gcHeapGrowthFactor <= rt->gcHighFrequencyHeapGrowthMax - && gcHeapGrowthFactor >= rt->gcHighFrequencyHeapGrowthMin); + double k = (rt->gc.highFrequencyHeapGrowthMin - rt->gc.highFrequencyHeapGrowthMax) + / (double)(rt->gc.highFrequencyHighLimitBytes - rt->gc.highFrequencyLowLimitBytes); + gcHeapGrowthFactor = (k * (lastBytes - rt->gc.highFrequencyLowLimitBytes) + + rt->gc.highFrequencyHeapGrowthMax); + JS_ASSERT(gcHeapGrowthFactor <= rt->gc.highFrequencyHeapGrowthMax + && gcHeapGrowthFactor >= rt->gc.highFrequencyHeapGrowthMin); } - rt->gcHighFrequencyGC = true; + rt->gc.highFrequencyGC = true; } else { - gcHeapGrowthFactor = rt->gcLowFrequencyHeapGrowth; - rt->gcHighFrequencyGC = false; + gcHeapGrowthFactor = rt->gc.lowFrequencyHeapGrowth; + rt->gc.highFrequencyGC = false; } } - gcTriggerBytes = ComputeTriggerBytes(this, lastBytes, rt->gcMaxBytes, gckind); + gcTriggerBytes = ComputeTriggerBytes(this, lastBytes, rt->gc.maxBytes, gckind); } void @@ -1339,7 +1430,7 @@ Zone::reduceGCTriggerBytes(size_t amount) { JS_ASSERT(amount > 0); JS_ASSERT(gcTriggerBytes >= amount); - if (gcTriggerBytes - amount < runtimeFromAnyThread()->gcAllocationThreshold * gcHeapGrowthFactor) + if (gcTriggerBytes - amount < runtimeFromAnyThread()->gc.allocationThreshold * gcHeapGrowthFactor) return; gcTriggerBytes -= amount; } @@ -1376,7 +1467,7 @@ ArenaLists::prepareForIncrementalGC(JSRuntime *rt) if (!headSpan->isEmpty()) { ArenaHeader *aheader = headSpan->arenaHeader(); aheader->allocatedDuringIncremental = true; - rt->gcMarker.delayMarkingArena(aheader); + rt->gc.marker.delayMarkingArena(aheader); } } } @@ -1384,8 +1475,8 @@ ArenaLists::prepareForIncrementalGC(JSRuntime *rt) static inline void PushArenaAllocatedDuringSweep(JSRuntime *runtime, ArenaHeader *arena) { - arena->setNextAllocDuringSweep(runtime->gcArenasAllocatedDuringSweep); - runtime->gcArenasAllocatedDuringSweep = arena; + arena->setNextAllocDuringSweep(runtime->gc.arenasAllocatedDuringSweep); + runtime->gc.arenasAllocatedDuringSweep = arena; } inline void * @@ -1456,7 +1547,7 @@ ArenaLists::allocateFromArenaInline(Zone *zone, AllocKind thingKind) if (MOZ_UNLIKELY(zone->wasGCStarted())) { if (zone->needsBarrier()) { aheader->allocatedDuringIncremental = true; - zone->runtimeFromMainThread()->gcMarker.delayMarkingArena(aheader); + zone->runtimeFromMainThread()->gc.marker.delayMarkingArena(aheader); } else if (zone->isGCSweeping()) { PushArenaAllocatedDuringSweep(zone->runtimeFromMainThread(), aheader); } @@ -1489,7 +1580,7 @@ ArenaLists::allocateFromArenaInline(Zone *zone, AllocKind thingKind) if (MOZ_UNLIKELY(zone->wasGCStarted())) { if (zone->needsBarrier()) { aheader->allocatedDuringIncremental = true; - zone->runtimeFromMainThread()->gcMarker.delayMarkingArena(aheader); + zone->runtimeFromMainThread()->gc.marker.delayMarkingArena(aheader); } else if (zone->isGCSweeping()) { PushArenaAllocatedDuringSweep(zone->runtimeFromMainThread(), aheader); } @@ -1587,7 +1678,7 @@ ArenaLists::queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind) JS_ASSERT(IsBackgroundFinalized(thingKind)); #ifdef JS_THREADSAFE - JS_ASSERT(!fop->runtime()->gcHelperThread.sweeping()); + JS_ASSERT(!fop->runtime()->gc.helperThread.sweeping()); #endif ArenaList *al = &arenaLists[thingKind]; @@ -1659,7 +1750,7 @@ ArenaLists::backgroundFinalize(FreeOp *fop, ArenaHeader *listHead, bool onBackgr void ArenaLists::queueObjectsForSweep(FreeOp *fop) { - gcstats::AutoPhase ap(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_OBJECT); + gcstats::AutoPhase ap(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_OBJECT); finalizeNow(fop, FINALIZE_OBJECT0); finalizeNow(fop, FINALIZE_OBJECT2); @@ -1679,7 +1770,7 @@ ArenaLists::queueObjectsForSweep(FreeOp *fop) void ArenaLists::queueStringsForSweep(FreeOp *fop) { - gcstats::AutoPhase ap(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_STRING); + gcstats::AutoPhase ap(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_STRING); queueForBackgroundSweep(fop, FINALIZE_FAT_INLINE_STRING); queueForBackgroundSweep(fop, FINALIZE_STRING); @@ -1690,7 +1781,7 @@ ArenaLists::queueStringsForSweep(FreeOp *fop) void ArenaLists::queueScriptsForSweep(FreeOp *fop) { - gcstats::AutoPhase ap(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_SCRIPT); + gcstats::AutoPhase ap(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_SCRIPT); queueForForegroundSweep(fop, FINALIZE_SCRIPT); queueForForegroundSweep(fop, FINALIZE_LAZY_SCRIPT); } @@ -1698,14 +1789,14 @@ ArenaLists::queueScriptsForSweep(FreeOp *fop) void ArenaLists::queueJitCodeForSweep(FreeOp *fop) { - gcstats::AutoPhase ap(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_JITCODE); + gcstats::AutoPhase ap(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_JITCODE); queueForForegroundSweep(fop, FINALIZE_JITCODE); } void ArenaLists::queueShapesForSweep(FreeOp *fop) { - gcstats::AutoPhase ap(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_SHAPE); + gcstats::AutoPhase ap(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_SHAPE); queueForBackgroundSweep(fop, FINALIZE_SHAPE); queueForBackgroundSweep(fop, FINALIZE_BASE_SHAPE); @@ -1751,7 +1842,7 @@ ArenaLists::refillFreeList(ThreadSafeContext *cx, AllocKind thingKind) Zone *zone = cx->allocator()->zone_; bool runGC = cx->allowGC() && allowGC && - cx->asJSContext()->runtime()->gcIncrementalState != NO_INCREMENTAL && + cx->asJSContext()->runtime()->gc.incrementalState != NO_INCREMENTAL && zone->gcBytes > zone->gcTriggerBytes; #ifdef JS_THREADSAFE @@ -1781,7 +1872,7 @@ ArenaLists::refillFreeList(ThreadSafeContext *cx, AllocKind thingKind) if (secondAttempt) break; - cx->asJSContext()->runtime()->gcHelperThread.waitBackgroundSweepEnd(); + cx->asJSContext()->runtime()->gc.helperThread.waitBackgroundSweepEnd(); } } else { #ifdef JS_THREADSAFE @@ -1888,11 +1979,11 @@ js::MarkCompartmentActive(InterpreterFrame *fp) static void RequestInterrupt(JSRuntime *rt, JS::gcreason::Reason reason) { - if (rt->gcIsNeeded) + if (rt->gc.isNeeded) return; - rt->gcIsNeeded = true; - rt->gcTriggerReason = reason; + rt->gc.isNeeded = true; + rt->gc.triggerReason = reason; rt->requestInterrupt(JSRuntime::RequestInterruptMainThread); } @@ -1974,17 +2065,17 @@ js::MaybeGC(JSContext *cx) return; } - if (rt->gcIsNeeded) { + if (rt->gc.isNeeded) { GCSlice(rt, GC_NORMAL, JS::gcreason::MAYBEGC); return; } - double factor = rt->gcHighFrequencyGC ? 0.85 : 0.9; + double factor = rt->gc.highFrequencyGC ? 0.85 : 0.9; Zone *zone = cx->zone(); if (zone->gcBytes > 1024 * 1024 && zone->gcBytes >= factor * zone->gcTriggerBytes && - rt->gcIncrementalState == NO_INCREMENTAL && - !rt->gcHelperThread.sweeping()) + rt->gc.incrementalState == NO_INCREMENTAL && + !rt->gc.helperThread.sweeping()) { PrepareZoneForGC(zone); GCSlice(rt, GC_NORMAL, JS::gcreason::MAYBEGC); @@ -1998,14 +2089,14 @@ js::MaybeGC(JSContext *cx) * tolerate this. */ int64_t now = PRMJ_Now(); - if (rt->gcNextFullGCTime && rt->gcNextFullGCTime <= now) { - if (rt->gcChunkAllocationSinceLastGC || - rt->gcNumArenasFreeCommitted > rt->gcDecommitThreshold) + if (rt->gc.nextFullGCTime && rt->gc.nextFullGCTime <= now) { + if (rt->gc.chunkAllocationSinceLastGC || + rt->gc.numArenasFreeCommitted > rt->gc.decommitThreshold) { JS::PrepareForFullGC(rt); GCSlice(rt, GC_SHRINK, JS::gcreason::MAYBEGC); } else { - rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN; + rt->gc.nextFullGCTime = now + GC_IDLE_FULL_SPAN; } } #endif @@ -2065,7 +2156,7 @@ DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp) Maybe maybeUnlock; if (!rt->isHeapBusy()) maybeUnlock.construct(rt); - ok = rt->pageAllocator.markPagesUnused(aheader->getArena(), ArenaSize); + ok = rt->gc.pageAllocator.markPagesUnused(aheader->getArena(), ArenaSize); } if (ok) { @@ -2095,7 +2186,7 @@ DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp) JS_ASSERT(chunk->info.prevp); } - if (rt->gcChunkAllocationSinceLastGC || !ok) { + if (rt->gc.chunkAllocationSinceLastGC || !ok) { /* * The allocator thread has started to get new chunks. We should stop * to avoid decommitting arenas in just allocated chunks. @@ -2123,15 +2214,15 @@ DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp) static void DecommitArenas(JSRuntime *rt) { - DecommitArenasFromAvailableList(rt, &rt->gcSystemAvailableChunkListHead); - DecommitArenasFromAvailableList(rt, &rt->gcUserAvailableChunkListHead); + DecommitArenasFromAvailableList(rt, &rt->gc.systemAvailableChunkListHead); + DecommitArenasFromAvailableList(rt, &rt->gc.userAvailableChunkListHead); } /* Must be called with the GC lock taken. */ static void ExpireChunksAndArenas(JSRuntime *rt, bool shouldShrink) { - if (Chunk *toFree = rt->gcChunkPool.expire(rt, shouldShrink)) { + if (Chunk *toFree = rt->gc.chunkPool.expire(rt, shouldShrink)) { AutoUnlockGC unlock(rt); FreeChunkList(rt, toFree); } @@ -2149,7 +2240,7 @@ SweepBackgroundThings(JSRuntime* rt, bool onBackgroundThread) */ FreeOp fop(rt, false); for (int phase = 0 ; phase < BackgroundPhaseCount ; ++phase) { - for (Zone *zone = rt->gcSweepingZones; zone; zone = zone->gcNextGraphNode) { + for (Zone *zone = rt->gc.sweepingZones; zone; zone = zone->gcNextGraphNode) { for (int index = 0 ; index < BackgroundPhaseLength[phase] ; ++index) { AllocKind kind = BackgroundPhases[phase][index]; ArenaHeader *arenas = zone->allocator.arenas.arenaListsToSweep[kind]; @@ -2159,14 +2250,14 @@ SweepBackgroundThings(JSRuntime* rt, bool onBackgroundThread) } } - rt->gcSweepingZones = nullptr; + rt->gc.sweepingZones = nullptr; } #ifdef JS_THREADSAFE static void AssertBackgroundSweepingFinished(JSRuntime *rt) { - JS_ASSERT(!rt->gcSweepingZones); + JS_ASSERT(!rt->gc.sweepingZones); for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { for (unsigned i = 0; i < FINALIZE_LIMIT; ++i) { JS_ASSERT(!zone->allocator.arenas.arenaListsToSweep[i]); @@ -2202,9 +2293,9 @@ GCHelperThread::init() } #ifdef JS_THREADSAFE - if (!(wakeup = PR_NewCondVar(rt->gcLock))) + if (!(wakeup = PR_NewCondVar(rt->gc.lock))) return false; - if (!(done = PR_NewCondVar(rt->gcLock))) + if (!(done = PR_NewCondVar(rt->gc.lock))) return false; thread = PR_CreateThread(PR_USER_THREAD, threadMain, this, PR_PRIORITY_NORMAL, @@ -2220,7 +2311,7 @@ GCHelperThread::init() void GCHelperThread::finish() { - if (!rt->useHelperThreads() || !rt->gcLock) { + if (!rt->useHelperThreads() || !rt->gc.lock) { JS_ASSERT(state == IDLE); return; } @@ -2279,10 +2370,10 @@ GCHelperThread::threadMain(void *arg) void GCHelperThread::wait(PRCondVar *which) { - rt->gcLockOwner = nullptr; + rt->gc.lockOwner = nullptr; PR_WaitCondVar(which, PR_INTERVAL_NO_TIMEOUT); #ifdef DEBUG - rt->gcLockOwner = PR_GetCurrentThread(); + rt->gc.lockOwner = PR_GetCurrentThread(); #endif } @@ -2326,8 +2417,8 @@ GCHelperThread::threadLoop() if (!chunk) break; JS_ASSERT(chunk->info.numArenasFreeCommitted == 0); - rt->gcChunkPool.put(chunk); - } while (state == ALLOCATING && rt->gcChunkPool.wantBackgroundAllocation(rt)); + rt->gc.chunkPool.put(chunk); + } while (state == ALLOCATING && rt->gc.chunkPool.wantBackgroundAllocation(rt)); if (state == ALLOCATING) state = IDLE; break; @@ -2399,7 +2490,7 @@ GCHelperThread::waitBackgroundSweepEnd() AutoLockGC lock(rt); while (state == SWEEPING) wait(done); - if (rt->gcIncrementalState == NO_INCREMENTAL) + if (rt->gc.incrementalState == NO_INCREMENTAL) AssertBackgroundSweepingFinished(rt); #endif /* JS_THREADSAFE */ } @@ -2418,7 +2509,7 @@ GCHelperThread::waitBackgroundSweepOrAllocEnd() state = CANCEL_ALLOCATION; while (state == SWEEPING || state == CANCEL_ALLOCATION) wait(done); - if (rt->gcIncrementalState == NO_INCREMENTAL) + if (rt->gc.incrementalState == NO_INCREMENTAL) AssertBackgroundSweepingFinished(rt); #endif /* JS_THREADSAFE */ } @@ -2515,10 +2606,10 @@ ReleaseObservedTypes(JSRuntime *rt) #ifndef JS_MORE_DETERMINISTIC int64_t now = PRMJ_Now(); - if (now >= rt->gcJitReleaseTime) + if (now >= rt->gc.jitReleaseTime) releaseTypes = true; if (releaseTypes) - rt->gcJitReleaseTime = now + JIT_SCRIPT_RELEASE_TYPES_INTERVAL; + rt->gc.jitReleaseTime = now + JIT_SCRIPT_RELEASE_TYPES_INTERVAL; #endif return releaseTypes; @@ -2574,11 +2665,11 @@ SweepZones(FreeOp *fop, bool lastGC) JSZoneCallback callback = rt->destroyZoneCallback; /* Skip the atomsCompartment zone. */ - Zone **read = rt->zones.begin() + 1; - Zone **end = rt->zones.end(); + Zone **read = rt->gc.zones.begin() + 1; + Zone **end = rt->gc.zones.end(); Zone **write = read; - JS_ASSERT(rt->zones.length() >= 1); - JS_ASSERT(rt->isAtomsZone(rt->zones[0])); + JS_ASSERT(rt->gc.zones.length() >= 1); + JS_ASSERT(rt->isAtomsZone(rt->gc.zones[0])); while (read < end) { Zone *zone = *read++; @@ -2599,7 +2690,7 @@ SweepZones(FreeOp *fop, bool lastGC) } *write++ = zone; } - rt->zones.resize(write - rt->zones.begin()); + rt->gc.zones.resize(write - rt->gc.zones.begin()); } static void @@ -2626,10 +2717,10 @@ static bool ShouldPreserveJITCode(JSCompartment *comp, int64_t currentTime) { JSRuntime *rt = comp->runtimeFromMainThread(); - if (rt->gcShouldCleanUpEverything) + if (rt->gc.shouldCleanUpEverything) return false; - if (rt->alwaysPreserveCode) + if (rt->gc.alwaysPreserveCode) return true; if (comp->lastAnimationTime + PRMJ_USEC_PER_SEC >= currentTime) return true; @@ -2719,7 +2810,7 @@ CheckCompartmentCallback(JSTracer *trcArg, void **thingp, JSGCTraceKind kind) static void CheckForCompartmentMismatches(JSRuntime *rt) { - if (rt->gcDisableStrictProxyCheckingCount) + if (rt->gc.disableStrictProxyCheckingCount) return; CompartmentCheckTracer trc(rt, CheckCompartmentCallback); @@ -2743,11 +2834,11 @@ BeginMarkPhase(JSRuntime *rt) int64_t currentTime = PRMJ_Now(); #ifdef DEBUG - if (rt->gcFullCompartmentChecks) + if (rt->gc.fullCompartmentChecks) CheckForCompartmentMismatches(rt); #endif - rt->gcIsFull = true; + rt->gc.isFull = true; bool any = false; for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { @@ -2764,7 +2855,7 @@ BeginMarkPhase(JSRuntime *rt) zone->setGCState(Zone::Mark); } } else { - rt->gcIsFull = false; + rt->gc.isFull = false; } zone->scheduledForDestruction = false; @@ -2779,7 +2870,7 @@ BeginMarkPhase(JSRuntime *rt) c->zone()->setPreservingCode(true); } - if (!rt->gcShouldCleanUpEverything) { + if (!rt->gc.shouldCleanUpEverything) { #ifdef JS_ION if (JSCompartment *comp = jit::TopmostJitActivationCompartment(rt)) comp->zone()->setPreservingCode(true); @@ -2796,7 +2887,7 @@ BeginMarkPhase(JSRuntime *rt) * on. If the value of keepAtoms() changes between GC slices, then we'll * cancel the incremental GC. See IsIncrementalGCSafe. */ - if (rt->gcIsFull && !rt->keepAtoms()) { + if (rt->gc.isFull && !rt->keepAtoms()) { Zone *atomsZone = rt->atomsCompartment()->zone(); if (atomsZone->isGCScheduled()) { JS_ASSERT(!atomsZone->isCollecting()); @@ -2816,26 +2907,26 @@ BeginMarkPhase(JSRuntime *rt) * arenas. This purge call ensures that we only mark arenas that have had * allocations after the incremental GC started. */ - if (rt->gcIsIncremental) { + if (rt->gc.isIncremental) { for (GCZonesIter zone(rt); !zone.done(); zone.next()) zone->allocator.arenas.purge(); } - rt->gcMarker.start(); - JS_ASSERT(!rt->gcMarker.callback); - JS_ASSERT(IS_GC_MARKING_TRACER(&rt->gcMarker)); + rt->gc.marker.start(); + JS_ASSERT(!rt->gc.marker.callback); + JS_ASSERT(IS_GC_MARKING_TRACER(&rt->gc.marker)); /* For non-incremental GC the following sweep discards the jit code. */ - if (rt->gcIsIncremental) { + if (rt->gc.isIncremental) { for (GCZonesIter zone(rt); !zone.done(); zone.next()) { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK_DISCARD_CODE); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_MARK_DISCARD_CODE); zone->discardJitCode(rt->defaultFreeOp()); } } - GCMarker *gcmarker = &rt->gcMarker; + GCMarker *gcmarker = &rt->gc.marker; - rt->gcStartNumber = rt->gcNumber; + rt->gc.startNumber = rt->gc.number; /* * We must purge the runtime at the beginning of an incremental GC. The @@ -2847,15 +2938,15 @@ BeginMarkPhase(JSRuntime *rt) * a GC hazard would exist. */ { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_PURGE); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_PURGE); PurgeRuntime(rt); } /* * Mark phase. */ - gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK); - gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_ROOTS); + gcstats::AutoPhase ap1(rt->gc.stats, gcstats::PHASE_MARK); + gcstats::AutoPhase ap2(rt->gc.stats, gcstats::PHASE_MARK_ROOTS); for (GCZonesIter zone(rt); !zone.done(); zone.next()) { /* Unmark everything in the zones being collected. */ @@ -2867,11 +2958,11 @@ BeginMarkPhase(JSRuntime *rt) WeakMapBase::resetCompartmentWeakMapList(c); } - if (rt->gcIsFull) + if (rt->gc.isFull) UnmarkScriptData(rt); MarkRuntime(gcmarker); - if (rt->gcIsIncremental) + if (rt->gc.isIncremental) BufferGrayRoots(gcmarker); /* @@ -2921,7 +3012,7 @@ BeginMarkPhase(JSRuntime *rt) if (!zone->maybeAlive && !rt->isAtomsZone(zone)) zone->scheduledForDestruction = true; } - rt->gcFoundBlackGrayEdges = false; + rt->gc.foundBlackGrayEdges = false; return true; } @@ -2930,11 +3021,11 @@ template static void MarkWeakReferences(JSRuntime *rt, gcstats::Phase phase) { - GCMarker *gcmarker = &rt->gcMarker; + GCMarker *gcmarker = &rt->gc.marker; JS_ASSERT(gcmarker->isDrained()); - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_MARK); - gcstats::AutoPhase ap1(rt->gcStats, phase); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_MARK); + gcstats::AutoPhase ap1(rt->gc.stats, phase); for (;;) { bool markedAny = false; @@ -2963,19 +3054,19 @@ template static void MarkGrayReferences(JSRuntime *rt) { - GCMarker *gcmarker = &rt->gcMarker; + GCMarker *gcmarker = &rt->gc.marker; { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_MARK); - gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_SWEEP_MARK_GRAY); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_MARK); + gcstats::AutoPhase ap1(rt->gc.stats, gcstats::PHASE_SWEEP_MARK_GRAY); gcmarker->setMarkColorGray(); if (gcmarker->hasBufferedGrayRoots()) { for (ZoneIterT zone(rt); !zone.done(); zone.next()) gcmarker->markBufferedGrayRoots(zone); } else { - JS_ASSERT(!rt->gcIsIncremental); - if (JSTraceDataOp op = rt->gcGrayRootTracer.op) - (*op)(gcmarker, rt->gcGrayRootTracer.data); + JS_ASSERT(!rt->gc.isIncremental); + if (JSTraceDataOp op = rt->gc.grayRootTracer.op) + (*op)(gcmarker, rt->gc.grayRootTracer.data); } SliceBudget budget; gcmarker->drainMarkStack(budget); @@ -3051,10 +3142,10 @@ js::gc::MarkingValidator::nonIncrementalMark() if (!map.init()) return; - GCMarker *gcmarker = &runtime->gcMarker; + GCMarker *gcmarker = &runtime->gc.marker; /* Save existing mark bits. */ - for (GCChunkSet::Range r(runtime->gcChunkSet.all()); !r.empty(); r.popFront()) { + for (GCChunkSet::Range r(runtime->gc.chunkSet.all()); !r.empty(); r.popFront()) { ChunkBitmap *bitmap = &r.front()->bitmap; ChunkBitmap *entry = js_new(); if (!entry) @@ -3092,31 +3183,31 @@ js::gc::MarkingValidator::nonIncrementalMark() } /* Re-do all the marking, but non-incrementally. */ - js::gc::State state = runtime->gcIncrementalState; - runtime->gcIncrementalState = MARK_ROOTS; + js::gc::State state = runtime->gc.incrementalState; + runtime->gc.incrementalState = MARK_ROOTS; JS_ASSERT(gcmarker->isDrained()); gcmarker->reset(); - for (GCChunkSet::Range r(runtime->gcChunkSet.all()); !r.empty(); r.popFront()) + for (GCChunkSet::Range r(runtime->gc.chunkSet.all()); !r.empty(); r.popFront()) r.front()->bitmap.clear(); { - gcstats::AutoPhase ap1(runtime->gcStats, gcstats::PHASE_MARK); - gcstats::AutoPhase ap2(runtime->gcStats, gcstats::PHASE_MARK_ROOTS); + gcstats::AutoPhase ap1(runtime->gc.stats, gcstats::PHASE_MARK); + gcstats::AutoPhase ap2(runtime->gc.stats, gcstats::PHASE_MARK_ROOTS); MarkRuntime(gcmarker, true); } { - gcstats::AutoPhase ap1(runtime->gcStats, gcstats::PHASE_MARK); + gcstats::AutoPhase ap1(runtime->gc.stats, gcstats::PHASE_MARK); SliceBudget budget; - runtime->gcIncrementalState = MARK; - runtime->gcMarker.drainMarkStack(budget); + runtime->gc.incrementalState = MARK; + runtime->gc.marker.drainMarkStack(budget); } - runtime->gcIncrementalState = SWEEP; + runtime->gc.incrementalState = SWEEP; { - gcstats::AutoPhase ap(runtime->gcStats, gcstats::PHASE_SWEEP); + gcstats::AutoPhase ap(runtime->gc.stats, gcstats::PHASE_SWEEP); MarkAllWeakReferences(runtime, gcstats::PHASE_SWEEP_MARK_WEAK); /* Update zone state for gray marking. */ @@ -3135,7 +3226,7 @@ js::gc::MarkingValidator::nonIncrementalMark() } /* Take a copy of the non-incremental mark state and restore the original. */ - for (GCChunkSet::Range r(runtime->gcChunkSet.all()); !r.empty(); r.popFront()) { + for (GCChunkSet::Range r(runtime->gc.chunkSet.all()); !r.empty(); r.popFront()) { Chunk *chunk = r.front(); ChunkBitmap *bitmap = &chunk->bitmap; ChunkBitmap *entry = map.lookup(chunk)->value(); @@ -3149,7 +3240,7 @@ js::gc::MarkingValidator::nonIncrementalMark() WeakMapBase::restoreCompartmentWeakMapLists(weakmaps); ArrayBufferObject::restoreArrayBufferLists(arrayBuffers); - runtime->gcIncrementalState = state; + runtime->gc.incrementalState = state; } void @@ -3163,7 +3254,7 @@ js::gc::MarkingValidator::validate() if (!initialized) return; - for (GCChunkSet::Range r(runtime->gcChunkSet.all()); !r.empty(); r.popFront()) { + for (GCChunkSet::Range r(runtime->gc.chunkSet.all()); !r.empty(); r.popFront()) { Chunk *chunk = r.front(); BitmapMap::Ptr ptr = map.lookup(chunk); if (!ptr) @@ -3214,11 +3305,11 @@ static void ComputeNonIncrementalMarkingForValidation(JSRuntime *rt) { #ifdef DEBUG - JS_ASSERT(!rt->gcMarkingValidator); - if (rt->gcIsIncremental && rt->gcValidate) - rt->gcMarkingValidator = js_new(rt); - if (rt->gcMarkingValidator) - rt->gcMarkingValidator->nonIncrementalMark(); + JS_ASSERT(!rt->gc.markingValidator); + if (rt->gc.isIncremental && rt->gc.validate) + rt->gc.markingValidator = js_new(rt); + if (rt->gc.markingValidator) + rt->gc.markingValidator->nonIncrementalMark(); #endif } @@ -3226,8 +3317,8 @@ static void ValidateIncrementalMarking(JSRuntime *rt) { #ifdef DEBUG - if (rt->gcMarkingValidator) - rt->gcMarkingValidator->validate(); + if (rt->gc.markingValidator) + rt->gc.markingValidator->validate(); #endif } @@ -3235,8 +3326,8 @@ static void FinishMarkingValidation(JSRuntime *rt) { #ifdef DEBUG - js_delete(rt->gcMarkingValidator); - rt->gcMarkingValidator = nullptr; + js_delete(rt->gc.markingValidator); + rt->gc.markingValidator = nullptr; #endif } @@ -3338,17 +3429,17 @@ static void FindZoneGroups(JSRuntime *rt) { ComponentFinder finder(rt->mainThread.nativeStackLimit[StackForSystemCode]); - if (!rt->gcIsIncremental) + if (!rt->gc.isIncremental) finder.useOneComponent(); for (GCZonesIter zone(rt); !zone.done(); zone.next()) { JS_ASSERT(zone->isGCMarking()); finder.addNode(zone); } - rt->gcZoneGroups = finder.getResultsList(); - rt->gcCurrentZoneGroup = rt->gcZoneGroups; - rt->gcZoneGroupIndex = 0; - JS_ASSERT_IF(!rt->gcIsIncremental, !rt->gcCurrentZoneGroup->nextGroup()); + rt->gc.zoneGroups = finder.getResultsList(); + rt->gc.currentZoneGroup = rt->gc.zoneGroups; + rt->gc.zoneGroupIndex = 0; + JS_ASSERT_IF(!rt->gc.isIncremental, !rt->gc.currentZoneGroup->nextGroup()); } static void @@ -3357,18 +3448,18 @@ ResetGrayList(JSCompartment* comp); static void GetNextZoneGroup(JSRuntime *rt) { - rt->gcCurrentZoneGroup = rt->gcCurrentZoneGroup->nextGroup(); - ++rt->gcZoneGroupIndex; - if (!rt->gcCurrentZoneGroup) { - rt->gcAbortSweepAfterCurrentGroup = false; + rt->gc.currentZoneGroup = rt->gc.currentZoneGroup->nextGroup(); + ++rt->gc.zoneGroupIndex; + if (!rt->gc.currentZoneGroup) { + rt->gc.abortSweepAfterCurrentGroup = false; return; } - if (!rt->gcIsIncremental) - ComponentFinder::mergeGroups(rt->gcCurrentZoneGroup); + if (!rt->gc.isIncremental) + ComponentFinder::mergeGroups(rt->gc.currentZoneGroup); - if (rt->gcAbortSweepAfterCurrentGroup) { - JS_ASSERT(!rt->gcIsIncremental); + if (rt->gc.abortSweepAfterCurrentGroup) { + JS_ASSERT(!rt->gc.isIncremental); for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) { JS_ASSERT(!zone->gcNextGraphComponent); JS_ASSERT(zone->isGCMarking()); @@ -3384,8 +3475,8 @@ GetNextZoneGroup(JSRuntime *rt) ResetGrayList(comp); } - rt->gcAbortSweepAfterCurrentGroup = false; - rt->gcCurrentZoneGroup = nullptr; + rt->gc.abortSweepAfterCurrentGroup = false; + rt->gc.currentZoneGroup = nullptr; } } @@ -3496,12 +3587,12 @@ MarkIncomingCrossCompartmentPointers(JSRuntime *rt, const uint32_t color) { JS_ASSERT(color == BLACK || color == GRAY); - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_MARK); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_MARK); static const gcstats::Phase statsPhases[] = { gcstats::PHASE_SWEEP_MARK_INCOMING_BLACK, gcstats::PHASE_SWEEP_MARK_INCOMING_GRAY }; - gcstats::AutoPhase ap1(rt->gcStats, statsPhases[color]); + gcstats::AutoPhase ap1(rt->gc.stats, statsPhases[color]); bool unlinkList = color == GRAY; @@ -3519,11 +3610,11 @@ MarkIncomingCrossCompartmentPointers(JSRuntime *rt, const uint32_t color) if (color == GRAY) { if (IsObjectMarked(&src) && src->isMarked(GRAY)) - MarkGCThingUnbarriered(&rt->gcMarker, (void**)&dst, + MarkGCThingUnbarriered(&rt->gc.marker, (void**)&dst, "cross-compartment gray pointer"); } else { if (IsObjectMarked(&src) && !src->isMarked(GRAY)) - MarkGCThingUnbarriered(&rt->gcMarker, (void**)&dst, + MarkGCThingUnbarriered(&rt->gc.marker, (void**)&dst, "cross-compartment black pointer"); } } @@ -3533,7 +3624,7 @@ MarkIncomingCrossCompartmentPointers(JSRuntime *rt, const uint32_t color) } SliceBudget budget; - rt->gcMarker.drainMarkStack(budget); + rt->gc.marker.drainMarkStack(budget); } static bool @@ -3642,9 +3733,9 @@ EndMarkingZoneGroup(JSRuntime *rt) } /* Mark incoming gray pointers from previously swept compartments. */ - rt->gcMarker.setMarkColorGray(); + rt->gc.marker.setMarkColorGray(); MarkIncomingCrossCompartmentPointers(rt, GRAY); - rt->gcMarker.setMarkColorBlack(); + rt->gc.marker.setMarkColorBlack(); /* Mark gray roots and mark transitively inside the current compartment group. */ MarkGrayReferencesInCurrentGroup(rt); @@ -3655,7 +3746,7 @@ EndMarkingZoneGroup(JSRuntime *rt) zone->setGCState(Zone::Mark); } - JS_ASSERT(rt->gcMarker.isDrained()); + JS_ASSERT(rt->gc.marker.isDrained()); } static void @@ -3684,16 +3775,16 @@ BeginSweepingZoneGroup(JSRuntime *rt) ValidateIncrementalMarking(rt); - FreeOp fop(rt, rt->gcSweepOnBackgroundThread); + FreeOp fop(rt, rt->gc.sweepOnBackgroundThread); { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_FINALIZE_START); - if (rt->gcFinalizeCallback) - rt->gcFinalizeCallback(&fop, JSFINALIZE_GROUP_START, !rt->gcIsFull /* unused */); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_FINALIZE_START); + if (rt->gc.finalizeCallback) + rt->gc.finalizeCallback(&fop, JSFINALIZE_GROUP_START, !rt->gc.isFull /* unused */); } if (sweepingAtoms) { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_ATOMS); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_ATOMS); rt->sweepAtoms(); } @@ -3708,21 +3799,21 @@ BeginSweepingZoneGroup(JSRuntime *rt) Debugger::sweepAll(&fop); { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_COMPARTMENTS); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_COMPARTMENTS); for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_DISCARD_CODE); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_DISCARD_CODE); zone->discardJitCode(&fop); } bool releaseTypes = ReleaseObservedTypes(rt); for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) { - gcstats::AutoSCC scc(rt->gcStats, rt->gcZoneGroupIndex); + gcstats::AutoSCC scc(rt->gc.stats, rt->gc.zoneGroupIndex); c->sweep(&fop, releaseTypes && !c->zone()->isPreservingCode()); } for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) { - gcstats::AutoSCC scc(rt->gcStats, rt->gcZoneGroupIndex); + gcstats::AutoSCC scc(rt->gc.stats, rt->gc.zoneGroupIndex); // If there is an OOM while sweeping types, the type information // will be deoptimized so that it is still correct (i.e. @@ -3751,38 +3842,38 @@ BeginSweepingZoneGroup(JSRuntime *rt) * Objects are finalized immediately but this may change in the future. */ for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) { - gcstats::AutoSCC scc(rt->gcStats, rt->gcZoneGroupIndex); + gcstats::AutoSCC scc(rt->gc.stats, rt->gc.zoneGroupIndex); zone->allocator.arenas.queueObjectsForSweep(&fop); } for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) { - gcstats::AutoSCC scc(rt->gcStats, rt->gcZoneGroupIndex); + gcstats::AutoSCC scc(rt->gc.stats, rt->gc.zoneGroupIndex); zone->allocator.arenas.queueStringsForSweep(&fop); } for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) { - gcstats::AutoSCC scc(rt->gcStats, rt->gcZoneGroupIndex); + gcstats::AutoSCC scc(rt->gc.stats, rt->gc.zoneGroupIndex); zone->allocator.arenas.queueScriptsForSweep(&fop); } #ifdef JS_ION for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) { - gcstats::AutoSCC scc(rt->gcStats, rt->gcZoneGroupIndex); + gcstats::AutoSCC scc(rt->gc.stats, rt->gc.zoneGroupIndex); zone->allocator.arenas.queueJitCodeForSweep(&fop); } #endif for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) { - gcstats::AutoSCC scc(rt->gcStats, rt->gcZoneGroupIndex); + gcstats::AutoSCC scc(rt->gc.stats, rt->gc.zoneGroupIndex); zone->allocator.arenas.queueShapesForSweep(&fop); zone->allocator.arenas.gcShapeArenasToSweep = zone->allocator.arenas.arenaListsToSweep[FINALIZE_SHAPE]; } - rt->gcSweepPhase = 0; - rt->gcSweepZone = rt->gcCurrentZoneGroup; - rt->gcSweepKindIndex = 0; + rt->gc.sweepPhase = 0; + rt->gc.sweepZone = rt->gc.currentZoneGroup; + rt->gc.sweepKindIndex = 0; { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_FINALIZE_END); - if (rt->gcFinalizeCallback) - rt->gcFinalizeCallback(&fop, JSFINALIZE_GROUP_END, !rt->gcIsFull /* unused */); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_FINALIZE_END); + if (rt->gc.finalizeCallback) + rt->gc.finalizeCallback(&fop, JSFINALIZE_GROUP_END, !rt->gc.isFull /* unused */); } } @@ -3796,8 +3887,8 @@ EndSweepingZoneGroup(JSRuntime *rt) } /* Reset the list of arenas marked as being allocated during sweep phase. */ - while (ArenaHeader *arena = rt->gcArenasAllocatedDuringSweep) { - rt->gcArenasAllocatedDuringSweep = arena->getNextAllocDuringSweep(); + while (ArenaHeader *arena = rt->gc.arenasAllocatedDuringSweep) { + rt->gc.arenasAllocatedDuringSweep = arena->getNextAllocDuringSweep(); arena->unsetAllocDuringSweep(); } } @@ -3808,19 +3899,19 @@ BeginSweepPhase(JSRuntime *rt, bool lastGC) /* * Sweep phase. * - * Finalize as we sweep, outside of rt->gcLock but with rt->isHeapBusy() + * Finalize as we sweep, outside of rt->gc.lock but with rt->isHeapBusy() * true so that any attempt to allocate a GC-thing from a finalizer will * fail, rather than nest badly and leave the unmarked newborn to be swept. */ - JS_ASSERT(!rt->gcAbortSweepAfterCurrentGroup); + JS_ASSERT(!rt->gc.abortSweepAfterCurrentGroup); ComputeNonIncrementalMarkingForValidation(rt); - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP); #ifdef JS_THREADSAFE - rt->gcSweepOnBackgroundThread = !lastGC && rt->useHelperThreads(); + rt->gc.sweepOnBackgroundThread = !lastGC && rt->useHelperThreads(); #endif #ifdef DEBUG @@ -3853,15 +3944,15 @@ static bool DrainMarkStack(JSRuntime *rt, SliceBudget &sliceBudget, gcstats::Phase phase) { /* Run a marking slice and return whether the stack is now empty. */ - gcstats::AutoPhase ap(rt->gcStats, phase); - return rt->gcMarker.drainMarkStack(sliceBudget); + gcstats::AutoPhase ap(rt->gc.stats, phase); + return rt->gc.marker.drainMarkStack(sliceBudget); } static bool SweepPhase(JSRuntime *rt, SliceBudget &sliceBudget) { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP); - FreeOp fop(rt, rt->gcSweepOnBackgroundThread); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP); + FreeOp fop(rt, rt->gc.sweepOnBackgroundThread); bool finished = DrainMarkStack(rt, sliceBudget, gcstats::PHASE_SWEEP_MARK); if (!finished) @@ -3869,31 +3960,31 @@ SweepPhase(JSRuntime *rt, SliceBudget &sliceBudget) for (;;) { /* Finalize foreground finalized things. */ - for (; rt->gcSweepPhase < FinalizePhaseCount ; ++rt->gcSweepPhase) { - gcstats::AutoPhase ap(rt->gcStats, FinalizePhaseStatsPhase[rt->gcSweepPhase]); + for (; rt->gc.sweepPhase < FinalizePhaseCount ; ++rt->gc.sweepPhase) { + gcstats::AutoPhase ap(rt->gc.stats, FinalizePhaseStatsPhase[rt->gc.sweepPhase]); - for (; rt->gcSweepZone; rt->gcSweepZone = rt->gcSweepZone->nextNodeInGroup()) { - Zone *zone = rt->gcSweepZone; + for (; rt->gc.sweepZone; rt->gc.sweepZone = rt->gc.sweepZone->nextNodeInGroup()) { + Zone *zone = rt->gc.sweepZone; - while (rt->gcSweepKindIndex < FinalizePhaseLength[rt->gcSweepPhase]) { - AllocKind kind = FinalizePhases[rt->gcSweepPhase][rt->gcSweepKindIndex]; + while (rt->gc.sweepKindIndex < FinalizePhaseLength[rt->gc.sweepPhase]) { + AllocKind kind = FinalizePhases[rt->gc.sweepPhase][rt->gc.sweepKindIndex]; if (!zone->allocator.arenas.foregroundFinalize(&fop, kind, sliceBudget)) return false; /* Yield to the mutator. */ - ++rt->gcSweepKindIndex; + ++rt->gc.sweepKindIndex; } - rt->gcSweepKindIndex = 0; + rt->gc.sweepKindIndex = 0; } - rt->gcSweepZone = rt->gcCurrentZoneGroup; + rt->gc.sweepZone = rt->gc.currentZoneGroup; } /* Remove dead shapes from the shape tree, but don't finalize them yet. */ { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_SHAPE); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_SHAPE); - for (; rt->gcSweepZone; rt->gcSweepZone = rt->gcSweepZone->nextNodeInGroup()) { - Zone *zone = rt->gcSweepZone; + for (; rt->gc.sweepZone; rt->gc.sweepZone = rt->gc.sweepZone->nextNodeInGroup()) { + Zone *zone = rt->gc.sweepZone; while (ArenaHeader *arena = zone->allocator.arenas.gcShapeArenasToSweep) { for (CellIterUnderGC i(arena); !i.done(); i.next()) { Shape *shape = i.get(); @@ -3911,7 +4002,7 @@ SweepPhase(JSRuntime *rt, SliceBudget &sliceBudget) EndSweepingZoneGroup(rt); GetNextZoneGroup(rt); - if (!rt->gcCurrentZoneGroup) + if (!rt->gc.currentZoneGroup) return true; /* We're finished. */ EndMarkingZoneGroup(rt); BeginSweepingZoneGroup(rt); @@ -3921,22 +4012,22 @@ SweepPhase(JSRuntime *rt, SliceBudget &sliceBudget) static void EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC) { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP); - FreeOp fop(rt, rt->gcSweepOnBackgroundThread); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP); + FreeOp fop(rt, rt->gc.sweepOnBackgroundThread); - JS_ASSERT_IF(lastGC, !rt->gcSweepOnBackgroundThread); + JS_ASSERT_IF(lastGC, !rt->gc.sweepOnBackgroundThread); - JS_ASSERT(rt->gcMarker.isDrained()); - rt->gcMarker.stop(); + JS_ASSERT(rt->gc.marker.isDrained()); + rt->gc.marker.stop(); /* * Recalculate whether GC was full or not as this may have changed due to * newly created zones. Can only change from full to not full. */ - if (rt->gcIsFull) { + if (rt->gc.isFull) { for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { if (!zone->isCollecting()) { - rt->gcIsFull = false; + rt->gc.isFull = false; break; } } @@ -3948,7 +4039,7 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC) * will no longer be collected. This is safe, although it may * prevent the cycle collector from collecting some dead objects. */ - if (rt->gcFoundBlackGrayEdges) { + if (rt->gc.foundBlackGrayEdges) { for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { if (!zone->isCollecting()) zone->allocator.arenas.unmarkAll(); @@ -3956,7 +4047,7 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC) } { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DESTROY); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_DESTROY); /* * Sweep script filenames after sweeping functions in the generic loop @@ -3964,7 +4055,7 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC) * script and calls rt->destroyScriptHook, the hook can still access the * script's filename. See bug 323267. */ - if (rt->gcIsFull) + if (rt->gc.isFull) SweepScriptData(rt); /* Clear out any small pools that we're hanging on to. */ @@ -3978,7 +4069,7 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC) if (!lastGC) SweepZones(&fop, lastGC); - if (!rt->gcSweepOnBackgroundThread) { + if (!rt->gc.sweepOnBackgroundThread) { /* * Destroy arenas after we finished the sweeping so finalizers can * safely use IsAboutToBeFinalized(). This is done on the @@ -3991,26 +4082,26 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC) } { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_FINALIZE_END); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_FINALIZE_END); - if (rt->gcFinalizeCallback) - rt->gcFinalizeCallback(&fop, JSFINALIZE_COLLECTION_END, !rt->gcIsFull); + if (rt->gc.finalizeCallback) + rt->gc.finalizeCallback(&fop, JSFINALIZE_COLLECTION_END, !rt->gc.isFull); /* If we finished a full GC, then the gray bits are correct. */ - if (rt->gcIsFull) - rt->gcGrayBitsValid = true; + if (rt->gc.isFull) + rt->gc.grayBitsValid = true; } /* Set up list of zones for sweeping of background things. */ - JS_ASSERT(!rt->gcSweepingZones); + JS_ASSERT(!rt->gc.sweepingZones); for (GCZonesIter zone(rt); !zone.done(); zone.next()) { - zone->gcNextGraphNode = rt->gcSweepingZones; - rt->gcSweepingZones = zone; + zone->gcNextGraphNode = rt->gc.sweepingZones; + rt->gc.sweepingZones = zone; } /* If not sweeping on background thread then we must do it here. */ - if (!rt->gcSweepOnBackgroundThread) { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DESTROY); + if (!rt->gc.sweepOnBackgroundThread) { + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_DESTROY); SweepBackgroundThings(rt, false); @@ -4034,7 +4125,7 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC) for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i) { JS_ASSERT_IF(!IsBackgroundFinalized(AllocKind(i)) || - !rt->gcSweepOnBackgroundThread, + !rt->gc.sweepOnBackgroundThread, !zone->allocator.arenas.arenaListsToSweep[i]); } #endif @@ -4054,7 +4145,7 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC) FinishMarkingValidation(rt); - rt->gcLastGCTime = PRMJ_Now(); + rt->gc.lastGCTime = PRMJ_Now(); } namespace { @@ -4079,13 +4170,13 @@ class AutoGCSession AutoTraceSession::AutoTraceSession(JSRuntime *rt, js::HeapState heapState) : lock(rt), runtime(rt), - prevState(rt->heapState) + prevState(rt->gc.heapState) { - JS_ASSERT(!rt->noGCOrAllocationCheck); + JS_ASSERT(!rt->gc.noGCOrAllocationCheck); JS_ASSERT(!rt->isHeapBusy()); JS_ASSERT(heapState != Idle); #ifdef JSGC_GENERATIONAL - JS_ASSERT_IF(heapState == MajorCollecting, rt->gcNursery.isEmpty()); + JS_ASSERT_IF(heapState == MajorCollecting, rt->gc.nursery.isEmpty()); #endif // Threads with an exclusive context can hit refillFreeList while holding @@ -4099,12 +4190,12 @@ AutoTraceSession::AutoTraceSession(JSRuntime *rt, js::HeapState heapState) // presence of exclusive threads, to avoid racing with refillFreeList. #ifdef JS_THREADSAFE AutoLockWorkerThreadState lock; - rt->heapState = heapState; + rt->gc.heapState = heapState; #else MOZ_CRASH(); #endif } else { - rt->heapState = heapState; + rt->gc.heapState = heapState; } } @@ -4115,7 +4206,7 @@ AutoTraceSession::~AutoTraceSession() if (runtime->exclusiveThreadsPresent()) { #ifdef JS_THREADSAFE AutoLockWorkerThreadState lock; - runtime->heapState = prevState; + runtime->gc.heapState = prevState; // Notify any worker threads waiting for the trace session to end. WorkerThreadState().notifyAll(GlobalWorkerThreadState::PRODUCER); @@ -4123,7 +4214,7 @@ AutoTraceSession::~AutoTraceSession() MOZ_CRASH(); #endif } else { - runtime->heapState = prevState; + runtime->gc.heapState = prevState; } } @@ -4132,10 +4223,10 @@ AutoGCSession::AutoGCSession(JSRuntime *rt) session(rt, MajorCollecting), canceled(false) { - runtime->gcIsNeeded = false; - runtime->gcInterFrameGC = true; + runtime->gc.isNeeded = false; + runtime->gc.interFrameGC = true; - runtime->gcNumber++; + runtime->gc.number++; // It's ok if threads other than the main thread have suppressGC set, as // they are operating on zones which will not be collected from here. @@ -4148,14 +4239,14 @@ AutoGCSession::~AutoGCSession() return; #ifndef JS_MORE_DETERMINISTIC - runtime->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN; + runtime->gc.nextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN; #endif - runtime->gcChunkAllocationSinceLastGC = false; + runtime->gc.chunkAllocationSinceLastGC = false; #ifdef JS_GC_ZEAL /* Keeping these around after a GC is dangerous. */ - runtime->gcSelectedForMarking.clearAndFree(); + runtime->gc.selectedForMarking.clearAndFree(); #endif /* Clear gcMallocBytes for all compartments */ @@ -4206,7 +4297,7 @@ IncrementalCollectSlice(JSRuntime *rt, static void ResetIncrementalGC(JSRuntime *rt, const char *reason) { - switch (rt->gcIncrementalState) { + switch (rt->gc.incrementalState) { case NO_INCREMENTAL: return; @@ -4214,8 +4305,8 @@ ResetIncrementalGC(JSRuntime *rt, const char *reason) /* Cancel any ongoing marking. */ AutoCopyFreeListToArenasForGC copy(rt); - rt->gcMarker.reset(); - rt->gcMarker.stop(); + rt->gc.marker.reset(); + rt->gc.marker.stop(); for (GCCompartmentsIter c(rt); !c.done(); c.next()) { ArrayBufferObject::resetArrayBufferList(c); @@ -4230,26 +4321,26 @@ ResetIncrementalGC(JSRuntime *rt, const char *reason) rt->setNeedsBarrier(false); AssertNeedsBarrierFlagsConsistent(rt); - rt->gcIncrementalState = NO_INCREMENTAL; + rt->gc.incrementalState = NO_INCREMENTAL; - JS_ASSERT(!rt->gcStrictCompartmentChecking); + JS_ASSERT(!rt->gc.strictCompartmentChecking); break; } case SWEEP: - rt->gcMarker.reset(); + rt->gc.marker.reset(); for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) zone->scheduledForDestruction = false; /* Finish sweeping the current zone group, then abort. */ - rt->gcAbortSweepAfterCurrentGroup = true; + rt->gc.abortSweepAfterCurrentGroup = true; IncrementalCollectSlice(rt, SliceBudget::Unlimited, JS::gcreason::RESET, GC_NORMAL); { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_WAIT_BACKGROUND_THREAD); - rt->gcHelperThread.waitBackgroundSweepOrAllocEnd(); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_WAIT_BACKGROUND_THREAD); + rt->gc.helperThread.waitBackgroundSweepOrAllocEnd(); } break; @@ -4257,7 +4348,7 @@ ResetIncrementalGC(JSRuntime *rt, const char *reason) MOZ_ASSUME_UNREACHABLE("Invalid incremental GC state"); } - rt->gcStats.reset(reason); + rt->gc.stats.reset(reason); #ifdef DEBUG for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) @@ -4336,10 +4427,10 @@ PushZealSelectedObjects(JSRuntime *rt) { #ifdef JS_GC_ZEAL /* Push selected objects onto the mark stack and clear the list. */ - for (JSObject **obj = rt->gcSelectedForMarking.begin(); - obj != rt->gcSelectedForMarking.end(); obj++) + for (JSObject **obj = rt->gc.selectedForMarking.begin(); + obj != rt->gc.selectedForMarking.end(); obj++) { - MarkObjectUnbarriered(&rt->gcMarker, obj, "selected obj"); + MarkObjectUnbarriered(&rt->gc.marker, obj, "selected obj"); } #endif } @@ -4357,7 +4448,7 @@ IncrementalCollectSlice(JSRuntime *rt, bool lastGC = (reason == JS::gcreason::DESTROY_RUNTIME); - gc::State initialState = rt->gcIncrementalState; + gc::State initialState = rt->gc.incrementalState; int zeal = 0; #ifdef JS_GC_ZEAL @@ -4371,8 +4462,8 @@ IncrementalCollectSlice(JSRuntime *rt, } #endif - JS_ASSERT_IF(rt->gcIncrementalState != NO_INCREMENTAL, rt->gcIsIncremental); - rt->gcIsIncremental = budget != SliceBudget::Unlimited; + JS_ASSERT_IF(rt->gc.incrementalState != NO_INCREMENTAL, rt->gc.isIncremental); + rt->gc.isIncremental = budget != SliceBudget::Unlimited; if (zeal == ZealIncrementalRootsThenFinish || zeal == ZealIncrementalMarkAllThenFinish) { /* @@ -4384,46 +4475,46 @@ IncrementalCollectSlice(JSRuntime *rt, SliceBudget sliceBudget(budget); - if (rt->gcIncrementalState == NO_INCREMENTAL) { - rt->gcIncrementalState = MARK_ROOTS; - rt->gcLastMarkSlice = false; + if (rt->gc.incrementalState == NO_INCREMENTAL) { + rt->gc.incrementalState = MARK_ROOTS; + rt->gc.lastMarkSlice = false; } - if (rt->gcIncrementalState == MARK) - AutoGCRooter::traceAllWrappers(&rt->gcMarker); + if (rt->gc.incrementalState == MARK) + AutoGCRooter::traceAllWrappers(&rt->gc.marker); - switch (rt->gcIncrementalState) { + switch (rt->gc.incrementalState) { case MARK_ROOTS: if (!BeginMarkPhase(rt)) { - rt->gcIncrementalState = NO_INCREMENTAL; + rt->gc.incrementalState = NO_INCREMENTAL; return; } if (!lastGC) PushZealSelectedObjects(rt); - rt->gcIncrementalState = MARK; + rt->gc.incrementalState = MARK; - if (rt->gcIsIncremental && zeal == ZealIncrementalRootsThenFinish) + if (rt->gc.isIncremental && zeal == ZealIncrementalRootsThenFinish) break; /* fall through */ case MARK: { /* If we needed delayed marking for gray roots, then collect until done. */ - if (!rt->gcMarker.hasBufferedGrayRoots()) { + if (!rt->gc.marker.hasBufferedGrayRoots()) { sliceBudget.reset(); - rt->gcIsIncremental = false; + rt->gc.isIncremental = false; } bool finished = DrainMarkStack(rt, sliceBudget, gcstats::PHASE_MARK); if (!finished) break; - JS_ASSERT(rt->gcMarker.isDrained()); + JS_ASSERT(rt->gc.marker.isDrained()); - if (!rt->gcLastMarkSlice && rt->gcIsIncremental && + if (!rt->gc.lastMarkSlice && rt->gc.isIncremental && ((initialState == MARK && zeal != ZealIncrementalRootsThenFinish) || zeal == ZealIncrementalMarkAllThenFinish)) { @@ -4432,11 +4523,11 @@ IncrementalCollectSlice(JSRuntime *rt, * slice. We will need to mark anything new on the stack * when we resume, so we stay in MARK state. */ - rt->gcLastMarkSlice = true; + rt->gc.lastMarkSlice = true; break; } - rt->gcIncrementalState = SWEEP; + rt->gc.incrementalState = SWEEP; /* * This runs to completion, but we don't continue if the budget is @@ -4450,7 +4541,7 @@ IncrementalCollectSlice(JSRuntime *rt, * Always yield here when running in incremental multi-slice zeal * mode, so RunDebugGC can reset the slice buget. */ - if (rt->gcIsIncremental && zeal == ZealIncrementalMultipleSlices) + if (rt->gc.isIncremental && zeal == ZealIncrementalMultipleSlices) break; /* fall through */ @@ -4463,10 +4554,10 @@ IncrementalCollectSlice(JSRuntime *rt, EndSweepPhase(rt, gckind, lastGC); - if (rt->gcSweepOnBackgroundThread) - rt->gcHelperThread.startBackgroundSweep(gckind == GC_SHRINK); + if (rt->gc.sweepOnBackgroundThread) + rt->gc.helperThread.startBackgroundSweep(gckind == GC_SHRINK); - rt->gcIncrementalState = NO_INCREMENTAL; + rt->gc.incrementalState = NO_INCREMENTAL; break; } @@ -4483,7 +4574,7 @@ gc::IsIncrementalGCSafe(JSRuntime *rt) if (rt->keepAtoms()) return IncrementalSafety::Unsafe("keepAtoms set"); - if (!rt->gcIncrementalEnabled) + if (!rt->gc.incrementalEnabled) return IncrementalSafety::Unsafe("incremental permanently disabled"); return IncrementalSafety::Safe(); @@ -4496,30 +4587,30 @@ BudgetIncrementalGC(JSRuntime *rt, int64_t *budget) if (!safe) { ResetIncrementalGC(rt, safe.reason()); *budget = SliceBudget::Unlimited; - rt->gcStats.nonincremental(safe.reason()); + rt->gc.stats.nonincremental(safe.reason()); return; } if (rt->gcMode() != JSGC_MODE_INCREMENTAL) { ResetIncrementalGC(rt, "GC mode change"); *budget = SliceBudget::Unlimited; - rt->gcStats.nonincremental("GC mode"); + rt->gc.stats.nonincremental("GC mode"); return; } if (rt->isTooMuchMalloc()) { *budget = SliceBudget::Unlimited; - rt->gcStats.nonincremental("malloc bytes trigger"); + rt->gc.stats.nonincremental("malloc bytes trigger"); } bool reset = false; for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { if (zone->gcBytes >= zone->gcTriggerBytes) { *budget = SliceBudget::Unlimited; - rt->gcStats.nonincremental("allocation trigger"); + rt->gc.stats.nonincremental("allocation trigger"); } - if (rt->gcIncrementalState != NO_INCREMENTAL && + if (rt->gc.incrementalState != NO_INCREMENTAL && zone->isGCScheduled() != zone->wasGCStarted()) { reset = true; @@ -4527,7 +4618,7 @@ BudgetIncrementalGC(JSRuntime *rt, int64_t *budget) if (zone->isTooMuchMalloc()) { *budget = SliceBudget::Unlimited; - rt->gcStats.nonincremental("malloc bytes trigger"); + rt->gc.stats.nonincremental("malloc bytes trigger"); } } @@ -4557,23 +4648,23 @@ GCCycle(JSRuntime *rt, bool incremental, int64_t budget, * when manipulating the chunks during the GC. */ { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_WAIT_BACKGROUND_THREAD); - rt->gcHelperThread.waitBackgroundSweepOrAllocEnd(); + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_WAIT_BACKGROUND_THREAD); + rt->gc.helperThread.waitBackgroundSweepOrAllocEnd(); } - State prevState = rt->gcIncrementalState; + State prevState = rt->gc.incrementalState; if (!incremental) { /* If non-incremental GC was requested, reset incremental GC. */ ResetIncrementalGC(rt, "requested"); - rt->gcStats.nonincremental("requested"); + rt->gc.stats.nonincremental("requested"); budget = SliceBudget::Unlimited; } else { BudgetIncrementalGC(rt, &budget); } /* The GC was reset, so we need a do-over. */ - if (prevState != NO_INCREMENTAL && rt->gcIncrementalState == NO_INCREMENTAL) { + if (prevState != NO_INCREMENTAL && rt->gc.incrementalState == NO_INCREMENTAL) { gcsession.cancel(); return true; } @@ -4620,12 +4711,12 @@ class AutoDisableStoreBuffer public: AutoDisableStoreBuffer(JSRuntime *rt) : runtime(rt) { - prior = rt->gcStoreBuffer.isEnabled(); - rt->gcStoreBuffer.disable(); + prior = rt->gc.storeBuffer.isEnabled(); + rt->gc.storeBuffer.disable(); } ~AutoDisableStoreBuffer() { if (prior) - runtime->gcStoreBuffer.enable(); + runtime->gc.storeBuffer.enable(); } }; #else @@ -4656,7 +4747,7 @@ Collect(JSRuntime *rt, bool incremental, int64_t budget, AutoTraceLog logGC(logger, TraceLogger::GC); #ifdef JS_GC_ZEAL - if (rt->gcDeterministicOnly && !IsDeterministicGCReason(reason)) + if (rt->gc.deterministicOnly && !IsDeterministicGCReason(reason)) return; #endif @@ -4675,7 +4766,7 @@ Collect(JSRuntime *rt, bool incremental, int64_t budget, zone->scheduleGC(); /* This is a heuristic to avoid resets. */ - if (rt->gcIncrementalState != NO_INCREMENTAL && zone->needsBarrier()) + if (rt->gc.incrementalState != NO_INCREMENTAL && zone->needsBarrier()) zone->scheduleGC(); zoneCount++; @@ -4686,7 +4777,7 @@ Collect(JSRuntime *rt, bool incremental, int64_t budget, for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next()) compartmentCount++; - rt->gcShouldCleanUpEverything = ShouldCleanUpEverything(rt, reason, gckind); + rt->gc.shouldCleanUpEverything = ShouldCleanUpEverything(rt, reason, gckind); bool repeat = false; do { @@ -4698,29 +4789,29 @@ Collect(JSRuntime *rt, bool incremental, int64_t budget, */ AutoDisableStoreBuffer adsb(rt); - gcstats::AutoGCSlice agc(rt->gcStats, collectedCount, zoneCount, compartmentCount, reason); + gcstats::AutoGCSlice agc(rt->gc.stats, collectedCount, zoneCount, compartmentCount, reason); /* * Let the API user decide to defer a GC if it wants to (unless this * is the last context). Invoke the callback regardless. */ - if (rt->gcIncrementalState == NO_INCREMENTAL) { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_GC_BEGIN); - if (JSGCCallback callback = rt->gcCallback) - callback(rt, JSGC_BEGIN, rt->gcCallbackData); + if (rt->gc.incrementalState == NO_INCREMENTAL) { + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_GC_BEGIN); + if (JSGCCallback callback = rt->gc.callback) + callback(rt, JSGC_BEGIN, rt->gc.callbackData); } - rt->gcPoke = false; + rt->gc.poke = false; bool wasReset = GCCycle(rt, incremental, budget, gckind, reason); - if (rt->gcIncrementalState == NO_INCREMENTAL) { - gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_GC_END); - if (JSGCCallback callback = rt->gcCallback) - callback(rt, JSGC_END, rt->gcCallbackData); + if (rt->gc.incrementalState == NO_INCREMENTAL) { + gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_GC_END); + if (JSGCCallback callback = rt->gc.callback) + callback(rt, JSGC_END, rt->gc.callbackData); } /* Need to re-schedule all zones for GC. */ - if (rt->gcPoke && rt->gcShouldCleanUpEverything) + if (rt->gc.poke && rt->gc.shouldCleanUpEverything) JS::PrepareForFullGC(rt); /* @@ -4729,10 +4820,10 @@ Collect(JSRuntime *rt, bool incremental, int64_t budget, * case) until we can be sure that no additional garbage is created * (which typically happens if roots are dropped during finalizers). */ - repeat = (rt->gcPoke && rt->gcShouldCleanUpEverything) || wasReset; + repeat = (rt->gc.poke && rt->gc.shouldCleanUpEverything) || wasReset; } while (repeat); - if (rt->gcIncrementalState == NO_INCREMENTAL) { + if (rt->gc.incrementalState == NO_INCREMENTAL) { #ifdef JS_THREADSAFE EnqueuePendingParseTasksAfterGC(rt); #endif @@ -4751,10 +4842,10 @@ js::GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reaso int64_t sliceBudget; if (millis) sliceBudget = SliceBudget::TimeBudget(millis); - else if (rt->gcHighFrequencyGC && rt->gcDynamicMarkSlice) - sliceBudget = rt->gcSliceBudget * IGC_MARK_SLICE_MULTIPLIER; + else if (rt->gc.highFrequencyGC && rt->gc.dynamicMarkSlice) + sliceBudget = rt->gc.sliceBudget * IGC_MARK_SLICE_MULTIPLIER; else - sliceBudget = rt->gcSliceBudget; + sliceBudget = rt->gc.sliceBudget; Collect(rt, true, sliceBudget, gckind, reason); } @@ -4805,7 +4896,7 @@ JS::ShrinkGCBuffers(JSRuntime *rt) if (!rt->useHelperThreads()) ExpireChunksAndArenas(rt, true); else - rt->gcHelperThread.startBackgroundShrink(); + rt->gc.helperThread.startBackgroundShrink(); } void @@ -4814,8 +4905,8 @@ js::MinorGC(JSRuntime *rt, JS::gcreason::Reason reason) #ifdef JSGC_GENERATIONAL TraceLogger *logger = TraceLoggerForMainThread(rt); AutoTraceLog logMinorGC(logger, TraceLogger::MinorGC); - rt->gcNursery.collect(rt, reason, nullptr); - JS_ASSERT_IF(!rt->mainThread.suppressGC, rt->gcNursery.isEmpty()); + rt->gc.nursery.collect(rt, reason, nullptr); + JS_ASSERT_IF(!rt->mainThread.suppressGC, rt->gc.nursery.isEmpty()); #endif } @@ -4830,12 +4921,12 @@ js::MinorGC(JSContext *cx, JS::gcreason::Reason reason) Nursery::TypeObjectList pretenureTypes; JSRuntime *rt = cx->runtime(); - rt->gcNursery.collect(cx->runtime(), reason, &pretenureTypes); + rt->gc.nursery.collect(cx->runtime(), reason, &pretenureTypes); for (size_t i = 0; i < pretenureTypes.length(); i++) { if (pretenureTypes[i]->canPreTenure()) pretenureTypes[i]->setShouldPreTenure(cx); } - JS_ASSERT_IF(!rt->mainThread.suppressGC, rt->gcNursery.isEmpty()); + JS_ASSERT_IF(!rt->mainThread.suppressGC, rt->gc.nursery.isEmpty()); #endif } @@ -4849,18 +4940,18 @@ js::gc::GCIfNeeded(JSContext *cx) * In case of store buffer overflow perform minor GC first so that the * correct reason is seen in the logs. */ - if (rt->gcStoreBuffer.isAboutToOverflow()) + if (rt->gc.storeBuffer.isAboutToOverflow()) MinorGC(cx, JS::gcreason::FULL_STORE_BUFFER); #endif - if (rt->gcIsNeeded) - GCSlice(rt, GC_NORMAL, rt->gcTriggerReason); + if (rt->gc.isNeeded) + GCSlice(rt, GC_NORMAL, rt->gc.triggerReason); } void js::gc::FinishBackgroundFinalize(JSRuntime *rt) { - rt->gcHelperThread.waitBackgroundSweepEnd(); + rt->gc.helperThread.waitBackgroundSweepEnd(); } AutoFinishGC::AutoFinishGC(JSRuntime *rt) @@ -4916,7 +5007,7 @@ js::NewCompartment(JSContext *cx, Zone *zone, JSPrincipals *principals, return nullptr; } - if (zoneHolder && !rt->zones.append(zone)) { + if (zoneHolder && !rt->gc.zones.append(zone)) { js_ReportOutOfMemory(cx); return nullptr; } @@ -4996,7 +5087,7 @@ gc::RunDebugGC(JSContext *cx) type == ZealIncrementalMarkAllThenFinish || type == ZealIncrementalMultipleSlices) { - js::gc::State initialState = rt->gcIncrementalState; + js::gc::State initialState = rt->gc.incrementalState; int64_t budget; if (type == ZealIncrementalMultipleSlices) { /* @@ -5005,10 +5096,10 @@ gc::RunDebugGC(JSContext *cx) * completion. */ if (initialState == NO_INCREMENTAL) - rt->gcIncrementalLimit = rt->gcZealFrequency / 2; + rt->gc.incrementalLimit = rt->gc.zealFrequency / 2; else - rt->gcIncrementalLimit *= 2; - budget = SliceBudget::WorkBudget(rt->gcIncrementalLimit); + rt->gc.incrementalLimit *= 2; + budget = SliceBudget::WorkBudget(rt->gc.incrementalLimit); } else { // This triggers incremental GC but is actually ignored by IncrementalMarkSlice. budget = SliceBudget::WorkBudget(1); @@ -5021,9 +5112,9 @@ gc::RunDebugGC(JSContext *cx) * phase. */ if (type == ZealIncrementalMultipleSlices && - initialState == MARK && rt->gcIncrementalState == SWEEP) + initialState == MARK && rt->gc.incrementalState == SWEEP) { - rt->gcIncrementalLimit = rt->gcZealFrequency / 2; + rt->gc.incrementalLimit = rt->gc.zealFrequency / 2; } } else { Collect(rt, false, SliceBudget::Unlimited, GC_NORMAL, JS::gcreason::DEBUG_GC); @@ -5037,7 +5128,7 @@ gc::SetDeterministicGC(JSContext *cx, bool enabled) { #ifdef JS_GC_ZEAL JSRuntime *rt = cx->runtime(); - rt->gcDeterministicOnly = enabled; + rt->gc.deterministicOnly = enabled; #endif } @@ -5045,14 +5136,14 @@ void gc::SetValidateGC(JSContext *cx, bool enabled) { JSRuntime *rt = cx->runtime(); - rt->gcValidate = enabled; + rt->gc.validate = enabled; } void gc::SetFullCompartmentChecks(JSContext *cx, bool enabled) { JSRuntime *rt = cx->runtime(); - rt->gcFullCompartmentChecks = enabled; + rt->gc.fullCompartmentChecks = enabled; } #ifdef DEBUG @@ -5140,15 +5231,15 @@ static void ReleaseScriptCounts(FreeOp *fop) { JSRuntime *rt = fop->runtime(); - JS_ASSERT(rt->scriptAndCountsVector); + JS_ASSERT(rt->gc.scriptAndCountsVector); - ScriptAndCountsVector &vec = *rt->scriptAndCountsVector; + ScriptAndCountsVector &vec = *rt->gc.scriptAndCountsVector; for (size_t i = 0; i < vec.length(); i++) vec[i].scriptCounts.destroy(fop); - fop->delete_(rt->scriptAndCountsVector); - rt->scriptAndCountsVector = nullptr; + fop->delete_(rt->gc.scriptAndCountsVector); + rt->gc.scriptAndCountsVector = nullptr; } JS_FRIEND_API(void) @@ -5159,7 +5250,7 @@ js::StartPCCountProfiling(JSContext *cx) if (rt->profilingScripts) return; - if (rt->scriptAndCountsVector) + if (rt->gc.scriptAndCountsVector) ReleaseScriptCounts(rt->defaultFreeOp()); ReleaseAllJITCode(rt->defaultFreeOp()); @@ -5174,7 +5265,7 @@ js::StopPCCountProfiling(JSContext *cx) if (!rt->profilingScripts) return; - JS_ASSERT(!rt->scriptAndCountsVector); + JS_ASSERT(!rt->gc.scriptAndCountsVector); ReleaseAllJITCode(rt->defaultFreeOp()); @@ -5196,7 +5287,7 @@ js::StopPCCountProfiling(JSContext *cx) } rt->profilingScripts = false; - rt->scriptAndCountsVector = vec; + rt->gc.scriptAndCountsVector = vec; } JS_FRIEND_API(void) @@ -5204,7 +5295,7 @@ js::PurgePCCounts(JSContext *cx) { JSRuntime *rt = cx->runtime(); - if (!rt->scriptAndCountsVector) + if (!rt->gc.scriptAndCountsVector) return; JS_ASSERT(!rt->profilingScripts); @@ -5299,27 +5390,27 @@ ArenaLists::containsArena(JSRuntime *rt, ArenaHeader *needle) AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSContext *cx) : runtime(cx->runtime()), - markCount(runtime->gcObjectsMarkedInDeadZones), + markCount(runtime->gc.objectsMarkedInDeadZones), inIncremental(JS::IsIncrementalGCInProgress(runtime)), - manipulatingDeadZones(runtime->gcManipulatingDeadZones) + manipulatingDeadZones(runtime->gc.manipulatingDeadZones) { - runtime->gcManipulatingDeadZones = true; + runtime->gc.manipulatingDeadZones = true; } AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSObject *obj) : runtime(obj->compartment()->runtimeFromMainThread()), - markCount(runtime->gcObjectsMarkedInDeadZones), + markCount(runtime->gc.objectsMarkedInDeadZones), inIncremental(JS::IsIncrementalGCInProgress(runtime)), - manipulatingDeadZones(runtime->gcManipulatingDeadZones) + manipulatingDeadZones(runtime->gc.manipulatingDeadZones) { - runtime->gcManipulatingDeadZones = true; + runtime->gc.manipulatingDeadZones = true; } AutoMaybeTouchDeadZones::~AutoMaybeTouchDeadZones() { - runtime->gcManipulatingDeadZones = manipulatingDeadZones; + runtime->gc.manipulatingDeadZones = manipulatingDeadZones; - if (inIncremental && runtime->gcObjectsMarkedInDeadZones != markCount) { + if (inIncremental && runtime->gc.objectsMarkedInDeadZones != markCount) { JS::PrepareForFullGC(runtime); js::GC(runtime, GC_NORMAL, JS::gcreason::TRANSPLANT); } @@ -5352,7 +5443,7 @@ js::UninlinedIsInsideNursery(JSRuntime *rt, const void *thing) #ifdef DEBUG AutoDisableProxyCheck::AutoDisableProxyCheck(JSRuntime *rt MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL) - : count(rt->gcDisableStrictProxyCheckingCount) + : count(rt->gc.disableStrictProxyCheckingCount) { MOZ_GUARD_OBJECT_NOTIFIER_INIT; count++; @@ -5371,7 +5462,7 @@ JS::GetGCNumber() JSRuntime *rt = js::TlsPerThreadData.get()->runtimeFromMainThread(); if (!rt) return 0; - return rt->gcNumber; + return rt->gc.number; } JS::AutoAssertNoGC::AutoAssertNoGC() @@ -5387,18 +5478,18 @@ JS::AutoAssertNoGC::AutoAssertNoGC() */ runtime = data->runtimeIfOnOwnerThread(); if (runtime) - gcNumber = runtime->gcNumber; + gcNumber = runtime->gc.number; } } JS::AutoAssertNoGC::AutoAssertNoGC(JSRuntime *rt) - : runtime(rt), gcNumber(rt->gcNumber) + : runtime(rt), gcNumber(rt->gc.number) { } JS::AutoAssertNoGC::~AutoAssertNoGC() { if (runtime) - MOZ_ASSERT(gcNumber == runtime->gcNumber, "GC ran inside an AutoAssertNoGC scope."); + MOZ_ASSERT(gcNumber == runtime->gc.number, "GC ran inside an AutoAssertNoGC scope."); } #endif diff --git a/js/src/jsgc.h b/js/src/jsgc.h index 34f4b424883..bae8b424ccc 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -50,6 +50,20 @@ enum HeapState { MinorCollecting // doing a GC of the minor heap (nursery) }; +struct ExtraTracer { + JSTraceDataOp op; + void *data; + + ExtraTracer() + : op(nullptr), data(nullptr) + {} + ExtraTracer(JSTraceDataOp op, void *data) + : op(op), data(data) + {} +}; + +typedef Vector ScriptAndCountsVector; + namespace jit { class JitCode; } diff --git a/js/src/jsgcinlines.h b/js/src/jsgcinlines.h index d24f429a0fb..dcaead4a0f5 100644 --- a/js/src/jsgcinlines.h +++ b/js/src/jsgcinlines.h @@ -27,8 +27,8 @@ struct AutoMarkInDeadZone scheduled(zone->scheduledForDestruction) { JSRuntime *rt = zone->runtimeFromMainThread(); - if (rt->gcManipulatingDeadZones && zone->scheduledForDestruction) { - rt->gcObjectsMarkedInDeadZones++; + if (rt->gc.manipulatingDeadZones && zone->scheduledForDestruction) { + rt->gc.objectsMarkedInDeadZones++; zone->scheduledForDestruction = false; } } @@ -106,12 +106,12 @@ GetGCThingTraceKind(const void *thing) static inline void GCPoke(JSRuntime *rt) { - rt->gcPoke = true; + rt->gc.poke = true; #ifdef JS_GC_ZEAL /* Schedule a GC to happen "soon" after a GC poke. */ if (rt->gcZeal() == js::gc::ZealPokeValue) - rt->gcNextScheduled = 1; + rt->gc.nextScheduled = 1; #endif } @@ -245,7 +245,7 @@ class CellIterUnderGC : public CellIterImpl public: CellIterUnderGC(JS::Zone *zone, AllocKind kind) { #ifdef JSGC_GENERATIONAL - JS_ASSERT(zone->runtimeFromAnyThread()->gcNursery.isEmpty()); + JS_ASSERT(zone->runtimeFromAnyThread()->gc.nursery.isEmpty()); #endif JS_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy()); init(zone, kind); @@ -284,7 +284,7 @@ class CellIter : public CellIterImpl #ifdef JSGC_GENERATIONAL /* Evict the nursery before iterating so we can see all things. */ JSRuntime *rt = zone->runtimeFromMainThread(); - if (!rt->gcNursery.isEmpty()) + if (!rt->gc.nursery.isEmpty()) MinorGC(rt, JS::gcreason::EVICT_NURSERY); #endif @@ -297,7 +297,7 @@ class CellIter : public CellIterImpl #ifdef DEBUG /* Assert that no GCs can occur while a CellIter is live. */ - counter = &zone->runtimeFromAnyThread()->noGCOrAllocationCheck; + counter = &zone->runtimeFromAnyThread()->gc.noGCOrAllocationCheck; ++*counter; #endif @@ -353,7 +353,7 @@ class GCZoneGroupIter { public: GCZoneGroupIter(JSRuntime *rt) { JS_ASSERT(rt->isHeapBusy()); - current = rt->gcCurrentZoneGroup; + current = rt->gc.currentZoneGroup; } bool done() const { return !current; } @@ -387,7 +387,7 @@ TryNewNurseryObject(ThreadSafeContext *cxArg, size_t thingSize, size_t nDynamicS JS_ASSERT(!IsAtomsCompartment(cx->compartment())); JSRuntime *rt = cx->runtime(); - Nursery &nursery = rt->gcNursery; + Nursery &nursery = rt->gc.nursery; JSObject *obj = nursery.allocateObject(cx, thingSize, nDynamicSlots); if (obj) return obj; @@ -427,7 +427,7 @@ CheckAllocatorState(ThreadSafeContext *cx, AllocKind kind) kind == FINALIZE_FAT_INLINE_STRING || kind == FINALIZE_JITCODE); JS_ASSERT(!rt->isHeapBusy()); - JS_ASSERT(!rt->noGCOrAllocationCheck); + JS_ASSERT(!rt->gc.noGCOrAllocationCheck); #endif // For testing out of memory conditions diff --git a/js/src/jsinfer.cpp b/js/src/jsinfer.cpp index 367738bff32..817a98a4e77 100644 --- a/js/src/jsinfer.cpp +++ b/js/src/jsinfer.cpp @@ -3917,7 +3917,7 @@ ExclusiveContext::getNewType(const Class *clasp, TaggedProto proto, JSFunction * #ifdef JSGC_GENERATIONAL if (proto.isObject() && hasNursery() && nursery().isInside(proto.toObject())) { - asJSContext()->runtime()->gcStoreBuffer.putGeneric( + asJSContext()->runtime()->gc.storeBuffer.putGeneric( NewTypeObjectsSetRef(&newTypeObjects, clasp, proto.toObject(), fun)); } #endif @@ -4255,7 +4255,7 @@ TypeCompartment::sweep(FreeOp *fop) void JSCompartment::sweepNewTypeObjectTable(TypeObjectWithNewScriptSet &table) { - gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats, + gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats, gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT); JS_ASSERT(zone()->isGCSweeping()); @@ -4400,7 +4400,7 @@ TypeZone::sweep(FreeOp *fop, bool releaseTypes, bool *oom) #endif { - gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_DISCARD_TI); + gcstats::AutoPhase ap2(rt->gc.stats, gcstats::PHASE_DISCARD_TI); for (CellIterUnderGC i(zone(), FINALIZE_SCRIPT); !i.done(); i.next()) { JSScript *script = i.get(); @@ -4442,7 +4442,7 @@ TypeZone::sweep(FreeOp *fop, bool releaseTypes, bool *oom) } { - gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TYPES); + gcstats::AutoPhase ap2(rt->gc.stats, gcstats::PHASE_SWEEP_TYPES); for (gc::CellIterUnderGC iter(zone(), gc::FINALIZE_TYPE_OBJECT); !iter.done(); iter.next()) @@ -4470,7 +4470,7 @@ TypeZone::sweep(FreeOp *fop, bool releaseTypes, bool *oom) } { - gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_FREE_TI_ARENA); + gcstats::AutoPhase ap2(rt->gc.stats, gcstats::PHASE_FREE_TI_ARENA); rt->freeLifoAlloc.transferFrom(&oldAlloc); } } diff --git a/js/src/jsiter.cpp b/js/src/jsiter.cpp index 4d046f54e90..e3ce922a955 100644 --- a/js/src/jsiter.cpp +++ b/js/src/jsiter.cpp @@ -1503,7 +1503,7 @@ static void GeneratorWriteBarrierPost(JSContext *cx, JSGenerator *gen) { #ifdef JSGC_GENERATIONAL - cx->runtime()->gcStoreBuffer.putWholeCell(gen->obj); + cx->runtime()->gc.storeBuffer.putWholeCell(gen->obj); #endif } diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 1e4911c4742..35c2ffba164 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -1290,7 +1290,7 @@ NewObject(ExclusiveContext *cx, types::TypeObject *type_, JSObject *parent, gc:: if (!cx->shouldBeJSContext()) return nullptr; JSRuntime *rt = cx->asJSContext()->runtime(); - rt->gcIncrementalEnabled = false; + rt->gc.incrementalEnabled = false; #ifdef DEBUG if (rt->gcMode() == JSGC_MODE_INCREMENTAL) { @@ -2710,7 +2710,7 @@ AllocateSlots(ThreadSafeContext *cx, JSObject *obj, uint32_t nslots) { #ifdef JSGC_GENERATIONAL if (cx->isJSContext()) - return cx->asJSContext()->runtime()->gcNursery.allocateSlots(cx->asJSContext(), obj, nslots); + return cx->asJSContext()->runtime()->gc.nursery.allocateSlots(cx->asJSContext(), obj, nslots); #endif return cx->pod_malloc(nslots); } @@ -2721,9 +2721,9 @@ ReallocateSlots(ThreadSafeContext *cx, JSObject *obj, HeapSlot *oldSlots, { #ifdef JSGC_GENERATIONAL if (cx->isJSContext()) { - return cx->asJSContext()->runtime()->gcNursery.reallocateSlots(cx->asJSContext(), - obj, oldSlots, - oldCount, newCount); + return cx->asJSContext()->runtime()->gc.nursery.reallocateSlots(cx->asJSContext(), + obj, oldSlots, + oldCount, newCount); } #endif return (HeapSlot *)cx->realloc_(oldSlots, oldCount * sizeof(HeapSlot), @@ -2798,7 +2798,7 @@ FreeSlots(ThreadSafeContext *cx, HeapSlot *slots) // Note: threads without a JSContext do not have access to nursery allocated things. #ifdef JSGC_GENERATIONAL if (cx->isJSContext()) - return cx->asJSContext()->runtime()->gcNursery.freeSlots(cx->asJSContext(), slots); + return cx->asJSContext()->runtime()->gc.nursery.freeSlots(cx->asJSContext(), slots); #endif js_free(slots); } @@ -3020,7 +3020,7 @@ AllocateElements(ThreadSafeContext *cx, JSObject *obj, uint32_t nelems) { #ifdef JSGC_GENERATIONAL if (cx->isJSContext()) - return cx->asJSContext()->runtime()->gcNursery.allocateElements(cx->asJSContext(), obj, nelems); + return cx->asJSContext()->runtime()->gc.nursery.allocateElements(cx->asJSContext(), obj, nelems); #endif return static_cast(cx->malloc_(nelems * sizeof(HeapValue))); @@ -3032,9 +3032,9 @@ ReallocateElements(ThreadSafeContext *cx, JSObject *obj, ObjectElements *oldHead { #ifdef JSGC_GENERATIONAL if (cx->isJSContext()) { - return cx->asJSContext()->runtime()->gcNursery.reallocateElements(cx->asJSContext(), obj, - oldHeader, oldCount, - newCount); + return cx->asJSContext()->runtime()->gc.nursery.reallocateElements(cx->asJSContext(), obj, + oldHeader, oldCount, + newCount); } #endif diff --git a/js/src/jsobjinlines.h b/js/src/jsobjinlines.h index 152a1153814..5709b10dad0 100644 --- a/js/src/jsobjinlines.h +++ b/js/src/jsobjinlines.h @@ -517,7 +517,7 @@ JSObject::create(js::ExclusiveContext *cx, js::gc::AllocKind kind, js::gc::Initi if (extantSlots) { #ifdef JSGC_GENERATIONAL if (cx->isJSContext()) - cx->asJSContext()->runtime()->gcNursery.notifyInitialSlots(obj, extantSlots); + cx->asJSContext()->runtime()->gc.nursery.notifyInitialSlots(obj, extantSlots); #endif obj->slots = extantSlots; } diff --git a/js/src/jsopcode.cpp b/js/src/jsopcode.cpp index fc150726dec..b5e4e167340 100644 --- a/js/src/jsopcode.cpp +++ b/js/src/jsopcode.cpp @@ -814,7 +814,7 @@ ToDisassemblySource(JSContext *cx, HandleValue v, JSAutoByteString *bytes) return true; } - if (cx->runtime()->isHeapBusy() || cx->runtime()->noGCOrAllocationCheck) { + if (cx->runtime()->isHeapBusy() || cx->runtime()->gc.noGCOrAllocationCheck) { char *source = JS_sprintf_append(nullptr, ""); if (!source) return false; @@ -1936,10 +1936,10 @@ js::GetPCCountScriptCount(JSContext *cx) { JSRuntime *rt = cx->runtime(); - if (!rt->scriptAndCountsVector) + if (!rt->gc.scriptAndCountsVector) return 0; - return rt->scriptAndCountsVector->length(); + return rt->gc.scriptAndCountsVector->length(); } enum MaybeComma {NO_COMMA, COMMA}; @@ -1974,12 +1974,12 @@ js::GetPCCountScriptSummary(JSContext *cx, size_t index) { JSRuntime *rt = cx->runtime(); - if (!rt->scriptAndCountsVector || index >= rt->scriptAndCountsVector->length()) { + if (!rt->gc.scriptAndCountsVector || index >= rt->gc.scriptAndCountsVector->length()) { JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_BUFFER_TOO_SMALL); return nullptr; } - const ScriptAndCounts &sac = (*rt->scriptAndCountsVector)[index]; + const ScriptAndCounts &sac = (*rt->gc.scriptAndCountsVector)[index]; RootedScript script(cx, sac.script); /* @@ -2234,12 +2234,12 @@ js::GetPCCountScriptContents(JSContext *cx, size_t index) { JSRuntime *rt = cx->runtime(); - if (!rt->scriptAndCountsVector || index >= rt->scriptAndCountsVector->length()) { + if (!rt->gc.scriptAndCountsVector || index >= rt->gc.scriptAndCountsVector->length()) { JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_BUFFER_TOO_SMALL); return nullptr; } - const ScriptAndCounts &sac = (*rt->scriptAndCountsVector)[index]; + const ScriptAndCounts &sac = (*rt->gc.scriptAndCountsVector)[index]; JSScript *script = sac.script; StringBuffer buf(cx); diff --git a/js/src/jsproxy.cpp b/js/src/jsproxy.cpp index 94320afefc6..423a3f9a89e 100644 --- a/js/src/jsproxy.cpp +++ b/js/src/jsproxy.cpp @@ -2987,7 +2987,7 @@ ProxyObject::trace(JSTracer *trc, JSObject *obj) ProxyObject *proxy = &obj->as(); #ifdef DEBUG - if (!trc->runtime()->gcDisableStrictProxyCheckingCount && proxy->is()) { + if (!trc->runtime()->gc.disableStrictProxyCheckingCount && proxy->is()) { JSObject *referent = &proxy->private_().toObject(); if (referent->compartment() != proxy->compartment()) { /* diff --git a/js/src/jsscript.cpp b/js/src/jsscript.cpp index a70b591f35f..f04c4ee4a0d 100644 --- a/js/src/jsscript.cpp +++ b/js/src/jsscript.cpp @@ -2063,7 +2063,7 @@ SaveSharedScriptData(ExclusiveContext *cx, Handle script, SharedScri */ if (cx->isJSContext()) { JSRuntime *rt = cx->asJSContext()->runtime(); - if (JS::IsIncrementalGCInProgress(rt) && rt->gcIsFull) + if (JS::IsIncrementalGCInProgress(rt) && rt->gc.isFull) ssd->marked = true; } #endif @@ -2081,14 +2081,14 @@ MarkScriptData(JSRuntime *rt, const jsbytecode *bytecode) * a GC. Since SweepScriptBytecodes is only called during a full gc, * to preserve this invariant, only mark during a full gc. */ - if (rt->gcIsFull) + if (rt->gc.isFull) SharedScriptData::fromBytecode(bytecode)->marked = true; } void js::UnmarkScriptData(JSRuntime *rt) { - JS_ASSERT(rt->gcIsFull); + JS_ASSERT(rt->gc.isFull); ScriptDataTable &table = rt->scriptDataTable(); for (ScriptDataTable::Enum e(table); !e.empty(); e.popFront()) { SharedScriptData *entry = e.front(); @@ -2099,7 +2099,7 @@ js::UnmarkScriptData(JSRuntime *rt) void js::SweepScriptData(JSRuntime *rt) { - JS_ASSERT(rt->gcIsFull); + JS_ASSERT(rt->gc.isFull); ScriptDataTable &table = rt->scriptDataTable(); if (rt->keepAtoms()) @@ -3304,7 +3304,7 @@ JSScript::markChildren(JSTracer *trc) // JSScript::Create(), but not yet finished initializing it with // fullyInitFromEmitter() or fullyInitTrivial(). - JS_ASSERT_IF(trc->runtime()->gcStrictCompartmentChecking, zone()->isCollecting()); + JS_ASSERT_IF(trc->runtime()->gc.strictCompartmentChecking, zone()->isCollecting()); for (uint32_t i = 0; i < natoms(); ++i) { if (atoms[i]) diff --git a/js/src/jsweakmap.cpp b/js/src/jsweakmap.cpp index a50b845fc0a..4521e7699b4 100644 --- a/js/src/jsweakmap.cpp +++ b/js/src/jsweakmap.cpp @@ -288,7 +288,7 @@ WeakMapPostWriteBarrier(JSRuntime *rt, ObjectValueMap *weakMap, JSObject *key) typedef gc::HashKeyRef Ref; if (key && IsInsideNursery(rt, key)) - rt->gcStoreBuffer.putGeneric(Ref((unbarrieredMap), key)); + rt->gc.storeBuffer.putGeneric(Ref((unbarrieredMap), key)); #endif } diff --git a/js/src/vm/ArrayBufferObject.h b/js/src/vm/ArrayBufferObject.h index 646c1ccc7f4..b81caa0cbc9 100644 --- a/js/src/vm/ArrayBufferObject.h +++ b/js/src/vm/ArrayBufferObject.h @@ -276,7 +276,7 @@ PostBarrierTypedArrayObject(JSObject *obj) JS_ASSERT(obj); JSRuntime *rt = obj->runtimeFromMainThread(); if (!rt->isHeapBusy() && !IsInsideNursery(rt, obj)) - rt->gcStoreBuffer.putWholeCell(obj); + rt->gc.storeBuffer.putWholeCell(obj); #endif } diff --git a/js/src/vm/ForkJoin.cpp b/js/src/vm/ForkJoin.cpp index 459c9a3edef..c079db6143b 100644 --- a/js/src/vm/ForkJoin.cpp +++ b/js/src/vm/ForkJoin.cpp @@ -479,7 +479,7 @@ ForkJoinActivation::ForkJoinActivation(JSContext *cx) MinorGC(cx->runtime(), JS::gcreason::API); - cx->runtime()->gcHelperThread.waitBackgroundSweepEnd(); + cx->runtime()->gc.helperThread.waitBackgroundSweepEnd(); JS_ASSERT(!cx->runtime()->needsBarrier()); JS_ASSERT(!cx->zone()->needsBarrier()); @@ -1557,7 +1557,7 @@ ForkJoinShared::setAbortFlagDueToInterrupt(ForkJoinContext &cx) // The GC Needed flag should not be set during parallel // execution. Instead, one of the requestGC() or // requestZoneGC() methods should be invoked. - JS_ASSERT(!cx_->runtime()->gcIsNeeded); + JS_ASSERT(!cx_->runtime()->gc.isNeeded); if (!abort_) { cx.bailoutRecord->setCause(ParallelBailoutInterrupt); diff --git a/js/src/vm/MemoryMetrics.cpp b/js/src/vm/MemoryMetrics.cpp index 5862324c46c..ea6793dea24 100644 --- a/js/src/vm/MemoryMetrics.cpp +++ b/js/src/vm/MemoryMetrics.cpp @@ -537,7 +537,7 @@ JS::CollectRuntimeStats(JSRuntime *rt, RuntimeStats *rtStats, ObjectPrivateVisit if (!rtStats->compartmentStatsVector.reserve(rt->numCompartments)) return false; - if (!rtStats->zoneStatsVector.reserve(rt->zones.length())) + if (!rtStats->zoneStatsVector.reserve(rt->gc.zones.length())) return false; rtStats->gcHeapChunkTotal = diff --git a/js/src/vm/RegExpObject.cpp b/js/src/vm/RegExpObject.cpp index cf47cb0d9e0..5e4af96641c 100644 --- a/js/src/vm/RegExpObject.cpp +++ b/js/src/vm/RegExpObject.cpp @@ -731,7 +731,7 @@ RegExpCompartment::sweep(JSRuntime *rt) for (PendingSet::Enum e(inUse_); !e.empty(); e.popFront()) { RegExpShared *shared = e.front(); - if (shared->activeUseCount == 0 && shared->gcNumberWhenUsed < rt->gcStartNumber) { + if (shared->activeUseCount == 0 && shared->gcNumberWhenUsed < rt->gc.startNumber) { js_delete(shared); e.removeFront(); } diff --git a/js/src/vm/Runtime.cpp b/js/src/vm/Runtime.cpp index 8072a182f5b..efb705ed83c 100644 --- a/js/src/vm/Runtime.cpp +++ b/js/src/vm/Runtime.cpp @@ -114,7 +114,7 @@ static const JSWrapObjectCallbacks DefaultWrapObjectCallbacks = { JSRuntime::JSRuntime(JSRuntime *parentRuntime, JSUseHelperThreads useHelperThreads) : JS::shadow::Runtime( #ifdef JSGC_GENERATIONAL - &gcStoreBuffer + &gc.storeBuffer #endif ), mainThread(this), @@ -135,7 +135,6 @@ JSRuntime::JSRuntime(JSRuntime *parentRuntime, JSUseHelperThreads useHelperThrea #else interruptLockTaken(false), #endif - systemZone(nullptr), numCompartments(0), localeCallbacks(nullptr), defaultLocale(nullptr), @@ -166,89 +165,11 @@ JSRuntime::JSRuntime(JSRuntime *parentRuntime, JSUseHelperThreads useHelperThrea #ifdef DEBUG activeContext(nullptr), #endif + gc(thisFromCtor()), gcInitialized(false), - gcSystemAvailableChunkListHead(nullptr), - gcUserAvailableChunkListHead(nullptr), - gcBytes(0), - gcMaxBytes(0), - gcMaxMallocBytes(0), - gcNumArenasFreeCommitted(0), - gcMarker(this), - gcVerifyPreData(nullptr), - gcVerifyPostData(nullptr), - gcChunkAllocationSinceLastGC(false), - gcNextFullGCTime(0), - gcLastGCTime(0), - gcJitReleaseTime(0), - gcAllocationThreshold(30 * 1024 * 1024), - gcHighFrequencyGC(false), - gcHighFrequencyTimeThreshold(1000), - gcHighFrequencyLowLimitBytes(100 * 1024 * 1024), - gcHighFrequencyHighLimitBytes(500 * 1024 * 1024), - gcHighFrequencyHeapGrowthMax(3.0), - gcHighFrequencyHeapGrowthMin(1.5), - gcLowFrequencyHeapGrowth(1.5), - gcDynamicHeapGrowth(false), - gcDynamicMarkSlice(false), - gcDecommitThreshold(32 * 1024 * 1024), - gcShouldCleanUpEverything(false), - gcGrayBitsValid(false), - gcIsNeeded(0), - gcStats(thisFromCtor()), - gcNumber(0), - gcStartNumber(0), - gcIsFull(false), - gcTriggerReason(JS::gcreason::NO_REASON), - gcStrictCompartmentChecking(false), -#ifdef DEBUG - gcDisableStrictProxyCheckingCount(0), -#endif - gcIncrementalState(gc::NO_INCREMENTAL), - gcLastMarkSlice(false), - gcSweepOnBackgroundThread(false), - gcFoundBlackGrayEdges(false), - gcSweepingZones(nullptr), - gcZoneGroupIndex(0), - gcZoneGroups(nullptr), - gcCurrentZoneGroup(nullptr), - gcSweepPhase(0), - gcSweepZone(nullptr), - gcSweepKindIndex(0), - gcAbortSweepAfterCurrentGroup(false), - gcArenasAllocatedDuringSweep(nullptr), -#ifdef DEBUG - gcMarkingValidator(nullptr), -#endif - gcInterFrameGC(0), - gcSliceBudget(SliceBudget::Unlimited), - gcIncrementalEnabled(true), - gcGenerationalDisabled(0), - gcManipulatingDeadZones(false), - gcObjectsMarkedInDeadZones(0), - gcPoke(false), - heapState(Idle), -#ifdef JSGC_GENERATIONAL - gcNursery(thisFromCtor()), - gcStoreBuffer(thisFromCtor(), gcNursery), -#endif -#ifdef JS_GC_ZEAL - gcZeal_(0), - gcZealFrequency(0), - gcNextScheduled(0), - gcDeterministicOnly(false), - gcIncrementalLimit(0), -#endif - gcValidate(true), - gcFullCompartmentChecks(false), - gcCallback(nullptr), - gcSliceCallback(nullptr), - gcFinalizeCallback(nullptr), - gcMallocBytes(0), - gcMallocGCTriggered(false), #ifdef JS_ARM_SIMULATOR simulatorRuntime_(nullptr), #endif - scriptAndCountsVector(nullptr), NaNValue(DoubleNaNValue()), negativeInfinityValue(DoubleValue(NegativeInfinity())), positiveInfinityValue(DoubleValue(PositiveInfinity())), @@ -256,13 +177,9 @@ JSRuntime::JSRuntime(JSRuntime *parentRuntime, JSUseHelperThreads useHelperThrea debugMode(false), spsProfiler(thisFromCtor()), profilingScripts(false), - alwaysPreserveCode(false), hadOutOfMemory(false), haveCreatedContext(false), data(nullptr), - gcLock(nullptr), - gcLockOwner(nullptr), - gcHelperThread(thisFromCtor()), signalHandlersInstalled_(false), defaultFreeOp_(thisFromCtor(), false), debuggerMutations(0), @@ -289,9 +206,6 @@ JSRuntime::JSRuntime(JSRuntime *parentRuntime, JSUseHelperThreads useHelperThrea permanentAtoms(nullptr), wrapObjectCallbacks(&DefaultWrapObjectCallbacks), preserveWrapperCallback(nullptr), -#ifdef DEBUG - noGCOrAllocationCheck(0), -#endif jitSupportsFloatingPoint(false), ionPcScriptCache(nullptr), threadPool(this), @@ -353,8 +267,8 @@ JSRuntime::init(uint32_t maxbytes) if (!interruptLock) return false; - gcLock = PR_NewLock(); - if (!gcLock) + gc.lock = PR_NewLock(); + if (!gc.lock) return false; exclusiveAccessLock = PR_NewLock(); @@ -373,7 +287,7 @@ JSRuntime::init(uint32_t maxbytes) if (!js_InitGC(this, maxbytes)) return false; - if (!gcMarker.init(gcMode())) + if (!gc.marker.init(gcMode())) return false; const char *size = getenv("JSGC_MARK_STACK_LIMIT"); @@ -389,7 +303,7 @@ JSRuntime::init(uint32_t maxbytes) if (!atomsCompartment || !atomsCompartment->init(nullptr)) return false; - zones.append(atomsZone.get()); + gc.zones.append(atomsZone.get()); atomsZone->compartments.append(atomsCompartment.get()); atomsCompartment->isSystem = true; @@ -524,8 +438,8 @@ JSRuntime::~JSRuntime() atomsCompartment_ = nullptr; #ifdef JS_THREADSAFE - if (gcLock) - PR_DestroyLock(gcLock); + if (gc.lock) + PR_DestroyLock(gc.lock); #endif js_free(defaultLocale); @@ -539,8 +453,8 @@ JSRuntime::~JSRuntime() js_delete(ionPcScriptCache); #ifdef JSGC_GENERATIONAL - gcStoreBuffer.disable(); - gcNursery.disable(); + gc.storeBuffer.disable(); + gc.nursery.disable(); #endif #ifdef JS_ARM_SIMULATOR @@ -628,12 +542,12 @@ JSRuntime::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::Runtim } #endif - rtSizes->gc.marker += gcMarker.sizeOfExcludingThis(mallocSizeOf); + rtSizes->gc.marker += gc.marker.sizeOfExcludingThis(mallocSizeOf); #ifdef JSGC_GENERATIONAL - rtSizes->gc.nurseryCommitted += gcNursery.sizeOfHeapCommitted(); - rtSizes->gc.nurseryDecommitted += gcNursery.sizeOfHeapDecommitted(); - rtSizes->gc.nurseryHugeSlots += gcNursery.sizeOfHugeSlots(mallocSizeOf); - gcStoreBuffer.addSizeOfExcludingThis(mallocSizeOf, &rtSizes->gc); + rtSizes->gc.nurseryCommitted += gc.nursery.sizeOfHeapCommitted(); + rtSizes->gc.nurseryDecommitted += gc.nursery.sizeOfHeapDecommitted(); + rtSizes->gc.nurseryHugeSlots += gc.nursery.sizeOfHugeSlots(mallocSizeOf); + gc.storeBuffer.addSizeOfExcludingThis(mallocSizeOf, &rtSizes->gc); #endif } @@ -785,7 +699,7 @@ JSRuntime::setGCMaxMallocBytes(size_t value) * For compatibility treat any value that exceeds PTRDIFF_T_MAX to * mean that value. */ - gcMaxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1; + gc.maxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1; resetGCMallocBytes(); for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) zone->setGCMaxMallocBytes(value); @@ -801,8 +715,8 @@ void JSRuntime::updateMallocCounter(JS::Zone *zone, size_t nbytes) { /* We tolerate any thread races when updating gcMallocBytes. */ - gcMallocBytes -= ptrdiff_t(nbytes); - if (MOZ_UNLIKELY(gcMallocBytes <= 0)) + gc.mallocBytes -= ptrdiff_t(nbytes); + if (MOZ_UNLIKELY(gc.mallocBytes <= 0)) onTooMuchMalloc(); else if (zone) zone->updateMallocCounter(nbytes); @@ -814,8 +728,8 @@ JSRuntime::onTooMuchMalloc() if (!CurrentThreadCanAccessRuntime(this)) return; - if (!gcMallocGCTriggered) - gcMallocGCTriggered = TriggerGC(this, JS::gcreason::TOO_MUCH_MALLOC); + if (!gc.mallocGCTriggered) + gc.mallocGCTriggered = TriggerGC(this, JS::gcreason::TOO_MUCH_MALLOC); } JS_FRIEND_API(void *) @@ -835,7 +749,7 @@ JSRuntime::onOutOfMemory(void *p, size_t nbytes, JSContext *cx) * all the allocations and released the empty GC chunks. */ JS::ShrinkGCBuffers(this); - gcHelperThread.waitBackgroundSweepOrAllocEnd(); + gc.helperThread.waitBackgroundSweepOrAllocEnd(); if (!p) p = js_malloc(nbytes); else if (p == reinterpret_cast(1)) @@ -930,7 +844,7 @@ JSRuntime::assertCanLock(RuntimeLock which) case InterruptLock: JS_ASSERT(!currentThreadOwnsInterruptLock()); case GCLock: - JS_ASSERT(gcLockOwner != PR_GetCurrentThread()); + JS_ASSERT(gc.lockOwner != PR_GetCurrentThread()); break; default: MOZ_CRASH(); diff --git a/js/src/vm/Runtime.h b/js/src/vm/Runtime.h index 94cfebda3fb..9619b68f94e 100644 --- a/js/src/vm/Runtime.h +++ b/js/src/vm/Runtime.h @@ -19,7 +19,6 @@ #include "jsatom.h" #include "jsclist.h" -#include "jsgc.h" #ifdef DEBUG # include "jsproxy.h" #endif @@ -27,13 +26,7 @@ #include "ds/FixedSizeHash.h" #include "frontend/ParseMaps.h" -#ifdef JSGC_GENERATIONAL -# include "gc/Nursery.h" -#endif -#include "gc/Statistics.h" -#ifdef JSGC_GENERATIONAL -# include "gc/StoreBuffer.h" -#endif +#include "gc/GCRuntime.h" #include "gc/Tracer.h" #ifdef XP_MACOSX # include "jit/AsmJSSignalHandlers.h" @@ -135,48 +128,6 @@ struct ScopeCoordinateNameCache { void purge(); }; -typedef Vector ScriptAndCountsVector; - -struct ConservativeGCData -{ - /* - * The GC scans conservatively between ThreadData::nativeStackBase and - * nativeStackTop unless the latter is nullptr. - */ - uintptr_t *nativeStackTop; - - union { - jmp_buf jmpbuf; - uintptr_t words[JS_HOWMANY(sizeof(jmp_buf), sizeof(uintptr_t))]; - } registerSnapshot; - - ConservativeGCData() { - mozilla::PodZero(this); - } - - ~ConservativeGCData() { -#ifdef JS_THREADSAFE - /* - * The conservative GC scanner should be disabled when the thread leaves - * the last request. - */ - JS_ASSERT(!hasStackToScan()); -#endif - } - - MOZ_NEVER_INLINE void recordStackTop(); - -#ifdef JS_THREADSAFE - void updateForRequestEnd() { - nativeStackTop = nullptr; - } -#endif - - bool hasStackToScan() const { - return !!nativeStackTop; - } -}; - struct EvalCacheEntry { JSScript *script; @@ -655,12 +606,6 @@ class PerThreadData : public PerThreadDataFriendFields #endif }; -namespace gc { -class MarkingValidator; -} // namespace gc - -typedef Vector ZoneVector; - class AutoLockForExclusiveAccess; void RecomputeStackLimit(JSRuntime *rt, StackKind kind); @@ -806,12 +751,6 @@ struct JSRuntime : public JS::shadow::Runtime, #endif } - /* Embedders can use this zone however they wish. */ - JS::Zone *systemZone; - - /* List of compartments and zones (protected by the GC lock). */ - js::ZoneVector zones; - /* How many compartments there are across all zones. */ size_t numCompartments; @@ -975,259 +914,37 @@ struct JSRuntime : public JS::shadow::Runtime, #endif /* Garbage collector state, used by jsgc.c. */ + js::gc::GCRuntime gc; /* Garbase collector state has been sucessfully initialized. */ bool gcInitialized; - /* - * Set of all GC chunks with at least one allocated thing. The - * conservative GC uses it to quickly check if a possible GC thing points - * into an allocated chunk. - */ - js::GCChunkSet gcChunkSet; - - /* - * Doubly-linked lists of chunks from user and system compartments. The GC - * allocates its arenas from the corresponding list and when all arenas - * in the list head are taken, then the chunk is removed from the list. - * During the GC when all arenas in a chunk become free, that chunk is - * removed from the list and scheduled for release. - */ - js::gc::Chunk *gcSystemAvailableChunkListHead; - js::gc::Chunk *gcUserAvailableChunkListHead; - js::gc::ChunkPool gcChunkPool; - - js::RootedValueMap gcRootsHash; - - /* This is updated by both the main and GC helper threads. */ - mozilla::Atomic gcBytes; - - size_t gcMaxBytes; - size_t gcMaxMallocBytes; - - /* - * Number of the committed arenas in all GC chunks including empty chunks. - */ - mozilla::Atomic gcNumArenasFreeCommitted; - js::GCMarker gcMarker; - void *gcVerifyPreData; - void *gcVerifyPostData; - bool gcChunkAllocationSinceLastGC; - int64_t gcNextFullGCTime; - int64_t gcLastGCTime; - int64_t gcJitReleaseTime; - private: - JSGCMode gcMode_; - - public: - JSGCMode gcMode() const { return gcMode_; } + JSGCMode gcMode() const { return gc.mode; } void setGCMode(JSGCMode mode) { - gcMode_ = mode; - gcMarker.setGCMode(mode); + gc.mode = mode; + gc.marker.setGCMode(mode); } - size_t gcAllocationThreshold; - bool gcHighFrequencyGC; - uint64_t gcHighFrequencyTimeThreshold; - uint64_t gcHighFrequencyLowLimitBytes; - uint64_t gcHighFrequencyHighLimitBytes; - double gcHighFrequencyHeapGrowthMax; - double gcHighFrequencyHeapGrowthMin; - double gcLowFrequencyHeapGrowth; - bool gcDynamicHeapGrowth; - bool gcDynamicMarkSlice; - uint64_t gcDecommitThreshold; - - /* During shutdown, the GC needs to clean up every possible object. */ - bool gcShouldCleanUpEverything; - - /* - * The gray bits can become invalid if UnmarkGray overflows the stack. A - * full GC will reset this bit, since it fills in all the gray bits. - */ - bool gcGrayBitsValid; - - /* - * These flags must be kept separate so that a thread requesting a - * compartment GC doesn't cancel another thread's concurrent request for a - * full GC. - */ - volatile uintptr_t gcIsNeeded; - - js::gcstats::Statistics gcStats; - - /* Incremented on every GC slice. */ - uint64_t gcNumber; - - /* The gcNumber at the time of the most recent GC's first slice. */ - uint64_t gcStartNumber; - - /* Whether the currently running GC can finish in multiple slices. */ - bool gcIsIncremental; - - /* Whether all compartments are being collected in first GC slice. */ - bool gcIsFull; - - /* The reason that an interrupt-triggered GC should be called. */ - JS::gcreason::Reason gcTriggerReason; - - /* - * If this is true, all marked objects must belong to a compartment being - * GCed. This is used to look for compartment bugs. - */ - bool gcStrictCompartmentChecking; - -#ifdef DEBUG - /* - * If this is 0, all cross-compartment proxies must be registered in the - * wrapper map. This checking must be disabled temporarily while creating - * new wrappers. When non-zero, this records the recursion depth of wrapper - * creation. - */ - uintptr_t gcDisableStrictProxyCheckingCount; -#else - uintptr_t unused1; -#endif - - /* - * The current incremental GC phase. This is also used internally in - * non-incremental GC. - */ - js::gc::State gcIncrementalState; - - /* Indicates that the last incremental slice exhausted the mark stack. */ - bool gcLastMarkSlice; - - /* Whether any sweeping will take place in the separate GC helper thread. */ - bool gcSweepOnBackgroundThread; - - /* Whether any black->gray edges were found during marking. */ - bool gcFoundBlackGrayEdges; - - /* List head of zones to be swept in the background. */ - JS::Zone *gcSweepingZones; - - /* Index of current zone group (for stats). */ - unsigned gcZoneGroupIndex; - - /* - * Incremental sweep state. - */ - JS::Zone *gcZoneGroups; - JS::Zone *gcCurrentZoneGroup; - int gcSweepPhase; - JS::Zone *gcSweepZone; - int gcSweepKindIndex; - bool gcAbortSweepAfterCurrentGroup; - - /* - * List head of arenas allocated during the sweep phase. - */ - js::gc::ArenaHeader *gcArenasAllocatedDuringSweep; - -#ifdef DEBUG - js::gc::MarkingValidator *gcMarkingValidator; -#endif - - /* - * Indicates that a GC slice has taken place in the middle of an animation - * frame, rather than at the beginning. In this case, the next slice will be - * delayed so that we don't get back-to-back slices. - */ - volatile uintptr_t gcInterFrameGC; - - /* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */ - int64_t gcSliceBudget; - - /* - * We disable incremental GC if we encounter a js::Class with a trace hook - * that does not implement write barriers. - */ - bool gcIncrementalEnabled; - - /* - * GGC can be enabled from the command line while testing. - */ - unsigned gcGenerationalDisabled; - - /* - * This is true if we are in the middle of a brain transplant (e.g., - * JS_TransplantObject) or some other operation that can manipulate - * dead zones. - */ - bool gcManipulatingDeadZones; - - /* - * This field is incremented each time we mark an object inside a - * zone with no incoming cross-compartment pointers. Typically if - * this happens it signals that an incremental GC is marking too much - * stuff. At various times we check this counter and, if it has changed, we - * run an immediate, non-incremental GC to clean up the dead - * zones. This should happen very rarely. - */ - unsigned gcObjectsMarkedInDeadZones; - - bool gcPoke; - - volatile js::HeapState heapState; - - bool isHeapBusy() { return heapState != js::Idle; } - bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; } - bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; } + bool isHeapBusy() { return gc.heapState != js::Idle; } + bool isHeapMajorCollecting() { return gc.heapState == js::MajorCollecting; } + bool isHeapMinorCollecting() { return gc.heapState == js::MinorCollecting; } bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); } -#ifdef JSGC_GENERATIONAL - js::Nursery gcNursery; - js::gc::StoreBuffer gcStoreBuffer; -#endif - - /* - * These options control the zealousness of the GC. The fundamental values - * are gcNextScheduled and gcDebugCompartmentGC. At every allocation, - * gcNextScheduled is decremented. When it reaches zero, we do either a - * full or a compartmental GC, based on gcDebugCompartmentGC. - * - * At this point, if gcZeal_ is one of the types that trigger periodic - * collection, then gcNextScheduled is reset to the value of - * gcZealFrequency. Otherwise, no additional GCs take place. - * - * You can control these values in several ways: - * - Pass the -Z flag to the shell (see the usage info for details) - * - Call gczeal() or schedulegc() from inside shell-executed JS code - * (see the help for details) - * - * If gzZeal_ == 1 then we perform GCs in select places (during MaybeGC and - * whenever a GC poke happens). This option is mainly useful to embedders. - * - * We use gcZeal_ == 4 to enable write barrier verification. See the comment - * in jsgc.cpp for more information about this. - * - * gcZeal_ values from 8 to 10 periodically run different types of - * incremental GC. - */ #ifdef JS_GC_ZEAL - int gcZeal_; - int gcZealFrequency; - int gcNextScheduled; - bool gcDeterministicOnly; - int gcIncrementalLimit; - - js::Vector gcSelectedForMarking; - - int gcZeal() { return gcZeal_; } + int gcZeal() { return gc.zealMode; } bool upcomingZealousGC() { - return gcNextScheduled == 1; + return gc.nextScheduled == 1; } bool needZealousGC() { - if (gcNextScheduled > 0 && --gcNextScheduled == 0) { + if (gc.nextScheduled > 0 && --gc.nextScheduled == 0) { if (gcZeal() == js::gc::ZealAllocValue || gcZeal() == js::gc::ZealGenerationalGCValue || (gcZeal() >= js::gc::ZealIncrementalRootsThenFinish && gcZeal() <= js::gc::ZealIncrementalMultipleSlices)) { - gcNextScheduled = gcZealFrequency; + gc.nextScheduled = gc.zealFrequency; } return true; } @@ -1239,27 +956,24 @@ struct JSRuntime : public JS::shadow::Runtime, bool needZealousGC() { return false; } #endif - bool gcValidate; - bool gcFullCompartmentChecks; + void lockGC() { +#ifdef JS_THREADSAFE + assertCanLock(js::GCLock); + PR_Lock(gc.lock); + JS_ASSERT(!gc.lockOwner); +#ifdef DEBUG + gc.lockOwner = PR_GetCurrentThread(); +#endif +#endif + } - JSGCCallback gcCallback; - JS::GCSliceCallback gcSliceCallback; - JSFinalizeCallback gcFinalizeCallback; - - void *gcCallbackData; - - private: - /* - * Malloc counter to measure memory pressure for GC scheduling. It runs - * from gcMaxMallocBytes down to zero. - */ - mozilla::Atomic gcMallocBytes; - - /* - * Whether a GC has been triggered as a result of gcMallocBytes falling - * below zero. - */ - mozilla::Atomic gcMallocGCTriggered; + void unlockGC() { +#ifdef JS_THREADSAFE + JS_ASSERT(gc.lockOwner == PR_GetCurrentThread()); + gc.lockOwner = nullptr; + PR_Unlock(gc.lock); +#endif + } #ifdef JS_ARM_SIMULATOR js::jit::SimulatorRuntime *simulatorRuntime_; @@ -1270,38 +984,11 @@ struct JSRuntime : public JS::shadow::Runtime, needsBarrier_ = needs; } - struct ExtraTracer { - JSTraceDataOp op; - void *data; - - ExtraTracer() - : op(nullptr), data(nullptr) - {} - ExtraTracer(JSTraceDataOp op, void *data) - : op(op), data(data) - {} - }; - #ifdef JS_ARM_SIMULATOR js::jit::SimulatorRuntime *simulatorRuntime() const; void setSimulatorRuntime(js::jit::SimulatorRuntime *srt); #endif - /* - * The trace operations to trace embedding-specific GC roots. One is for - * tracing through black roots and the other is for tracing through gray - * roots. The black/gray distinction is only relevant to the cycle - * collector. - */ - typedef js::Vector ExtraTracerVector; - ExtraTracerVector gcBlackRootTracers; - ExtraTracer gcGrayRootTracer; - - js::gc::SystemPageAllocator pageAllocator; - - /* Strong references on scripts held for PCCount profiling API. */ - js::ScriptAndCountsVector *scriptAndCountsVector; - /* Well-known numbers held for use by this runtime's contexts. */ const js::Value NaNValue; const js::Value negativeInfinityValue; @@ -1330,9 +1017,6 @@ struct JSRuntime : public JS::shadow::Runtime, /* If true, new scripts must be created with PC counter information. */ bool profilingScripts; - /* Always preserve JIT code during GCs, for testing. */ - bool alwaysPreserveCode; - /* Had an out-of-memory error which did not populate an exception. */ bool hadOutOfMemory; @@ -1352,33 +1036,6 @@ struct JSRuntime : public JS::shadow::Runtime, void *data; private: - /* Synchronize GC heap access between main thread and GCHelperThread. */ - PRLock *gcLock; - mozilla::DebugOnly gcLockOwner; - - friend class js::GCHelperThread; - public: - - void lockGC() { -#ifdef JS_THREADSAFE - assertCanLock(js::GCLock); - PR_Lock(gcLock); - JS_ASSERT(!gcLockOwner); -#ifdef DEBUG - gcLockOwner = PR_GetCurrentThread(); -#endif -#endif - } - - void unlockGC() { -#ifdef JS_THREADSAFE - JS_ASSERT(gcLockOwner == PR_GetCurrentThread()); - gcLockOwner = nullptr; - PR_Unlock(gcLock); -#endif - } - - js::GCHelperThread gcHelperThread; #if defined(XP_MACOSX) && defined(JS_ION) js::AsmJSMachExceptionHandler asmJSMachExceptionHandler; @@ -1451,8 +1108,6 @@ struct JSRuntime : public JS::shadow::Runtime, js::DateTimeInfo dateTimeInfo; - js::ConservativeGCData conservativeGC; - // Pool of maps used during parse/emit. This may be modified by threads // with an ExclusiveContext and requires a lock. Active compilations // prevent the pool from being purged during GCs. @@ -1572,10 +1227,6 @@ struct JSRuntime : public JS::shadow::Runtime, return scriptDataTable_; } -#ifdef DEBUG - size_t noGCOrAllocationCheck; -#endif - bool jitSupportsFloatingPoint; // Used to reset stack limit after a signaled interrupt (i.e. jitStackLimit_ = -1) @@ -1644,8 +1295,8 @@ struct JSRuntime : public JS::shadow::Runtime, void setGCMaxMallocBytes(size_t value); void resetGCMallocBytes() { - gcMallocBytes = ptrdiff_t(gcMaxMallocBytes); - gcMallocGCTriggered = false; + gc.mallocBytes = ptrdiff_t(gc.maxMallocBytes); + gc.mallocGCTriggered = false; } /* @@ -1662,7 +1313,7 @@ struct JSRuntime : public JS::shadow::Runtime, void reportAllocationOverflow() { js_ReportAllocationOverflow(nullptr); } bool isTooMuchMalloc() const { - return gcMallocBytes <= 0; + return gc.mallocBytes <= 0; } /* @@ -1849,7 +1500,7 @@ inline void FreeOp::free_(void *p) { if (shouldFreeLater()) { - runtime()->gcHelperThread.freeLater(p); + runtime()->gc.helperThread.freeLater(p); return; } js_free(p); diff --git a/js/src/vm/ScopeObject.cpp b/js/src/vm/ScopeObject.cpp index c43067f8bfb..1e8305525e5 100644 --- a/js/src/vm/ScopeObject.cpp +++ b/js/src/vm/ScopeObject.cpp @@ -1581,7 +1581,7 @@ DebugScopes::proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map, typedef gc::HashKeyRef Ref; if (key && IsInsideNursery(rt, key)) - rt->gcStoreBuffer.putGeneric(Ref(unbarrieredMap, key.get())); + rt->gc.storeBuffer.putGeneric(Ref(unbarrieredMap, key.get())); #endif } @@ -1612,7 +1612,7 @@ DebugScopes::missingScopesPostWriteBarrier(JSRuntime *rt, MissingScopeMap *map, { #ifdef JSGC_GENERATIONAL if (key.enclosingScope() && IsInsideNursery(rt, key.enclosingScope())) - rt->gcStoreBuffer.putGeneric(MissingScopesRef(map, key)); + rt->gc.storeBuffer.putGeneric(MissingScopesRef(map, key)); #endif } @@ -1628,7 +1628,7 @@ DebugScopes::liveScopesPostWriteBarrier(JSRuntime *rt, LiveScopeMap *map, ScopeO RuntimeAllocPolicy> UnbarrieredLiveScopeMap; typedef gc::HashKeyRef Ref; if (key && IsInsideNursery(rt, key)) - rt->gcStoreBuffer.putGeneric(Ref(reinterpret_cast(map), key)); + rt->gc.storeBuffer.putGeneric(Ref(reinterpret_cast(map), key)); #endif } diff --git a/js/src/vm/Shape.cpp b/js/src/vm/Shape.cpp index ceda6fa1b12..9a03cfb894f 100644 --- a/js/src/vm/Shape.cpp +++ b/js/src/vm/Shape.cpp @@ -1526,7 +1526,7 @@ BaseShape::assertConsistency() void JSCompartment::sweepBaseShapeTable() { - gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats, + gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats, gcstats::PHASE_SWEEP_TABLES_BASE_SHAPE); if (baseShapes.initialized()) { @@ -1736,7 +1736,7 @@ EmptyShape::getInitialShape(ExclusiveContext *cx, const Class *clasp, TaggedProt { InitialShapeSetRef ref( &table, clasp, protoRoot, parentRoot, metadataRoot, nfixed, objectFlags); - cx->asJSContext()->runtime()->gcStoreBuffer.putGeneric(ref); + cx->asJSContext()->runtime()->gc.storeBuffer.putGeneric(ref); } } #endif @@ -1814,7 +1814,7 @@ EmptyShape::insertInitialShape(ExclusiveContext *cx, HandleShape shape, HandleOb void JSCompartment::sweepInitialShapeTable() { - gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats, + gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats, gcstats::PHASE_SWEEP_TABLES_INITIAL_SHAPE); if (initialShapes.initialized()) {