Backed out changeset 3e6abdf3b4b4 (bug 988486)

This commit is contained in:
Carsten "Tomcat" Book 2014-04-30 13:59:19 +02:00
parent b61dac7dbc
commit 0b62e8fe16
47 changed files with 1168 additions and 1211 deletions

View File

@ -1136,7 +1136,7 @@ WriteBarrierPost(JSRuntime *rt, ValueMap *map, const HashableValue &key)
{
#ifdef JSGC_GENERATIONAL
typedef OrderedHashMap<Value, Value, UnbarrieredHashPolicy, RuntimeAllocPolicy> UnbarrieredMap;
rt->gc.storeBuffer.putGeneric(OrderedHashTableRef<UnbarrieredMap>(
rt->gcStoreBuffer.putGeneric(OrderedHashTableRef<UnbarrieredMap>(
reinterpret_cast<UnbarrieredMap *>(map), key.get()));
#endif
}
@ -1146,7 +1146,7 @@ WriteBarrierPost(JSRuntime *rt, ValueSet *set, const HashableValue &key)
{
#ifdef JSGC_GENERATIONAL
typedef OrderedHashSet<Value, UnbarrieredHashPolicy, RuntimeAllocPolicy> UnbarrieredSet;
rt->gc.storeBuffer.putGeneric(OrderedHashTableRef<UnbarrieredSet>(
rt->gcStoreBuffer.putGeneric(OrderedHashTableRef<UnbarrieredSet>(
reinterpret_cast<UnbarrieredSet *>(set), key.get()));
#endif
}

View File

@ -238,7 +238,7 @@ GC(JSContext *cx, unsigned argc, jsval *vp)
}
#ifndef JS_MORE_DETERMINISTIC
size_t preBytes = cx->runtime()->gc.bytes;
size_t preBytes = cx->runtime()->gcBytes;
#endif
if (compartment)
@ -250,7 +250,7 @@ GC(JSContext *cx, unsigned argc, jsval *vp)
char buf[256] = { '\0' };
#ifndef JS_MORE_DETERMINISTIC
JS_snprintf(buf, sizeof(buf), "before %lu, after %lu\n",
(unsigned long)preBytes, (unsigned long)cx->runtime()->gc.bytes);
(unsigned long)preBytes, (unsigned long)cx->runtime()->gcBytes);
#endif
JSString *str = JS_NewStringCopyZ(cx, buf);
if (!str)
@ -265,7 +265,7 @@ MinorGC(JSContext *cx, unsigned argc, jsval *vp)
CallArgs args = CallArgsFromVp(argc, vp);
#ifdef JSGC_GENERATIONAL
if (args.get(0) == BooleanValue(true))
cx->runtime()->gc.storeBuffer.setAboutToOverflow();
cx->runtime()->gcStoreBuffer.setAboutToOverflow();
MinorGC(cx, gcreason::API);
#endif
@ -445,7 +445,7 @@ GCPreserveCode(JSContext *cx, unsigned argc, jsval *vp)
return false;
}
cx->runtime()->gc.alwaysPreserveCode = true;
cx->runtime()->alwaysPreserveCode = true;
args.rval().setUndefined();
return true;
@ -513,7 +513,7 @@ SelectForGC(JSContext *cx, unsigned argc, Value *vp)
JSRuntime *rt = cx->runtime();
for (unsigned i = 0; i < args.length(); i++) {
if (args[i].isObject()) {
if (!rt->gc.selectedForMarking.append(&args[i].toObject()))
if (!rt->gcSelectedForMarking.append(&args[i].toObject()))
return false;
}
}
@ -564,7 +564,7 @@ GCState(JSContext *cx, unsigned argc, jsval *vp)
}
const char *state;
gc::State globalState = cx->runtime()->gc.incrementalState;
gc::State globalState = cx->runtime()->gcIncrementalState;
if (globalState == gc::NO_INCREMENTAL)
state = "none";
else if (globalState == gc::MARK)

View File

@ -124,11 +124,11 @@ class AutoStopVerifyingBarriers
MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
: runtime(rt)
{
restartPreVerifier = !isShutdown && rt->gc.verifyPreData;
restartPostVerifier = !isShutdown && rt->gc.verifyPostData && JS::IsGenerationalGCEnabled(rt);
if (rt->gc.verifyPreData)
restartPreVerifier = !isShutdown && rt->gcVerifyPreData;
restartPostVerifier = !isShutdown && rt->gcVerifyPostData && JS::IsGenerationalGCEnabled(rt);
if (rt->gcVerifyPreData)
EndVerifyPreBarriers(rt);
if (rt->gc.verifyPostData)
if (rt->gcVerifyPostData)
EndVerifyPostBarriers(rt);
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
}

View File

@ -1,371 +0,0 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef gc_GCRuntime_h
#define gc_GCRuntime_h
#include "jsgc.h"
#include "gc/Heap.h"
#ifdef JSGC_GENERATIONAL
# include "gc/Nursery.h"
#endif
#include "gc/Statistics.h"
#ifdef JSGC_GENERATIONAL
# include "gc/StoreBuffer.h"
#endif
namespace js {
namespace gc {
typedef Vector<JS::Zone *, 4, SystemAllocPolicy> ZoneVector;
class MarkingValidator;
struct ConservativeGCData
{
/*
* The GC scans conservatively between ThreadData::nativeStackBase and
* nativeStackTop unless the latter is nullptr.
*/
uintptr_t *nativeStackTop;
union {
jmp_buf jmpbuf;
uintptr_t words[JS_HOWMANY(sizeof(jmp_buf), sizeof(uintptr_t))];
} registerSnapshot;
ConservativeGCData() {
mozilla::PodZero(this);
}
~ConservativeGCData() {
#ifdef JS_THREADSAFE
/*
* The conservative GC scanner should be disabled when the thread leaves
* the last request.
*/
JS_ASSERT(!hasStackToScan());
#endif
}
MOZ_NEVER_INLINE void recordStackTop();
#ifdef JS_THREADSAFE
void updateForRequestEnd() {
nativeStackTop = nullptr;
}
#endif
bool hasStackToScan() const {
return !!nativeStackTop;
}
};
class GCRuntime
{
public:
GCRuntime(JSRuntime *rt);
public: // Internal state, public for now
/* Embedders can use this zone however they wish. */
JS::Zone *systemZone;
/* List of compartments and zones (protected by the GC lock). */
js::gc::ZoneVector zones;
js::gc::SystemPageAllocator pageAllocator;
/*
* Set of all GC chunks with at least one allocated thing. The
* conservative GC uses it to quickly check if a possible GC thing points
* into an allocated chunk.
*/
js::GCChunkSet chunkSet;
/*
* Doubly-linked lists of chunks from user and system compartments. The GC
* allocates its arenas from the corresponding list and when all arenas
* in the list head are taken, then the chunk is removed from the list.
* During the GC when all arenas in a chunk become free, that chunk is
* removed from the list and scheduled for release.
*/
js::gc::Chunk *systemAvailableChunkListHead;
js::gc::Chunk *userAvailableChunkListHead;
js::gc::ChunkPool chunkPool;
js::RootedValueMap rootsHash;
/* This is updated by both the main and GC helper threads. */
mozilla::Atomic<size_t, mozilla::ReleaseAcquire> bytes;
size_t maxBytes;
size_t maxMallocBytes;
/*
* Number of the committed arenas in all GC chunks including empty chunks.
*/
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> numArenasFreeCommitted;
js::GCMarker marker;
void *verifyPreData;
void *verifyPostData;
bool chunkAllocationSinceLastGC;
int64_t nextFullGCTime;
int64_t lastGCTime;
int64_t jitReleaseTime;
JSGCMode mode;
size_t allocationThreshold;
bool highFrequencyGC;
uint64_t highFrequencyTimeThreshold;
uint64_t highFrequencyLowLimitBytes;
uint64_t highFrequencyHighLimitBytes;
double highFrequencyHeapGrowthMax;
double highFrequencyHeapGrowthMin;
double lowFrequencyHeapGrowth;
bool dynamicHeapGrowth;
bool dynamicMarkSlice;
uint64_t decommitThreshold;
/* During shutdown, the GC needs to clean up every possible object. */
bool shouldCleanUpEverything;
/*
* The gray bits can become invalid if UnmarkGray overflows the stack. A
* full GC will reset this bit, since it fills in all the gray bits.
*/
bool grayBitsValid;
/*
* These flags must be kept separate so that a thread requesting a
* compartment GC doesn't cancel another thread's concurrent request for a
* full GC.
*/
volatile uintptr_t isNeeded;
js::gcstats::Statistics stats;
/* Incremented on every GC slice. */
uint64_t number;
/* The number at the time of the most recent GC's first slice. */
uint64_t startNumber;
/* Whether the currently running GC can finish in multiple slices. */
bool isIncremental;
/* Whether all compartments are being collected in first GC slice. */
bool isFull;
/* The reason that an interrupt-triggered GC should be called. */
JS::gcreason::Reason triggerReason;
/*
* If this is true, all marked objects must belong to a compartment being
* GCed. This is used to look for compartment bugs.
*/
bool strictCompartmentChecking;
#ifdef DEBUG
/*
* If this is 0, all cross-compartment proxies must be registered in the
* wrapper map. This checking must be disabled temporarily while creating
* new wrappers. When non-zero, this records the recursion depth of wrapper
* creation.
*/
uintptr_t disableStrictProxyCheckingCount;
#else
uintptr_t unused1;
#endif
/*
* The current incremental GC phase. This is also used internally in
* non-incremental GC.
*/
js::gc::State incrementalState;
/* Indicates that the last incremental slice exhausted the mark stack. */
bool lastMarkSlice;
/* Whether any sweeping will take place in the separate GC helper thread. */
bool sweepOnBackgroundThread;
/* Whether any black->gray edges were found during marking. */
bool foundBlackGrayEdges;
/* List head of zones to be swept in the background. */
JS::Zone *sweepingZones;
/* Index of current zone group (for stats). */
unsigned zoneGroupIndex;
/*
* Incremental sweep state.
*/
JS::Zone *zoneGroups;
JS::Zone *currentZoneGroup;
int sweepPhase;
JS::Zone *sweepZone;
int sweepKindIndex;
bool abortSweepAfterCurrentGroup;
/*
* List head of arenas allocated during the sweep phase.
*/
js::gc::ArenaHeader *arenasAllocatedDuringSweep;
#ifdef DEBUG
js::gc::MarkingValidator *markingValidator;
#endif
/*
* Indicates that a GC slice has taken place in the middle of an animation
* frame, rather than at the beginning. In this case, the next slice will be
* delayed so that we don't get back-to-back slices.
*/
volatile uintptr_t interFrameGC;
/* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */
int64_t sliceBudget;
/*
* We disable incremental GC if we encounter a js::Class with a trace hook
* that does not implement write barriers.
*/
bool incrementalEnabled;
/*
* GGC can be enabled from the command line while testing.
*/
unsigned generationalDisabled;
/*
* This is true if we are in the middle of a brain transplant (e.g.,
* JS_TransplantObject) or some other operation that can manipulate
* dead zones.
*/
bool manipulatingDeadZones;
/*
* This field is incremented each time we mark an object inside a
* zone with no incoming cross-compartment pointers. Typically if
* this happens it signals that an incremental GC is marking too much
* stuff. At various times we check this counter and, if it has changed, we
* run an immediate, non-incremental GC to clean up the dead
* zones. This should happen very rarely.
*/
unsigned objectsMarkedInDeadZones;
bool poke;
volatile js::HeapState heapState;
#ifdef JSGC_GENERATIONAL
js::Nursery nursery;
js::gc::StoreBuffer storeBuffer;
#endif
/*
* These options control the zealousness of the GC. The fundamental values
* are nextScheduled and gcDebugCompartmentGC. At every allocation,
* nextScheduled is decremented. When it reaches zero, we do either a
* full or a compartmental GC, based on debugCompartmentGC.
*
* At this point, if zeal_ is one of the types that trigger periodic
* collection, then nextScheduled is reset to the value of
* zealFrequency. Otherwise, no additional GCs take place.
*
* You can control these values in several ways:
* - Pass the -Z flag to the shell (see the usage info for details)
* - Call zeal() or schedulegc() from inside shell-executed JS code
* (see the help for details)
*
* If gzZeal_ == 1 then we perform GCs in select places (during MaybeGC and
* whenever a GC poke happens). This option is mainly useful to embedders.
*
* We use zeal_ == 4 to enable write barrier verification. See the comment
* in jsgc.cpp for more information about this.
*
* zeal_ values from 8 to 10 periodically run different types of
* incremental GC.
*/
#ifdef JS_GC_ZEAL
int zealMode;
int zealFrequency;
int nextScheduled;
bool deterministicOnly;
int incrementalLimit;
js::Vector<JSObject *, 0, js::SystemAllocPolicy> selectedForMarking;
#endif
bool validate;
bool fullCompartmentChecks;
JSGCCallback callback;
JS::GCSliceCallback sliceCallback;
JSFinalizeCallback finalizeCallback;
void *callbackData;
/*
* Malloc counter to measure memory pressure for GC scheduling. It runs
* from maxMallocBytes down to zero.
*/
mozilla::Atomic<ptrdiff_t, mozilla::ReleaseAcquire> mallocBytes;
/*
* Whether a GC has been triggered as a result of mallocBytes falling
* below zero.
*/
mozilla::Atomic<bool, mozilla::ReleaseAcquire> mallocGCTriggered;
/*
* The trace operations to trace embedding-specific GC roots. One is for
* tracing through black roots and the other is for tracing through gray
* roots. The black/gray distinction is only relevant to the cycle
* collector.
*/
typedef js::Vector<ExtraTracer, 4, js::SystemAllocPolicy> ExtraTracerVector;
ExtraTracerVector blackRootTracers;
ExtraTracer grayRootTracer;
/*
* The GC can only safely decommit memory when the page size of the
* running process matches the compiled arena size.
*/
size_t systemPageSize;
/* The OS allocation granularity may not match the page size. */
size_t systemAllocGranularity;
/* Strong references on scripts held for PCCount profiling API. */
js::ScriptAndCountsVector *scriptAndCountsVector;
/* Always preserve JIT code during GCs, for testing. */
bool alwaysPreserveCode;
#ifdef DEBUG
size_t noGCOrAllocationCheck;
#endif
/* Synchronize GC heap access between main thread and GCHelperThread. */
PRLock *lock;
mozilla::DebugOnly<PRThread *> lockOwner;
friend class js::GCHelperThread;
js::GCHelperThread helperThread;
ConservativeGCData conservativeGC;
};
} /* namespace gc */
} /* namespace js */
#endif

View File

@ -84,7 +84,7 @@ js::IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback)
{
AutoPrepareForTracing prep(rt, SkipAtoms);
for (js::GCChunkSet::Range r = rt->gc.chunkSet.all(); !r.empty(); r.popFront())
for (js::GCChunkSet::Range r = rt->gcChunkSet.all(); !r.empty(); r.popFront())
chunkCallback(rt, data, r.front());
}

View File

@ -169,7 +169,7 @@ CheckMarkedThing(JSTracer *trc, T *thing)
DebugOnly<JSRuntime *> rt = trc->runtime();
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gc.manipulatingDeadZones,
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gcManipulatingDeadZones,
!thing->zone()->scheduledForDestruction);
JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
@ -181,7 +181,7 @@ CheckMarkedThing(JSTracer *trc, T *thing)
JS_ASSERT(MapTypeToTraceKind<T>::kind == GetGCThingTraceKind(thing));
JS_ASSERT_IF(rt->gc.strictCompartmentChecking,
JS_ASSERT_IF(rt->gcStrictCompartmentChecking,
thing->zone()->isCollecting() || rt->isAtomsZone(thing->zone()));
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && AsGCMarker(trc)->getMarkColor() == GRAY,
@ -247,8 +247,8 @@ MarkInternal(JSTracer *trc, T **thingp)
#define JS_ROOT_MARKING_ASSERT(trc) \
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc), \
trc->runtime()->gc.incrementalState == NO_INCREMENTAL || \
trc->runtime()->gc.incrementalState == MARK_ROOTS);
trc->runtime()->gcIncrementalState == NO_INCREMENTAL || \
trc->runtime()->gcIncrementalState == MARK_ROOTS);
namespace js {
namespace gc {
@ -339,7 +339,7 @@ IsMarked(T **thingp)
JS_ASSERT(thingp);
JS_ASSERT(*thingp);
#ifdef JSGC_GENERATIONAL
Nursery &nursery = (*thingp)->runtimeFromMainThread()->gc.nursery;
Nursery &nursery = (*thingp)->runtimeFromMainThread()->gcNursery;
if (nursery.isInside(*thingp))
return nursery.getForwardedPointer(thingp);
#endif
@ -364,7 +364,7 @@ IsAboutToBeFinalized(T **thingp)
return false;
#ifdef JSGC_GENERATIONAL
Nursery &nursery = rt->gc.nursery;
Nursery &nursery = rt->gcNursery;
JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !nursery.isInside(thing));
if (rt->isHeapMinorCollecting()) {
if (nursery.isInside(thing))
@ -394,8 +394,8 @@ UpdateIfRelocated(JSRuntime *rt, T **thingp)
{
JS_ASSERT(thingp);
#ifdef JSGC_GENERATIONAL
if (*thingp && rt->isHeapMinorCollecting() && rt->gc.nursery.isInside(*thingp))
rt->gc.nursery.getForwardedPointer(thingp);
if (*thingp && rt->isHeapMinorCollecting() && rt->gcNursery.isInside(*thingp))
rt->gcNursery.getForwardedPointer(thingp);
#endif
return *thingp;
}
@ -784,7 +784,7 @@ ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell)
*/
if (cell->isMarked(GRAY)) {
JS_ASSERT(!zone->isCollecting());
trc->runtime()->gc.foundBlackGrayEdges = true;
trc->runtime()->gcFoundBlackGrayEdges = true;
}
return zone->isGCMarking();
} else {
@ -1540,7 +1540,7 @@ GCMarker::processMarkStackTop(SliceBudget &budget)
// if the gloal has no custom trace hook of it's own, or has been moved to a different
// compartment, and so can't have one.
JS_ASSERT_IF(runtime()->gcMode() == JSGC_MODE_INCREMENTAL &&
runtime()->gc.incrementalEnabled &&
runtime()->gcIncrementalEnabled &&
!(clasp->trace == JS_GlobalObjectTraceHook &&
(!obj->compartment()->options().getTrace() ||
!obj->isOwnGlobal())),
@ -1586,10 +1586,10 @@ GCMarker::drainMarkStack(SliceBudget &budget)
struct AutoCheckCompartment {
JSRuntime *runtime;
AutoCheckCompartment(JSRuntime *rt) : runtime(rt) {
JS_ASSERT(!rt->gc.strictCompartmentChecking);
runtime->gc.strictCompartmentChecking = true;
JS_ASSERT(!rt->gcStrictCompartmentChecking);
runtime->gcStrictCompartmentChecking = true;
}
~AutoCheckCompartment() { runtime->gc.strictCompartmentChecking = false; }
~AutoCheckCompartment() { runtime->gcStrictCompartmentChecking = false; }
} acc(rt);
#endif
@ -1739,7 +1739,7 @@ UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind)
* If we run out of stack, we take a more drastic measure: require that
* we GC again before the next CC.
*/
trc->runtime()->gc.grayBitsValid = false;
trc->runtime()->gcGrayBitsValid = false;
return;
}

View File

@ -53,7 +53,7 @@ js::Nursery::init()
if (!hugeSlots.init())
return false;
void *heap = runtime()->gc.pageAllocator.mapAlignedPages(NurserySize, Alignment);
void *heap = runtime()->pageAllocator.mapAlignedPages(NurserySize, Alignment);
if (!heap)
return false;
@ -79,7 +79,7 @@ js::Nursery::init()
js::Nursery::~Nursery()
{
if (start())
runtime()->gc.pageAllocator.unmapPages((void *)start(), NurserySize);
runtime()->pageAllocator.unmapPages((void *)start(), NurserySize);
}
void
@ -108,7 +108,7 @@ js::Nursery::enable()
setCurrentChunk(0);
currentStart_ = position();
#ifdef JS_GC_ZEAL
if (runtime()->gc.zealMode == ZealGenerationalGCValue)
if (runtime()->gcZeal_ == ZealGenerationalGCValue)
enterZealMode();
#endif
}
@ -130,7 +130,7 @@ js::Nursery::isEmpty() const
JS_ASSERT(runtime_);
if (!isEnabled())
return true;
JS_ASSERT_IF(runtime_->gc.zealMode != ZealGenerationalGCValue, currentStart_ == start());
JS_ASSERT_IF(runtime_->gcZeal_ != ZealGenerationalGCValue, currentStart_ == start());
return position() == currentStart_;
}
@ -324,7 +324,7 @@ class MinorCollectionTracer : public JSTracer
savedRuntimeNeedBarrier(rt->needsBarrier()),
disableStrictProxyChecking(rt)
{
rt->gc.number++;
rt->gcNumber++;
/*
* We disable the runtime needsBarrier() check so that pre-barriers do
@ -341,7 +341,7 @@ class MinorCollectionTracer : public JSTracer
* sweep their dead views. Incremental collection also use these lists,
* so we may need to save and restore their contents here.
*/
if (rt->gc.incrementalState != NO_INCREMENTAL) {
if (rt->gcIncrementalState != NO_INCREMENTAL) {
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
if (!ArrayBufferObject::saveArrayBufferList(c, liveArrayBuffers))
CrashAtUnhandlableOOM("OOM while saving live array buffers");
@ -352,7 +352,7 @@ class MinorCollectionTracer : public JSTracer
~MinorCollectionTracer() {
runtime()->setNeedsBarrier(savedRuntimeNeedBarrier);
if (runtime()->gc.incrementalState != NO_INCREMENTAL)
if (runtime()->gcIncrementalState != NO_INCREMENTAL)
ArrayBufferObject::restoreArrayBufferLists(liveArrayBuffers);
}
};
@ -740,7 +740,7 @@ js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList
if (isEmpty())
return;
rt->gc.stats.count(gcstats::STAT_MINOR_GC);
rt->gcStats.count(gcstats::STAT_MINOR_GC);
TIME_START(total);
@ -750,7 +750,7 @@ js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList
MinorCollectionTracer trc(rt, this);
// Mark the store buffer. This must happen first.
StoreBuffer &sb = rt->gc.storeBuffer;
StoreBuffer &sb = rt->gcStoreBuffer;
TIME_START(markValues);
sb.markValues(&trc);
TIME_END(markValues);
@ -852,13 +852,13 @@ js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList
TIME_END(sweep);
TIME_START(clearStoreBuffer);
rt->gc.storeBuffer.clear();
rt->gcStoreBuffer.clear();
TIME_END(clearStoreBuffer);
// We ignore gcMaxBytes when allocating for minor collection. However, if we
// overflowed, we disable the nursery. The next time we allocate, we'll fail
// because gcBytes >= gcMaxBytes.
if (rt->gc.bytes >= rt->gc.maxBytes)
if (rt->gcBytes >= rt->gcMaxBytes)
disable();
TIME_END(total);
@ -922,7 +922,7 @@ js::Nursery::sweep(JSRuntime *rt)
for (int i = 0; i < NumNurseryChunks; ++i)
initChunk(i);
if (rt->gc.zealMode == ZealGenerationalGCValue) {
if (rt->gcZeal_ == ZealGenerationalGCValue) {
MOZ_ASSERT(numActiveChunks_ == NumNurseryChunks);
/* Only reset the alloc point when we are close to the end. */
@ -947,8 +947,7 @@ void
js::Nursery::growAllocableSpace()
{
#ifdef JS_GC_ZEAL
MOZ_ASSERT_IF(runtime()->gc.zealMode == ZealGenerationalGCValue,
numActiveChunks_ == NumNurseryChunks);
MOZ_ASSERT_IF(runtime()->gcZeal_ == ZealGenerationalGCValue, numActiveChunks_ == NumNurseryChunks);
#endif
numActiveChunks_ = Min(numActiveChunks_ * 2, NumNurseryChunks);
}
@ -957,7 +956,7 @@ void
js::Nursery::shrinkAllocableSpace()
{
#ifdef JS_GC_ZEAL
if (runtime()->gc.zealMode == ZealGenerationalGCValue)
if (runtime()->gcZeal_ == ZealGenerationalGCValue)
return;
#endif
numActiveChunks_ = Max(numActiveChunks_ - 1, 1);

View File

@ -143,7 +143,7 @@ IsAddressableGCThing(JSRuntime *rt, uintptr_t w,
Chunk *chunk = Chunk::fromAddress(addr);
if (!rt->gc.chunkSet.has(chunk))
if (!rt->gcChunkSet.has(chunk))
return CGCT_NOTCHUNK;
/*
@ -223,7 +223,7 @@ MarkIfGCThingWord(JSTracer *trc, uintptr_t w)
JS_ASSERT(tmp == thing);
#ifdef DEBUG
if (trc->runtime()->gc.incrementalState == MARK_ROOTS)
if (trc->runtime()->gcIncrementalState == MARK_ROOTS)
trc->runtime()->mainThread.gcSavedRoots.append(
PerThreadData::SavedGCRoot(thing, traceKind));
#endif
@ -691,7 +691,7 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
rt->markSelfHostingGlobal(trc);
}
for (RootRange r = rt->gc.rootsHash.all(); !r.empty(); r.popFront()) {
for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
const RootEntry &entry = r.front();
const char *name = entry.value().name ? entry.value().name : "root";
JSGCRootType type = entry.value().type;
@ -712,8 +712,8 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
MarkPersistentRootedChains(trc);
if (rt->gc.scriptAndCountsVector) {
ScriptAndCountsVector &vec = *rt->gc.scriptAndCountsVector;
if (rt->scriptAndCountsVector) {
ScriptAndCountsVector &vec = *rt->scriptAndCountsVector;
for (size_t i = 0; i < vec.length(); i++)
MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector");
}
@ -788,15 +788,15 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
* the nursery should be in the store buffer, and we want to avoid the
* time taken to trace all these roots.
*/
for (size_t i = 0; i < rt->gc.blackRootTracers.length(); i++) {
const ExtraTracer &e = rt->gc.blackRootTracers[i];
for (size_t i = 0; i < rt->gcBlackRootTracers.length(); i++) {
const JSRuntime::ExtraTracer &e = rt->gcBlackRootTracers[i];
(*e.op)(trc, e.data);
}
/* During GC, we don't mark gray roots at this stage. */
if (JSTraceDataOp op = rt->gc.grayRootTracer.op) {
if (JSTraceDataOp op = rt->gcGrayRootTracer.op) {
if (!IS_GC_MARKING_TRACER(trc))
(*op)(trc, rt->gc.grayRootTracer.data);
(*op)(trc, rt->gcGrayRootTracer.data);
}
}
}
@ -806,7 +806,7 @@ js::gc::BufferGrayRoots(GCMarker *gcmarker)
{
JSRuntime *rt = gcmarker->runtime();
gcmarker->startBufferingGrayRoots();
if (JSTraceDataOp op = rt->gc.grayRootTracer.op)
(*op)(gcmarker, rt->gc.grayRootTracer.data);
if (JSTraceDataOp op = rt->gcGrayRootTracer.op)
(*op)(gcmarker, rt->gcGrayRootTracer.data);
gcmarker->endBufferingGrayRoots();
}

View File

@ -521,7 +521,7 @@ Statistics::beginGC()
sccTimes.clearAndFree();
nonincrementalReason = nullptr;
preBytes = runtime->gc.bytes;
preBytes = runtime->gcBytes;
}
void
@ -547,7 +547,7 @@ Statistics::endGC()
(*cb)(JS_TELEMETRY_GC_MARK_ROOTS_MS, t(phaseTimes[PHASE_MARK_ROOTS]));
(*cb)(JS_TELEMETRY_GC_MARK_GRAY_MS, t(phaseTimes[PHASE_SWEEP_MARK_GRAY]));
(*cb)(JS_TELEMETRY_GC_NON_INCREMENTAL, !!nonincrementalReason);
(*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gc.incrementalEnabled);
(*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gcIncrementalEnabled);
(*cb)(JS_TELEMETRY_GC_SCC_SWEEP_TOTAL_MS, t(sccTotal));
(*cb)(JS_TELEMETRY_GC_SCC_SWEEP_MAX_PAUSE_MS, t(sccLongest));
@ -567,7 +567,7 @@ Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount,
this->zoneCount = zoneCount;
this->compartmentCount = compartmentCount;
bool first = runtime->gc.incrementalState == gc::NO_INCREMENTAL;
bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
if (first)
beginGC();
@ -580,7 +580,7 @@ Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount,
// Slice callbacks should only fire for the outermost level
if (++gcDepth == 1) {
bool wasFullGC = collectedCount == zoneCount;
if (JS::GCSliceCallback cb = runtime->gc.sliceCallback)
if (JS::GCSliceCallback cb = runtime->gcSliceCallback)
(*cb)(runtime, first ? JS::GC_CYCLE_BEGIN : JS::GC_SLICE_BEGIN,
JS::GCDescription(!wasFullGC));
}
@ -597,14 +597,14 @@ Statistics::endSlice()
(*cb)(JS_TELEMETRY_GC_RESET, !!slices.back().resetReason);
}
bool last = runtime->gc.incrementalState == gc::NO_INCREMENTAL;
bool last = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
if (last)
endGC();
// Slice callbacks should only fire for the outermost level
if (--gcDepth == 0) {
bool wasFullGC = collectedCount == zoneCount;
if (JS::GCSliceCallback cb = runtime->gc.sliceCallback)
if (JS::GCSliceCallback cb = runtime->gcSliceCallback)
(*cb)(runtime, last ? JS::GC_CYCLE_END : JS::GC_SLICE_END,
JS::GCDescription(!wasFullGC));
}

View File

@ -26,7 +26,7 @@ StoreBuffer::SlotsEdge::mark(JSTracer *trc)
{
JSObject *obj = object();
if (trc->runtime()->gc.nursery.isInside(obj))
if (trc->runtime()->gcNursery.isInside(obj))
return;
if (!obj->isNative()) {
@ -337,7 +337,7 @@ JS::HeapCellPostBarrier(js::gc::Cell **cellp)
{
JS_ASSERT(*cellp);
JSRuntime *runtime = (*cellp)->runtimeFromMainThread();
runtime->gc.storeBuffer.putRelocatableCell(cellp);
runtime->gcStoreBuffer.putRelocatableCell(cellp);
}
JS_PUBLIC_API(void)
@ -346,7 +346,7 @@ JS::HeapCellRelocate(js::gc::Cell **cellp)
/* Called with old contents of *pp before overwriting. */
JS_ASSERT(*cellp);
JSRuntime *runtime = (*cellp)->runtimeFromMainThread();
runtime->gc.storeBuffer.removeRelocatableCell(cellp);
runtime->gcStoreBuffer.removeRelocatableCell(cellp);
}
JS_PUBLIC_API(void)
@ -356,7 +356,7 @@ JS::HeapValuePostBarrier(JS::Value *valuep)
if (valuep->isString() && StringIsPermanentAtom(valuep->toString()))
return;
JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread();
runtime->gc.storeBuffer.putRelocatableValue(valuep);
runtime->gcStoreBuffer.putRelocatableValue(valuep);
}
JS_PUBLIC_API(void)
@ -367,7 +367,7 @@ JS::HeapValueRelocate(JS::Value *valuep)
if (valuep->isString() && StringIsPermanentAtom(valuep->toString()))
return;
JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread();
runtime->gc.storeBuffer.removeRelocatableValue(valuep);
runtime->gcStoreBuffer.removeRelocatableValue(valuep);
}
template class StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>;

View File

@ -531,8 +531,8 @@ bool
GCMarker::markDelayedChildren(SliceBudget &budget)
{
gcstats::MaybeAutoPhase ap;
if (runtime()->gc.incrementalState == MARK)
ap.construct(runtime()->gc.stats, gcstats::PHASE_MARK_DELAYED);
if (runtime()->gcIncrementalState == MARK)
ap.construct(runtime()->gcStats, gcstats::PHASE_MARK_DELAYED);
JS_ASSERT(unmarkedArenaStackTop);
do {
@ -669,6 +669,6 @@ js::SetMarkStackLimit(JSRuntime *rt, size_t limit)
{
JS_ASSERT(!rt->isHeapBusy());
AutoStopVerifyingBarriers pauseVerification(rt, false);
rt->gc.marker.setMaxCapacity(limit);
rt->gcMarker.setMaxCapacity(limit);
}

View File

@ -103,7 +103,7 @@ struct VerifyPreTracer : JSTracer
NodeMap nodemap;
VerifyPreTracer(JSRuntime *rt, JSTraceCallback callback)
: JSTracer(rt, callback), noggc(rt), number(rt->gc.number), count(0), root(nullptr)
: JSTracer(rt, callback), noggc(rt), number(rt->gcNumber), count(0), root(nullptr)
{}
~VerifyPreTracer() {
@ -171,7 +171,7 @@ NextNode(VerifyNode *node)
void
gc::StartVerifyPreBarriers(JSRuntime *rt)
{
if (rt->gc.verifyPreData || rt->gc.incrementalState != NO_INCREMENTAL)
if (rt->gcVerifyPreData || rt->gcIncrementalState != NO_INCREMENTAL)
return;
/*
@ -180,7 +180,7 @@ gc::StartVerifyPreBarriers(JSRuntime *rt)
* starting the pre barrier verifier if the post barrier verifier is already
* running.
*/
if (rt->gc.verifyPostData)
if (rt->gcVerifyPostData)
return;
MinorGC(rt, JS::gcreason::EVICT_NURSERY);
@ -190,10 +190,10 @@ gc::StartVerifyPreBarriers(JSRuntime *rt)
if (!IsIncrementalGCSafe(rt))
return;
for (GCChunkSet::Range r(rt->gc.chunkSet.all()); !r.empty(); r.popFront())
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
r.front()->bitmap.clear();
rt->gc.number++;
rt->gcNumber++;
VerifyPreTracer *trc = js_new<VerifyPreTracer>(rt, JSTraceCallback(nullptr));
if (!trc)
@ -219,7 +219,7 @@ gc::StartVerifyPreBarriers(JSRuntime *rt)
trc->curnode = MakeNode(trc, nullptr, JSGCTraceKind(0));
/* We want MarkRuntime to save the roots to gcSavedRoots. */
rt->gc.incrementalState = MARK_ROOTS;
rt->gcIncrementalState = MARK_ROOTS;
/* Make all the roots be edges emanating from the root node. */
MarkRuntime(trc);
@ -245,9 +245,9 @@ gc::StartVerifyPreBarriers(JSRuntime *rt)
node = NextNode(node);
}
rt->gc.verifyPreData = trc;
rt->gc.incrementalState = MARK;
rt->gc.marker.start();
rt->gcVerifyPreData = trc;
rt->gcIncrementalState = MARK;
rt->gcMarker.start();
rt->setNeedsBarrier(true);
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
@ -259,9 +259,9 @@ gc::StartVerifyPreBarriers(JSRuntime *rt)
return;
oom:
rt->gc.incrementalState = NO_INCREMENTAL;
rt->gcIncrementalState = NO_INCREMENTAL;
js_delete(trc);
rt->gc.verifyPreData = nullptr;
rt->gcVerifyPreData = nullptr;
}
static bool
@ -323,7 +323,7 @@ gc::EndVerifyPreBarriers(JSRuntime *rt)
AutoPrepareForTracing prep(rt, SkipAtoms);
VerifyPreTracer *trc = (VerifyPreTracer *)rt->gc.verifyPreData;
VerifyPreTracer *trc = (VerifyPreTracer *)rt->gcVerifyPreData;
if (!trc)
return;
@ -344,11 +344,11 @@ gc::EndVerifyPreBarriers(JSRuntime *rt)
* We need to bump gcNumber so that the methodjit knows that jitcode has
* been discarded.
*/
JS_ASSERT(trc->number == rt->gc.number);
rt->gc.number++;
JS_ASSERT(trc->number == rt->gcNumber);
rt->gcNumber++;
rt->gc.verifyPreData = nullptr;
rt->gc.incrementalState = NO_INCREMENTAL;
rt->gcVerifyPreData = nullptr;
rt->gcIncrementalState = NO_INCREMENTAL;
if (!compartmentCreated && IsIncrementalGCSafe(rt)) {
trc->setTraceCallback(CheckEdge);
@ -368,8 +368,8 @@ gc::EndVerifyPreBarriers(JSRuntime *rt)
}
}
rt->gc.marker.reset();
rt->gc.marker.stop();
rt->gcMarker.reset();
rt->gcMarker.stop();
js_delete(trc);
}
@ -389,7 +389,7 @@ struct VerifyPostTracer : JSTracer
EdgeSet *edges;
VerifyPostTracer(JSRuntime *rt, JSTraceCallback callback)
: JSTracer(rt, callback), number(rt->gc.number), count(0)
: JSTracer(rt, callback), number(rt->gcNumber), count(0)
{}
};
@ -402,21 +402,21 @@ void
gc::StartVerifyPostBarriers(JSRuntime *rt)
{
#ifdef JSGC_GENERATIONAL
if (rt->gc.verifyPostData ||
rt->gc.incrementalState != NO_INCREMENTAL)
if (rt->gcVerifyPostData ||
rt->gcIncrementalState != NO_INCREMENTAL)
{
return;
}
MinorGC(rt, JS::gcreason::EVICT_NURSERY);
rt->gc.number++;
rt->gcNumber++;
VerifyPostTracer *trc = js_new<VerifyPostTracer>(rt, JSTraceCallback(nullptr));
if (!trc)
return;
rt->gc.verifyPostData = trc;
rt->gcVerifyPostData = trc;
#endif
}
@ -432,7 +432,7 @@ PostVerifierCollectStoreBufferEdges(JSTracer *jstrc, void **thingp, JSGCTraceKin
/* The store buffer may store extra, non-cross-generational edges. */
JSObject *dst = *reinterpret_cast<JSObject **>(thingp);
if (trc->runtime()->gc.nursery.isInside(thingp) || !trc->runtime()->gc.nursery.isInside(dst))
if (trc->runtime()->gcNursery.isInside(thingp) || !trc->runtime()->gcNursery.isInside(dst))
return;
/*
@ -468,9 +468,9 @@ PostVerifierVisitEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
return;
/* Filter out non cross-generational edges. */
JS_ASSERT(!trc->runtime()->gc.nursery.isInside(thingp));
JS_ASSERT(!trc->runtime()->gcNursery.isInside(thingp));
JSObject *dst = *reinterpret_cast<JSObject **>(thingp);
if (!trc->runtime()->gc.nursery.isInside(dst))
if (!trc->runtime()->gcNursery.isInside(dst))
return;
/*
@ -492,14 +492,14 @@ js::gc::EndVerifyPostBarriers(JSRuntime *rt)
VerifyPostTracer::EdgeSet edges;
AutoPrepareForTracing prep(rt, SkipAtoms);
VerifyPostTracer *trc = (VerifyPostTracer *)rt->gc.verifyPostData;
VerifyPostTracer *trc = (VerifyPostTracer *)rt->gcVerifyPostData;
/* Visit every entry in the store buffer and put the edges in a hash set. */
trc->setTraceCallback(PostVerifierCollectStoreBufferEdges);
if (!edges.init())
goto oom;
trc->edges = &edges;
rt->gc.storeBuffer.markAll(trc);
rt->gcStoreBuffer.markAll(trc);
/* Walk the heap to find any edges not the the |edges| set. */
trc->setTraceCallback(PostVerifierVisitEdge);
@ -514,7 +514,7 @@ js::gc::EndVerifyPostBarriers(JSRuntime *rt)
oom:
js_delete(trc);
rt->gc.verifyPostData = nullptr;
rt->gcVerifyPostData = nullptr;
#endif
}
@ -523,7 +523,7 @@ oom:
static void
VerifyPreBarriers(JSRuntime *rt)
{
if (rt->gc.verifyPreData)
if (rt->gcVerifyPreData)
EndVerifyPreBarriers(rt);
else
StartVerifyPreBarriers(rt);
@ -532,7 +532,7 @@ VerifyPreBarriers(JSRuntime *rt)
static void
VerifyPostBarriers(JSRuntime *rt)
{
if (rt->gc.verifyPostData)
if (rt->gcVerifyPostData)
EndVerifyPostBarriers(rt);
else
StartVerifyPostBarriers(rt);
@ -556,8 +556,8 @@ MaybeVerifyPreBarriers(JSRuntime *rt, bool always)
if (rt->mainThread.suppressGC)
return;
if (VerifyPreTracer *trc = (VerifyPreTracer *)rt->gc.verifyPreData) {
if (++trc->count < rt->gc.zealFrequency && !always)
if (VerifyPreTracer *trc = (VerifyPreTracer *)rt->gcVerifyPreData) {
if (++trc->count < rt->gcZealFrequency && !always)
return;
EndVerifyPreBarriers(rt);
@ -573,11 +573,11 @@ MaybeVerifyPostBarriers(JSRuntime *rt, bool always)
if (rt->gcZeal() != ZealVerifierPostValue)
return;
if (rt->mainThread.suppressGC || !rt->gc.storeBuffer.isEnabled())
if (rt->mainThread.suppressGC || !rt->gcStoreBuffer.isEnabled())
return;
if (VerifyPostTracer *trc = (VerifyPostTracer *)rt->gc.verifyPostData) {
if (++trc->count < rt->gc.zealFrequency && !always)
if (VerifyPostTracer *trc = (VerifyPostTracer *)rt->gcVerifyPostData) {
if (++trc->count < rt->gcZealFrequency && !always)
return;
EndVerifyPostBarriers(rt);
@ -596,14 +596,14 @@ js::gc::MaybeVerifyBarriers(JSContext *cx, bool always)
void
js::gc::FinishVerifier(JSRuntime *rt)
{
if (VerifyPreTracer *trc = (VerifyPreTracer *)rt->gc.verifyPreData) {
if (VerifyPreTracer *trc = (VerifyPreTracer *)rt->gcVerifyPreData) {
js_delete(trc);
rt->gc.verifyPreData = nullptr;
rt->gcVerifyPreData = nullptr;
}
#ifdef JSGC_GENERATIONAL
if (VerifyPostTracer *trc = (VerifyPostTracer *)rt->gc.verifyPostData) {
if (VerifyPostTracer *trc = (VerifyPostTracer *)rt->gcVerifyPostData) {
js_delete(trc);
rt->gc.verifyPostData = nullptr;
rt->gcVerifyPostData = nullptr;
}
#endif
}

View File

@ -22,7 +22,7 @@ using namespace js;
using namespace js::gc;
JS::Zone::Zone(JSRuntime *rt)
: JS::shadow::Zone(rt, &rt->gc.marker),
: JS::shadow::Zone(rt, &rt->gcMarker),
allocator(this),
ionUsingBarriers_(false),
active(false),
@ -49,14 +49,13 @@ JS::Zone::Zone(JSRuntime *rt)
JS_ASSERT(reinterpret_cast<JS::shadow::Zone *>(this) ==
static_cast<JS::shadow::Zone *>(this));
setGCMaxMallocBytes(rt->gc.maxMallocBytes * 0.9);
setGCMaxMallocBytes(rt->gcMaxMallocBytes * 0.9);
}
Zone::~Zone()
{
JSRuntime *rt = runtimeFromMainThread();
if (this == rt->gc.systemZone)
rt->gc.systemZone = nullptr;
if (this == runtimeFromMainThread()->systemZone)
runtimeFromMainThread()->systemZone = nullptr;
#ifdef JS_ION
js_delete(jitZone_);
@ -116,7 +115,7 @@ Zone::sweep(FreeOp *fop, bool releaseTypes, bool *oom)
releaseTypes = false;
{
gcstats::AutoPhase ap(fop->runtime()->gc.stats, gcstats::PHASE_DISCARD_ANALYSIS);
gcstats::AutoPhase ap(fop->runtime()->gcStats, gcstats::PHASE_DISCARD_ANALYSIS);
types.sweep(fop, releaseTypes, oom);
}
@ -134,8 +133,8 @@ Zone::sweepBreakpoints(FreeOp *fop)
* to iterate over the scripts belonging to a single compartment in a zone.
*/
gcstats::AutoPhase ap1(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_TABLES);
gcstats::AutoPhase ap2(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT);
gcstats::AutoPhase ap1(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_TABLES);
gcstats::AutoPhase ap2(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT);
JS_ASSERT(isGCSweeping());
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
@ -230,7 +229,7 @@ Zone::gcNumber()
{
// Zones in use by exclusive threads are not collected, and threads using
// them cannot access the main runtime's gcNumber without racing.
return usedByExclusiveThread ? 0 : runtimeFromMainThread()->gc.number;
return usedByExclusiveThread ? 0 : runtimeFromMainThread()->gcNumber;
}
#ifdef JS_ION

View File

@ -357,8 +357,8 @@ class ZonesIter {
public:
ZonesIter(JSRuntime *rt, ZoneSelector selector) {
it = rt->gc.zones.begin();
end = rt->gc.zones.end();
it = rt->zones.begin();
end = rt->zones.end();
if (selector == SkipAtoms) {
JS_ASSERT(rt->isAtomsZone(*it));

View File

@ -734,7 +734,7 @@ inline bool
ICStubCompiler::emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val,
Register scratch, GeneralRegisterSet saveRegs)
{
Nursery &nursery = cx->runtime()->gc.nursery;
Nursery &nursery = cx->runtime()->gcNursery;
Label skipBarrier;
masm.branchTestObject(Assembler::NotEqual, val, &skipBarrier);
@ -3433,7 +3433,7 @@ IsCacheableGetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *sh
#ifdef JSGC_GENERATIONAL
// Information from get prop call ICs may be used directly from Ion code,
// and should not be nursery allocated.
if (cx->runtime()->gc.nursery.isInside(holder) || cx->runtime()->gc.nursery.isInside(func))
if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func))
return false;
#endif
@ -3552,7 +3552,7 @@ IsCacheableSetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *sh
#ifdef JSGC_GENERATIONAL
// Information from set prop call ICs may be used directly from Ion code,
// and should not be nursery allocated.
if (cx->runtime()->gc.nursery.isInside(holder) || cx->runtime()->gc.nursery.isInside(func))
if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func))
return false;
#endif

View File

@ -456,7 +456,7 @@ BaselineScript::Destroy(FreeOp *fop, BaselineScript *script)
* in invalid store buffer entries. Assert that if we do destroy scripts
* outside of a GC that we at least emptied the nursery first.
*/
JS_ASSERT(fop->runtime()->gc.nursery.isEmpty());
JS_ASSERT(fop->runtime()->gcNursery.isEmpty());
#endif
fop->delete_(script);
}

View File

@ -68,7 +68,7 @@ CompileRuntime::addressOfLastCachedNativeIterator()
const void *
CompileRuntime::addressOfGCZeal()
{
return &runtime()->gc.zealMode;
return &runtime()->gcZeal_;
}
#endif
@ -170,7 +170,7 @@ CompileRuntime::maybeGetMathCache()
const Nursery &
CompileRuntime::gcNursery()
{
return runtime()->gc.nursery;
return runtime()->gcNursery;
}
#endif

View File

@ -1730,7 +1730,7 @@ OffThreadCompilationAvailable(JSContext *cx)
// when running off thread.
return cx->runtime()->canUseParallelIonCompilation()
&& WorkerThreadState().cpuCount > 1
&& cx->runtime()->gc.incrementalState == gc::NO_INCREMENTAL
&& cx->runtime()->gcIncrementalState == gc::NO_INCREMENTAL
&& !cx->runtime()->profilingScripts;
#else
return false;
@ -2845,13 +2845,13 @@ jit::FinishInvalidation<ParallelExecution>(FreeOp *fop, JSScript *script);
void
jit::MarkValueFromIon(JSRuntime *rt, Value *vp)
{
gc::MarkValueUnbarriered(&rt->gc.marker, vp, "write barrier");
gc::MarkValueUnbarriered(&rt->gcMarker, vp, "write barrier");
}
void
jit::MarkShapeFromIon(JSRuntime *rt, Shape **shapep)
{
gc::MarkShapeUnbarriered(&rt->gc.marker, shapep, "write barrier");
gc::MarkShapeUnbarriered(&rt->gcMarker, shapep, "write barrier");
}
void

View File

@ -965,7 +965,7 @@ UpdateIonJSFrameForMinorGC(JSTracer *trc, const JitFrameIterator &frame)
for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills()); iter.more(); iter++) {
--spill;
if (slotsRegs.has(*iter))
trc->runtime()->gc.nursery.forwardBufferPointer(reinterpret_cast<HeapSlot **>(spill));
trc->runtime()->gcNursery.forwardBufferPointer(reinterpret_cast<HeapSlot **>(spill));
}
// Skip to the right place in the safepoint
@ -979,7 +979,7 @@ UpdateIonJSFrameForMinorGC(JSTracer *trc, const JitFrameIterator &frame)
while (safepoint.getSlotsOrElementsSlot(&slot)) {
HeapSlot **slots = reinterpret_cast<HeapSlot **>(layout->slotRef(slot));
trc->runtime()->gc.nursery.forwardBufferPointer(slots);
trc->runtime()->gcNursery.forwardBufferPointer(slots);
}
}
#endif
@ -1302,7 +1302,7 @@ GetPcScript(JSContext *cx, JSScript **scriptRes, jsbytecode **pcRes)
if (MOZ_UNLIKELY(rt->ionPcScriptCache == nullptr)) {
rt->ionPcScriptCache = (PcScriptCache *)js_malloc(sizeof(struct PcScriptCache));
if (rt->ionPcScriptCache)
rt->ionPcScriptCache->clear(rt->gc.number);
rt->ionPcScriptCache->clear(rt->gcNumber);
}
// Attempt to lookup address in cache.

View File

@ -67,7 +67,7 @@ class Linker
masm.link(code);
#ifdef JSGC_GENERATIONAL
if (masm.embedsNurseryPointers())
cx->runtime()->gc.storeBuffer.putWholeCell(code);
cx->runtime()->gcStoreBuffer.putWholeCell(code);
#endif
return code;
}

View File

@ -460,7 +460,7 @@ ShouldPreserveParallelJITCode(JSRuntime *rt, JSScript *script, bool increase = f
{
IonScript *parallelIon = script->parallelIonScript();
uint32_t age = increase ? parallelIon->increaseParallelAge() : parallelIon->parallelAge();
return age < jit::IonScript::MAX_PARALLEL_AGE && !rt->gc.shouldCleanUpEverything;
return age < jit::IonScript::MAX_PARALLEL_AGE && !rt->gcShouldCleanUpEverything;
}
// On windows systems, really large frames need to be incrementally touched.

View File

@ -45,8 +45,8 @@ struct PcScriptCache
JSScript **scriptRes, jsbytecode **pcRes)
{
// If a GC occurred, lazily clear the cache now.
if (gcNumber != rt->gc.number) {
clear(rt->gc.number);
if (gcNumber != rt->gcNumber) {
clear(rt->gcNumber);
return false;
}

View File

@ -554,7 +554,7 @@ NewCallObject(JSContext *cx, HandleShape shape, HandleTypeObject type, HeapSlot
// the initializing writes. The interpreter, however, may have allocated
// the call object tenured, so barrier as needed before re-entering.
if (!IsInsideNursery(cx->runtime(), obj))
cx->runtime()->gc.storeBuffer.putWholeCell(obj);
cx->runtime()->gcStoreBuffer.putWholeCell(obj);
#endif
return obj;
@ -573,7 +573,7 @@ NewSingletonCallObject(JSContext *cx, HandleShape shape, HeapSlot *slots)
// the call object tenured, so barrier as needed before re-entering.
MOZ_ASSERT(!IsInsideNursery(cx->runtime(), obj),
"singletons are created in the tenured heap");
cx->runtime()->gc.storeBuffer.putWholeCell(obj);
cx->runtime()->gcStoreBuffer.putWholeCell(obj);
#endif
return obj;
@ -714,7 +714,7 @@ void
PostWriteBarrier(JSRuntime *rt, JSObject *obj)
{
JS_ASSERT(!IsInsideNursery(rt, obj));
rt->gc.storeBuffer.putWholeCell(obj);
rt->gcStoreBuffer.putWholeCell(obj);
}
void

View File

@ -17,7 +17,7 @@ BEGIN_TEST(testGCFinalizeCallback)
/* Full GC, non-incremental. */
FinalizeCalls = 0;
JS_GC(rt);
CHECK(rt->gc.isFull);
CHECK(rt->gcIsFull);
CHECK(checkSingleGroup());
CHECK(checkFinalizeStatus());
CHECK(checkFinalizeIsCompartmentGC(false));
@ -26,8 +26,8 @@ BEGIN_TEST(testGCFinalizeCallback)
FinalizeCalls = 0;
JS::PrepareForFullGC(rt);
JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
CHECK(rt->gc.isFull);
CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL);
CHECK(rt->gcIsFull);
CHECK(checkMultipleGroups());
CHECK(checkFinalizeStatus());
CHECK(checkFinalizeIsCompartmentGC(false));
@ -43,7 +43,7 @@ BEGIN_TEST(testGCFinalizeCallback)
FinalizeCalls = 0;
JS::PrepareZoneForGC(global1->zone());
JS::GCForReason(rt, JS::gcreason::API);
CHECK(!rt->gc.isFull);
CHECK(!rt->gcIsFull);
CHECK(checkSingleGroup());
CHECK(checkFinalizeStatus());
CHECK(checkFinalizeIsCompartmentGC(true));
@ -54,7 +54,7 @@ BEGIN_TEST(testGCFinalizeCallback)
JS::PrepareZoneForGC(global2->zone());
JS::PrepareZoneForGC(global3->zone());
JS::GCForReason(rt, JS::gcreason::API);
CHECK(!rt->gc.isFull);
CHECK(!rt->gcIsFull);
CHECK(checkSingleGroup());
CHECK(checkFinalizeStatus());
CHECK(checkFinalizeIsCompartmentGC(true));
@ -63,8 +63,8 @@ BEGIN_TEST(testGCFinalizeCallback)
FinalizeCalls = 0;
JS::PrepareZoneForGC(global1->zone());
JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
CHECK(!rt->gc.isFull);
CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL);
CHECK(!rt->gcIsFull);
CHECK(checkSingleGroup());
CHECK(checkFinalizeStatus());
CHECK(checkFinalizeIsCompartmentGC(true));
@ -75,8 +75,8 @@ BEGIN_TEST(testGCFinalizeCallback)
JS::PrepareZoneForGC(global2->zone());
JS::PrepareZoneForGC(global3->zone());
JS::IncrementalGC(rt, JS::gcreason::API, 1000000);
CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
CHECK(!rt->gc.isFull);
CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL);
CHECK(!rt->gcIsFull);
CHECK(checkMultipleGroups());
CHECK(checkFinalizeStatus());
CHECK(checkFinalizeIsCompartmentGC(true));
@ -89,13 +89,13 @@ BEGIN_TEST(testGCFinalizeCallback)
JS_SetGCZeal(cx, 9, 1000000);
JS::PrepareForFullGC(rt);
js::GCDebugSlice(rt, true, 1);
CHECK(rt->gc.incrementalState == js::gc::MARK);
CHECK(rt->gc.isFull);
CHECK(rt->gcIncrementalState == js::gc::MARK);
CHECK(rt->gcIsFull);
JS::RootedObject global4(cx, createGlobal());
js::GCDebugSlice(rt, true, 1);
CHECK(rt->gc.incrementalState == js::gc::NO_INCREMENTAL);
CHECK(!rt->gc.isFull);
CHECK(rt->gcIncrementalState == js::gc::NO_INCREMENTAL);
CHECK(!rt->gcIsFull);
CHECK(checkMultipleGroups());
CHECK(checkFinalizeStatus());

View File

@ -158,7 +158,7 @@ namespace js {
void
AssertHeapIsIdle(JSRuntime *rt)
{
JS_ASSERT(rt->gc.heapState == js::Idle);
JS_ASSERT(rt->heapState == js::Idle);
}
void
@ -718,7 +718,7 @@ StopRequest(JSContext *cx)
if (rt->requestDepth != 1) {
rt->requestDepth--;
} else {
rt->gc.conservativeGC.updateForRequestEnd();
rt->conservativeGC.updateForRequestEnd();
rt->requestDepth = 0;
rt->triggerActivityCallback(false);
}
@ -1613,17 +1613,17 @@ JS_PUBLIC_API(bool)
JS_AddExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
{
AssertHeapIsIdle(rt);
return !!rt->gc.blackRootTracers.append(ExtraTracer(traceOp, data));
return !!rt->gcBlackRootTracers.append(JSRuntime::ExtraTracer(traceOp, data));
}
JS_PUBLIC_API(void)
JS_RemoveExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
{
AssertHeapIsIdle(rt);
for (size_t i = 0; i < rt->gc.blackRootTracers.length(); i++) {
ExtraTracer *e = &rt->gc.blackRootTracers[i];
for (size_t i = 0; i < rt->gcBlackRootTracers.length(); i++) {
JSRuntime::ExtraTracer *e = &rt->gcBlackRootTracers[i];
if (e->op == traceOp && e->data == data) {
rt->gc.blackRootTracers.erase(e);
rt->gcBlackRootTracers.erase(e);
break;
}
}
@ -1897,15 +1897,15 @@ JS_PUBLIC_API(void)
JS_SetGCCallback(JSRuntime *rt, JSGCCallback cb, void *data)
{
AssertHeapIsIdle(rt);
rt->gc.callback = cb;
rt->gc.callbackData = data;
rt->gcCallback = cb;
rt->gcCallbackData = data;
}
JS_PUBLIC_API(void)
JS_SetFinalizeCallback(JSRuntime *rt, JSFinalizeCallback cb)
{
AssertHeapIsIdle(rt);
rt->gc.finalizeCallback = cb;
rt->gcFinalizeCallback = cb;
}
JS_PUBLIC_API(bool)
@ -1925,51 +1925,51 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value)
{
switch (key) {
case JSGC_MAX_BYTES: {
JS_ASSERT(value >= rt->gc.bytes);
rt->gc.maxBytes = value;
JS_ASSERT(value >= rt->gcBytes);
rt->gcMaxBytes = value;
break;
}
case JSGC_MAX_MALLOC_BYTES:
rt->setGCMaxMallocBytes(value);
break;
case JSGC_SLICE_TIME_BUDGET:
rt->gc.sliceBudget = SliceBudget::TimeBudget(value);
rt->gcSliceBudget = SliceBudget::TimeBudget(value);
break;
case JSGC_MARK_STACK_LIMIT:
js::SetMarkStackLimit(rt, value);
break;
case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
rt->gc.highFrequencyTimeThreshold = value;
rt->gcHighFrequencyTimeThreshold = value;
break;
case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
rt->gc.highFrequencyLowLimitBytes = value * 1024 * 1024;
rt->gcHighFrequencyLowLimitBytes = value * 1024 * 1024;
break;
case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
rt->gc.highFrequencyHighLimitBytes = value * 1024 * 1024;
rt->gcHighFrequencyHighLimitBytes = value * 1024 * 1024;
break;
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX:
rt->gc.highFrequencyHeapGrowthMax = value / 100.0;
MOZ_ASSERT(rt->gc.highFrequencyHeapGrowthMax / 0.85 > 1.0);
rt->gcHighFrequencyHeapGrowthMax = value / 100.0;
MOZ_ASSERT(rt->gcHighFrequencyHeapGrowthMax / 0.85 > 1.0);
break;
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN:
rt->gc.highFrequencyHeapGrowthMin = value / 100.0;
MOZ_ASSERT(rt->gc.highFrequencyHeapGrowthMin / 0.85 > 1.0);
rt->gcHighFrequencyHeapGrowthMin = value / 100.0;
MOZ_ASSERT(rt->gcHighFrequencyHeapGrowthMin / 0.85 > 1.0);
break;
case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
rt->gc.lowFrequencyHeapGrowth = value / 100.0;
MOZ_ASSERT(rt->gc.lowFrequencyHeapGrowth / 0.9 > 1.0);
rt->gcLowFrequencyHeapGrowth = value / 100.0;
MOZ_ASSERT(rt->gcLowFrequencyHeapGrowth / 0.9 > 1.0);
break;
case JSGC_DYNAMIC_HEAP_GROWTH:
rt->gc.dynamicHeapGrowth = value;
rt->gcDynamicHeapGrowth = value;
break;
case JSGC_DYNAMIC_MARK_SLICE:
rt->gc.dynamicMarkSlice = value;
rt->gcDynamicMarkSlice = value;
break;
case JSGC_ALLOCATION_THRESHOLD:
rt->gc.allocationThreshold = value * 1024 * 1024;
rt->gcAllocationThreshold = value * 1024 * 1024;
break;
case JSGC_DECOMMIT_THRESHOLD:
rt->gc.decommitThreshold = value * 1024 * 1024;
rt->gcDecommitThreshold = value * 1024 * 1024;
break;
default:
JS_ASSERT(key == JSGC_MODE);
@ -1986,42 +1986,42 @@ JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key)
{
switch (key) {
case JSGC_MAX_BYTES:
return uint32_t(rt->gc.maxBytes);
return uint32_t(rt->gcMaxBytes);
case JSGC_MAX_MALLOC_BYTES:
return rt->gc.maxMallocBytes;
return rt->gcMaxMallocBytes;
case JSGC_BYTES:
return uint32_t(rt->gc.bytes);
return uint32_t(rt->gcBytes);
case JSGC_MODE:
return uint32_t(rt->gcMode());
case JSGC_UNUSED_CHUNKS:
return uint32_t(rt->gc.chunkPool.getEmptyCount());
return uint32_t(rt->gcChunkPool.getEmptyCount());
case JSGC_TOTAL_CHUNKS:
return uint32_t(rt->gc.chunkSet.count() + rt->gc.chunkPool.getEmptyCount());
return uint32_t(rt->gcChunkSet.count() + rt->gcChunkPool.getEmptyCount());
case JSGC_SLICE_TIME_BUDGET:
return uint32_t(rt->gc.sliceBudget > 0 ? rt->gc.sliceBudget / PRMJ_USEC_PER_MSEC : 0);
return uint32_t(rt->gcSliceBudget > 0 ? rt->gcSliceBudget / PRMJ_USEC_PER_MSEC : 0);
case JSGC_MARK_STACK_LIMIT:
return rt->gc.marker.maxCapacity();
return rt->gcMarker.maxCapacity();
case JSGC_HIGH_FREQUENCY_TIME_LIMIT:
return rt->gc.highFrequencyTimeThreshold;
return rt->gcHighFrequencyTimeThreshold;
case JSGC_HIGH_FREQUENCY_LOW_LIMIT:
return rt->gc.highFrequencyLowLimitBytes / 1024 / 1024;
return rt->gcHighFrequencyLowLimitBytes / 1024 / 1024;
case JSGC_HIGH_FREQUENCY_HIGH_LIMIT:
return rt->gc.highFrequencyHighLimitBytes / 1024 / 1024;
return rt->gcHighFrequencyHighLimitBytes / 1024 / 1024;
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX:
return uint32_t(rt->gc.highFrequencyHeapGrowthMax * 100);
return uint32_t(rt->gcHighFrequencyHeapGrowthMax * 100);
case JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN:
return uint32_t(rt->gc.highFrequencyHeapGrowthMin * 100);
return uint32_t(rt->gcHighFrequencyHeapGrowthMin * 100);
case JSGC_LOW_FREQUENCY_HEAP_GROWTH:
return uint32_t(rt->gc.lowFrequencyHeapGrowth * 100);
return uint32_t(rt->gcLowFrequencyHeapGrowth * 100);
case JSGC_DYNAMIC_HEAP_GROWTH:
return rt->gc.dynamicHeapGrowth;
return rt->gcDynamicHeapGrowth;
case JSGC_DYNAMIC_MARK_SLICE:
return rt->gc.dynamicMarkSlice;
return rt->gcDynamicMarkSlice;
case JSGC_ALLOCATION_THRESHOLD:
return rt->gc.allocationThreshold / 1024 / 1024;
return rt->gcAllocationThreshold / 1024 / 1024;
default:
JS_ASSERT(key == JSGC_NUMBER);
return uint32_t(rt->gc.number);
return uint32_t(rt->gcNumber);
}
}
@ -2501,7 +2501,7 @@ JS_NewGlobalObject(JSContext *cx, const JSClass *clasp, JSPrincipals *principals
Zone *zone;
if (options.zoneSpecifier() == JS::SystemZone)
zone = rt->gc.systemZone;
zone = rt->systemZone;
else if (options.zoneSpecifier() == JS::FreshZone)
zone = nullptr;
else
@ -2512,9 +2512,9 @@ JS_NewGlobalObject(JSContext *cx, const JSClass *clasp, JSPrincipals *principals
return nullptr;
// Lazily create the system zone.
if (!rt->gc.systemZone && options.zoneSpecifier() == JS::SystemZone) {
rt->gc.systemZone = compartment->zone();
rt->gc.systemZone->isSystem = true;
if (!rt->systemZone && options.zoneSpecifier() == JS::SystemZone) {
rt->systemZone = compartment->zone();
rt->systemZone->isSystem = true;
}
Rooted<GlobalObject *> global(cx);
@ -6203,7 +6203,7 @@ JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency)
JS_PUBLIC_API(void)
JS_ScheduleGC(JSContext *cx, uint32_t count)
{
cx->runtime()->gc.nextScheduled = count;
cx->runtime()->gcNextScheduled = count;
}
#endif

View File

@ -223,7 +223,7 @@ struct ThreadSafeContext : ContextFriendFields,
inline js::Nursery &nursery() {
JS_ASSERT(hasNursery());
return runtime_->gc.nursery;
return runtime_->gcNursery;
}
#endif
@ -289,7 +289,7 @@ struct ThreadSafeContext : ContextFriendFields,
void *runtimeAddressForJit() { return runtime_; }
void *stackLimitAddress(StackKind kind) { return &runtime_->mainThread.nativeStackLimit[kind]; }
void *stackLimitAddressForJitCode(StackKind kind);
size_t gcSystemPageSize() { return runtime_->gc.pageAllocator.systemPageSize(); }
size_t gcSystemPageSize() { return runtime_->pageAllocator.systemPageSize(); }
bool signalHandlersInstalled() const { return runtime_->signalHandlersInstalled(); }
bool jitSupportsFloatingPoint() const { return runtime_->jitSupportsFloatingPoint; }

View File

@ -258,7 +258,7 @@ JSCompartment::putWrapper(JSContext *cx, const CrossCompartmentKey &wrapped, con
if (success && (nursery.isInside(wrapped.wrapped) || nursery.isInside(wrapped.debugger))) {
WrapperMapRef ref(&crossCompartmentWrappers, wrapped);
cx->runtime()->gc.storeBuffer.putGeneric(ref);
cx->runtime()->gcStoreBuffer.putGeneric(ref);
}
#endif
@ -557,7 +557,7 @@ JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
JSRuntime *rt = runtimeFromMainThread();
{
gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_TABLES);
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES);
/* Remove dead references held weakly by the compartment. */
@ -616,8 +616,8 @@ JSCompartment::sweepCrossCompartmentWrappers()
{
JSRuntime *rt = runtimeFromMainThread();
gcstats::AutoPhase ap1(rt->gc.stats, gcstats::PHASE_SWEEP_TABLES);
gcstats::AutoPhase ap2(rt->gc.stats, gcstats::PHASE_SWEEP_TABLES_WRAPPER);
gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_SWEEP_TABLES);
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_WRAPPER);
/* Remove dead wrappers from the table. */
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {

View File

@ -59,8 +59,8 @@ js::ForgetSourceHook(JSRuntime *rt)
JS_FRIEND_API(void)
JS_SetGrayGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
{
rt->gc.grayRootTracer.op = traceOp;
rt->gc.grayRootTracer.data = data;
rt->gcGrayRootTracer.op = traceOp;
rt->gcGrayRootTracer.data = data;
}
JS_FRIEND_API(JSString *)
@ -632,7 +632,7 @@ js::TraceWeakMaps(WeakMapTracer *trc)
extern JS_FRIEND_API(bool)
js::AreGCGrayBitsValid(JSRuntime *rt)
{
return rt->gc.grayBitsValid;
return rt->gcGrayBitsValid;
}
JS_FRIEND_API(bool)
@ -857,27 +857,27 @@ js::IsContextRunningJS(JSContext *cx)
JS_FRIEND_API(JS::GCSliceCallback)
JS::SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback)
{
JS::GCSliceCallback old = rt->gc.sliceCallback;
rt->gc.sliceCallback = callback;
JS::GCSliceCallback old = rt->gcSliceCallback;
rt->gcSliceCallback = callback;
return old;
}
JS_FRIEND_API(bool)
JS::WasIncrementalGC(JSRuntime *rt)
{
return rt->gc.isIncremental;
return rt->gcIsIncremental;
}
jschar *
GCDescription::formatMessage(JSRuntime *rt) const
{
return rt->gc.stats.formatMessage();
return rt->gcStats.formatMessage();
}
jschar *
GCDescription::formatJSON(JSRuntime *rt, uint64_t timestamp) const
{
return rt->gc.stats.formatJSON(timestamp);
return rt->gcStats.formatJSON(timestamp);
}
JS_FRIEND_API(void)
@ -899,36 +899,36 @@ JS::NotifyDidPaint(JSRuntime *rt)
return;
}
if (JS::IsIncrementalGCInProgress(rt) && !rt->gc.interFrameGC) {
if (JS::IsIncrementalGCInProgress(rt) && !rt->gcInterFrameGC) {
JS::PrepareForIncrementalGC(rt);
GCSlice(rt, GC_NORMAL, gcreason::REFRESH_FRAME);
}
rt->gc.interFrameGC = false;
rt->gcInterFrameGC = false;
}
JS_FRIEND_API(bool)
JS::IsIncrementalGCEnabled(JSRuntime *rt)
{
return rt->gc.incrementalEnabled && rt->gcMode() == JSGC_MODE_INCREMENTAL;
return rt->gcIncrementalEnabled && rt->gcMode() == JSGC_MODE_INCREMENTAL;
}
JS_FRIEND_API(bool)
JS::IsIncrementalGCInProgress(JSRuntime *rt)
{
return rt->gc.incrementalState != gc::NO_INCREMENTAL && !rt->gc.verifyPreData;
return rt->gcIncrementalState != gc::NO_INCREMENTAL && !rt->gcVerifyPreData;
}
JS_FRIEND_API(void)
JS::DisableIncrementalGC(JSRuntime *rt)
{
rt->gc.incrementalEnabled = false;
rt->gcIncrementalEnabled = false;
}
JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime *rt)
: runtime(rt)
#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
, restartVerifier(rt->gc.verifyPostData)
, restartVerifier(rt->gcVerifyPostData)
#endif
{
#ifdef JSGC_GENERATIONAL
@ -938,21 +938,21 @@ JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime *rt)
gc::EndVerifyPostBarriers(rt);
#endif
MinorGC(rt, JS::gcreason::API);
rt->gc.nursery.disable();
rt->gc.storeBuffer.disable();
rt->gcNursery.disable();
rt->gcStoreBuffer.disable();
}
#endif
++rt->gc.generationalDisabled;
++rt->gcGenerationalDisabled;
}
JS::AutoDisableGenerationalGC::~AutoDisableGenerationalGC()
{
JS_ASSERT(runtime->gc.generationalDisabled > 0);
--runtime->gc.generationalDisabled;
JS_ASSERT(runtime->gcGenerationalDisabled > 0);
--runtime->gcGenerationalDisabled;
#ifdef JSGC_GENERATIONAL
if (runtime->gc.generationalDisabled == 0) {
runtime->gc.nursery.enable();
runtime->gc.storeBuffer.enable();
if (runtime->gcGenerationalDisabled == 0) {
runtime->gcNursery.enable();
runtime->gcStoreBuffer.enable();
#ifdef JS_GC_ZEAL
if (restartVerifier)
gc::StartVerifyPostBarriers(runtime);
@ -964,13 +964,13 @@ JS::AutoDisableGenerationalGC::~AutoDisableGenerationalGC()
extern JS_FRIEND_API(bool)
JS::IsGenerationalGCEnabled(JSRuntime *rt)
{
return rt->gc.generationalDisabled == 0;
return rt->gcGenerationalDisabled == 0;
}
JS_FRIEND_API(bool)
JS::IsIncrementalBarrierNeeded(JSRuntime *rt)
{
return rt->gc.incrementalState == gc::MARK && !rt->isHeapBusy();
return rt->gcIncrementalState == gc::MARK && !rt->isHeapBusy();
}
JS_FRIEND_API(bool)
@ -1034,7 +1034,7 @@ JS::IncrementalValueBarrier(const Value &v)
JS_FRIEND_API(void)
JS::PokeGC(JSRuntime *rt)
{
rt->gc.poke = true;
rt->gcPoke = true;
}
JS_FRIEND_API(JSCompartment *)
@ -1203,7 +1203,7 @@ js_DefineOwnProperty(JSContext *cx, JSObject *objArg, jsid idArg,
{
RootedObject obj(cx, objArg);
RootedId id(cx, idArg);
JS_ASSERT(cx->runtime()->gc.heapState == js::Idle);
JS_ASSERT(cx->runtime()->heapState == js::Idle);
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj, id, descriptor.value());
if (descriptor.hasGetterObject())
@ -1240,7 +1240,7 @@ JS_StoreObjectPostBarrierCallback(JSContext* cx,
{
JSRuntime *rt = cx->runtime();
if (IsInsideNursery(rt, key))
rt->gc.storeBuffer.putCallback(callback, key, data);
rt->gcStoreBuffer.putCallback(callback, key, data);
}
extern JS_FRIEND_API(void)
@ -1250,6 +1250,6 @@ JS_StoreStringPostBarrierCallback(JSContext* cx,
{
JSRuntime *rt = cx->runtime();
if (IsInsideNursery(rt, key))
rt->gc.storeBuffer.putCallback(callback, key, data);
rt->gcStoreBuffer.putCallback(callback, key, data);
}
#endif /* JSGC_GENERATIONAL */

File diff suppressed because it is too large Load Diff

View File

@ -50,20 +50,6 @@ enum HeapState {
MinorCollecting // doing a GC of the minor heap (nursery)
};
struct ExtraTracer {
JSTraceDataOp op;
void *data;
ExtraTracer()
: op(nullptr), data(nullptr)
{}
ExtraTracer(JSTraceDataOp op, void *data)
: op(op), data(data)
{}
};
typedef Vector<ScriptAndCounts, 0, SystemAllocPolicy> ScriptAndCountsVector;
namespace jit {
class JitCode;
}

View File

@ -27,8 +27,8 @@ struct AutoMarkInDeadZone
scheduled(zone->scheduledForDestruction)
{
JSRuntime *rt = zone->runtimeFromMainThread();
if (rt->gc.manipulatingDeadZones && zone->scheduledForDestruction) {
rt->gc.objectsMarkedInDeadZones++;
if (rt->gcManipulatingDeadZones && zone->scheduledForDestruction) {
rt->gcObjectsMarkedInDeadZones++;
zone->scheduledForDestruction = false;
}
}
@ -106,12 +106,12 @@ GetGCThingTraceKind(const void *thing)
static inline void
GCPoke(JSRuntime *rt)
{
rt->gc.poke = true;
rt->gcPoke = true;
#ifdef JS_GC_ZEAL
/* Schedule a GC to happen "soon" after a GC poke. */
if (rt->gcZeal() == js::gc::ZealPokeValue)
rt->gc.nextScheduled = 1;
rt->gcNextScheduled = 1;
#endif
}
@ -245,7 +245,7 @@ class CellIterUnderGC : public CellIterImpl
public:
CellIterUnderGC(JS::Zone *zone, AllocKind kind) {
#ifdef JSGC_GENERATIONAL
JS_ASSERT(zone->runtimeFromAnyThread()->gc.nursery.isEmpty());
JS_ASSERT(zone->runtimeFromAnyThread()->gcNursery.isEmpty());
#endif
JS_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy());
init(zone, kind);
@ -284,7 +284,7 @@ class CellIter : public CellIterImpl
#ifdef JSGC_GENERATIONAL
/* Evict the nursery before iterating so we can see all things. */
JSRuntime *rt = zone->runtimeFromMainThread();
if (!rt->gc.nursery.isEmpty())
if (!rt->gcNursery.isEmpty())
MinorGC(rt, JS::gcreason::EVICT_NURSERY);
#endif
@ -297,7 +297,7 @@ class CellIter : public CellIterImpl
#ifdef DEBUG
/* Assert that no GCs can occur while a CellIter is live. */
counter = &zone->runtimeFromAnyThread()->gc.noGCOrAllocationCheck;
counter = &zone->runtimeFromAnyThread()->noGCOrAllocationCheck;
++*counter;
#endif
@ -353,7 +353,7 @@ class GCZoneGroupIter {
public:
GCZoneGroupIter(JSRuntime *rt) {
JS_ASSERT(rt->isHeapBusy());
current = rt->gc.currentZoneGroup;
current = rt->gcCurrentZoneGroup;
}
bool done() const { return !current; }
@ -387,7 +387,7 @@ TryNewNurseryObject(ThreadSafeContext *cxArg, size_t thingSize, size_t nDynamicS
JS_ASSERT(!IsAtomsCompartment(cx->compartment()));
JSRuntime *rt = cx->runtime();
Nursery &nursery = rt->gc.nursery;
Nursery &nursery = rt->gcNursery;
JSObject *obj = nursery.allocateObject(cx, thingSize, nDynamicSlots);
if (obj)
return obj;
@ -427,7 +427,7 @@ CheckAllocatorState(ThreadSafeContext *cx, AllocKind kind)
kind == FINALIZE_FAT_INLINE_STRING ||
kind == FINALIZE_JITCODE);
JS_ASSERT(!rt->isHeapBusy());
JS_ASSERT(!rt->gc.noGCOrAllocationCheck);
JS_ASSERT(!rt->noGCOrAllocationCheck);
#endif
// For testing out of memory conditions

View File

@ -3917,7 +3917,7 @@ ExclusiveContext::getNewType(const Class *clasp, TaggedProto proto, JSFunction *
#ifdef JSGC_GENERATIONAL
if (proto.isObject() && hasNursery() && nursery().isInside(proto.toObject())) {
asJSContext()->runtime()->gc.storeBuffer.putGeneric(
asJSContext()->runtime()->gcStoreBuffer.putGeneric(
NewTypeObjectsSetRef(&newTypeObjects, clasp, proto.toObject(), fun));
}
#endif
@ -4255,7 +4255,7 @@ TypeCompartment::sweep(FreeOp *fop)
void
JSCompartment::sweepNewTypeObjectTable(TypeObjectWithNewScriptSet &table)
{
gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats,
gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats,
gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
JS_ASSERT(zone()->isGCSweeping());
@ -4400,7 +4400,7 @@ TypeZone::sweep(FreeOp *fop, bool releaseTypes, bool *oom)
#endif
{
gcstats::AutoPhase ap2(rt->gc.stats, gcstats::PHASE_DISCARD_TI);
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_DISCARD_TI);
for (CellIterUnderGC i(zone(), FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
@ -4442,7 +4442,7 @@ TypeZone::sweep(FreeOp *fop, bool releaseTypes, bool *oom)
}
{
gcstats::AutoPhase ap2(rt->gc.stats, gcstats::PHASE_SWEEP_TYPES);
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TYPES);
for (gc::CellIterUnderGC iter(zone(), gc::FINALIZE_TYPE_OBJECT);
!iter.done(); iter.next())
@ -4470,7 +4470,7 @@ TypeZone::sweep(FreeOp *fop, bool releaseTypes, bool *oom)
}
{
gcstats::AutoPhase ap2(rt->gc.stats, gcstats::PHASE_FREE_TI_ARENA);
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_FREE_TI_ARENA);
rt->freeLifoAlloc.transferFrom(&oldAlloc);
}
}

View File

@ -1503,7 +1503,7 @@ static void
GeneratorWriteBarrierPost(JSContext *cx, JSGenerator *gen)
{
#ifdef JSGC_GENERATIONAL
cx->runtime()->gc.storeBuffer.putWholeCell(gen->obj);
cx->runtime()->gcStoreBuffer.putWholeCell(gen->obj);
#endif
}

View File

@ -1290,7 +1290,7 @@ NewObject(ExclusiveContext *cx, types::TypeObject *type_, JSObject *parent, gc::
if (!cx->shouldBeJSContext())
return nullptr;
JSRuntime *rt = cx->asJSContext()->runtime();
rt->gc.incrementalEnabled = false;
rt->gcIncrementalEnabled = false;
#ifdef DEBUG
if (rt->gcMode() == JSGC_MODE_INCREMENTAL) {
@ -2710,7 +2710,7 @@ AllocateSlots(ThreadSafeContext *cx, JSObject *obj, uint32_t nslots)
{
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext())
return cx->asJSContext()->runtime()->gc.nursery.allocateSlots(cx->asJSContext(), obj, nslots);
return cx->asJSContext()->runtime()->gcNursery.allocateSlots(cx->asJSContext(), obj, nslots);
#endif
return cx->pod_malloc<HeapSlot>(nslots);
}
@ -2721,9 +2721,9 @@ ReallocateSlots(ThreadSafeContext *cx, JSObject *obj, HeapSlot *oldSlots,
{
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext()) {
return cx->asJSContext()->runtime()->gc.nursery.reallocateSlots(cx->asJSContext(),
obj, oldSlots,
oldCount, newCount);
return cx->asJSContext()->runtime()->gcNursery.reallocateSlots(cx->asJSContext(),
obj, oldSlots,
oldCount, newCount);
}
#endif
return (HeapSlot *)cx->realloc_(oldSlots, oldCount * sizeof(HeapSlot),
@ -2798,7 +2798,7 @@ FreeSlots(ThreadSafeContext *cx, HeapSlot *slots)
// Note: threads without a JSContext do not have access to nursery allocated things.
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext())
return cx->asJSContext()->runtime()->gc.nursery.freeSlots(cx->asJSContext(), slots);
return cx->asJSContext()->runtime()->gcNursery.freeSlots(cx->asJSContext(), slots);
#endif
js_free(slots);
}
@ -3020,7 +3020,7 @@ AllocateElements(ThreadSafeContext *cx, JSObject *obj, uint32_t nelems)
{
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext())
return cx->asJSContext()->runtime()->gc.nursery.allocateElements(cx->asJSContext(), obj, nelems);
return cx->asJSContext()->runtime()->gcNursery.allocateElements(cx->asJSContext(), obj, nelems);
#endif
return static_cast<js::ObjectElements *>(cx->malloc_(nelems * sizeof(HeapValue)));
@ -3032,9 +3032,9 @@ ReallocateElements(ThreadSafeContext *cx, JSObject *obj, ObjectElements *oldHead
{
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext()) {
return cx->asJSContext()->runtime()->gc.nursery.reallocateElements(cx->asJSContext(), obj,
oldHeader, oldCount,
newCount);
return cx->asJSContext()->runtime()->gcNursery.reallocateElements(cx->asJSContext(), obj,
oldHeader, oldCount,
newCount);
}
#endif

View File

@ -517,7 +517,7 @@ JSObject::create(js::ExclusiveContext *cx, js::gc::AllocKind kind, js::gc::Initi
if (extantSlots) {
#ifdef JSGC_GENERATIONAL
if (cx->isJSContext())
cx->asJSContext()->runtime()->gc.nursery.notifyInitialSlots(obj, extantSlots);
cx->asJSContext()->runtime()->gcNursery.notifyInitialSlots(obj, extantSlots);
#endif
obj->slots = extantSlots;
}

View File

@ -814,7 +814,7 @@ ToDisassemblySource(JSContext *cx, HandleValue v, JSAutoByteString *bytes)
return true;
}
if (cx->runtime()->isHeapBusy() || cx->runtime()->gc.noGCOrAllocationCheck) {
if (cx->runtime()->isHeapBusy() || cx->runtime()->noGCOrAllocationCheck) {
char *source = JS_sprintf_append(nullptr, "<value>");
if (!source)
return false;
@ -1936,10 +1936,10 @@ js::GetPCCountScriptCount(JSContext *cx)
{
JSRuntime *rt = cx->runtime();
if (!rt->gc.scriptAndCountsVector)
if (!rt->scriptAndCountsVector)
return 0;
return rt->gc.scriptAndCountsVector->length();
return rt->scriptAndCountsVector->length();
}
enum MaybeComma {NO_COMMA, COMMA};
@ -1974,12 +1974,12 @@ js::GetPCCountScriptSummary(JSContext *cx, size_t index)
{
JSRuntime *rt = cx->runtime();
if (!rt->gc.scriptAndCountsVector || index >= rt->gc.scriptAndCountsVector->length()) {
if (!rt->scriptAndCountsVector || index >= rt->scriptAndCountsVector->length()) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_BUFFER_TOO_SMALL);
return nullptr;
}
const ScriptAndCounts &sac = (*rt->gc.scriptAndCountsVector)[index];
const ScriptAndCounts &sac = (*rt->scriptAndCountsVector)[index];
RootedScript script(cx, sac.script);
/*
@ -2234,12 +2234,12 @@ js::GetPCCountScriptContents(JSContext *cx, size_t index)
{
JSRuntime *rt = cx->runtime();
if (!rt->gc.scriptAndCountsVector || index >= rt->gc.scriptAndCountsVector->length()) {
if (!rt->scriptAndCountsVector || index >= rt->scriptAndCountsVector->length()) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_BUFFER_TOO_SMALL);
return nullptr;
}
const ScriptAndCounts &sac = (*rt->gc.scriptAndCountsVector)[index];
const ScriptAndCounts &sac = (*rt->scriptAndCountsVector)[index];
JSScript *script = sac.script;
StringBuffer buf(cx);

View File

@ -2987,7 +2987,7 @@ ProxyObject::trace(JSTracer *trc, JSObject *obj)
ProxyObject *proxy = &obj->as<ProxyObject>();
#ifdef DEBUG
if (!trc->runtime()->gc.disableStrictProxyCheckingCount && proxy->is<WrapperObject>()) {
if (!trc->runtime()->gcDisableStrictProxyCheckingCount && proxy->is<WrapperObject>()) {
JSObject *referent = &proxy->private_().toObject();
if (referent->compartment() != proxy->compartment()) {
/*

View File

@ -2063,7 +2063,7 @@ SaveSharedScriptData(ExclusiveContext *cx, Handle<JSScript *> script, SharedScri
*/
if (cx->isJSContext()) {
JSRuntime *rt = cx->asJSContext()->runtime();
if (JS::IsIncrementalGCInProgress(rt) && rt->gc.isFull)
if (JS::IsIncrementalGCInProgress(rt) && rt->gcIsFull)
ssd->marked = true;
}
#endif
@ -2081,14 +2081,14 @@ MarkScriptData(JSRuntime *rt, const jsbytecode *bytecode)
* a GC. Since SweepScriptBytecodes is only called during a full gc,
* to preserve this invariant, only mark during a full gc.
*/
if (rt->gc.isFull)
if (rt->gcIsFull)
SharedScriptData::fromBytecode(bytecode)->marked = true;
}
void
js::UnmarkScriptData(JSRuntime *rt)
{
JS_ASSERT(rt->gc.isFull);
JS_ASSERT(rt->gcIsFull);
ScriptDataTable &table = rt->scriptDataTable();
for (ScriptDataTable::Enum e(table); !e.empty(); e.popFront()) {
SharedScriptData *entry = e.front();
@ -2099,7 +2099,7 @@ js::UnmarkScriptData(JSRuntime *rt)
void
js::SweepScriptData(JSRuntime *rt)
{
JS_ASSERT(rt->gc.isFull);
JS_ASSERT(rt->gcIsFull);
ScriptDataTable &table = rt->scriptDataTable();
if (rt->keepAtoms())
@ -3304,7 +3304,7 @@ JSScript::markChildren(JSTracer *trc)
// JSScript::Create(), but not yet finished initializing it with
// fullyInitFromEmitter() or fullyInitTrivial().
JS_ASSERT_IF(trc->runtime()->gc.strictCompartmentChecking, zone()->isCollecting());
JS_ASSERT_IF(trc->runtime()->gcStrictCompartmentChecking, zone()->isCollecting());
for (uint32_t i = 0; i < natoms(); ++i) {
if (atoms[i])

View File

@ -288,7 +288,7 @@ WeakMapPostWriteBarrier(JSRuntime *rt, ObjectValueMap *weakMap, JSObject *key)
typedef gc::HashKeyRef<UnbarrieredMap, JSObject *> Ref;
if (key && IsInsideNursery(rt, key))
rt->gc.storeBuffer.putGeneric(Ref((unbarrieredMap), key));
rt->gcStoreBuffer.putGeneric(Ref((unbarrieredMap), key));
#endif
}

View File

@ -276,7 +276,7 @@ PostBarrierTypedArrayObject(JSObject *obj)
JS_ASSERT(obj);
JSRuntime *rt = obj->runtimeFromMainThread();
if (!rt->isHeapBusy() && !IsInsideNursery(rt, obj))
rt->gc.storeBuffer.putWholeCell(obj);
rt->gcStoreBuffer.putWholeCell(obj);
#endif
}

View File

@ -479,7 +479,7 @@ ForkJoinActivation::ForkJoinActivation(JSContext *cx)
MinorGC(cx->runtime(), JS::gcreason::API);
cx->runtime()->gc.helperThread.waitBackgroundSweepEnd();
cx->runtime()->gcHelperThread.waitBackgroundSweepEnd();
JS_ASSERT(!cx->runtime()->needsBarrier());
JS_ASSERT(!cx->zone()->needsBarrier());
@ -1557,7 +1557,7 @@ ForkJoinShared::setAbortFlagDueToInterrupt(ForkJoinContext &cx)
// The GC Needed flag should not be set during parallel
// execution. Instead, one of the requestGC() or
// requestZoneGC() methods should be invoked.
JS_ASSERT(!cx_->runtime()->gc.isNeeded);
JS_ASSERT(!cx_->runtime()->gcIsNeeded);
if (!abort_) {
cx.bailoutRecord->setCause(ParallelBailoutInterrupt);

View File

@ -537,7 +537,7 @@ JS::CollectRuntimeStats(JSRuntime *rt, RuntimeStats *rtStats, ObjectPrivateVisit
if (!rtStats->compartmentStatsVector.reserve(rt->numCompartments))
return false;
if (!rtStats->zoneStatsVector.reserve(rt->gc.zones.length()))
if (!rtStats->zoneStatsVector.reserve(rt->zones.length()))
return false;
rtStats->gcHeapChunkTotal =

View File

@ -731,7 +731,7 @@ RegExpCompartment::sweep(JSRuntime *rt)
for (PendingSet::Enum e(inUse_); !e.empty(); e.popFront()) {
RegExpShared *shared = e.front();
if (shared->activeUseCount == 0 && shared->gcNumberWhenUsed < rt->gc.startNumber) {
if (shared->activeUseCount == 0 && shared->gcNumberWhenUsed < rt->gcStartNumber) {
js_delete(shared);
e.removeFront();
}

View File

@ -114,7 +114,7 @@ static const JSWrapObjectCallbacks DefaultWrapObjectCallbacks = {
JSRuntime::JSRuntime(JSRuntime *parentRuntime, JSUseHelperThreads useHelperThreads)
: JS::shadow::Runtime(
#ifdef JSGC_GENERATIONAL
&gc.storeBuffer
&gcStoreBuffer
#endif
),
mainThread(this),
@ -135,6 +135,7 @@ JSRuntime::JSRuntime(JSRuntime *parentRuntime, JSUseHelperThreads useHelperThrea
#else
interruptLockTaken(false),
#endif
systemZone(nullptr),
numCompartments(0),
localeCallbacks(nullptr),
defaultLocale(nullptr),
@ -165,11 +166,89 @@ JSRuntime::JSRuntime(JSRuntime *parentRuntime, JSUseHelperThreads useHelperThrea
#ifdef DEBUG
activeContext(nullptr),
#endif
gc(thisFromCtor()),
gcInitialized(false),
gcSystemAvailableChunkListHead(nullptr),
gcUserAvailableChunkListHead(nullptr),
gcBytes(0),
gcMaxBytes(0),
gcMaxMallocBytes(0),
gcNumArenasFreeCommitted(0),
gcMarker(this),
gcVerifyPreData(nullptr),
gcVerifyPostData(nullptr),
gcChunkAllocationSinceLastGC(false),
gcNextFullGCTime(0),
gcLastGCTime(0),
gcJitReleaseTime(0),
gcAllocationThreshold(30 * 1024 * 1024),
gcHighFrequencyGC(false),
gcHighFrequencyTimeThreshold(1000),
gcHighFrequencyLowLimitBytes(100 * 1024 * 1024),
gcHighFrequencyHighLimitBytes(500 * 1024 * 1024),
gcHighFrequencyHeapGrowthMax(3.0),
gcHighFrequencyHeapGrowthMin(1.5),
gcLowFrequencyHeapGrowth(1.5),
gcDynamicHeapGrowth(false),
gcDynamicMarkSlice(false),
gcDecommitThreshold(32 * 1024 * 1024),
gcShouldCleanUpEverything(false),
gcGrayBitsValid(false),
gcIsNeeded(0),
gcStats(thisFromCtor()),
gcNumber(0),
gcStartNumber(0),
gcIsFull(false),
gcTriggerReason(JS::gcreason::NO_REASON),
gcStrictCompartmentChecking(false),
#ifdef DEBUG
gcDisableStrictProxyCheckingCount(0),
#endif
gcIncrementalState(gc::NO_INCREMENTAL),
gcLastMarkSlice(false),
gcSweepOnBackgroundThread(false),
gcFoundBlackGrayEdges(false),
gcSweepingZones(nullptr),
gcZoneGroupIndex(0),
gcZoneGroups(nullptr),
gcCurrentZoneGroup(nullptr),
gcSweepPhase(0),
gcSweepZone(nullptr),
gcSweepKindIndex(0),
gcAbortSweepAfterCurrentGroup(false),
gcArenasAllocatedDuringSweep(nullptr),
#ifdef DEBUG
gcMarkingValidator(nullptr),
#endif
gcInterFrameGC(0),
gcSliceBudget(SliceBudget::Unlimited),
gcIncrementalEnabled(true),
gcGenerationalDisabled(0),
gcManipulatingDeadZones(false),
gcObjectsMarkedInDeadZones(0),
gcPoke(false),
heapState(Idle),
#ifdef JSGC_GENERATIONAL
gcNursery(thisFromCtor()),
gcStoreBuffer(thisFromCtor(), gcNursery),
#endif
#ifdef JS_GC_ZEAL
gcZeal_(0),
gcZealFrequency(0),
gcNextScheduled(0),
gcDeterministicOnly(false),
gcIncrementalLimit(0),
#endif
gcValidate(true),
gcFullCompartmentChecks(false),
gcCallback(nullptr),
gcSliceCallback(nullptr),
gcFinalizeCallback(nullptr),
gcMallocBytes(0),
gcMallocGCTriggered(false),
#ifdef JS_ARM_SIMULATOR
simulatorRuntime_(nullptr),
#endif
scriptAndCountsVector(nullptr),
NaNValue(DoubleNaNValue()),
negativeInfinityValue(DoubleValue(NegativeInfinity<double>())),
positiveInfinityValue(DoubleValue(PositiveInfinity<double>())),
@ -177,9 +256,13 @@ JSRuntime::JSRuntime(JSRuntime *parentRuntime, JSUseHelperThreads useHelperThrea
debugMode(false),
spsProfiler(thisFromCtor()),
profilingScripts(false),
alwaysPreserveCode(false),
hadOutOfMemory(false),
haveCreatedContext(false),
data(nullptr),
gcLock(nullptr),
gcLockOwner(nullptr),
gcHelperThread(thisFromCtor()),
signalHandlersInstalled_(false),
defaultFreeOp_(thisFromCtor(), false),
debuggerMutations(0),
@ -206,6 +289,9 @@ JSRuntime::JSRuntime(JSRuntime *parentRuntime, JSUseHelperThreads useHelperThrea
permanentAtoms(nullptr),
wrapObjectCallbacks(&DefaultWrapObjectCallbacks),
preserveWrapperCallback(nullptr),
#ifdef DEBUG
noGCOrAllocationCheck(0),
#endif
jitSupportsFloatingPoint(false),
ionPcScriptCache(nullptr),
threadPool(this),
@ -267,8 +353,8 @@ JSRuntime::init(uint32_t maxbytes)
if (!interruptLock)
return false;
gc.lock = PR_NewLock();
if (!gc.lock)
gcLock = PR_NewLock();
if (!gcLock)
return false;
exclusiveAccessLock = PR_NewLock();
@ -287,7 +373,7 @@ JSRuntime::init(uint32_t maxbytes)
if (!js_InitGC(this, maxbytes))
return false;
if (!gc.marker.init(gcMode()))
if (!gcMarker.init(gcMode()))
return false;
const char *size = getenv("JSGC_MARK_STACK_LIMIT");
@ -303,7 +389,7 @@ JSRuntime::init(uint32_t maxbytes)
if (!atomsCompartment || !atomsCompartment->init(nullptr))
return false;
gc.zones.append(atomsZone.get());
zones.append(atomsZone.get());
atomsZone->compartments.append(atomsCompartment.get());
atomsCompartment->isSystem = true;
@ -438,8 +524,8 @@ JSRuntime::~JSRuntime()
atomsCompartment_ = nullptr;
#ifdef JS_THREADSAFE
if (gc.lock)
PR_DestroyLock(gc.lock);
if (gcLock)
PR_DestroyLock(gcLock);
#endif
js_free(defaultLocale);
@ -453,8 +539,8 @@ JSRuntime::~JSRuntime()
js_delete(ionPcScriptCache);
#ifdef JSGC_GENERATIONAL
gc.storeBuffer.disable();
gc.nursery.disable();
gcStoreBuffer.disable();
gcNursery.disable();
#endif
#ifdef JS_ARM_SIMULATOR
@ -542,12 +628,12 @@ JSRuntime::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::Runtim
}
#endif
rtSizes->gc.marker += gc.marker.sizeOfExcludingThis(mallocSizeOf);
rtSizes->gc.marker += gcMarker.sizeOfExcludingThis(mallocSizeOf);
#ifdef JSGC_GENERATIONAL
rtSizes->gc.nurseryCommitted += gc.nursery.sizeOfHeapCommitted();
rtSizes->gc.nurseryDecommitted += gc.nursery.sizeOfHeapDecommitted();
rtSizes->gc.nurseryHugeSlots += gc.nursery.sizeOfHugeSlots(mallocSizeOf);
gc.storeBuffer.addSizeOfExcludingThis(mallocSizeOf, &rtSizes->gc);
rtSizes->gc.nurseryCommitted += gcNursery.sizeOfHeapCommitted();
rtSizes->gc.nurseryDecommitted += gcNursery.sizeOfHeapDecommitted();
rtSizes->gc.nurseryHugeSlots += gcNursery.sizeOfHugeSlots(mallocSizeOf);
gcStoreBuffer.addSizeOfExcludingThis(mallocSizeOf, &rtSizes->gc);
#endif
}
@ -699,7 +785,7 @@ JSRuntime::setGCMaxMallocBytes(size_t value)
* For compatibility treat any value that exceeds PTRDIFF_T_MAX to
* mean that value.
*/
gc.maxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
gcMaxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
resetGCMallocBytes();
for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next())
zone->setGCMaxMallocBytes(value);
@ -715,8 +801,8 @@ void
JSRuntime::updateMallocCounter(JS::Zone *zone, size_t nbytes)
{
/* We tolerate any thread races when updating gcMallocBytes. */
gc.mallocBytes -= ptrdiff_t(nbytes);
if (MOZ_UNLIKELY(gc.mallocBytes <= 0))
gcMallocBytes -= ptrdiff_t(nbytes);
if (MOZ_UNLIKELY(gcMallocBytes <= 0))
onTooMuchMalloc();
else if (zone)
zone->updateMallocCounter(nbytes);
@ -728,8 +814,8 @@ JSRuntime::onTooMuchMalloc()
if (!CurrentThreadCanAccessRuntime(this))
return;
if (!gc.mallocGCTriggered)
gc.mallocGCTriggered = TriggerGC(this, JS::gcreason::TOO_MUCH_MALLOC);
if (!gcMallocGCTriggered)
gcMallocGCTriggered = TriggerGC(this, JS::gcreason::TOO_MUCH_MALLOC);
}
JS_FRIEND_API(void *)
@ -749,7 +835,7 @@ JSRuntime::onOutOfMemory(void *p, size_t nbytes, JSContext *cx)
* all the allocations and released the empty GC chunks.
*/
JS::ShrinkGCBuffers(this);
gc.helperThread.waitBackgroundSweepOrAllocEnd();
gcHelperThread.waitBackgroundSweepOrAllocEnd();
if (!p)
p = js_malloc(nbytes);
else if (p == reinterpret_cast<void *>(1))
@ -844,7 +930,7 @@ JSRuntime::assertCanLock(RuntimeLock which)
case InterruptLock:
JS_ASSERT(!currentThreadOwnsInterruptLock());
case GCLock:
JS_ASSERT(gc.lockOwner != PR_GetCurrentThread());
JS_ASSERT(gcLockOwner != PR_GetCurrentThread());
break;
default:
MOZ_CRASH();

View File

@ -19,6 +19,7 @@
#include "jsatom.h"
#include "jsclist.h"
#include "jsgc.h"
#ifdef DEBUG
# include "jsproxy.h"
#endif
@ -26,7 +27,13 @@
#include "ds/FixedSizeHash.h"
#include "frontend/ParseMaps.h"
#include "gc/GCRuntime.h"
#ifdef JSGC_GENERATIONAL
# include "gc/Nursery.h"
#endif
#include "gc/Statistics.h"
#ifdef JSGC_GENERATIONAL
# include "gc/StoreBuffer.h"
#endif
#include "gc/Tracer.h"
#ifdef XP_MACOSX
# include "jit/AsmJSSignalHandlers.h"
@ -128,6 +135,48 @@ struct ScopeCoordinateNameCache {
void purge();
};
typedef Vector<ScriptAndCounts, 0, SystemAllocPolicy> ScriptAndCountsVector;
struct ConservativeGCData
{
/*
* The GC scans conservatively between ThreadData::nativeStackBase and
* nativeStackTop unless the latter is nullptr.
*/
uintptr_t *nativeStackTop;
union {
jmp_buf jmpbuf;
uintptr_t words[JS_HOWMANY(sizeof(jmp_buf), sizeof(uintptr_t))];
} registerSnapshot;
ConservativeGCData() {
mozilla::PodZero(this);
}
~ConservativeGCData() {
#ifdef JS_THREADSAFE
/*
* The conservative GC scanner should be disabled when the thread leaves
* the last request.
*/
JS_ASSERT(!hasStackToScan());
#endif
}
MOZ_NEVER_INLINE void recordStackTop();
#ifdef JS_THREADSAFE
void updateForRequestEnd() {
nativeStackTop = nullptr;
}
#endif
bool hasStackToScan() const {
return !!nativeStackTop;
}
};
struct EvalCacheEntry
{
JSScript *script;
@ -606,6 +655,12 @@ class PerThreadData : public PerThreadDataFriendFields
#endif
};
namespace gc {
class MarkingValidator;
} // namespace gc
typedef Vector<JS::Zone *, 4, SystemAllocPolicy> ZoneVector;
class AutoLockForExclusiveAccess;
void RecomputeStackLimit(JSRuntime *rt, StackKind kind);
@ -751,6 +806,12 @@ struct JSRuntime : public JS::shadow::Runtime,
#endif
}
/* Embedders can use this zone however they wish. */
JS::Zone *systemZone;
/* List of compartments and zones (protected by the GC lock). */
js::ZoneVector zones;
/* How many compartments there are across all zones. */
size_t numCompartments;
@ -914,37 +975,259 @@ struct JSRuntime : public JS::shadow::Runtime,
#endif
/* Garbage collector state, used by jsgc.c. */
js::gc::GCRuntime gc;
/* Garbase collector state has been sucessfully initialized. */
bool gcInitialized;
JSGCMode gcMode() const { return gc.mode; }
/*
* Set of all GC chunks with at least one allocated thing. The
* conservative GC uses it to quickly check if a possible GC thing points
* into an allocated chunk.
*/
js::GCChunkSet gcChunkSet;
/*
* Doubly-linked lists of chunks from user and system compartments. The GC
* allocates its arenas from the corresponding list and when all arenas
* in the list head are taken, then the chunk is removed from the list.
* During the GC when all arenas in a chunk become free, that chunk is
* removed from the list and scheduled for release.
*/
js::gc::Chunk *gcSystemAvailableChunkListHead;
js::gc::Chunk *gcUserAvailableChunkListHead;
js::gc::ChunkPool gcChunkPool;
js::RootedValueMap gcRootsHash;
/* This is updated by both the main and GC helper threads. */
mozilla::Atomic<size_t, mozilla::ReleaseAcquire> gcBytes;
size_t gcMaxBytes;
size_t gcMaxMallocBytes;
/*
* Number of the committed arenas in all GC chunks including empty chunks.
*/
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> gcNumArenasFreeCommitted;
js::GCMarker gcMarker;
void *gcVerifyPreData;
void *gcVerifyPostData;
bool gcChunkAllocationSinceLastGC;
int64_t gcNextFullGCTime;
int64_t gcLastGCTime;
int64_t gcJitReleaseTime;
private:
JSGCMode gcMode_;
public:
JSGCMode gcMode() const { return gcMode_; }
void setGCMode(JSGCMode mode) {
gc.mode = mode;
gc.marker.setGCMode(mode);
gcMode_ = mode;
gcMarker.setGCMode(mode);
}
bool isHeapBusy() { return gc.heapState != js::Idle; }
bool isHeapMajorCollecting() { return gc.heapState == js::MajorCollecting; }
bool isHeapMinorCollecting() { return gc.heapState == js::MinorCollecting; }
size_t gcAllocationThreshold;
bool gcHighFrequencyGC;
uint64_t gcHighFrequencyTimeThreshold;
uint64_t gcHighFrequencyLowLimitBytes;
uint64_t gcHighFrequencyHighLimitBytes;
double gcHighFrequencyHeapGrowthMax;
double gcHighFrequencyHeapGrowthMin;
double gcLowFrequencyHeapGrowth;
bool gcDynamicHeapGrowth;
bool gcDynamicMarkSlice;
uint64_t gcDecommitThreshold;
/* During shutdown, the GC needs to clean up every possible object. */
bool gcShouldCleanUpEverything;
/*
* The gray bits can become invalid if UnmarkGray overflows the stack. A
* full GC will reset this bit, since it fills in all the gray bits.
*/
bool gcGrayBitsValid;
/*
* These flags must be kept separate so that a thread requesting a
* compartment GC doesn't cancel another thread's concurrent request for a
* full GC.
*/
volatile uintptr_t gcIsNeeded;
js::gcstats::Statistics gcStats;
/* Incremented on every GC slice. */
uint64_t gcNumber;
/* The gcNumber at the time of the most recent GC's first slice. */
uint64_t gcStartNumber;
/* Whether the currently running GC can finish in multiple slices. */
bool gcIsIncremental;
/* Whether all compartments are being collected in first GC slice. */
bool gcIsFull;
/* The reason that an interrupt-triggered GC should be called. */
JS::gcreason::Reason gcTriggerReason;
/*
* If this is true, all marked objects must belong to a compartment being
* GCed. This is used to look for compartment bugs.
*/
bool gcStrictCompartmentChecking;
#ifdef DEBUG
/*
* If this is 0, all cross-compartment proxies must be registered in the
* wrapper map. This checking must be disabled temporarily while creating
* new wrappers. When non-zero, this records the recursion depth of wrapper
* creation.
*/
uintptr_t gcDisableStrictProxyCheckingCount;
#else
uintptr_t unused1;
#endif
/*
* The current incremental GC phase. This is also used internally in
* non-incremental GC.
*/
js::gc::State gcIncrementalState;
/* Indicates that the last incremental slice exhausted the mark stack. */
bool gcLastMarkSlice;
/* Whether any sweeping will take place in the separate GC helper thread. */
bool gcSweepOnBackgroundThread;
/* Whether any black->gray edges were found during marking. */
bool gcFoundBlackGrayEdges;
/* List head of zones to be swept in the background. */
JS::Zone *gcSweepingZones;
/* Index of current zone group (for stats). */
unsigned gcZoneGroupIndex;
/*
* Incremental sweep state.
*/
JS::Zone *gcZoneGroups;
JS::Zone *gcCurrentZoneGroup;
int gcSweepPhase;
JS::Zone *gcSweepZone;
int gcSweepKindIndex;
bool gcAbortSweepAfterCurrentGroup;
/*
* List head of arenas allocated during the sweep phase.
*/
js::gc::ArenaHeader *gcArenasAllocatedDuringSweep;
#ifdef DEBUG
js::gc::MarkingValidator *gcMarkingValidator;
#endif
/*
* Indicates that a GC slice has taken place in the middle of an animation
* frame, rather than at the beginning. In this case, the next slice will be
* delayed so that we don't get back-to-back slices.
*/
volatile uintptr_t gcInterFrameGC;
/* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */
int64_t gcSliceBudget;
/*
* We disable incremental GC if we encounter a js::Class with a trace hook
* that does not implement write barriers.
*/
bool gcIncrementalEnabled;
/*
* GGC can be enabled from the command line while testing.
*/
unsigned gcGenerationalDisabled;
/*
* This is true if we are in the middle of a brain transplant (e.g.,
* JS_TransplantObject) or some other operation that can manipulate
* dead zones.
*/
bool gcManipulatingDeadZones;
/*
* This field is incremented each time we mark an object inside a
* zone with no incoming cross-compartment pointers. Typically if
* this happens it signals that an incremental GC is marking too much
* stuff. At various times we check this counter and, if it has changed, we
* run an immediate, non-incremental GC to clean up the dead
* zones. This should happen very rarely.
*/
unsigned gcObjectsMarkedInDeadZones;
bool gcPoke;
volatile js::HeapState heapState;
bool isHeapBusy() { return heapState != js::Idle; }
bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
#ifdef JSGC_GENERATIONAL
js::Nursery gcNursery;
js::gc::StoreBuffer gcStoreBuffer;
#endif
/*
* These options control the zealousness of the GC. The fundamental values
* are gcNextScheduled and gcDebugCompartmentGC. At every allocation,
* gcNextScheduled is decremented. When it reaches zero, we do either a
* full or a compartmental GC, based on gcDebugCompartmentGC.
*
* At this point, if gcZeal_ is one of the types that trigger periodic
* collection, then gcNextScheduled is reset to the value of
* gcZealFrequency. Otherwise, no additional GCs take place.
*
* You can control these values in several ways:
* - Pass the -Z flag to the shell (see the usage info for details)
* - Call gczeal() or schedulegc() from inside shell-executed JS code
* (see the help for details)
*
* If gzZeal_ == 1 then we perform GCs in select places (during MaybeGC and
* whenever a GC poke happens). This option is mainly useful to embedders.
*
* We use gcZeal_ == 4 to enable write barrier verification. See the comment
* in jsgc.cpp for more information about this.
*
* gcZeal_ values from 8 to 10 periodically run different types of
* incremental GC.
*/
#ifdef JS_GC_ZEAL
int gcZeal() { return gc.zealMode; }
int gcZeal_;
int gcZealFrequency;
int gcNextScheduled;
bool gcDeterministicOnly;
int gcIncrementalLimit;
js::Vector<JSObject *, 0, js::SystemAllocPolicy> gcSelectedForMarking;
int gcZeal() { return gcZeal_; }
bool upcomingZealousGC() {
return gc.nextScheduled == 1;
return gcNextScheduled == 1;
}
bool needZealousGC() {
if (gc.nextScheduled > 0 && --gc.nextScheduled == 0) {
if (gcNextScheduled > 0 && --gcNextScheduled == 0) {
if (gcZeal() == js::gc::ZealAllocValue ||
gcZeal() == js::gc::ZealGenerationalGCValue ||
(gcZeal() >= js::gc::ZealIncrementalRootsThenFinish &&
gcZeal() <= js::gc::ZealIncrementalMultipleSlices))
{
gc.nextScheduled = gc.zealFrequency;
gcNextScheduled = gcZealFrequency;
}
return true;
}
@ -956,24 +1239,27 @@ struct JSRuntime : public JS::shadow::Runtime,
bool needZealousGC() { return false; }
#endif
void lockGC() {
#ifdef JS_THREADSAFE
assertCanLock(js::GCLock);
PR_Lock(gc.lock);
JS_ASSERT(!gc.lockOwner);
#ifdef DEBUG
gc.lockOwner = PR_GetCurrentThread();
#endif
#endif
}
bool gcValidate;
bool gcFullCompartmentChecks;
void unlockGC() {
#ifdef JS_THREADSAFE
JS_ASSERT(gc.lockOwner == PR_GetCurrentThread());
gc.lockOwner = nullptr;
PR_Unlock(gc.lock);
#endif
}
JSGCCallback gcCallback;
JS::GCSliceCallback gcSliceCallback;
JSFinalizeCallback gcFinalizeCallback;
void *gcCallbackData;
private:
/*
* Malloc counter to measure memory pressure for GC scheduling. It runs
* from gcMaxMallocBytes down to zero.
*/
mozilla::Atomic<ptrdiff_t, mozilla::ReleaseAcquire> gcMallocBytes;
/*
* Whether a GC has been triggered as a result of gcMallocBytes falling
* below zero.
*/
mozilla::Atomic<bool, mozilla::ReleaseAcquire> gcMallocGCTriggered;
#ifdef JS_ARM_SIMULATOR
js::jit::SimulatorRuntime *simulatorRuntime_;
@ -984,11 +1270,38 @@ struct JSRuntime : public JS::shadow::Runtime,
needsBarrier_ = needs;
}
struct ExtraTracer {
JSTraceDataOp op;
void *data;
ExtraTracer()
: op(nullptr), data(nullptr)
{}
ExtraTracer(JSTraceDataOp op, void *data)
: op(op), data(data)
{}
};
#ifdef JS_ARM_SIMULATOR
js::jit::SimulatorRuntime *simulatorRuntime() const;
void setSimulatorRuntime(js::jit::SimulatorRuntime *srt);
#endif
/*
* The trace operations to trace embedding-specific GC roots. One is for
* tracing through black roots and the other is for tracing through gray
* roots. The black/gray distinction is only relevant to the cycle
* collector.
*/
typedef js::Vector<ExtraTracer, 4, js::SystemAllocPolicy> ExtraTracerVector;
ExtraTracerVector gcBlackRootTracers;
ExtraTracer gcGrayRootTracer;
js::gc::SystemPageAllocator pageAllocator;
/* Strong references on scripts held for PCCount profiling API. */
js::ScriptAndCountsVector *scriptAndCountsVector;
/* Well-known numbers held for use by this runtime's contexts. */
const js::Value NaNValue;
const js::Value negativeInfinityValue;
@ -1017,6 +1330,9 @@ struct JSRuntime : public JS::shadow::Runtime,
/* If true, new scripts must be created with PC counter information. */
bool profilingScripts;
/* Always preserve JIT code during GCs, for testing. */
bool alwaysPreserveCode;
/* Had an out-of-memory error which did not populate an exception. */
bool hadOutOfMemory;
@ -1036,6 +1352,33 @@ struct JSRuntime : public JS::shadow::Runtime,
void *data;
private:
/* Synchronize GC heap access between main thread and GCHelperThread. */
PRLock *gcLock;
mozilla::DebugOnly<PRThread *> gcLockOwner;
friend class js::GCHelperThread;
public:
void lockGC() {
#ifdef JS_THREADSAFE
assertCanLock(js::GCLock);
PR_Lock(gcLock);
JS_ASSERT(!gcLockOwner);
#ifdef DEBUG
gcLockOwner = PR_GetCurrentThread();
#endif
#endif
}
void unlockGC() {
#ifdef JS_THREADSAFE
JS_ASSERT(gcLockOwner == PR_GetCurrentThread());
gcLockOwner = nullptr;
PR_Unlock(gcLock);
#endif
}
js::GCHelperThread gcHelperThread;
#if defined(XP_MACOSX) && defined(JS_ION)
js::AsmJSMachExceptionHandler asmJSMachExceptionHandler;
@ -1108,6 +1451,8 @@ struct JSRuntime : public JS::shadow::Runtime,
js::DateTimeInfo dateTimeInfo;
js::ConservativeGCData conservativeGC;
// Pool of maps used during parse/emit. This may be modified by threads
// with an ExclusiveContext and requires a lock. Active compilations
// prevent the pool from being purged during GCs.
@ -1227,6 +1572,10 @@ struct JSRuntime : public JS::shadow::Runtime,
return scriptDataTable_;
}
#ifdef DEBUG
size_t noGCOrAllocationCheck;
#endif
bool jitSupportsFloatingPoint;
// Used to reset stack limit after a signaled interrupt (i.e. jitStackLimit_ = -1)
@ -1295,8 +1644,8 @@ struct JSRuntime : public JS::shadow::Runtime,
void setGCMaxMallocBytes(size_t value);
void resetGCMallocBytes() {
gc.mallocBytes = ptrdiff_t(gc.maxMallocBytes);
gc.mallocGCTriggered = false;
gcMallocBytes = ptrdiff_t(gcMaxMallocBytes);
gcMallocGCTriggered = false;
}
/*
@ -1313,7 +1662,7 @@ struct JSRuntime : public JS::shadow::Runtime,
void reportAllocationOverflow() { js_ReportAllocationOverflow(nullptr); }
bool isTooMuchMalloc() const {
return gc.mallocBytes <= 0;
return gcMallocBytes <= 0;
}
/*
@ -1500,7 +1849,7 @@ inline void
FreeOp::free_(void *p)
{
if (shouldFreeLater()) {
runtime()->gc.helperThread.freeLater(p);
runtime()->gcHelperThread.freeLater(p);
return;
}
js_free(p);

View File

@ -1581,7 +1581,7 @@ DebugScopes::proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map,
typedef gc::HashKeyRef<UnbarrieredMap, JSObject *> Ref;
if (key && IsInsideNursery(rt, key))
rt->gc.storeBuffer.putGeneric(Ref(unbarrieredMap, key.get()));
rt->gcStoreBuffer.putGeneric(Ref(unbarrieredMap, key.get()));
#endif
}
@ -1612,7 +1612,7 @@ DebugScopes::missingScopesPostWriteBarrier(JSRuntime *rt, MissingScopeMap *map,
{
#ifdef JSGC_GENERATIONAL
if (key.enclosingScope() && IsInsideNursery(rt, key.enclosingScope()))
rt->gc.storeBuffer.putGeneric(MissingScopesRef(map, key));
rt->gcStoreBuffer.putGeneric(MissingScopesRef(map, key));
#endif
}
@ -1628,7 +1628,7 @@ DebugScopes::liveScopesPostWriteBarrier(JSRuntime *rt, LiveScopeMap *map, ScopeO
RuntimeAllocPolicy> UnbarrieredLiveScopeMap;
typedef gc::HashKeyRef<UnbarrieredLiveScopeMap, ScopeObject *> Ref;
if (key && IsInsideNursery(rt, key))
rt->gc.storeBuffer.putGeneric(Ref(reinterpret_cast<UnbarrieredLiveScopeMap *>(map), key));
rt->gcStoreBuffer.putGeneric(Ref(reinterpret_cast<UnbarrieredLiveScopeMap *>(map), key));
#endif
}

View File

@ -1526,7 +1526,7 @@ BaseShape::assertConsistency()
void
JSCompartment::sweepBaseShapeTable()
{
gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats,
gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats,
gcstats::PHASE_SWEEP_TABLES_BASE_SHAPE);
if (baseShapes.initialized()) {
@ -1736,7 +1736,7 @@ EmptyShape::getInitialShape(ExclusiveContext *cx, const Class *clasp, TaggedProt
{
InitialShapeSetRef ref(
&table, clasp, protoRoot, parentRoot, metadataRoot, nfixed, objectFlags);
cx->asJSContext()->runtime()->gc.storeBuffer.putGeneric(ref);
cx->asJSContext()->runtime()->gcStoreBuffer.putGeneric(ref);
}
}
#endif
@ -1814,7 +1814,7 @@ EmptyShape::insertInitialShape(ExclusiveContext *cx, HandleShape shape, HandleOb
void
JSCompartment::sweepInitialShapeTable()
{
gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats,
gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats,
gcstats::PHASE_SWEEP_TABLES_INITIAL_SHAPE);
if (initialShapes.initialized()) {