Bug 807168 - Move GCMarker into gc/Tracer.h; r=jonco

--HG--
extra : rebase_source : 031495e6bb5510009eb871866e9f5e3beda24211
This commit is contained in:
Terrence Cole 2014-04-16 16:26:33 -07:00
parent 34b4eee93b
commit 0dfe71b218
9 changed files with 649 additions and 622 deletions

View File

@ -13,6 +13,20 @@
#include "js/RootingAPI.h"
#include "js/Value.h"
typedef enum JSGCMode {
/* Perform only global GCs. */
JSGC_MODE_GLOBAL = 0,
/* Perform per-compartment GCs until too much garbage has accumulated. */
JSGC_MODE_COMPARTMENT = 1,
/*
* Collect in short time slices rather than all at once. Implies
* JSGC_MODE_COMPARTMENT.
*/
JSGC_MODE_INCREMENTAL = 2
} JSGCMode;
namespace JS {
#define GCREASONS(D) \

View File

@ -6,6 +6,8 @@
#include "gc/Tracer.h"
#include "mozilla/DebugOnly.h"
#include "jsapi.h"
#include "jsfun.h"
#include "jsgc.h"
@ -13,10 +15,14 @@
#include "jsscript.h"
#include "NamespaceImports.h"
#include "gc/GCInternals.h"
#include "gc/Marking.h"
#include "jsgcinlines.h"
using namespace js;
using namespace js::gc;
using mozilla::DebugOnly;
JS_PUBLIC_API(void)
JS_CallValueTracer(JSTracer *trc, Value *valuep, const char *name)
@ -326,3 +332,248 @@ JSTracer::tracingLocation(void **thingp)
return realLocation_ ? (void **)realLocation_ : thingp;
}
#endif
/*
* DoNotTraceWeakMaps: the GC is recomputing the liveness of WeakMap entries,
* so we delay visting entries.
*/
GCMarker::GCMarker(JSRuntime *rt)
: JSTracer(rt, nullptr, DoNotTraceWeakMaps),
stack(size_t(-1)),
color(BLACK),
started(false),
unmarkedArenaStackTop(nullptr),
markLaterArenas(0),
grayBufferState(GRAY_BUFFER_UNUSED)
{
}
bool
GCMarker::init(JSGCMode gcMode)
{
return stack.init(gcMode);
}
void
GCMarker::start()
{
JS_ASSERT(!started);
started = true;
color = BLACK;
JS_ASSERT(!unmarkedArenaStackTop);
JS_ASSERT(markLaterArenas == 0);
}
void
GCMarker::stop()
{
JS_ASSERT(isDrained());
JS_ASSERT(started);
started = false;
JS_ASSERT(!unmarkedArenaStackTop);
JS_ASSERT(markLaterArenas == 0);
/* Free non-ballast stack memory. */
stack.reset();
resetBufferedGrayRoots();
grayBufferState = GRAY_BUFFER_UNUSED;
}
void
GCMarker::reset()
{
color = BLACK;
stack.reset();
JS_ASSERT(isMarkStackEmpty());
while (unmarkedArenaStackTop) {
ArenaHeader *aheader = unmarkedArenaStackTop;
JS_ASSERT(aheader->hasDelayedMarking);
JS_ASSERT(markLaterArenas);
unmarkedArenaStackTop = aheader->getNextDelayedMarking();
aheader->unsetDelayedMarking();
aheader->markOverflow = 0;
aheader->allocatedDuringIncremental = 0;
markLaterArenas--;
}
JS_ASSERT(isDrained());
JS_ASSERT(!markLaterArenas);
}
void
GCMarker::markDelayedChildren(ArenaHeader *aheader)
{
if (aheader->markOverflow) {
bool always = aheader->allocatedDuringIncremental;
aheader->markOverflow = 0;
for (CellIterUnderGC i(aheader); !i.done(); i.next()) {
Cell *t = i.getCell();
if (always || t->isMarked()) {
t->markIfUnmarked();
JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
}
}
} else {
JS_ASSERT(aheader->allocatedDuringIncremental);
PushArena(this, aheader);
}
aheader->allocatedDuringIncremental = 0;
/*
* Note that during an incremental GC we may still be allocating into
* aheader. However, prepareForIncrementalGC sets the
* allocatedDuringIncremental flag if we continue marking.
*/
}
bool
GCMarker::markDelayedChildren(SliceBudget &budget)
{
gcstats::MaybeAutoPhase ap;
if (runtime()->gcIncrementalState == MARK)
ap.construct(runtime()->gcStats, gcstats::PHASE_MARK_DELAYED);
JS_ASSERT(unmarkedArenaStackTop);
do {
/*
* If marking gets delayed at the same arena again, we must repeat
* marking of its things. For that we pop arena from the stack and
* clear its hasDelayedMarking flag before we begin the marking.
*/
ArenaHeader *aheader = unmarkedArenaStackTop;
JS_ASSERT(aheader->hasDelayedMarking);
JS_ASSERT(markLaterArenas);
unmarkedArenaStackTop = aheader->getNextDelayedMarking();
aheader->unsetDelayedMarking();
markLaterArenas--;
markDelayedChildren(aheader);
budget.step(150);
if (budget.isOverBudget())
return false;
} while (unmarkedArenaStackTop);
JS_ASSERT(!markLaterArenas);
return true;
}
#ifdef DEBUG
void
GCMarker::checkZone(void *p)
{
JS_ASSERT(started);
DebugOnly<Cell *> cell = static_cast<Cell *>(p);
JS_ASSERT_IF(cell->isTenured(), cell->tenuredZone()->isCollecting());
}
#endif
bool
GCMarker::hasBufferedGrayRoots() const
{
return grayBufferState == GRAY_BUFFER_OK;
}
void
GCMarker::startBufferingGrayRoots()
{
JS_ASSERT(grayBufferState == GRAY_BUFFER_UNUSED);
grayBufferState = GRAY_BUFFER_OK;
for (GCZonesIter zone(runtime()); !zone.done(); zone.next())
JS_ASSERT(zone->gcGrayRoots.empty());
JS_ASSERT(!callback);
callback = GrayCallback;
JS_ASSERT(IS_GC_MARKING_TRACER(this));
}
void
GCMarker::endBufferingGrayRoots()
{
JS_ASSERT(callback == GrayCallback);
callback = nullptr;
JS_ASSERT(IS_GC_MARKING_TRACER(this));
JS_ASSERT(grayBufferState == GRAY_BUFFER_OK ||
grayBufferState == GRAY_BUFFER_FAILED);
}
void
GCMarker::resetBufferedGrayRoots()
{
for (GCZonesIter zone(runtime()); !zone.done(); zone.next())
zone->gcGrayRoots.clearAndFree();
}
void
GCMarker::markBufferedGrayRoots(JS::Zone *zone)
{
JS_ASSERT(grayBufferState == GRAY_BUFFER_OK);
JS_ASSERT(zone->isGCMarkingGray());
for (GrayRoot *elem = zone->gcGrayRoots.begin(); elem != zone->gcGrayRoots.end(); elem++) {
#ifdef DEBUG
setTracingDetails(elem->debugPrinter, elem->debugPrintArg, elem->debugPrintIndex);
#endif
void *tmp = elem->thing;
setTracingLocation((void *)&elem->thing);
MarkKind(this, &tmp, elem->kind);
JS_ASSERT(tmp == elem->thing);
}
}
void
GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind)
{
JS_ASSERT(started);
if (grayBufferState == GRAY_BUFFER_FAILED)
return;
GrayRoot root(thing, kind);
#ifdef DEBUG
root.debugPrinter = debugPrinter();
root.debugPrintArg = debugPrintArg();
root.debugPrintIndex = debugPrintIndex();
#endif
Zone *zone = static_cast<Cell *>(thing)->tenuredZone();
if (zone->isCollecting()) {
zone->maybeAlive = true;
if (!zone->gcGrayRoots.append(root)) {
resetBufferedGrayRoots();
grayBufferState = GRAY_BUFFER_FAILED;
}
}
}
void
GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
{
JS_ASSERT(thingp);
JS_ASSERT(*thingp);
GCMarker *gcmarker = static_cast<GCMarker *>(trc);
gcmarker->appendGrayRoot(*thingp, kind);
}
size_t
GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
{
size_t size = stack.sizeOfExcludingThis(mallocSizeOf);
for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next())
size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf);
return size;
}
void
js::SetMarkStackLimit(JSRuntime *rt, size_t limit)
{
JS_ASSERT(!rt->isHeapBusy());
AutoStopVerifyingBarriers pauseVerification(rt, false);
rt->gcMarker.setMaxCapacity(limit);
}

View File

@ -7,6 +7,367 @@
#ifndef js_Tracer_h
#define js_Tracer_h
#include "mozilla/DebugOnly.h"
#include "jsutil.h"
#include "js/GCAPI.h"
#include "js/SliceBudget.h"
#include "js/TracingAPI.h"
namespace js {
class ObjectImpl;
namespace gc {
class ArenaHeader;
}
namespace jit {
class JitCode;
}
namespace types {
class TypeObject;
}
static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096;
static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768;
/*
* When the native stack is low, the GC does not call JS_TraceChildren to mark
* the reachable "children" of the thing. Rather the thing is put aside and
* JS_TraceChildren is called later with more space on the C stack.
*
* To implement such delayed marking of the children with minimal overhead for
* the normal case of sufficient native stack, the code adds a field per arena.
* The field markingDelay->link links all arenas with delayed things into a
* stack list with the pointer to stack top in GCMarker::unmarkedArenaStackTop.
* GCMarker::delayMarkingChildren adds arenas to the stack as necessary while
* markDelayedChildren pops the arenas from the stack until it empties.
*/
template<class T>
struct MarkStack {
T *stack_;
T *tos_;
T *end_;
// The capacity we start with and reset() to.
size_t baseCapacity_;
size_t maxCapacity_;
MarkStack(size_t maxCapacity)
: stack_(nullptr),
tos_(nullptr),
end_(nullptr),
baseCapacity_(0),
maxCapacity_(maxCapacity)
{}
~MarkStack() {
js_free(stack_);
}
size_t capacity() { return end_ - stack_; }
ptrdiff_t position() const { return tos_ - stack_; }
void setStack(T *stack, size_t tosIndex, size_t capacity) {
stack_ = stack;
tos_ = stack + tosIndex;
end_ = stack + capacity;
}
void setBaseCapacity(JSGCMode mode) {
switch (mode) {
case JSGC_MODE_GLOBAL:
case JSGC_MODE_COMPARTMENT:
baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY;
break;
case JSGC_MODE_INCREMENTAL:
baseCapacity_ = INCREMENTAL_MARK_STACK_BASE_CAPACITY;
break;
default:
MOZ_ASSUME_UNREACHABLE("bad gc mode");
}
if (baseCapacity_ > maxCapacity_)
baseCapacity_ = maxCapacity_;
}
bool init(JSGCMode gcMode) {
setBaseCapacity(gcMode);
JS_ASSERT(!stack_);
T *newStack = js_pod_malloc<T>(baseCapacity_);
if (!newStack)
return false;
setStack(newStack, 0, baseCapacity_);
return true;
}
void setMaxCapacity(size_t maxCapacity) {
JS_ASSERT(isEmpty());
maxCapacity_ = maxCapacity;
if (baseCapacity_ > maxCapacity_)
baseCapacity_ = maxCapacity_;
reset();
}
bool push(T item) {
if (tos_ == end_) {
if (!enlarge(1))
return false;
}
JS_ASSERT(tos_ < end_);
*tos_++ = item;
return true;
}
bool push(T item1, T item2, T item3) {
T *nextTos = tos_ + 3;
if (nextTos > end_) {
if (!enlarge(3))
return false;
nextTos = tos_ + 3;
}
JS_ASSERT(nextTos <= end_);
tos_[0] = item1;
tos_[1] = item2;
tos_[2] = item3;
tos_ = nextTos;
return true;
}
bool isEmpty() const {
return tos_ == stack_;
}
T pop() {
JS_ASSERT(!isEmpty());
return *--tos_;
}
void reset() {
if (capacity() == baseCapacity_) {
// No size change; keep the current stack.
setStack(stack_, 0, baseCapacity_);
return;
}
T *newStack = (T *)js_realloc(stack_, sizeof(T) * baseCapacity_);
if (!newStack) {
// If the realloc fails, just keep using the existing stack; it's
// not ideal but better than failing.
newStack = stack_;
baseCapacity_ = capacity();
}
setStack(newStack, 0, baseCapacity_);
}
/* Grow the stack, ensuring there is space for at least count elements. */
bool enlarge(unsigned count) {
size_t newCapacity = Min(maxCapacity_, capacity() * 2);
if (newCapacity < capacity() + count)
return false;
size_t tosIndex = position();
T *newStack = (T *)js_realloc(stack_, sizeof(T) * newCapacity);
if (!newStack)
return false;
setStack(newStack, tosIndex, newCapacity);
return true;
}
void setGCMode(JSGCMode gcMode) {
// The mark stack won't be resized until the next call to reset(), but
// that will happen at the end of the next GC.
setBaseCapacity(gcMode);
}
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
return mallocSizeOf(stack_);
}
};
struct GCMarker : public JSTracer {
private:
/*
* We use a common mark stack to mark GC things of different types and use
* the explicit tags to distinguish them when it cannot be deduced from
* the context of push or pop operation.
*/
enum StackTag {
ValueArrayTag,
ObjectTag,
TypeTag,
XmlTag,
SavedValueArrayTag,
JitCodeTag,
LastTag = JitCodeTag
};
static const uintptr_t StackTagMask = 7;
static void staticAsserts() {
JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag));
JS_STATIC_ASSERT(StackTagMask <= gc::CellMask);
}
public:
explicit GCMarker(JSRuntime *rt);
bool init(JSGCMode gcMode);
void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); }
size_t maxCapacity() const { return stack.maxCapacity_; }
void start();
void stop();
void reset();
void pushObject(ObjectImpl *obj) {
pushTaggedPtr(ObjectTag, obj);
}
void pushType(types::TypeObject *type) {
pushTaggedPtr(TypeTag, type);
}
void pushJitCode(jit::JitCode *code) {
pushTaggedPtr(JitCodeTag, code);
}
uint32_t getMarkColor() const {
return color;
}
/*
* Care must be taken changing the mark color from gray to black. The cycle
* collector depends on the invariant that there are no black to gray edges
* in the GC heap. This invariant lets the CC not trace through black
* objects. If this invariant is violated, the cycle collector may free
* objects that are still reachable.
*/
void setMarkColorGray() {
JS_ASSERT(isDrained());
JS_ASSERT(color == gc::BLACK);
color = gc::GRAY;
}
void setMarkColorBlack() {
JS_ASSERT(isDrained());
JS_ASSERT(color == gc::GRAY);
color = gc::BLACK;
}
inline void delayMarkingArena(gc::ArenaHeader *aheader);
void delayMarkingChildren(const void *thing);
void markDelayedChildren(gc::ArenaHeader *aheader);
bool markDelayedChildren(SliceBudget &budget);
bool hasDelayedChildren() const {
return !!unmarkedArenaStackTop;
}
bool isDrained() {
return isMarkStackEmpty() && !unmarkedArenaStackTop;
}
bool drainMarkStack(SliceBudget &budget);
/*
* Gray marking must be done after all black marking is complete. However,
* we do not have write barriers on XPConnect roots. Therefore, XPConnect
* roots must be accumulated in the first slice of incremental GC. We
* accumulate these roots in the each compartment's gcGrayRoots vector and
* then mark them later, after black marking is complete for each
* compartment. This accumulation can fail, but in that case we switch to
* non-incremental GC.
*/
bool hasBufferedGrayRoots() const;
void startBufferingGrayRoots();
void endBufferingGrayRoots();
void resetBufferedGrayRoots();
void markBufferedGrayRoots(JS::Zone *zone);
static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind);
void setGCMode(JSGCMode mode) { stack.setGCMode(mode); }
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
MarkStack<uintptr_t> stack;
private:
#ifdef DEBUG
void checkZone(void *p);
#else
void checkZone(void *p) {}
#endif
void pushTaggedPtr(StackTag tag, void *ptr) {
checkZone(ptr);
uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
JS_ASSERT(!(addr & StackTagMask));
if (!stack.push(addr | uintptr_t(tag)))
delayMarkingChildren(ptr);
}
void pushValueArray(JSObject *obj, void *start, void *end) {
checkZone(obj);
JS_ASSERT(start <= end);
uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
/*
* Push in the reverse order so obj will be on top. If we cannot push
* the array, we trigger delay marking for the whole object.
*/
if (!stack.push(endAddr, startAddr, tagged))
delayMarkingChildren(obj);
}
bool isMarkStackEmpty() {
return stack.isEmpty();
}
bool restoreValueArray(JSObject *obj, void **vpp, void **endp);
void saveValueRanges();
inline void processMarkStackTop(SliceBudget &budget);
void processMarkStackOther(uintptr_t tag, uintptr_t addr);
void appendGrayRoot(void *thing, JSGCTraceKind kind);
/* The color is only applied to objects and functions. */
uint32_t color;
mozilla::DebugOnly<bool> started;
/* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::ArenaHeader *unmarkedArenaStackTop;
/* Count of arenas that are currently in the stack. */
mozilla::DebugOnly<size_t> markLaterArenas;
enum GrayBufferState
{
GRAY_BUFFER_UNUSED,
GRAY_BUFFER_OK,
GRAY_BUFFER_FAILED
};
GrayBufferState grayBufferState;
};
void
SetMarkStackLimit(JSRuntime *rt, size_t limit);
} /* namespace js */
/*
* Macro to test if a traversal is the marking phase of the GC.
*/
#define IS_GC_MARKING_TRACER(trc) \
((trc)->callback == nullptr || (trc)->callback == GCMarker::GrayCallback)
#endif /* js_Tracer_h */

View File

@ -2120,20 +2120,6 @@ typedef enum JSGCParamKey {
JSGC_DECOMMIT_THRESHOLD = 20
} JSGCParamKey;
typedef enum JSGCMode {
/* Perform only global GCs. */
JSGC_MODE_GLOBAL = 0,
/* Perform per-compartment GCs until too much garbage has accumulated. */
JSGC_MODE_COMPARTMENT = 1,
/*
* Collect in short time slices rather than all at once. Implies
* JSGC_MODE_COMPARTMENT.
*/
JSGC_MODE_INCREMENTAL = 2
} JSGCMode;
extern JS_PUBLIC_API(void)
JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value);

View File

@ -23,6 +23,7 @@
#include "jsutil.h"
#include "ds/Sort.h"
#include "gc/Heap.h"
#include "vm/ArgumentsObject.h"
#include "vm/ForkJoin.h"
#include "vm/Interpreter.h"

View File

@ -1338,6 +1338,26 @@ Allocator::Allocator(Zone *zone)
: zone_(zone)
{}
inline void
GCMarker::delayMarkingArena(ArenaHeader *aheader)
{
if (aheader->hasDelayedMarking) {
/* Arena already scheduled to be marked later */
return;
}
aheader->setNextDelayedMarking(unmarkedArenaStackTop);
unmarkedArenaStackTop = aheader;
markLaterArenas++;
}
void
GCMarker::delayMarkingChildren(const void *thing)
{
const Cell *cell = reinterpret_cast<const Cell *>(thing);
cell->arenaHeader()->markOverflow = 1;
delayMarkingArena(cell->arenaHeader());
}
inline void
ArenaLists::prepareForIncrementalGC(JSRuntime *rt)
{
@ -1849,284 +1869,6 @@ SliceBudget::checkOverBudget()
return over;
}
/*
* DoNotTraceWeakMaps: the GC is recomputing the liveness of WeakMap entries,
* so we delay visting entries.
*/
GCMarker::GCMarker(JSRuntime *rt)
: JSTracer(rt, nullptr, DoNotTraceWeakMaps),
stack(size_t(-1)),
color(BLACK),
started(false),
unmarkedArenaStackTop(nullptr),
markLaterArenas(0),
grayBufferState(GRAY_BUFFER_UNUSED)
{
}
bool
GCMarker::init(JSGCMode gcMode)
{
return stack.init(gcMode);
}
void
GCMarker::start()
{
JS_ASSERT(!started);
started = true;
color = BLACK;
JS_ASSERT(!unmarkedArenaStackTop);
JS_ASSERT(markLaterArenas == 0);
}
void
GCMarker::stop()
{
JS_ASSERT(isDrained());
JS_ASSERT(started);
started = false;
JS_ASSERT(!unmarkedArenaStackTop);
JS_ASSERT(markLaterArenas == 0);
/* Free non-ballast stack memory. */
stack.reset();
resetBufferedGrayRoots();
grayBufferState = GRAY_BUFFER_UNUSED;
}
void
GCMarker::reset()
{
color = BLACK;
stack.reset();
JS_ASSERT(isMarkStackEmpty());
while (unmarkedArenaStackTop) {
ArenaHeader *aheader = unmarkedArenaStackTop;
JS_ASSERT(aheader->hasDelayedMarking);
JS_ASSERT(markLaterArenas);
unmarkedArenaStackTop = aheader->getNextDelayedMarking();
aheader->unsetDelayedMarking();
aheader->markOverflow = 0;
aheader->allocatedDuringIncremental = 0;
markLaterArenas--;
}
JS_ASSERT(isDrained());
JS_ASSERT(!markLaterArenas);
}
/*
* When the native stack is low, the GC does not call JS_TraceChildren to mark
* the reachable "children" of the thing. Rather the thing is put aside and
* JS_TraceChildren is called later with more space on the C stack.
*
* To implement such delayed marking of the children with minimal overhead for
* the normal case of sufficient native stack, the code adds a field per
* arena. The field markingDelay->link links all arenas with delayed things
* into a stack list with the pointer to stack top in
* GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
* arenas to the stack as necessary while markDelayedChildren pops the arenas
* from the stack until it empties.
*/
inline void
GCMarker::delayMarkingArena(ArenaHeader *aheader)
{
if (aheader->hasDelayedMarking) {
/* Arena already scheduled to be marked later */
return;
}
aheader->setNextDelayedMarking(unmarkedArenaStackTop);
unmarkedArenaStackTop = aheader;
markLaterArenas++;
}
void
GCMarker::delayMarkingChildren(const void *thing)
{
const Cell *cell = reinterpret_cast<const Cell *>(thing);
cell->arenaHeader()->markOverflow = 1;
delayMarkingArena(cell->arenaHeader());
}
void
GCMarker::markDelayedChildren(ArenaHeader *aheader)
{
if (aheader->markOverflow) {
bool always = aheader->allocatedDuringIncremental;
aheader->markOverflow = 0;
for (CellIterUnderGC i(aheader); !i.done(); i.next()) {
Cell *t = i.getCell();
if (always || t->isMarked()) {
t->markIfUnmarked();
JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
}
}
} else {
JS_ASSERT(aheader->allocatedDuringIncremental);
PushArena(this, aheader);
}
aheader->allocatedDuringIncremental = 0;
/*
* Note that during an incremental GC we may still be allocating into
* aheader. However, prepareForIncrementalGC sets the
* allocatedDuringIncremental flag if we continue marking.
*/
}
bool
GCMarker::markDelayedChildren(SliceBudget &budget)
{
gcstats::MaybeAutoPhase ap;
if (runtime()->gcIncrementalState == MARK)
ap.construct(runtime()->gcStats, gcstats::PHASE_MARK_DELAYED);
JS_ASSERT(unmarkedArenaStackTop);
do {
/*
* If marking gets delayed at the same arena again, we must repeat
* marking of its things. For that we pop arena from the stack and
* clear its hasDelayedMarking flag before we begin the marking.
*/
ArenaHeader *aheader = unmarkedArenaStackTop;
JS_ASSERT(aheader->hasDelayedMarking);
JS_ASSERT(markLaterArenas);
unmarkedArenaStackTop = aheader->getNextDelayedMarking();
aheader->unsetDelayedMarking();
markLaterArenas--;
markDelayedChildren(aheader);
budget.step(150);
if (budget.isOverBudget())
return false;
} while (unmarkedArenaStackTop);
JS_ASSERT(!markLaterArenas);
return true;
}
#ifdef DEBUG
void
GCMarker::checkZone(void *p)
{
JS_ASSERT(started);
DebugOnly<Cell *> cell = static_cast<Cell *>(p);
JS_ASSERT_IF(cell->isTenured(), cell->tenuredZone()->isCollecting());
}
#endif
bool
GCMarker::hasBufferedGrayRoots() const
{
return grayBufferState == GRAY_BUFFER_OK;
}
void
GCMarker::startBufferingGrayRoots()
{
JS_ASSERT(grayBufferState == GRAY_BUFFER_UNUSED);
grayBufferState = GRAY_BUFFER_OK;
for (GCZonesIter zone(runtime()); !zone.done(); zone.next())
JS_ASSERT(zone->gcGrayRoots.empty());
JS_ASSERT(!callback);
callback = GrayCallback;
JS_ASSERT(IS_GC_MARKING_TRACER(this));
}
void
GCMarker::endBufferingGrayRoots()
{
JS_ASSERT(callback == GrayCallback);
callback = nullptr;
JS_ASSERT(IS_GC_MARKING_TRACER(this));
JS_ASSERT(grayBufferState == GRAY_BUFFER_OK ||
grayBufferState == GRAY_BUFFER_FAILED);
}
void
GCMarker::resetBufferedGrayRoots()
{
for (GCZonesIter zone(runtime()); !zone.done(); zone.next())
zone->gcGrayRoots.clearAndFree();
}
void
GCMarker::markBufferedGrayRoots(JS::Zone *zone)
{
JS_ASSERT(grayBufferState == GRAY_BUFFER_OK);
JS_ASSERT(zone->isGCMarkingGray());
for (GrayRoot *elem = zone->gcGrayRoots.begin(); elem != zone->gcGrayRoots.end(); elem++) {
#ifdef DEBUG
setTracingDetails(elem->debugPrinter, elem->debugPrintArg, elem->debugPrintIndex);
#endif
void *tmp = elem->thing;
setTracingLocation((void *)&elem->thing);
MarkKind(this, &tmp, elem->kind);
JS_ASSERT(tmp == elem->thing);
}
}
void
GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind)
{
JS_ASSERT(started);
if (grayBufferState == GRAY_BUFFER_FAILED)
return;
GrayRoot root(thing, kind);
#ifdef DEBUG
root.debugPrinter = debugPrinter();
root.debugPrintArg = debugPrintArg();
root.debugPrintIndex = debugPrintIndex();
#endif
Zone *zone = static_cast<Cell *>(thing)->tenuredZone();
if (zone->isCollecting()) {
zone->maybeAlive = true;
if (!zone->gcGrayRoots.append(root)) {
resetBufferedGrayRoots();
grayBufferState = GRAY_BUFFER_FAILED;
}
}
}
void
GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
{
JS_ASSERT(thingp);
JS_ASSERT(*thingp);
GCMarker *gcmarker = static_cast<GCMarker *>(trc);
gcmarker->appendGrayRoot(*thingp, kind);
}
size_t
GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
{
size_t size = stack.sizeOfExcludingThis(mallocSizeOf);
for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next())
size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf);
return size;
}
void
js::SetMarkStackLimit(JSRuntime *rt, size_t limit)
{
JS_ASSERT(!rt->isHeapBusy());
AutoStopVerifyingBarriers pauseVerification(rt, false);
rt->gcMarker.setMaxCapacity(limit);
}
void
js::MarkCompartmentActive(InterpreterFrame *fp)
{

View File

@ -15,7 +15,6 @@
#include "jslock.h"
#include "jsobj.h"
#include "gc/Tracer.h"
#include "js/GCAPI.h"
#include "js/SliceBudget.h"
#include "js/Vector.h"
@ -930,157 +929,6 @@ struct GCChunkHasher {
typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet;
static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096;
static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768;
template<class T>
struct MarkStack {
T *stack_;
T *tos_;
T *end_;
// The capacity we start with and reset() to.
size_t baseCapacity_;
size_t maxCapacity_;
MarkStack(size_t maxCapacity)
: stack_(nullptr),
tos_(nullptr),
end_(nullptr),
baseCapacity_(0),
maxCapacity_(maxCapacity)
{}
~MarkStack() {
js_free(stack_);
}
size_t capacity() { return end_ - stack_; }
ptrdiff_t position() const { return tos_ - stack_; }
void setStack(T *stack, size_t tosIndex, size_t capacity) {
stack_ = stack;
tos_ = stack + tosIndex;
end_ = stack + capacity;
}
void setBaseCapacity(JSGCMode mode) {
switch (mode) {
case JSGC_MODE_GLOBAL:
case JSGC_MODE_COMPARTMENT:
baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY;
break;
case JSGC_MODE_INCREMENTAL:
baseCapacity_ = INCREMENTAL_MARK_STACK_BASE_CAPACITY;
break;
default:
MOZ_ASSUME_UNREACHABLE("bad gc mode");
}
if (baseCapacity_ > maxCapacity_)
baseCapacity_ = maxCapacity_;
}
bool init(JSGCMode gcMode) {
setBaseCapacity(gcMode);
JS_ASSERT(!stack_);
T *newStack = js_pod_malloc<T>(baseCapacity_);
if (!newStack)
return false;
setStack(newStack, 0, baseCapacity_);
return true;
}
void setMaxCapacity(size_t maxCapacity) {
JS_ASSERT(isEmpty());
maxCapacity_ = maxCapacity;
if (baseCapacity_ > maxCapacity_)
baseCapacity_ = maxCapacity_;
reset();
}
bool push(T item) {
if (tos_ == end_) {
if (!enlarge(1))
return false;
}
JS_ASSERT(tos_ < end_);
*tos_++ = item;
return true;
}
bool push(T item1, T item2, T item3) {
T *nextTos = tos_ + 3;
if (nextTos > end_) {
if (!enlarge(3))
return false;
nextTos = tos_ + 3;
}
JS_ASSERT(nextTos <= end_);
tos_[0] = item1;
tos_[1] = item2;
tos_[2] = item3;
tos_ = nextTos;
return true;
}
bool isEmpty() const {
return tos_ == stack_;
}
T pop() {
JS_ASSERT(!isEmpty());
return *--tos_;
}
void reset() {
if (capacity() == baseCapacity_) {
// No size change; keep the current stack.
setStack(stack_, 0, baseCapacity_);
return;
}
T *newStack = (T *)js_realloc(stack_, sizeof(T) * baseCapacity_);
if (!newStack) {
// If the realloc fails, just keep using the existing stack; it's
// not ideal but better than failing.
newStack = stack_;
baseCapacity_ = capacity();
}
setStack(newStack, 0, baseCapacity_);
}
/* Grow the stack, ensuring there is space for at least count elements. */
bool enlarge(unsigned count) {
size_t newCapacity = Min(maxCapacity_, capacity() * 2);
if (newCapacity < capacity() + count)
return false;
size_t tosIndex = position();
T *newStack = (T *)js_realloc(stack_, sizeof(T) * newCapacity);
if (!newStack)
return false;
setStack(newStack, tosIndex, newCapacity);
return true;
}
void setGCMode(JSGCMode gcMode) {
// The mark stack won't be resized until the next call to reset(), but
// that will happen at the end of the next GC.
setBaseCapacity(gcMode);
}
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
return mallocSizeOf(stack_);
}
};
struct GrayRoot {
void *thing;
JSGCTraceKind kind;
@ -1094,178 +942,6 @@ struct GrayRoot {
: thing(thing), kind(kind) {}
};
struct GCMarker : public JSTracer {
private:
/*
* We use a common mark stack to mark GC things of different types and use
* the explicit tags to distinguish them when it cannot be deduced from
* the context of push or pop operation.
*/
enum StackTag {
ValueArrayTag,
ObjectTag,
TypeTag,
XmlTag,
SavedValueArrayTag,
JitCodeTag,
LastTag = JitCodeTag
};
static const uintptr_t StackTagMask = 7;
static void staticAsserts() {
JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag));
JS_STATIC_ASSERT(StackTagMask <= gc::CellMask);
}
public:
explicit GCMarker(JSRuntime *rt);
bool init(JSGCMode gcMode);
void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); }
size_t maxCapacity() const { return stack.maxCapacity_; }
void start();
void stop();
void reset();
void pushObject(ObjectImpl *obj) {
pushTaggedPtr(ObjectTag, obj);
}
void pushType(types::TypeObject *type) {
pushTaggedPtr(TypeTag, type);
}
void pushJitCode(jit::JitCode *code) {
pushTaggedPtr(JitCodeTag, code);
}
uint32_t getMarkColor() const {
return color;
}
/*
* Care must be taken changing the mark color from gray to black. The cycle
* collector depends on the invariant that there are no black to gray edges
* in the GC heap. This invariant lets the CC not trace through black
* objects. If this invariant is violated, the cycle collector may free
* objects that are still reachable.
*/
void setMarkColorGray() {
JS_ASSERT(isDrained());
JS_ASSERT(color == gc::BLACK);
color = gc::GRAY;
}
void setMarkColorBlack() {
JS_ASSERT(isDrained());
JS_ASSERT(color == gc::GRAY);
color = gc::BLACK;
}
inline void delayMarkingArena(gc::ArenaHeader *aheader);
void delayMarkingChildren(const void *thing);
void markDelayedChildren(gc::ArenaHeader *aheader);
bool markDelayedChildren(SliceBudget &budget);
bool hasDelayedChildren() const {
return !!unmarkedArenaStackTop;
}
bool isDrained() {
return isMarkStackEmpty() && !unmarkedArenaStackTop;
}
bool drainMarkStack(SliceBudget &budget);
/*
* Gray marking must be done after all black marking is complete. However,
* we do not have write barriers on XPConnect roots. Therefore, XPConnect
* roots must be accumulated in the first slice of incremental GC. We
* accumulate these roots in the each compartment's gcGrayRoots vector and
* then mark them later, after black marking is complete for each
* compartment. This accumulation can fail, but in that case we switch to
* non-incremental GC.
*/
bool hasBufferedGrayRoots() const;
void startBufferingGrayRoots();
void endBufferingGrayRoots();
void resetBufferedGrayRoots();
void markBufferedGrayRoots(JS::Zone *zone);
static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind);
void setGCMode(JSGCMode mode) { stack.setGCMode(mode); }
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
MarkStack<uintptr_t> stack;
private:
#ifdef DEBUG
void checkZone(void *p);
#else
void checkZone(void *p) {}
#endif
void pushTaggedPtr(StackTag tag, void *ptr) {
checkZone(ptr);
uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
JS_ASSERT(!(addr & StackTagMask));
if (!stack.push(addr | uintptr_t(tag)))
delayMarkingChildren(ptr);
}
void pushValueArray(JSObject *obj, void *start, void *end) {
checkZone(obj);
JS_ASSERT(start <= end);
uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
/*
* Push in the reverse order so obj will be on top. If we cannot push
* the array, we trigger delay marking for the whole object.
*/
if (!stack.push(endAddr, startAddr, tagged))
delayMarkingChildren(obj);
}
bool isMarkStackEmpty() {
return stack.isEmpty();
}
bool restoreValueArray(JSObject *obj, void **vpp, void **endp);
void saveValueRanges();
inline void processMarkStackTop(SliceBudget &budget);
void processMarkStackOther(uintptr_t tag, uintptr_t addr);
void appendGrayRoot(void *thing, JSGCTraceKind kind);
/* The color is only applied to objects and functions. */
uint32_t color;
mozilla::DebugOnly<bool> started;
/* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::ArenaHeader *unmarkedArenaStackTop;
/* Count of arenas that are currently in the stack. */
mozilla::DebugOnly<size_t> markLaterArenas;
enum GrayBufferState
{
GRAY_BUFFER_UNUSED,
GRAY_BUFFER_OK,
GRAY_BUFFER_FAILED
};
GrayBufferState grayBufferState;
};
void
SetMarkStackLimit(JSRuntime *rt, size_t limit);
void
MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end);
@ -1320,12 +996,6 @@ IterateScripts(JSRuntime *rt, JSCompartment *compartment,
extern void
js_FinalizeStringRT(JSRuntime *rt, JSString *str);
/*
* Macro to test if a traversal is the marking phase of the GC.
*/
#define IS_GC_MARKING_TRACER(trc) \
((trc)->callback == nullptr || (trc)->callback == GCMarker::GrayCallback)
namespace js {
JSCompartment *

View File

@ -18,6 +18,7 @@
#include "mozilla/MemoryReporting.h"
#include "gc/Barrier.h"
#include "gc/Marking.h"
#include "js/GCAPI.h"
#include "vm/ObjectImpl.h"

View File

@ -34,6 +34,7 @@
#ifdef JSGC_GENERATIONAL
# include "gc/StoreBuffer.h"
#endif
#include "gc/Tracer.h"
#ifdef XP_MACOSX
# include "jit/AsmJSSignalHandlers.h"
#endif