Backed out changeset 8d1e96d1eb31 (bug 650161)

This commit is contained in:
Ed Morley 2014-08-14 12:52:27 +01:00
parent 61a82a07a9
commit 8c11a7efc9
22 changed files with 48 additions and 174 deletions

View File

@ -169,7 +169,6 @@ class BaseShape;
class DebugScopeObject;
class GlobalObject;
class LazyScript;
class NestedScopeObject;
class Nursery;
class ObjectImpl;
class PropertyName;
@ -218,7 +217,6 @@ template <> struct MapTypeToTraceKind<JSObject> { static const JSGCTrace
template <> struct MapTypeToTraceKind<JSScript> { static const JSGCTraceKind kind = JSTRACE_SCRIPT; };
template <> struct MapTypeToTraceKind<JSString> { static const JSGCTraceKind kind = JSTRACE_STRING; };
template <> struct MapTypeToTraceKind<LazyScript> { static const JSGCTraceKind kind = JSTRACE_LAZY_SCRIPT; };
template <> struct MapTypeToTraceKind<NestedScopeObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
template <> struct MapTypeToTraceKind<ObjectImpl> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
template <> struct MapTypeToTraceKind<PropertyName> { static const JSGCTraceKind kind = JSTRACE_STRING; };
template <> struct MapTypeToTraceKind<SavedFrame> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };

View File

@ -396,11 +396,6 @@ class GCRuntime
void disableGenerationalGC();
void enableGenerationalGC();
#ifdef JSGC_COMPACTING
void disableCompactingGC();
void enableCompactingGC();
#endif
void setGrayRootsTracer(JSTraceDataOp traceOp, void *data);
bool addBlackRootsTracer(JSTraceDataOp traceOp, void *data);
void removeBlackRootsTracer(JSTraceDataOp traceOp, void *data);
@ -718,15 +713,6 @@ class GCRuntime
*/
unsigned generationalDisabled;
#ifdef JSGC_COMPACTING
/*
* Some code cannot tolerate compacting GC so it can be disabled with this
* counter. This can happen from code executing in a ThreadSafeContext so
* we make it atomic.
*/
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> compactingDisabled;
#endif
/*
* This is true if we are in the middle of a brain transplant (e.g.,
* JS_TransplantObject) or some other operation that can manipulate

View File

@ -629,7 +629,6 @@ DeclMarkerImpl(Object, DebugScopeObject)
DeclMarkerImpl(Object, GlobalObject)
DeclMarkerImpl(Object, JSObject)
DeclMarkerImpl(Object, JSFunction)
DeclMarkerImpl(Object, NestedScopeObject)
DeclMarkerImpl(Object, ObjectImpl)
DeclMarkerImpl(Object, SavedFrame)
DeclMarkerImpl(Object, ScopeObject)

View File

@ -23,7 +23,6 @@ class DebugScopeObject;
class GCMarker;
class GlobalObject;
class LazyScript;
class NestedScopeObject;
class SavedFrame;
class ScopeObject;
class Shape;
@ -113,7 +112,6 @@ DeclMarker(Object, DebugScopeObject)
DeclMarker(Object, GlobalObject)
DeclMarker(Object, JSObject)
DeclMarker(Object, JSFunction)
DeclMarker(Object, NestedScopeObject)
DeclMarker(Object, SavedFrame)
DeclMarker(Object, ScopeObject)
DeclMarker(Script, JSScript)

View File

@ -637,26 +637,28 @@ GCMarker::markBufferedGrayRoots(JS::Zone *zone)
#ifdef DEBUG
setTracingDetails(elem->debugPrinter, elem->debugPrintArg, elem->debugPrintIndex);
#endif
MarkKind(this, elem->thingp, elem->kind);
void *tmp = elem->thing;
setTracingLocation((void *)&elem->thing);
MarkKind(this, &tmp, elem->kind);
JS_ASSERT(tmp == elem->thing);
}
}
void
GCMarker::appendGrayRoot(void **thingp, JSGCTraceKind kind)
GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind)
{
JS_ASSERT(started);
if (grayBufferState == GRAY_BUFFER_FAILED)
return;
GrayRoot root(thingp, kind);
GrayRoot root(thing, kind);
#ifdef DEBUG
root.debugPrinter = debugPrinter();
root.debugPrintArg = debugPrintArg();
root.debugPrintIndex = debugPrintIndex();
#endif
void *thing = *thingp;
Zone *zone = static_cast<Cell *>(thing)->tenuredZone();
if (zone->isCollecting()) {
// See the comment on SetMaybeAliveFlag to see why we only do this for
@ -686,7 +688,7 @@ GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
JS_ASSERT(thingp);
JS_ASSERT(*thingp);
GCMarker *gcmarker = static_cast<GCMarker *>(trc);
gcmarker->appendGrayRoot(thingp, kind);
gcmarker->appendGrayRoot(*thingp, kind);
}
size_t

View File

@ -275,7 +275,7 @@ class GCMarker : public JSTracer
inline void processMarkStackTop(SliceBudget &budget);
void processMarkStackOther(uintptr_t tag, uintptr_t addr);
void appendGrayRoot(void **thingp, JSGCTraceKind kind);
void appendGrayRoot(void *thing, JSGCTraceKind kind);
/* The color is only applied to objects and functions. */
uint32_t color;

View File

@ -627,17 +627,6 @@ JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
ni->unlink();
ni = next;
}
/* For each debuggee being GC'd, detach it from all its debuggers. */
for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
GlobalObject *global = e.front();
if (IsObjectAboutToBeFinalized(&global)) {
// See infallibility note above.
Debugger::detachAllDebuggersFromGlobal(fop, global, &e);
} else if (global != e.front()) {
e.rekeyFront(global);
}
}
}
/*
@ -892,7 +881,7 @@ JSCompartment::updateJITForDebugMode(JSContext *maybecx, AutoDebugModeInvalidati
}
bool
JSCompartment::addDebuggee(JSContext *cx, JS::Handle<js::GlobalObject *> global)
JSCompartment::addDebuggee(JSContext *cx, js::GlobalObject *global)
{
AutoDebugModeInvalidation invalidate(this);
return addDebuggee(cx, global, invalidate);
@ -900,9 +889,11 @@ JSCompartment::addDebuggee(JSContext *cx, JS::Handle<js::GlobalObject *> global)
bool
JSCompartment::addDebuggee(JSContext *cx,
JS::Handle<GlobalObject *> global,
GlobalObject *globalArg,
AutoDebugModeInvalidation &invalidate)
{
Rooted<GlobalObject*> global(cx, globalArg);
bool wasEnabled = debugMode();
if (!debuggees.put(global)) {
js_ReportOutOfMemory(cx);

View File

@ -423,8 +423,8 @@ struct JSCompartment
public:
js::GlobalObjectSet &getDebuggees() { return debuggees; }
bool addDebuggee(JSContext *cx, JS::Handle<js::GlobalObject *> global);
bool addDebuggee(JSContext *cx, JS::Handle<js::GlobalObject *> global,
bool addDebuggee(JSContext *cx, js::GlobalObject *global);
bool addDebuggee(JSContext *cx, js::GlobalObject *global,
js::AutoDebugModeInvalidation &invalidate);
bool removeDebuggee(JSContext *cx, js::GlobalObject *global,
js::GlobalObjectSet::Enum *debuggeesEnum = nullptr);

View File

@ -1158,9 +1158,6 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
sliceBudget(SliceBudget::Unlimited),
incrementalAllowed(true),
generationalDisabled(0),
#ifdef JSGC_COMPACTING
compactingDisabled(0),
#endif
manipulatingDeadZones(false),
objectsMarkedInDeadZones(0),
poked(false),
@ -2007,7 +2004,7 @@ bool
GCRuntime::shouldCompact()
{
#ifdef JSGC_COMPACTING
return invocationKind == GC_SHRINK && !compactingDisabled;
return invocationKind == GC_SHRINK;
#else
return false;
#endif
@ -2015,30 +2012,6 @@ GCRuntime::shouldCompact()
#ifdef JSGC_COMPACTING
void
GCRuntime::disableCompactingGC()
{
++rt->gc.compactingDisabled;
}
void
GCRuntime::enableCompactingGC()
{
JS_ASSERT(compactingDisabled > 0);
--compactingDisabled;
}
AutoDisableCompactingGC::AutoDisableCompactingGC(JSRuntime *rt)
: gc(rt->gc)
{
gc.disableCompactingGC();
}
AutoDisableCompactingGC::~AutoDisableCompactingGC()
{
gc.enableCompactingGC();
}
static void
ForwardCell(Cell *dest, Cell *src)
{
@ -2075,11 +2048,9 @@ CanRelocateArena(ArenaHeader *arena)
/*
* We can't currently move global objects because their address is baked
* into compiled code. We therefore skip moving the contents of any arena
* containing a global if ion or baseline are enabled.
* containing a global.
*/
JSRuntime *rt = arena->zone->runtimeFromMainThread();
return arena->getAllocKind() <= FINALIZE_OBJECT_LAST &&
((!rt->options().baseline() && !rt->options().ion()) || !ArenaContainsGlobal(arena));
return arena->getAllocKind() <= FINALIZE_OBJECT_LAST && !ArenaContainsGlobal(arena);
}
static bool
@ -2132,14 +2103,6 @@ ArenaList::pickArenasToRelocate()
return head;
}
#ifdef DEBUG
inline bool
PtrIsInRange(void *ptr, void *start, size_t length)
{
return uintptr_t(ptr) - uintptr_t(start) < length;
}
#endif
static bool
RelocateCell(Zone *zone, Cell *src, AllocKind thingKind, size_t thingSize)
{
@ -2153,38 +2116,22 @@ RelocateCell(Zone *zone, Cell *src, AllocKind thingKind, size_t thingSize)
// Copy source cell contents to destination.
memcpy(dst, src, thingSize);
// Mark source cell as forwarded and leave a pointer to the destination.
ForwardCell(static_cast<Cell *>(dst), src);
// Fixup the pointer to inline object elements if necessary.
if (thingKind <= FINALIZE_OBJECT_LAST) {
JSObject *srcObj = static_cast<JSObject *>(src);
JSObject *dstObj = static_cast<JSObject *>(dst);
if (srcObj->hasFixedElements())
dstObj->setFixedElements();
if (srcObj->is<ArrayBufferObject>()) {
// We must fix up any inline data pointers while we know the source
// object and before we mark any of the views.
ArrayBufferObject::fixupDataPointerAfterMovingGC(
srcObj->as<ArrayBufferObject>(), dstObj->as<ArrayBufferObject>());
} else if (srcObj->is<TypedArrayObject>()) {
TypedArrayObject &typedArray = srcObj->as<TypedArrayObject>();
if (!typedArray.hasBuffer()) {
JS_ASSERT(srcObj->getPrivate() ==
srcObj->fixedData(TypedArrayObject::FIXED_DATA_START));
dstObj->setPrivate(dstObj->fixedData(TypedArrayObject::FIXED_DATA_START));
}
}
JS_ASSERT_IF(dstObj->isNative(),
!PtrIsInRange((HeapSlot*)dstObj->getDenseElements(), src, thingSize));
JS_ASSERT(
uintptr_t((HeapSlot*)dstObj->getElementsHeader()) - uintptr_t(srcObj) >= thingSize);
}
// Copy the mark bits.
static_cast<Cell *>(dst)->copyMarkBitsFrom(src);
// Mark source cell as forwarded and leave a pointer to the destination.
ForwardCell(static_cast<Cell *>(dst), src);
return true;
}
@ -2304,12 +2251,10 @@ void
MovingTracer::Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
{
Cell *thing = static_cast<Cell *>(*thingp);
Zone *zone = thing->tenuredZoneFromAnyThread();
if (!zone->isGCCompacting()) {
if (!thing->tenuredZone()->isGCCompacting()) {
JS_ASSERT(!IsForwarded(thing));
return;
}
JS_ASSERT(CurrentThreadCanAccessZone(zone));
if (IsForwarded(thing)) {
Cell *dst = Forwarded(thing);
@ -2335,6 +2280,7 @@ MovingTracer::Sweep(JSTracer *jstrc)
for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
c->sweep(fop, false);
ArrayBufferObject::sweep(c);
}
} else {
/* Update cross compartment wrappers into moved zones. */
@ -2345,9 +2291,6 @@ MovingTracer::Sweep(JSTracer *jstrc)
/* Type inference may put more blocks here to free. */
rt->freeLifoAlloc.freeAll();
/* Clear the new object cache as this can contain cell pointers. */
rt->newObjectCache.purge();
}
/*
@ -2416,15 +2359,12 @@ GCRuntime::updatePointersToRelocatedCells()
Debugger::markCrossCompartmentDebuggerObjectReferents(&trc);
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
WeakMapBase::markAll(c, &trc);
if (c->watchpointMap)
c->watchpointMap->markAll(&trc);
}
// Mark all gray roots, making sure we call the trace callback to get the
// current set.
marker.resetBufferedGrayRoots();
markAllGrayReferences(gcstats::PHASE_COMPACT_UPDATE_GRAY);
markAllWeakReferences(gcstats::PHASE_COMPACT_UPDATE_GRAY);
MovingTracer::Sweep(&trc);
}

View File

@ -1138,7 +1138,7 @@ struct GCChunkHasher {
typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet;
struct GrayRoot {
void **thingp;
void *thing;
JSGCTraceKind kind;
#ifdef DEBUG
JSTraceNamePrinter debugPrinter;
@ -1146,8 +1146,8 @@ struct GrayRoot {
size_t debugPrintIndex;
#endif
GrayRoot(void **thingp, JSGCTraceKind kind)
: thingp(thingp), kind(kind) {}
GrayRoot(void *thing, JSGCTraceKind kind)
: thing(thing), kind(kind) {}
};
void
@ -1427,20 +1427,6 @@ struct AutoDisableProxyCheck
};
#endif
struct AutoDisableCompactingGC
{
#ifdef JSGC_COMPACTING
explicit AutoDisableCompactingGC(JSRuntime *rt);
~AutoDisableCompactingGC();
private:
gc::GCRuntime &gc;
#else
explicit AutoDisableCompactingGC(JSRuntime *rt) {}
~AutoDisableCompactingGC() {}
#endif
};
void
PurgeJITCaches(JS::Zone *zone);

View File

@ -13,7 +13,7 @@ namespace js {
/*
* Used to add entries to a js::HashMap or HashSet where the key depends on a GC
* thing that may be moved by generational or compacting GC between the call to
* thing that may be moved by generational collection between the call to
* lookupForAdd() and relookupOrAdd().
*/
template <class T>

View File

@ -68,13 +68,6 @@ WeakMapBase::unmarkCompartment(JSCompartment *c)
m->marked = false;
}
void
WeakMapBase::markAll(JSCompartment *c, JSTracer *tracer)
{
for (WeakMapBase *m = c->gcWeakMapList; m; m = m->next)
m->markIteratively(tracer);
}
bool
WeakMapBase::markCompartmentIteratively(JSCompartment *c, JSTracer *tracer)
{

View File

@ -49,9 +49,6 @@ class WeakMapBase {
// Unmark all weak maps in a compartment.
static void unmarkCompartment(JSCompartment *c);
// Mark all the weakmaps in a compartment.
static void markAll(JSCompartment *c, JSTracer *tracer);
// Check all weak maps in a compartment that have been marked as live in this garbage
// collection, and mark the values of all entries that have become strong references
// to them. Return true if we marked any new values, indicating that we need to make

View File

@ -157,7 +157,6 @@ class HeapReverser : public JSTracer, public JS::CustomAutoRooter
: JSTracer(cx->runtime(), traverseEdgeWithThis),
JS::CustomAutoRooter(cx),
noggc(JS_GetRuntime(cx)),
nocgc(JS_GetRuntime(cx)),
runtime(JS_GetRuntime(cx)),
parent(nullptr)
{
@ -170,7 +169,6 @@ class HeapReverser : public JSTracer, public JS::CustomAutoRooter
private:
JS::AutoDisableGenerationalGC noggc;
js::AutoDisableCompactingGC nocgc;
/* A runtime pointer for use by the destructor. */
JSRuntime *runtime;

View File

@ -932,15 +932,6 @@ ArrayBufferObject::sweep(JSCompartment *compartment)
gcLiveArrayBuffers.clear();
}
/* static */ void
ArrayBufferObject::fixupDataPointerAfterMovingGC(const ArrayBufferObject &src, ArrayBufferObject &dst)
{
// Fix up possible inline data pointer.
const size_t reservedSlots = JSCLASS_RESERVED_SLOTS(&ArrayBufferObject::class_);
if (src.dataPointer() == src.fixedData(reservedSlots))
dst.setSlot(DATA_SLOT, PrivateValue(dst.fixedData(reservedSlots)));
}
void
ArrayBufferObject::resetArrayBufferList(JSCompartment *comp)
{
@ -1001,7 +992,7 @@ ArrayBufferViewObject::trace(JSTracer *trc, JSObject *obj)
// Update obj's data pointer if the array buffer moved. Note that during
// initialization, bufSlot may still contain |undefined|.
if (bufSlot.isObject()) {
ArrayBufferObject &buf = AsArrayBuffer(MaybeForwarded(&bufSlot.toObject()));
ArrayBufferObject &buf = AsArrayBuffer(&bufSlot.toObject());
int32_t offset = obj->getReservedSlot(BYTEOFFSET_SLOT).toInt32();
MOZ_ASSERT(buf.dataPointer() != nullptr);
obj->initPrivate(buf.dataPointer() + offset);

View File

@ -95,8 +95,6 @@ class ArrayBufferObject : public JSObject
static void sweep(JSCompartment *rt);
static void fixupDataPointerAfterMovingGC(const ArrayBufferObject &src, ArrayBufferObject &dst);
static void resetArrayBufferList(JSCompartment *rt);
static bool saveArrayBufferList(JSCompartment *c, ArrayBufferVector &vector);
static void restoreArrayBufferLists(ArrayBufferVector &vector);

View File

@ -1750,6 +1750,20 @@ Debugger::sweepAll(FreeOp *fop)
}
}
}
for (gc::GCCompartmentGroupIter comp(rt); !comp.done(); comp.next()) {
/* For each debuggee being GC'd, detach it from all its debuggers. */
GlobalObjectSet &debuggees = comp->getDebuggees();
for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
GlobalObject *global = e.front();
if (IsObjectAboutToBeFinalized(&global)) {
// See infallibility note above.
detachAllDebuggersFromGlobal(fop, global, &e);
} else if (global != e.front()) {
e.rekeyFront(global);
}
}
}
}
void

View File

@ -10,7 +10,6 @@
#include "jsapi.h"
#include "jscompartment.h"
#include "jsfriendapi.h"
#include "jshashutil.h"
#include "jsnum.h"
#include "gc/Marking.h"
@ -563,7 +562,7 @@ SavedStacks::insertFrames(JSContext *cx, FrameIter &iter, MutableHandleSavedFram
SavedFrame *
SavedStacks::getOrCreateSavedFrame(JSContext *cx, SavedFrame::HandleLookup lookup)
{
DependentAddPtr<SavedFrame::Set> p(cx, frames, lookup);
SavedFrame::Set::AddPtr p = frames.lookupForAdd(lookup);
if (p)
return *p;
@ -571,7 +570,7 @@ SavedStacks::getOrCreateSavedFrame(JSContext *cx, SavedFrame::HandleLookup looku
if (!frame)
return nullptr;
if (!p.add(cx, frames, lookup, frame))
if (!frames.relookupOrAdd(p, lookup, frame))
return nullptr;
return frame;

View File

@ -1070,15 +1070,6 @@ ScopeIterKey::match(ScopeIterKey si1, ScopeIterKey si2)
si1.type_ == si2.type_));
}
void
ScopeIterVal::sweep()
{
/* We need to update possibly moved pointers on sweep. */
MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(cur_.unsafeGet()));
if (staticScope_)
MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(staticScope_.unsafeGet()));
}
// Live ScopeIter values may be added to DebugScopes::liveScopes, as
// ScopeIterVal instances. They need to have write barriers when they are added
// to the hash table, but no barriers when rehashing inside GC. It's a nasty
@ -1801,7 +1792,7 @@ DebugScopes::sweep(JSRuntime *rt)
key.updateCur(js::gc::Forwarded(key.cur()));
needsUpdate = true;
}
if (key.staticScope() && IsForwarded(key.staticScope())) {
if (IsForwarded(key.staticScope())) {
key.updateStaticScope(Forwarded(key.staticScope()));
needsUpdate = true;
}
@ -1813,8 +1804,6 @@ DebugScopes::sweep(JSRuntime *rt)
for (LiveScopeMap::Enum e(liveScopes); !e.empty(); e.popFront()) {
ScopeObject *scope = e.front().key();
e.front().value().sweep();
/*
* Scopes can be finalized when a debugger-synthesized ScopeObject is
* no longer reachable via its DebugScopeObject.

View File

@ -733,8 +733,6 @@ class ScopeIterVal
ScopeIter::Type type_;
bool hasScopeObject_;
void sweep();
static void staticAsserts();
public:

View File

@ -445,7 +445,7 @@ js::ObjectImpl::toDictionaryMode(ThreadSafeContext *cx)
#ifdef JSGC_COMPACTING
// TODO: This crashes if we run a compacting GC here.
js::AutoDisableCompactingGC nogc(zone()->runtimeFromAnyThread());
js::gc::AutoSuppressGC nogc(zone()->runtimeFromAnyThread());
#endif
/* We allocate the shapes from cx->compartment(), so make sure it's right. */

View File

@ -80,9 +80,6 @@ class TypedArrayObject : public ArrayBufferViewObject
ensureHasBuffer(JSContext *cx, Handle<TypedArrayObject *> tarray);
ArrayBufferObject *sharedBuffer() const;
bool hasBuffer() const {
return bufferValue(const_cast<TypedArrayObject*>(this)).isObject();
}
ArrayBufferObject *buffer() const {
JSObject *obj = bufferValue(const_cast<TypedArrayObject*>(this)).toObjectOrNull();
if (!obj)