mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 650161 - Update pointers to relocated objects r=terrence
This commit is contained in:
parent
62aae1e869
commit
2c36c06785
@ -959,6 +959,7 @@ class HashTable : private AllocPolicy
|
||||
// a new key at the new Lookup position. |front()| is invalid after
|
||||
// this operation until the next call to |popFront()|.
|
||||
void rekeyFront(const Lookup &l, const Key &k) {
|
||||
JS_ASSERT(&k != &HashPolicy::getKey(this->cur->get()));
|
||||
Ptr p(*this->cur, table_);
|
||||
table_.rekeyWithoutRehash(p, l, k);
|
||||
rekeyed = true;
|
||||
|
@ -1126,6 +1126,14 @@ StructTypeDescr::fieldCount() const
|
||||
return getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_NAMES).toObject().getDenseInitializedLength();
|
||||
}
|
||||
|
||||
size_t
|
||||
StructTypeDescr::maybeForwardedFieldCount() const
|
||||
{
|
||||
JSObject *fieldNames =
|
||||
MaybeForwarded(&getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_NAMES).toObject());
|
||||
return fieldNames->getDenseInitializedLength();
|
||||
}
|
||||
|
||||
bool
|
||||
StructTypeDescr::fieldIndex(jsid id, size_t *out) const
|
||||
{
|
||||
@ -1157,6 +1165,15 @@ StructTypeDescr::fieldOffset(size_t index) const
|
||||
return SafeCast<size_t>(fieldOffsets.getDenseElement(index).toInt32());
|
||||
}
|
||||
|
||||
size_t
|
||||
StructTypeDescr::maybeForwardedFieldOffset(size_t index) const
|
||||
{
|
||||
JSObject &fieldOffsets =
|
||||
*MaybeForwarded(&getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_OFFSETS).toObject());
|
||||
JS_ASSERT(index < fieldOffsets.getDenseInitializedLength());
|
||||
return SafeCast<size_t>(fieldOffsets.getDenseElement(index).toInt32());
|
||||
}
|
||||
|
||||
SizedTypeDescr&
|
||||
StructTypeDescr::fieldDescr(size_t index) const
|
||||
{
|
||||
@ -1166,6 +1183,15 @@ StructTypeDescr::fieldDescr(size_t index) const
|
||||
return fieldDescrs.getDenseElement(index).toObject().as<SizedTypeDescr>();
|
||||
}
|
||||
|
||||
SizedTypeDescr&
|
||||
StructTypeDescr::maybeForwardedFieldDescr(size_t index) const
|
||||
{
|
||||
JSObject &fieldDescrs =
|
||||
*MaybeForwarded(&getReservedSlot(JS_DESCR_SLOT_STRUCT_FIELD_TYPES).toObject());
|
||||
JS_ASSERT(index < fieldDescrs.getDenseInitializedLength());
|
||||
return fieldDescrs.getDenseElement(index).toObject().as<SizedTypeDescr>();
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* Creating the TypedObject "module"
|
||||
*
|
||||
@ -1630,7 +1656,11 @@ TypedObject::obj_trace(JSTracer *trace, JSObject *object)
|
||||
|
||||
JS_ASSERT(object->is<TypedObject>());
|
||||
TypedObject &typedObj = object->as<TypedObject>();
|
||||
TypeDescr &descr = typedObj.typeDescr();
|
||||
|
||||
// When this is called for compacting GC, the related objects we touch here
|
||||
// may not have had their slots updated yet.
|
||||
TypeDescr &descr = typedObj.maybeForwardedTypeDescr();
|
||||
|
||||
if (descr.opaque()) {
|
||||
uint8_t *mem = typedObj.typedMem();
|
||||
if (!mem)
|
||||
@ -3092,7 +3122,7 @@ visitReferences(SizedTypeDescr &descr,
|
||||
case type::SizedArray:
|
||||
{
|
||||
SizedArrayTypeDescr &arrayDescr = descr.as<SizedArrayTypeDescr>();
|
||||
SizedTypeDescr &elementDescr = arrayDescr.elementType();
|
||||
SizedTypeDescr &elementDescr = arrayDescr.maybeForwardedElementType();
|
||||
for (int32_t i = 0; i < arrayDescr.length(); i++) {
|
||||
visitReferences(elementDescr, mem, visitor);
|
||||
mem += elementDescr.size();
|
||||
@ -3108,9 +3138,9 @@ visitReferences(SizedTypeDescr &descr,
|
||||
case type::Struct:
|
||||
{
|
||||
StructTypeDescr &structDescr = descr.as<StructTypeDescr>();
|
||||
for (size_t i = 0; i < structDescr.fieldCount(); i++) {
|
||||
SizedTypeDescr &descr = structDescr.fieldDescr(i);
|
||||
size_t offset = structDescr.fieldOffset(i);
|
||||
for (size_t i = 0; i < structDescr.maybeForwardedFieldCount(); i++) {
|
||||
SizedTypeDescr &descr = structDescr.maybeForwardedFieldDescr(i);
|
||||
size_t offset = structDescr.maybeForwardedFieldOffset(i);
|
||||
visitReferences(descr, mem + offset, visitor);
|
||||
}
|
||||
return;
|
||||
|
@ -169,6 +169,10 @@ class TypedProto : public JSObject
|
||||
return getReservedSlot(JS_TYPROTO_SLOT_DESCR).toObject().as<TypeDescr>();
|
||||
}
|
||||
|
||||
TypeDescr &maybeForwardedTypeDescr() const {
|
||||
return MaybeForwarded(&getReservedSlot(JS_TYPROTO_SLOT_DESCR).toObject())->as<TypeDescr>();
|
||||
}
|
||||
|
||||
inline type::Kind kind() const;
|
||||
};
|
||||
|
||||
@ -453,6 +457,11 @@ class SizedArrayTypeDescr : public ComplexTypeDescr
|
||||
return getReservedSlot(JS_DESCR_SLOT_ARRAY_ELEM_TYPE).toObject().as<SizedTypeDescr>();
|
||||
}
|
||||
|
||||
SizedTypeDescr &maybeForwardedElementType() const {
|
||||
JSObject *elemType = &getReservedSlot(JS_DESCR_SLOT_ARRAY_ELEM_TYPE).toObject();
|
||||
return MaybeForwarded(elemType)->as<SizedTypeDescr>();
|
||||
}
|
||||
|
||||
int32_t length() const {
|
||||
return getReservedSlot(JS_DESCR_SLOT_SIZED_ARRAY_LENGTH).toInt32();
|
||||
}
|
||||
@ -492,6 +501,7 @@ class StructTypeDescr : public ComplexTypeDescr
|
||||
|
||||
// Returns the number of fields defined in this struct.
|
||||
size_t fieldCount() const;
|
||||
size_t maybeForwardedFieldCount() const;
|
||||
|
||||
// Set `*out` to the index of the field named `id` and returns true,
|
||||
// or return false if no such field exists.
|
||||
@ -502,9 +512,11 @@ class StructTypeDescr : public ComplexTypeDescr
|
||||
|
||||
// Return the type descr of the field at index `index`.
|
||||
SizedTypeDescr &fieldDescr(size_t index) const;
|
||||
SizedTypeDescr &maybeForwardedFieldDescr(size_t index) const;
|
||||
|
||||
// Return the offset of the field at index `index`.
|
||||
size_t fieldOffset(size_t index) const;
|
||||
size_t maybeForwardedFieldOffset(size_t index) const;
|
||||
};
|
||||
|
||||
typedef Handle<StructTypeDescr*> HandleStructTypeDescr;
|
||||
@ -678,10 +690,18 @@ class TypedObject : public ArrayBufferViewObject
|
||||
return getProto()->as<TypedProto>();
|
||||
}
|
||||
|
||||
TypedProto &maybeForwardedTypedProto() const {
|
||||
return MaybeForwarded(getProto())->as<TypedProto>();
|
||||
}
|
||||
|
||||
TypeDescr &typeDescr() const {
|
||||
return typedProto().typeDescr();
|
||||
}
|
||||
|
||||
TypeDescr &maybeForwardedTypeDescr() const {
|
||||
return maybeForwardedTypedProto().maybeForwardedTypeDescr();
|
||||
}
|
||||
|
||||
uint8_t *typedMem() const {
|
||||
return (uint8_t*) getPrivate();
|
||||
}
|
||||
|
@ -262,6 +262,11 @@ class GCRuntime
|
||||
bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
|
||||
bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
|
||||
bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
|
||||
#ifdef JSGC_COMPACTING
|
||||
bool isHeapCompacting() { return isHeapMajorCollecting() && state() == COMPACT; }
|
||||
#else
|
||||
bool isHeapCompacting() { return false; }
|
||||
#endif
|
||||
|
||||
// Performance note: if isFJMinorCollecting turns out to be slow because
|
||||
// reading the counter is slow then we may be able to augment the counter
|
||||
|
@ -164,6 +164,10 @@ CheckMarkedThing(JSTracer *trc, T **thingp)
|
||||
T *thing = *thingp;
|
||||
JS_ASSERT(*thingp);
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
thing = MaybeForwarded(thing);
|
||||
#endif
|
||||
|
||||
# ifdef JSGC_FJGENERATIONAL
|
||||
/*
|
||||
* The code below (runtimeFromMainThread(), etc) makes assumptions
|
||||
@ -442,6 +446,10 @@ IsMarked(T **thingp)
|
||||
Zone *zone = (*thingp)->tenuredZone();
|
||||
if (!zone->isCollecting() || zone->isGCFinished())
|
||||
return true;
|
||||
#ifdef JSGC_COMPACTING
|
||||
if (zone->isGCCompacting() && IsForwarded(*thingp))
|
||||
*thingp = Forwarded(*thingp);
|
||||
#endif
|
||||
return (*thingp)->isMarked();
|
||||
}
|
||||
|
||||
@ -480,19 +488,27 @@ IsAboutToBeFinalized(T **thingp)
|
||||
}
|
||||
#endif // JSGC_GENERATIONAL
|
||||
|
||||
if (!thing->tenuredZone()->isGCSweeping())
|
||||
Zone *zone = thing->tenuredZone();
|
||||
if (zone->isGCSweeping()) {
|
||||
/*
|
||||
* We should return false for things that have been allocated during
|
||||
* incremental sweeping, but this possibility doesn't occur at the moment
|
||||
* because this function is only called at the very start of the sweeping a
|
||||
* compartment group and during minor gc. Rather than do the extra check,
|
||||
* we just assert that it's not necessary.
|
||||
*/
|
||||
JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !thing->arenaHeader()->allocatedDuringIncremental);
|
||||
|
||||
return !thing->isMarked();
|
||||
}
|
||||
#ifdef JSGC_COMPACTING
|
||||
else if (zone->isGCCompacting() && IsForwarded(thing)) {
|
||||
*thingp = Forwarded(thing);
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
/*
|
||||
* We should return false for things that have been allocated during
|
||||
* incremental sweeping, but this possibility doesn't occur at the moment
|
||||
* because this function is only called at the very start of the sweeping a
|
||||
* compartment group and during minor gc. Rather than do the extra check,
|
||||
* we just assert that it's not necessary.
|
||||
*/
|
||||
JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !thing->arenaHeader()->allocatedDuringIncremental);
|
||||
|
||||
return !thing->isMarked();
|
||||
return false;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
@ -500,21 +516,32 @@ T *
|
||||
UpdateIfRelocated(JSRuntime *rt, T **thingp)
|
||||
{
|
||||
JS_ASSERT(thingp);
|
||||
if (!*thingp)
|
||||
return nullptr;
|
||||
|
||||
#ifdef JSGC_GENERATIONAL
|
||||
|
||||
#ifdef JSGC_FJGENERATIONAL
|
||||
if (*thingp && rt->isFJMinorCollecting()) {
|
||||
if (rt->isFJMinorCollecting()) {
|
||||
ForkJoinContext *ctx = ForkJoinContext::current();
|
||||
ForkJoinNursery &nursery = ctx->nursery();
|
||||
if (nursery.isInsideFromspace(*thingp))
|
||||
nursery.getForwardedPointer(thingp);
|
||||
return *thingp;
|
||||
}
|
||||
else
|
||||
#endif
|
||||
{
|
||||
if (*thingp && rt->isHeapMinorCollecting() && IsInsideNursery(*thingp))
|
||||
rt->gc.nursery.getForwardedPointer(thingp);
|
||||
|
||||
if (rt->isHeapMinorCollecting() && IsInsideNursery(*thingp)) {
|
||||
rt->gc.nursery.getForwardedPointer(thingp);
|
||||
return *thingp;
|
||||
}
|
||||
#endif // JSGC_GENERATIONAL
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
Zone *zone = (*thingp)->tenuredZone();
|
||||
if (zone->isGCCompacting() && IsForwarded(*thingp))
|
||||
*thingp = Forwarded(*thingp);
|
||||
#endif
|
||||
return *thingp;
|
||||
}
|
||||
|
||||
|
@ -135,10 +135,10 @@ Zone::sweepBreakpoints(FreeOp *fop)
|
||||
gcstats::AutoPhase ap1(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_TABLES);
|
||||
gcstats::AutoPhase ap2(fop->runtime()->gc.stats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT);
|
||||
|
||||
JS_ASSERT(isGCSweeping());
|
||||
JS_ASSERT(isGCSweepingOrCompacting());
|
||||
for (ZoneCellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
|
||||
JSScript *script = i.get<JSScript>();
|
||||
JS_ASSERT(script->zone()->isGCSweeping());
|
||||
JS_ASSERT_IF(isGCSweeping(), script->zone()->isGCSweeping());
|
||||
if (!script->hasAnyBreakpointsOrStepMode())
|
||||
continue;
|
||||
|
||||
@ -153,7 +153,8 @@ Zone::sweepBreakpoints(FreeOp *fop)
|
||||
for (Breakpoint *bp = site->firstBreakpoint(); bp; bp = nextbp) {
|
||||
nextbp = bp->nextInSite();
|
||||
HeapPtrObject &dbgobj = bp->debugger->toJSObjectRef();
|
||||
JS_ASSERT_IF(dbgobj->zone()->isCollecting(), dbgobj->zone()->isGCSweeping());
|
||||
JS_ASSERT_IF(isGCSweeping() && dbgobj->zone()->isCollecting(),
|
||||
dbgobj->zone()->isGCSweeping());
|
||||
bool dying = scriptGone || IsObjectAboutToBeFinalized(&dbgobj);
|
||||
JS_ASSERT_IF(!dying, !IsAboutToBeFinalized(&bp->getHandlerRef()));
|
||||
if (dying)
|
||||
|
@ -175,7 +175,8 @@ struct Zone : public JS::shadow::Zone,
|
||||
Mark,
|
||||
MarkGray,
|
||||
Sweep,
|
||||
Finished
|
||||
Finished,
|
||||
Compact
|
||||
};
|
||||
void setGCState(GCState state) {
|
||||
JS_ASSERT(runtimeFromMainThread()->isHeapBusy());
|
||||
@ -193,7 +194,8 @@ struct Zone : public JS::shadow::Zone,
|
||||
// If this returns true, all object tracing must be done with a GC marking
|
||||
// tracer.
|
||||
bool requireGCTracer() const {
|
||||
return runtimeFromMainThread()->isHeapMajorCollecting() && gcState_ != NoGC;
|
||||
JSRuntime *rt = runtimeFromMainThread();
|
||||
return rt->isHeapMajorCollecting() && !rt->isHeapCompacting() && gcState_ != NoGC;
|
||||
}
|
||||
|
||||
bool isGCMarking() {
|
||||
@ -208,6 +210,8 @@ struct Zone : public JS::shadow::Zone,
|
||||
bool isGCMarkingGray() { return gcState_ == MarkGray; }
|
||||
bool isGCSweeping() { return gcState_ == Sweep; }
|
||||
bool isGCFinished() { return gcState_ == Finished; }
|
||||
bool isGCCompacting() { return gcState_ == Compact; }
|
||||
bool isGCSweepingOrCompacting() { return gcState_ == Sweep || gcState_ == Compact; }
|
||||
|
||||
// Get a number that is incremented whenever this zone is collected, and
|
||||
// possibly at other times too.
|
||||
|
@ -87,10 +87,11 @@ void
|
||||
js::TraceCycleDetectionSet(JSTracer *trc, js::ObjectSet &set)
|
||||
{
|
||||
for (js::ObjectSet::Enum e(set); !e.empty(); e.popFront()) {
|
||||
JSObject *prior = e.front();
|
||||
MarkObjectRoot(trc, const_cast<JSObject **>(&e.front()), "cycle detector table entry");
|
||||
if (prior != e.front())
|
||||
e.rekeyFront(e.front());
|
||||
JSObject *key = e.front();
|
||||
trc->setTracingLocation((void *)&e.front());
|
||||
MarkObjectRoot(trc, &key, "cycle detector table entry");
|
||||
if (key != e.front())
|
||||
e.rekeyFront(key);
|
||||
}
|
||||
}
|
||||
|
||||
@ -100,9 +101,13 @@ JSCompartment::sweepCallsiteClones()
|
||||
if (callsiteClones.initialized()) {
|
||||
for (CallsiteCloneTable::Enum e(callsiteClones); !e.empty(); e.popFront()) {
|
||||
CallsiteCloneKey key = e.front().key();
|
||||
JSFunction *fun = e.front().value();
|
||||
if (!IsScriptMarked(&key.script) || !IsObjectMarked(&fun))
|
||||
if (IsObjectAboutToBeFinalized(&key.original) || IsScriptAboutToBeFinalized(&key.script) ||
|
||||
IsObjectAboutToBeFinalized(e.front().value().unsafeGet()))
|
||||
{
|
||||
e.removeFront();
|
||||
} else if (key != e.front().key()) {
|
||||
e.rekeyFront(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -49,6 +49,14 @@ struct CallsiteCloneKey {
|
||||
|
||||
CallsiteCloneKey(JSFunction *f, JSScript *s, uint32_t o) : original(f), script(s), offset(o) {}
|
||||
|
||||
bool operator==(const CallsiteCloneKey& other) {
|
||||
return original == other.original && script == other.script && offset == other.offset;
|
||||
}
|
||||
|
||||
bool operator!=(const CallsiteCloneKey& other) {
|
||||
return !(*this == other);
|
||||
}
|
||||
|
||||
typedef CallsiteCloneKey Lookup;
|
||||
|
||||
static inline uint32_t hash(CallsiteCloneKey key) {
|
||||
@ -58,6 +66,12 @@ struct CallsiteCloneKey {
|
||||
static inline bool match(const CallsiteCloneKey &a, const CallsiteCloneKey &b) {
|
||||
return a.script == b.script && a.offset == b.offset && a.original == b.original;
|
||||
}
|
||||
|
||||
static void rekey(CallsiteCloneKey &k, const CallsiteCloneKey &newKey) {
|
||||
k.original = newKey.original;
|
||||
k.script = newKey.script;
|
||||
k.offset = newKey.offset;
|
||||
}
|
||||
};
|
||||
|
||||
typedef HashMap<CallsiteCloneKey,
|
||||
|
@ -585,8 +585,12 @@ JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
|
||||
|
||||
sweepBaseShapeTable();
|
||||
sweepInitialShapeTable();
|
||||
sweepNewTypeObjectTable(newTypeObjects);
|
||||
sweepNewTypeObjectTable(lazyTypeObjects);
|
||||
{
|
||||
gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats,
|
||||
gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
|
||||
sweepNewTypeObjectTable(newTypeObjects);
|
||||
sweepNewTypeObjectTable(lazyTypeObjects);
|
||||
}
|
||||
sweepCallsiteClones();
|
||||
savedStacks_.sweep(rt);
|
||||
|
||||
@ -656,6 +660,59 @@ JSCompartment::sweepCrossCompartmentWrappers()
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
|
||||
/*
|
||||
* Fixup wrappers with moved keys or values.
|
||||
*/
|
||||
void
|
||||
JSCompartment::fixupCrossCompartmentWrappers(JSTracer *trc)
|
||||
{
|
||||
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
|
||||
Value val = e.front().value();
|
||||
if (IsForwarded(val)) {
|
||||
val = Forwarded(val);
|
||||
e.front().value().set(val);
|
||||
}
|
||||
|
||||
// CrossCompartmentKey's hash does not depend on the debugger object,
|
||||
// so update it but do not rekey if it changes
|
||||
CrossCompartmentKey key = e.front().key();
|
||||
if (key.debugger)
|
||||
key.debugger = MaybeForwarded(key.debugger);
|
||||
if (key.wrapped && IsForwarded(key.wrapped)) {
|
||||
key.wrapped = Forwarded(key.wrapped);
|
||||
e.rekeyFront(key, key);
|
||||
}
|
||||
|
||||
if (!zone()->isCollecting() && val.isObject()) {
|
||||
// Call the trace hook to update any pointers to relocated things.
|
||||
JSObject *obj = &val.toObject();
|
||||
const Class *clasp = obj->getClass();
|
||||
if (clasp->trace)
|
||||
clasp->trace(trc, obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void JSCompartment::fixupAfterMovingGC()
|
||||
{
|
||||
fixupGlobal();
|
||||
fixupNewTypeObjectTable(newTypeObjects);
|
||||
fixupNewTypeObjectTable(lazyTypeObjects);
|
||||
fixupInitialShapeTable();
|
||||
}
|
||||
|
||||
void
|
||||
JSCompartment::fixupGlobal()
|
||||
{
|
||||
GlobalObject *global = *global_.unsafeGet();
|
||||
if (global)
|
||||
global_.set(MaybeForwarded(global));
|
||||
}
|
||||
|
||||
#endif // JSGC_COMPACTING
|
||||
|
||||
void
|
||||
JSCompartment::purge()
|
||||
{
|
||||
|
@ -347,6 +347,14 @@ struct JSCompartment
|
||||
void purge();
|
||||
void clearTables();
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void fixupInitialShapeTable();
|
||||
void fixupNewTypeObjectTable(js::types::TypeObjectWithNewScriptSet &table);
|
||||
void fixupCrossCompartmentWrappers(JSTracer *trc);
|
||||
void fixupAfterMovingGC();
|
||||
void fixupGlobal();
|
||||
#endif
|
||||
|
||||
bool hasObjectMetadataCallback() const { return objectMetadataCallback; }
|
||||
void setObjectMetadataCallback(js::ObjectMetadataCallback callback);
|
||||
void forgetObjectMetadataCallback() {
|
||||
|
@ -47,7 +47,9 @@ enum State {
|
||||
MARK_ROOTS,
|
||||
MARK,
|
||||
SWEEP,
|
||||
INVALID
|
||||
#ifdef JSGC_COMPACTING
|
||||
COMPACT
|
||||
#endif
|
||||
};
|
||||
|
||||
static inline JSGCTraceKind
|
||||
@ -1275,6 +1277,7 @@ MaybeForwarded(T t)
|
||||
#else
|
||||
|
||||
template <typename T> inline bool IsForwarded(T t) { return false; }
|
||||
template <typename T> inline T Forwarded(T t) { return t; }
|
||||
template <typename T> inline T MaybeForwarded(T t) { return t; }
|
||||
|
||||
#endif // JSGC_COMPACTING
|
||||
|
@ -3440,12 +3440,13 @@ types::TypeMonitorCallSlow(JSContext *cx, JSObject *callee, const CallArgs &args
|
||||
}
|
||||
|
||||
static inline bool
|
||||
IsAboutToBeFinalized(TypeObjectKey *key)
|
||||
IsAboutToBeFinalized(TypeObjectKey **keyp)
|
||||
{
|
||||
/* Mask out the low bit indicating whether this is a type or JS object. */
|
||||
gc::Cell *tmp = reinterpret_cast<gc::Cell *>(uintptr_t(key) & ~1);
|
||||
uintptr_t flagBit = uintptr_t(*keyp) & 1;
|
||||
gc::Cell *tmp = reinterpret_cast<gc::Cell *>(uintptr_t(*keyp) & ~1);
|
||||
bool isAboutToBeFinalized = IsCellAboutToBeFinalized(&tmp);
|
||||
JS_ASSERT(tmp == reinterpret_cast<gc::Cell *>(uintptr_t(key) & ~1));
|
||||
*keyp = reinterpret_cast<TypeObjectKey *>(uintptr_t(tmp) | flagBit);
|
||||
return isAboutToBeFinalized;
|
||||
}
|
||||
|
||||
@ -3943,7 +3944,7 @@ ConstraintTypeSet::sweep(Zone *zone, bool *oom)
|
||||
objectCount = 0;
|
||||
for (unsigned i = 0; i < oldCapacity; i++) {
|
||||
TypeObjectKey *object = oldArray[i];
|
||||
if (object && !IsAboutToBeFinalized(object)) {
|
||||
if (object && !IsAboutToBeFinalized(&object)) {
|
||||
TypeObjectKey **pentry =
|
||||
HashSetInsert<TypeObjectKey *,TypeObjectKey,TypeObjectKey>
|
||||
(zone->types.typeLifoAlloc, objectSet, objectCount, object);
|
||||
@ -3961,9 +3962,11 @@ ConstraintTypeSet::sweep(Zone *zone, bool *oom)
|
||||
setBaseObjectCount(objectCount);
|
||||
} else if (objectCount == 1) {
|
||||
TypeObjectKey *object = (TypeObjectKey *) objectSet;
|
||||
if (IsAboutToBeFinalized(object)) {
|
||||
if (IsAboutToBeFinalized(&object)) {
|
||||
objectSet = nullptr;
|
||||
setBaseObjectCount(0);
|
||||
} else {
|
||||
objectSet = reinterpret_cast<TypeObjectKey **>(object);
|
||||
}
|
||||
}
|
||||
|
||||
@ -4177,26 +4180,59 @@ TypeCompartment::sweep(FreeOp *fop)
|
||||
void
|
||||
JSCompartment::sweepNewTypeObjectTable(TypeObjectWithNewScriptSet &table)
|
||||
{
|
||||
gcstats::AutoPhase ap(runtimeFromMainThread()->gc.stats,
|
||||
gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
|
||||
|
||||
JS_ASSERT(zone()->isGCSweeping());
|
||||
JS_ASSERT(zone()->isCollecting());
|
||||
if (table.initialized()) {
|
||||
for (TypeObjectWithNewScriptSet::Enum e(table); !e.empty(); e.popFront()) {
|
||||
TypeObjectWithNewScriptEntry entry = e.front();
|
||||
if (IsTypeObjectAboutToBeFinalized(entry.object.unsafeGet())) {
|
||||
if (IsTypeObjectAboutToBeFinalized(entry.object.unsafeGet()) ||
|
||||
(entry.newFunction && IsObjectAboutToBeFinalized(&entry.newFunction)))
|
||||
{
|
||||
e.removeFront();
|
||||
} else if (entry.newFunction && IsObjectAboutToBeFinalized(&entry.newFunction)) {
|
||||
e.removeFront();
|
||||
} else if (entry.object.unbarrieredGet() != e.front().object.unbarrieredGet()) {
|
||||
} else {
|
||||
/* Any rekeying necessary is handled by fixupNewTypeObjectTable() below. */
|
||||
JS_ASSERT(entry.object == e.front().object);
|
||||
JS_ASSERT(entry.newFunction == e.front().newFunction);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void
|
||||
JSCompartment::fixupNewTypeObjectTable(TypeObjectWithNewScriptSet &table)
|
||||
{
|
||||
/*
|
||||
* Each entry's hash depends on the object's prototype and we can't tell
|
||||
* whether that has been moved or not in sweepNewTypeObjectTable().
|
||||
*/
|
||||
JS_ASSERT(zone()->isCollecting());
|
||||
if (table.initialized()) {
|
||||
for (TypeObjectWithNewScriptSet::Enum e(table); !e.empty(); e.popFront()) {
|
||||
TypeObjectWithNewScriptEntry entry = e.front();
|
||||
bool needRekey = false;
|
||||
if (IsForwarded(entry.object.get())) {
|
||||
entry.object.set(Forwarded(entry.object.get()));
|
||||
needRekey = true;
|
||||
}
|
||||
TaggedProto proto = entry.object->proto();
|
||||
if (proto.isObject() && IsForwarded(proto.toObject())) {
|
||||
proto = TaggedProto(Forwarded(proto.toObject()));
|
||||
needRekey = true;
|
||||
}
|
||||
if (entry.newFunction && IsForwarded(entry.newFunction)) {
|
||||
entry.newFunction = Forwarded(entry.newFunction);
|
||||
needRekey = true;
|
||||
}
|
||||
if (needRekey) {
|
||||
TypeObjectWithNewScriptSet::Lookup lookup(entry.object->clasp(),
|
||||
entry.object->proto(),
|
||||
proto,
|
||||
entry.newFunction);
|
||||
e.rekeyFront(lookup, entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef JSGC_HASH_TABLE_CHECKS
|
||||
|
||||
@ -4245,7 +4281,7 @@ TypeCompartment::~TypeCompartment()
|
||||
TypeScript::Sweep(FreeOp *fop, JSScript *script, bool *oom)
|
||||
{
|
||||
JSCompartment *compartment = script->compartment();
|
||||
JS_ASSERT(compartment->zone()->isGCSweeping());
|
||||
JS_ASSERT(compartment->zone()->isGCSweepingOrCompacting());
|
||||
|
||||
unsigned num = NumTypeSets(script);
|
||||
StackTypeSet *typeArray = script->types->typeArray();
|
||||
@ -4324,7 +4360,7 @@ TypeZone::~TypeZone()
|
||||
void
|
||||
TypeZone::sweep(FreeOp *fop, bool releaseTypes, bool *oom)
|
||||
{
|
||||
JS_ASSERT(zone()->isGCSweeping());
|
||||
JS_ASSERT(zone()->isGCSweepingOrCompacting());
|
||||
|
||||
JSRuntime *rt = fop->runtime();
|
||||
|
||||
|
@ -1571,6 +1571,8 @@ FinalizeGenerator(FreeOp *fop, JSObject *obj)
|
||||
static void
|
||||
MarkGeneratorFrame(JSTracer *trc, JSGenerator *gen)
|
||||
{
|
||||
gen->obj = MaybeForwarded(gen->obj.get());
|
||||
MarkObject(trc, &gen->obj, "Generator Object");
|
||||
MarkValueRange(trc,
|
||||
HeapValueify(gen->fp->generatorArgsSnapshotBegin()),
|
||||
HeapValueify(gen->fp->generatorArgsSnapshotEnd()),
|
||||
|
@ -17,6 +17,7 @@
|
||||
#include "vm/Shape-inl.h"
|
||||
|
||||
using namespace js;
|
||||
using namespace js::gc;
|
||||
|
||||
inline HashNumber
|
||||
ShapeHasher::hash(const Lookup &l)
|
||||
@ -268,6 +269,76 @@ Shape::finalize(FreeOp *fop)
|
||||
fop->delete_(kids.toHash());
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
|
||||
void
|
||||
Shape::fixupDictionaryShapeAfterMovingGC()
|
||||
{
|
||||
if (!listp)
|
||||
return;
|
||||
|
||||
JS_ASSERT(!IsInsideNursery(reinterpret_cast<Cell *>(listp)));
|
||||
AllocKind kind = reinterpret_cast<Cell *>(listp)->tenuredGetAllocKind();
|
||||
JS_ASSERT(kind == FINALIZE_SHAPE || kind <= FINALIZE_OBJECT_LAST);
|
||||
if (kind == FINALIZE_SHAPE) {
|
||||
// listp points to the parent field of the next shape.
|
||||
Shape *next = reinterpret_cast<Shape *>(uintptr_t(listp) -
|
||||
offsetof(Shape, parent));
|
||||
listp = &gc::MaybeForwarded(next)->parent;
|
||||
} else {
|
||||
// listp points to the shape_ field of an object.
|
||||
JSObject *last = reinterpret_cast<JSObject *>(uintptr_t(listp) -
|
||||
offsetof(JSObject, shape_));
|
||||
listp = &gc::MaybeForwarded(last)->shape_;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
Shape::fixupShapeTreeAfterMovingGC()
|
||||
{
|
||||
if (kids.isNull())
|
||||
return;
|
||||
|
||||
if (kids.isShape()) {
|
||||
if (gc::IsForwarded(kids.toShape()))
|
||||
kids.setShape(gc::Forwarded(kids.toShape()));
|
||||
return;
|
||||
}
|
||||
|
||||
JS_ASSERT(kids.isHash());
|
||||
KidsHash *kh = kids.toHash();
|
||||
for (KidsHash::Enum e(*kh); !e.empty(); e.popFront()) {
|
||||
Shape *key = e.front();
|
||||
if (!IsForwarded(key))
|
||||
continue;
|
||||
|
||||
key = Forwarded(key);
|
||||
BaseShape *base = key->base();
|
||||
if (IsForwarded(base))
|
||||
base = Forwarded(base);
|
||||
UnownedBaseShape *unowned = base->unowned();
|
||||
if (IsForwarded(unowned))
|
||||
unowned = Forwarded(unowned);
|
||||
StackShape lookup(unowned,
|
||||
const_cast<Shape *>(key)->propidRef(),
|
||||
key->slotInfo & Shape::SLOT_MASK,
|
||||
key->attrs,
|
||||
key->flags);
|
||||
e.rekeyFront(lookup, key);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
Shape::fixupAfterMovingGC()
|
||||
{
|
||||
if (inDictionary())
|
||||
fixupDictionaryShapeAfterMovingGC();
|
||||
else
|
||||
fixupShapeTreeAfterMovingGC();
|
||||
}
|
||||
|
||||
#endif // JSGC_COMPACTING
|
||||
|
||||
#ifdef DEBUG
|
||||
|
||||
void
|
||||
|
@ -17,7 +17,7 @@ namespace js {
|
||||
class Shape;
|
||||
struct StackShape;
|
||||
|
||||
struct ShapeHasher {
|
||||
struct ShapeHasher : public DefaultHasher<Shape *> {
|
||||
typedef Shape *Key;
|
||||
typedef StackShape Lookup;
|
||||
|
||||
|
@ -2834,7 +2834,7 @@ ProxyObject::trace(JSTracer *trc, JSObject *obj)
|
||||
|
||||
#ifdef DEBUG
|
||||
if (trc->runtime()->gc.isStrictProxyCheckingEnabled() && proxy->is<WrapperObject>()) {
|
||||
JSObject *referent = &proxy->private_().toObject();
|
||||
JSObject *referent = MaybeForwarded(&proxy->private_().toObject());
|
||||
if (referent->compartment() != proxy->compartment()) {
|
||||
/*
|
||||
* Assert that this proxy is tracked in the wrapper map. We maintain
|
||||
@ -2842,6 +2842,7 @@ ProxyObject::trace(JSTracer *trc, JSObject *obj)
|
||||
*/
|
||||
Value key = ObjectValue(*referent);
|
||||
WrapperMap::Ptr p = proxy->compartment()->lookupWrapper(key);
|
||||
JS_ASSERT(p);
|
||||
JS_ASSERT(*p->value().unsafeGet() == ObjectValue(*proxy));
|
||||
}
|
||||
}
|
||||
|
@ -3339,7 +3339,7 @@ JSScript::markChildren(JSTracer *trc)
|
||||
}
|
||||
|
||||
if (sourceObject()) {
|
||||
JS_ASSERT(sourceObject()->compartment() == compartment());
|
||||
JS_ASSERT(MaybeForwarded(sourceObject())->compartment() == compartment());
|
||||
MarkObject(trc, &sourceObject_, "sourceObject");
|
||||
}
|
||||
|
||||
|
@ -88,6 +88,11 @@ js::UncheckedUnwrap(JSObject *wrapped, bool stopAtOuter, unsigned *flagsp)
|
||||
}
|
||||
flags |= Wrapper::wrapperHandler(wrapped)->flags();
|
||||
wrapped = wrapped->as<ProxyObject>().private_().toObjectOrNull();
|
||||
|
||||
// This can be called from DirectProxyHandler::weakmapKeyDelegate() on a
|
||||
// wrapper whose referent has been moved while it is still unmarked.
|
||||
if (wrapped)
|
||||
wrapped = MaybeForwarded(wrapped);
|
||||
}
|
||||
if (flagsp)
|
||||
*flagsp = flags;
|
||||
|
@ -826,9 +826,10 @@ ArrayBufferObject::finalize(FreeOp *fop, JSObject *obj)
|
||||
/* static */ void
|
||||
ArrayBufferObject::obj_trace(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
if (!IS_GC_MARKING_TRACER(trc) && !trc->runtime()->isHeapMinorCollecting()
|
||||
JSRuntime *rt = trc->runtime();
|
||||
if (!IS_GC_MARKING_TRACER(trc) && !rt->isHeapMinorCollecting() && !rt->isHeapCompacting()
|
||||
#ifdef JSGC_FJGENERATIONAL
|
||||
&& !trc->runtime()->isFJMinorCollecting()
|
||||
&& !rt->isFJMinorCollecting()
|
||||
#endif
|
||||
)
|
||||
{
|
||||
@ -852,15 +853,16 @@ ArrayBufferObject::obj_trace(JSTracer *trc, JSObject *obj)
|
||||
if (!viewsHead)
|
||||
return;
|
||||
|
||||
buffer.setViewList(UpdateObjectIfRelocated(trc->runtime(), &viewsHead));
|
||||
ArrayBufferViewObject *tmp = viewsHead;
|
||||
buffer.setViewList(UpdateObjectIfRelocated(rt, &tmp));
|
||||
|
||||
if (viewsHead->nextView() == nullptr) {
|
||||
if (tmp->nextView() == nullptr) {
|
||||
// Single view: mark it, but only if we're actually doing a GC pass
|
||||
// right now. Otherwise, the tracing pass for barrier verification will
|
||||
// fail if we add another view and the pointer becomes weak.
|
||||
MarkObjectUnbarriered(trc, &viewsHead, "arraybuffer.singleview");
|
||||
buffer.setViewListNoBarrier(viewsHead);
|
||||
} else {
|
||||
} else if (!rt->isHeapCompacting()) {
|
||||
// Multiple views: do not mark, but append buffer to list.
|
||||
ArrayBufferVector &gcLiveArrayBuffers = buffer.compartment()->gcLiveArrayBuffers;
|
||||
|
||||
@ -878,6 +880,19 @@ ArrayBufferObject::obj_trace(JSTracer *trc, JSObject *obj)
|
||||
} else {
|
||||
CrashAtUnhandlableOOM("OOM while updating live array buffers");
|
||||
}
|
||||
} else {
|
||||
// If we're fixing up pointers after compacting then trace everything.
|
||||
ArrayBufferViewObject *prev = nullptr;
|
||||
ArrayBufferViewObject *view = viewsHead;
|
||||
while (view) {
|
||||
JS_ASSERT(buffer.compartment() == MaybeForwarded(view)->compartment());
|
||||
MarkObjectUnbarriered(trc, &view, "arraybuffer.singleview");
|
||||
if (prev)
|
||||
prev->setNextView(view);
|
||||
else
|
||||
buffer.setViewListNoBarrier(view);
|
||||
view = view->nextView();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1707,7 +1707,7 @@ Debugger::trace(JSTracer *trc)
|
||||
*/
|
||||
for (FrameMap::Range r = frames.all(); !r.empty(); r.popFront()) {
|
||||
RelocatablePtrObject &frameobj = r.front().value();
|
||||
JS_ASSERT(frameobj->getPrivate());
|
||||
JS_ASSERT(MaybeForwarded(frameobj.get())->getPrivate());
|
||||
MarkObject(trc, &frameobj, "live Debugger.Frame");
|
||||
}
|
||||
|
||||
@ -1759,9 +1759,9 @@ Debugger::sweepAll(FreeOp *fop)
|
||||
if (IsObjectAboutToBeFinalized(&global)) {
|
||||
// See infallibility note above.
|
||||
detachAllDebuggersFromGlobal(fop, global, &e);
|
||||
}
|
||||
else if (global != e.front())
|
||||
} else if (global != e.front()) {
|
||||
e.rekeyFront(global);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -127,6 +127,9 @@ class DebuggerWeakMap : private WeakMap<Key, Value, DefaultHasher<Key> >
|
||||
if (gc::IsAboutToBeFinalized(&k)) {
|
||||
e.removeFront();
|
||||
decZoneCount(k->zone());
|
||||
} else {
|
||||
// markKeys() should have done any necessary relocation.
|
||||
JS_ASSERT(k == e.front().key());
|
||||
}
|
||||
}
|
||||
Base::assertEntriesNotAboutToBeFinalized();
|
||||
|
@ -937,13 +937,14 @@ struct JSRuntime : public JS::shadow::Runtime,
|
||||
/* Garbage collector state, used by jsgc.c. */
|
||||
js::gc::GCRuntime gc;
|
||||
|
||||
/* Garbase collector state has been sucessfully initialized. */
|
||||
/* Garbage collector state has been sucessfully initialized. */
|
||||
bool gcInitialized;
|
||||
|
||||
bool isHeapBusy() { return gc.isHeapBusy(); }
|
||||
bool isHeapMajorCollecting() { return gc.isHeapMajorCollecting(); }
|
||||
bool isHeapMinorCollecting() { return gc.isHeapMinorCollecting(); }
|
||||
bool isHeapCollecting() { return gc.isHeapCollecting(); }
|
||||
bool isHeapCompacting() { return gc.isHeapCompacting(); }
|
||||
|
||||
bool isFJMinorCollecting() { return gc.isFJMinorCollecting(); }
|
||||
|
||||
|
@ -24,6 +24,7 @@
|
||||
#include "vm/Stack-inl.h"
|
||||
|
||||
using namespace js;
|
||||
using namespace js::gc;
|
||||
using namespace js::types;
|
||||
|
||||
using mozilla::PodZero;
|
||||
@ -1784,6 +1785,19 @@ DebugScopes::sweep(JSRuntime *rt)
|
||||
*/
|
||||
liveScopes.remove(&(*debugScope)->scope());
|
||||
e.removeFront();
|
||||
} else {
|
||||
ScopeIterKey key = e.front().key();
|
||||
bool needsUpdate = false;
|
||||
if (IsForwarded(key.cur())) {
|
||||
key.updateCur(js::gc::Forwarded(key.cur()));
|
||||
needsUpdate = true;
|
||||
}
|
||||
if (IsForwarded(key.staticScope())) {
|
||||
key.updateStaticScope(Forwarded(key.staticScope()));
|
||||
needsUpdate = true;
|
||||
}
|
||||
if (needsUpdate)
|
||||
e.rekeyFront(key);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1794,10 +1808,10 @@ DebugScopes::sweep(JSRuntime *rt)
|
||||
* Scopes can be finalized when a debugger-synthesized ScopeObject is
|
||||
* no longer reachable via its DebugScopeObject.
|
||||
*/
|
||||
if (IsObjectAboutToBeFinalized(&scope)) {
|
||||
if (IsObjectAboutToBeFinalized(&scope))
|
||||
e.removeFront();
|
||||
continue;
|
||||
}
|
||||
else if (scope != e.front().key())
|
||||
e.rekeyFront(scope);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -704,6 +704,9 @@ class ScopeIterKey
|
||||
JSObject *enclosingScope() const { return cur_; }
|
||||
JSObject *&enclosingScope() { return cur_; }
|
||||
|
||||
void updateCur(JSObject *obj) { cur_ = obj; }
|
||||
void updateStaticScope(NestedScopeObject *obj) { staticScope_ = obj; }
|
||||
|
||||
/* For use as hash policy */
|
||||
typedef ScopeIterKey Lookup;
|
||||
static HashNumber hash(ScopeIterKey si);
|
||||
|
@ -220,6 +220,15 @@ GetShapeAttributes(JSObject *obj, Shape *shape)
|
||||
return shape->attributes();
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
inline void
|
||||
BaseShape::fixupAfterMovingGC()
|
||||
{
|
||||
if (hasTable())
|
||||
table().fixupAfterMovingGC();
|
||||
}
|
||||
#endif
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
#endif /* vm_Shape_inl_h */
|
||||
|
@ -248,6 +248,20 @@ ShapeTable::search(jsid id, bool adding)
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void
|
||||
ShapeTable::fixupAfterMovingGC()
|
||||
{
|
||||
int log2 = HASH_BITS - hashShift;
|
||||
uint32_t size = JS_BIT(log2);
|
||||
for (HashNumber i = 0; i < size; i++) {
|
||||
Shape *shape = SHAPE_FETCH(&entries[i]);
|
||||
if (shape && IsForwarded(shape))
|
||||
SHAPE_STORE_PRESERVING_COLLISION(&entries[i], Forwarded(shape));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
bool
|
||||
ShapeTable::change(int log2Delta, ThreadSafeContext *cx)
|
||||
{
|
||||
@ -429,6 +443,11 @@ js::ObjectImpl::toDictionaryMode(ThreadSafeContext *cx)
|
||||
{
|
||||
JS_ASSERT(!inDictionaryMode());
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
// TODO: This crashes if we run a compacting GC here.
|
||||
js::gc::AutoSuppressGC nogc(zone()->runtimeFromAnyThread());
|
||||
#endif
|
||||
|
||||
/* We allocate the shapes from cx->compartment(), so make sure it's right. */
|
||||
JS_ASSERT(cx->isInsideCurrentCompartment(this));
|
||||
|
||||
@ -1529,8 +1548,13 @@ JSCompartment::sweepBaseShapeTable()
|
||||
if (baseShapes.initialized()) {
|
||||
for (BaseShapeSet::Enum e(baseShapes); !e.empty(); e.popFront()) {
|
||||
UnownedBaseShape *base = e.front().unbarrieredGet();
|
||||
if (IsBaseShapeAboutToBeFinalized(&base))
|
||||
if (IsBaseShapeAboutToBeFinalized(&base)) {
|
||||
e.removeFront();
|
||||
} else if (base != e.front()) {
|
||||
StackBaseShape sbase(base);
|
||||
ReadBarriered<UnownedBaseShape *> b(base);
|
||||
e.rekeyFront(&sbase, b);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1823,7 +1847,9 @@ JSCompartment::sweepInitialShapeTable()
|
||||
const InitialShapeEntry &entry = e.front();
|
||||
Shape *shape = entry.shape.unbarrieredGet();
|
||||
JSObject *proto = entry.proto.raw();
|
||||
if (IsShapeAboutToBeFinalized(&shape) || (entry.proto.isObject() && IsObjectAboutToBeFinalized(&proto))) {
|
||||
if (IsShapeAboutToBeFinalized(&shape) ||
|
||||
(entry.proto.isObject() && IsObjectAboutToBeFinalized(&proto)))
|
||||
{
|
||||
e.removeFront();
|
||||
} else {
|
||||
#ifdef DEBUG
|
||||
@ -1841,6 +1867,47 @@ JSCompartment::sweepInitialShapeTable()
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void
|
||||
JSCompartment::fixupInitialShapeTable()
|
||||
{
|
||||
if (!initialShapes.initialized())
|
||||
return;
|
||||
|
||||
for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
|
||||
InitialShapeEntry entry = e.front();
|
||||
bool needRekey = false;
|
||||
if (IsForwarded(entry.shape.get())) {
|
||||
entry.shape.set(Forwarded(entry.shape.get()));
|
||||
needRekey = true;
|
||||
}
|
||||
if (entry.proto.isObject() && IsForwarded(entry.proto.toObject())) {
|
||||
entry.proto = TaggedProto(Forwarded(entry.proto.toObject()));
|
||||
needRekey = true;
|
||||
}
|
||||
JSObject *parent = entry.shape->getObjectParent();
|
||||
if (parent) {
|
||||
parent = MaybeForwarded(parent);
|
||||
needRekey = true;
|
||||
}
|
||||
JSObject *metadata = entry.shape->getObjectMetadata();
|
||||
if (metadata) {
|
||||
metadata = MaybeForwarded(metadata);
|
||||
needRekey = true;
|
||||
}
|
||||
if (needRekey) {
|
||||
InitialShapeEntry::Lookup relookup(entry.shape->getObjectClass(),
|
||||
entry.proto,
|
||||
parent,
|
||||
metadata,
|
||||
entry.shape->numFixedSlots(),
|
||||
entry.shape->getObjectFlags());
|
||||
e.rekeyFront(relookup, entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif // JSGC_COMPACTING
|
||||
|
||||
void
|
||||
AutoRooterGetterSetter::Inner::trace(JSTracer *trc)
|
||||
{
|
||||
|
@ -190,6 +190,11 @@ struct ShapeTable {
|
||||
bool init(ThreadSafeContext *cx, Shape *lastProp);
|
||||
bool change(int log2Delta, ThreadSafeContext *cx);
|
||||
Shape **search(jsid id, bool adding);
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
/* Update entries whose shapes have been moved */
|
||||
void fixupAfterMovingGC();
|
||||
#endif
|
||||
};
|
||||
|
||||
/*
|
||||
@ -505,6 +510,10 @@ class BaseShape : public gc::BarrieredCell<BaseShape>
|
||||
gc::MarkObject(trc, &metadata, "metadata");
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void fixupAfterMovingGC();
|
||||
#endif
|
||||
|
||||
private:
|
||||
static void staticAsserts() {
|
||||
JS_STATIC_ASSERT(offsetof(BaseShape, clasp_) == offsetof(js::shadow::BaseShape, clasp_));
|
||||
@ -550,7 +559,7 @@ BaseShape::baseUnowned()
|
||||
}
|
||||
|
||||
/* Entries for the per-compartment baseShapes set of unowned base shapes. */
|
||||
struct StackBaseShape
|
||||
struct StackBaseShape : public DefaultHasher<ReadBarrieredUnownedBaseShape>
|
||||
{
|
||||
typedef const StackBaseShape *Lookup;
|
||||
|
||||
@ -1028,10 +1037,19 @@ class Shape : public gc::BarrieredCell<Shape>
|
||||
inline Shape *search(ExclusiveContext *cx, jsid id);
|
||||
inline Shape *searchLinear(jsid id);
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void fixupAfterMovingGC();
|
||||
#endif
|
||||
|
||||
/* For JIT usage */
|
||||
static inline size_t offsetOfBase() { return offsetof(Shape, base_); }
|
||||
|
||||
private:
|
||||
#ifdef JSGC_COMPACTING
|
||||
void fixupDictionaryShapeAfterMovingGC();
|
||||
void fixupShapeTreeAfterMovingGC();
|
||||
#endif
|
||||
|
||||
static void staticAsserts() {
|
||||
JS_STATIC_ASSERT(offsetof(Shape, base_) == offsetof(js::shadow::Shape, base));
|
||||
JS_STATIC_ASSERT(offsetof(Shape, slotInfo) == offsetof(js::shadow::Shape, slotInfo));
|
||||
|
Loading…
Reference in New Issue
Block a user