Bug 1137780 - Fix marking JitcodeGlobalTable. (r=terrence)

This commit is contained in:
Shu-yu Guo 2015-03-04 23:17:23 -08:00
parent 8ad4f1923e
commit 4381ed508c
12 changed files with 403 additions and 122 deletions

View File

@ -889,6 +889,7 @@ class GCRuntime
void markGrayReferencesInCurrentGroup(gcstats::Phase phase);
void markAllWeakReferences(gcstats::Phase phase);
void markAllGrayReferences(gcstats::Phase phase);
void markJitcodeGlobalTable();
void beginSweepPhase(bool lastGC);
void findZoneGroups();

View File

@ -542,6 +542,12 @@ Mark##base##RootRange(JSTracer *trc, size_t len, type **vec, const char *name)
} \
\
bool \
Is##base##MarkedFromAnyThread(type **thingp) \
{ \
return IsMarkedFromAnyThread<type>(thingp); \
} \
\
bool \
Is##base##Marked(type **thingp) \
{ \
return IsMarked<type>(thingp); \
@ -802,22 +808,6 @@ gc::MarkValueRoot(JSTracer *trc, Value *v, const char *name)
MarkValueInternal(trc, v);
}
void
TypeSet::MarkTypeRoot(JSTracer *trc, TypeSet::Type *v, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
trc->setTracingName(name);
if (v->isSingletonUnchecked()) {
JSObject *obj = v->singleton();
MarkInternal(trc, &obj);
*v = TypeSet::ObjectType(obj);
} else if (v->isGroupUnchecked()) {
ObjectGroup *group = v->group();
MarkInternal(trc, &group);
*v = TypeSet::ObjectType(group);
}
}
void
gc::MarkValueRange(JSTracer *trc, size_t len, BarrieredBase<Value> *vec, const char *name)
{
@ -903,6 +893,30 @@ gc::IsValueAboutToBeFinalizedFromAnyThread(Value *v)
return rv;
}
/*** Type Marking ***/
void
TypeSet::MarkTypeRoot(JSTracer *trc, TypeSet::Type *v, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
MarkTypeUnbarriered(trc, v, name);
}
void
TypeSet::MarkTypeUnbarriered(JSTracer *trc, TypeSet::Type *v, const char *name)
{
trc->setTracingName(name);
if (v->isSingletonUnchecked()) {
JSObject *obj = v->singleton();
MarkInternal(trc, &obj);
*v = TypeSet::ObjectType(obj);
} else if (v->isGroupUnchecked()) {
ObjectGroup *group = v->group();
MarkInternal(trc, &group);
*v = TypeSet::ObjectType(group);
}
}
/*** Slot Marking ***/
bool

View File

@ -93,6 +93,7 @@ void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type*> *thing, const c
void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name); \
bool Is##base##Marked(type **thingp); \
bool Is##base##Marked(BarrieredBase<type*> *thingp); \
bool Is##base##MarkedFromAnyThread(type **thingp); \
bool Is##base##MarkedFromAnyThread(BarrieredBase<type*> *thingp); \
bool Is##base##AboutToBeFinalized(type **thingp); \
bool Is##base##AboutToBeFinalizedFromAnyThread(type **thingp); \

View File

@ -337,6 +337,7 @@ static const PhaseInfo phases[] = {
{ PHASE_SWEEP_MARK_INCOMING_GRAY, "Mark Incoming Gray Pointers", PHASE_SWEEP_MARK },
{ PHASE_SWEEP_MARK_GRAY, "Mark Gray", PHASE_SWEEP_MARK },
{ PHASE_SWEEP_MARK_GRAY_WEAK, "Mark Gray and Weak", PHASE_SWEEP_MARK },
{ PHASE_SWEEP_MARK_JITCODE_GLOBAL_TABLE, "Mark JitcodeGlobalTable", PHASE_SWEEP_MARK },
{ PHASE_FINALIZE_START, "Finalize Start Callback", PHASE_SWEEP },
{ PHASE_SWEEP_ATOMS, "Sweep Atoms", PHASE_SWEEP },
{ PHASE_SWEEP_SYMBOL_REGISTRY, "Sweep Symbol Registry", PHASE_SWEEP },

View File

@ -43,6 +43,7 @@ enum Phase {
PHASE_SWEEP_MARK_INCOMING_GRAY,
PHASE_SWEEP_MARK_GRAY,
PHASE_SWEEP_MARK_GRAY_WEAK,
PHASE_SWEEP_MARK_JITCODE_GLOBAL_TABLE,
PHASE_FINALIZE_START,
PHASE_SWEEP_ATOMS,
PHASE_SWEEP_SYMBOL_REGISTRY,

View File

@ -488,6 +488,7 @@ jit::LazyLinkTopActivation(JSContext *cx)
return script->baselineOrIonRawPointer();
}
/* static */ void
JitRuntime::Mark(JSTracer *trc)
{
@ -497,8 +498,11 @@ JitRuntime::Mark(JSTracer *trc)
JitCode *code = i.get<JitCode>();
MarkJitCodeRoot(trc, &code, "wrapper");
}
}
// Mark all heap the jitcode global table map.
/* static */ void
JitRuntime::MarkJitcodeGlobalTable(JSTracer *trc)
{
if (trc->runtime()->hasJitRuntime() &&
trc->runtime()->jitRuntime()->hasJitcodeGlobalTable())
{
@ -506,6 +510,13 @@ JitRuntime::Mark(JSTracer *trc)
}
}
/* static */ void
JitRuntime::SweepJitcodeGlobalTable(JSRuntime *rt)
{
if (rt->hasJitRuntime() && rt->jitRuntime()->hasJitcodeGlobalTable())
rt->jitRuntime()->getJitcodeGlobalTable()->sweep(rt);
}
void
JitCompartment::mark(JSTracer *trc, JSCompartment *compartment)
{
@ -664,13 +675,15 @@ JitCode::fixupNurseryObjects(JSContext *cx, const ObjectVector &nurseryObjects)
void
JitCode::finalize(FreeOp *fop)
{
// If this jitcode had a bytecode map, it must have already been removed.
#ifdef DEBUG
JSRuntime *rt = fop->runtime();
// If this jitcode has a bytecode map, de-register it.
if (hasBytecodeMap_) {
JitcodeGlobalEntry result;
MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
rt->jitRuntime()->getJitcodeGlobalTable()->releaseEntry(raw(), rt);
MOZ_ASSERT(!rt->jitRuntime()->getJitcodeGlobalTable()->lookup(raw(), &result, rt));
}
#endif
// Buffer can be freed at any time hereafter. Catch use-after-free bugs.
// Don't do this if the Ion code is protected, as the signal handler will

View File

@ -257,6 +257,8 @@ class JitRuntime
void freeOsrTempData();
static void Mark(JSTracer *trc);
static void MarkJitcodeGlobalTable(JSTracer *trc);
static void SweepJitcodeGlobalTable(JSRuntime *rt);
ExecutableAllocator &execAlloc() {
return execAlloc_;

View File

@ -439,6 +439,27 @@ JitcodeGlobalTable::lookupForSampler(void *ptr, JitcodeGlobalEntry *result, JSRu
entry->setGeneration(sampleBufferGen);
// IonCache entries must keep their corresponding Ion entries alive.
if (entry->isIonCache()) {
JitcodeGlobalEntry rejoinEntry;
RejoinEntry(rt, entry->ionCacheEntry(), ptr, &rejoinEntry);
rejoinEntry.setGeneration(sampleBufferGen);
}
#ifdef DEBUG
// JitcodeGlobalEntries are marked during the beginning of the sweep phase
// (PHASE_SWEEP_MARK_JITCODE_GLOBAL_TABLE). A read barrier is not needed,
// as any JS frames sampled during the sweep phase of the GC must be on
// stack, and on-stack frames must already be marked at the beginning of
// the sweep phase. This assumption is verified below.
if (rt->isHeapBusy() &&
rt->gc.stats.currentPhase() >= gcstats::PHASE_SWEEP &&
rt->gc.stats.currentPhase() <= gcstats::PHASE_GC_END)
{
MOZ_ASSERT(entry->isMarkedFromAnyThread(rt));
}
#endif
*result = *entry;
return true;
}
@ -518,57 +539,40 @@ JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt)
}
void
JitcodeGlobalTable::removeEntry(void *startAddr, JSRuntime *rt)
JitcodeGlobalTable::removeEntry(JitcodeGlobalEntry &entry, JitcodeGlobalEntry **prevTower,
JSRuntime *rt)
{
JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(startAddr);
JitcodeGlobalEntry *searchTower[JitcodeSkiplistTower::MAX_HEIGHT];
searchInternal(query, searchTower);
MOZ_ASSERT(!rt->isProfilerSamplingEnabled());
JitcodeGlobalEntry *queryEntry;
if (searchTower[0]) {
MOZ_ASSERT(searchTower[0]->compareTo(query) < 0);
queryEntry = searchTower[0]->tower_->next(0);
} else {
MOZ_ASSERT(startTower_[0]);
queryEntry = startTower_[0];
}
MOZ_ASSERT(queryEntry->compareTo(query) == 0);
{
// Suppress profiler sampling while table is being mutated.
AutoSuppressProfilerSampling suppressSampling(rt);
// Unlink query entry.
for (int level = queryEntry->tower_->height() - 1; level >= 0; level--) {
JitcodeGlobalEntry *searchTowerEntry = searchTower[level];
if (searchTowerEntry) {
MOZ_ASSERT(searchTowerEntry);
searchTowerEntry->tower_->setNext(level, queryEntry->tower_->next(level));
} else {
startTower_[level] = queryEntry->tower_->next(level);
}
// Unlink query entry.
for (int level = entry.tower_->height() - 1; level >= 0; level--) {
JitcodeGlobalEntry *prevTowerEntry = prevTower[level];
if (prevTowerEntry) {
MOZ_ASSERT(prevTowerEntry->tower_->next(level) == &entry);
prevTowerEntry->tower_->setNext(level, entry.tower_->next(level));
} else {
startTower_[level] = entry.tower_->next(level);
}
skiplistSize_--;
// verifySkiplist(); - disabled for release.
}
skiplistSize_--;
// verifySkiplist(); - disabled for release.
// Entry has been unlinked.
queryEntry->destroy();
queryEntry->tower_->addToFreeList(&(freeTowers_[queryEntry->tower_->height() - 1]));
queryEntry->tower_ = nullptr;
*queryEntry = JitcodeGlobalEntry();
queryEntry->addToFreeList(&freeEntries_);
entry.destroy();
entry.tower_->addToFreeList(&(freeTowers_[entry.tower_->height() - 1]));
entry.tower_ = nullptr;
entry = JitcodeGlobalEntry();
entry.addToFreeList(&freeEntries_);
}
void
JitcodeGlobalTable::releaseEntry(void *startAddr, JSRuntime *rt)
JitcodeGlobalTable::releaseEntry(JitcodeGlobalEntry &entry, JitcodeGlobalEntry **prevTower,
JSRuntime *rt)
{
mozilla::DebugOnly<JitcodeGlobalEntry *> entry = lookupInternal(startAddr);
mozilla::DebugOnly<uint32_t> gen = rt->profilerSampleBufferGen();
mozilla::DebugOnly<uint32_t> lapCount = rt->profilerSampleBufferLapCount();
MOZ_ASSERT(entry);
MOZ_ASSERT_IF(gen != UINT32_MAX, !entry->isSampled(gen, lapCount));
removeEntry(startAddr, rt);
MOZ_ASSERT_IF(gen != UINT32_MAX, !entry.isSampled(gen, lapCount));
removeEntry(entry, prevTower, rt);
}
void
@ -714,71 +718,195 @@ JitcodeGlobalTable::verifySkiplist()
}
#endif // DEBUG
struct JitcodeMapEntryTraceCallback
{
JSTracer *trc;
uint32_t gen;
uint32_t lapCount;
explicit JitcodeMapEntryTraceCallback(JSTracer *trc)
: trc(trc),
gen(trc->runtime()->profilerSampleBufferGen()),
lapCount(trc->runtime()->profilerSampleBufferLapCount())
{
if (!trc->runtime()->spsProfiler.enabled())
gen = UINT32_MAX;
}
void operator()(JitcodeGlobalEntry &entry) {
// If an entry is not sampled, reset its generation to
// the invalid generation, and skip it.
if (!entry.isSampled(gen, lapCount)) {
entry.setGeneration(UINT32_MAX);
return;
}
// Mark jitcode pointed to by this entry.
entry.baseEntry().markJitcode(trc);
// Mark ion entry if necessary.
if (entry.isIon())
entry.ionEntry().mark(trc);
}
};
void
JitcodeGlobalTable::mark(JSTracer *trc)
{
// JitcodeGlobalTable must keep entries that are in the sampler buffer
// alive. This conditionality is akin to holding the entries weakly.
//
// If this table were marked at the beginning of the mark phase, then
// sampling would require a read barrier for sampling in between
// incremental GC slices. However, invoking read barriers from the sampler
// is wildly unsafe. The sampler may run at any time, including during GC
// itself.
//
// Instead, JitcodeGlobalTable is marked at the beginning of the sweep
// phase. The key assumption is the following. At the beginning of the
// sweep phase, any JS frames that the sampler may put in its buffer that
// are not already there at the beginning of the mark phase must have
// already been marked, as either 1) the frame was on-stack at the
// beginning of the sweep phase, or 2) the frame was pushed between
// incremental sweep slices. Frames of case 1) are already marked. Frames
// of case 2) must have been reachable to have been newly pushed, and thus
// are already marked.
//
// The approach above obviates the need for read barriers. The assumption
// above is checked in JitcodeGlobalTable::lookupForSampler.
MOZ_ASSERT(trc->runtime()->gc.stats.currentPhase() ==
gcstats::PHASE_SWEEP_MARK_JITCODE_GLOBAL_TABLE);
AutoSuppressProfilerSampling suppressSampling(trc->runtime());
JitcodeMapEntryTraceCallback traceCallback(trc);
uint32_t gen = trc->runtime()->profilerSampleBufferGen();
uint32_t lapCount = trc->runtime()->profilerSampleBufferLapCount();
if (!trc->runtime()->spsProfiler.enabled())
gen = UINT32_MAX;
// Find start entry.
JitcodeGlobalEntry *entry = startTower_[0];
while (entry != nullptr) {
traceCallback(*entry);
entry = entry->tower_->next(0);
for (Range r(*this); !r.empty(); r.popFront()) {
JitcodeGlobalEntry *entry = r.front();
// If an entry is not sampled, reset its generation to the invalid
// generation, and conditionally mark the rest of the entry if its
// JitCode is not already marked. This conditional marking ensures
// that so long as the JitCode *may* be sampled, we keep any
// information that may be handed out to the sampler, like tracked
// types used by optimizations and scripts used for pc to line number
// mapping, alive as well.
if (!entry->isSampled(gen, lapCount)) {
entry->setGeneration(UINT32_MAX);
if (!entry->baseEntry().isJitcodeMarkedFromAnyThread())
continue;
}
// The table is runtime-wide. Not all zones may be participating in
// the GC.
if (!entry->zone()->isCollecting() || entry->zone()->isGCFinished())
continue;
entry->mark(trc);
}
}
void
JitcodeGlobalTable::sweep(JSRuntime *rt)
{
AutoSuppressProfilerSampling suppressSampling(rt);
for (Enum e(*this, rt); !e.empty(); e.popFront()) {
JitcodeGlobalEntry *entry = e.front();
if (!entry->zone()->isCollecting() || entry->zone()->isGCFinished())
continue;
if (entry->baseEntry().isJitcodeAboutToBeFinalized())
e.removeFront();
else
entry->sweep();
}
}
void
JitcodeGlobalEntry::BaseEntry::markJitcode(JSTracer *trc)
{
MarkJitCodeRoot(trc, &jitcode_, "jitcodglobaltable-baseentry-jitcode");
MarkJitCodeUnbarriered(trc, &jitcode_, "jitcodglobaltable-baseentry-jitcode");
}
bool
JitcodeGlobalEntry::BaseEntry::isJitcodeMarkedFromAnyThread()
{
return IsJitCodeMarkedFromAnyThread(&jitcode_);
}
bool
JitcodeGlobalEntry::BaseEntry::isJitcodeAboutToBeFinalized()
{
return IsJitCodeAboutToBeFinalized(&jitcode_);
}
void
JitcodeGlobalEntry::BaselineEntry::mark(JSTracer *trc)
{
MarkScriptUnbarriered(trc, &script_, "jitcodeglobaltable-baselineentry-script");
}
void
JitcodeGlobalEntry::BaselineEntry::sweep()
{
MOZ_ALWAYS_FALSE(IsScriptAboutToBeFinalized(&script_));
}
bool
JitcodeGlobalEntry::BaselineEntry::isMarkedFromAnyThread()
{
return IsScriptMarkedFromAnyThread(&script_);
}
void
JitcodeGlobalEntry::IonEntry::mark(JSTracer *trc)
{
for (unsigned i = 0; i < numScripts(); i++) {
MarkScriptUnbarriered(trc, &sizedScriptList()->pairs[i].script,
"jitcodeglobaltable-ionentry-script");
}
if (!optsAllTypes_)
return;
for (IonTrackedTypeWithAddendum *iter = optsAllTypes_->begin();
iter != optsAllTypes_->end(); iter++)
{
TypeSet::MarkTypeRoot(trc, &(iter->type), "jitcodeglobaltable-ionentry-type");
TypeSet::MarkTypeUnbarriered(trc, &(iter->type), "jitcodeglobaltable-ionentry-type");
if (iter->hasAllocationSite()) {
MarkScriptUnbarriered(trc, &iter->script,
"jitcodeglobaltable-ionentry-type-addendum-script");
} else if (iter->hasConstructor()) {
MarkObjectUnbarriered(trc, &iter->constructor,
"jitcodeglobaltable-ionentry-type-addendum-constructor");
}
}
}
void
JitcodeGlobalEntry::IonEntry::sweep()
{
for (unsigned i = 0; i < numScripts(); i++)
MOZ_ALWAYS_FALSE(IsScriptAboutToBeFinalized(&sizedScriptList()->pairs[i].script));
if (!optsAllTypes_)
return;
for (IonTrackedTypeWithAddendum *iter = optsAllTypes_->begin();
iter != optsAllTypes_->end(); iter++)
{
// Types may move under compacting GC. This method is only called on
// entries that are sampled, and thus are not about to be finalized.
MOZ_ALWAYS_FALSE(TypeSet::IsTypeAboutToBeFinalized(&iter->type));
if (iter->hasAllocationSite())
MOZ_ALWAYS_FALSE(IsScriptAboutToBeFinalized(&iter->script));
else if (iter->hasConstructor())
MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(&iter->constructor));
}
}
bool
JitcodeGlobalEntry::IonEntry::isMarkedFromAnyThread()
{
for (unsigned i = 0; i < numScripts(); i++) {
if (!IsScriptMarkedFromAnyThread(&sizedScriptList()->pairs[i].script))
return false;
}
if (!optsAllTypes_)
return true;
for (IonTrackedTypeWithAddendum *iter = optsAllTypes_->begin();
iter != optsAllTypes_->end(); iter++)
{
if (!TypeSet::IsTypeMarkedFromAnyThread(&iter->type))
return false;
}
return true;
}
bool
JitcodeGlobalEntry::IonCacheEntry::isMarkedFromAnyThread(JSRuntime *rt)
{
JitcodeGlobalEntry entry;
RejoinEntry(rt, *this, nativeStartAddr(), &entry);
return entry.isMarkedFromAnyThread(rt);
}
/* static */ void
JitcodeRegionEntry::WriteHead(CompactBufferWriter &writer,
uint32_t nativeOffset, uint8_t scriptDepth)

View File

@ -208,6 +208,8 @@ class JitcodeGlobalEntry
}
void markJitcode(JSTracer *trc);
bool isJitcodeMarkedFromAnyThread();
bool isJitcodeAboutToBeFinalized();
};
struct IonEntry : public BaseEntry
@ -357,6 +359,8 @@ class JitcodeGlobalEntry
mozilla::Maybe<uint8_t> trackedOptimizationIndexAtAddr(void *ptr);
void mark(JSTracer *trc);
void sweep();
bool isMarkedFromAnyThread();
};
struct BaselineEntry : public BaseEntry
@ -409,6 +413,10 @@ class JitcodeGlobalEntry
void youngestFrameLocationAtAddr(JSRuntime *rt, void *ptr,
JSScript **script, jsbytecode **pc) const;
void mark(JSTracer *trc);
void sweep();
bool isMarkedFromAnyThread();
};
struct IonCacheEntry : public BaseEntry
@ -437,6 +445,8 @@ class JitcodeGlobalEntry
void youngestFrameLocationAtAddr(JSRuntime *rt, void *ptr,
JSScript **script, jsbytecode **pc) const;
bool isMarkedFromAnyThread(JSRuntime *rt);
};
// Dummy entries are created for jitcode generated when profiling is not turned on,
@ -801,6 +811,61 @@ class JitcodeGlobalEntry
return ionEntry().allTrackedTypes();
}
Zone *zone() {
return baseEntry().jitcode()->zone();
}
void mark(JSTracer *trc) {
baseEntry().markJitcode(trc);
switch (kind()) {
case Ion:
ionEntry().mark(trc);
break;
case Baseline:
baselineEntry().mark(trc);
break;
case IonCache:
case Dummy:
break;
default:
MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
}
}
void sweep() {
switch (kind()) {
case Ion:
ionEntry().sweep();
break;
case Baseline:
baselineEntry().sweep();
break;
case IonCache:
case Dummy:
break;
default:
MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
}
}
bool isMarkedFromAnyThread(JSRuntime *rt) {
if (!baseEntry().isJitcodeMarkedFromAnyThread())
return false;
switch (kind()) {
case Ion:
return ionEntry().isMarkedFromAnyThread();
case Baseline:
return baselineEntry().isMarkedFromAnyThread();
case IonCache:
return ionCacheEntry().isMarkedFromAnyThread(rt);
case Dummy:
break;
default:
MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
}
return true;
}
//
// When stored in a free-list, entries use 'tower_' to store a
// pointer to the next entry. In this context only, 'tower_'
@ -884,10 +949,11 @@ class JitcodeGlobalTable
return addEntry(JitcodeGlobalEntry(entry), rt);
}
void removeEntry(void *startAddr, JSRuntime *rt);
void releaseEntry(void *startAddr, JSRuntime *rt);
void removeEntry(JitcodeGlobalEntry &entry, JitcodeGlobalEntry **prevTower, JSRuntime *rt);
void releaseEntry(JitcodeGlobalEntry &entry, JitcodeGlobalEntry **prevTower, JSRuntime *rt);
void mark(JSTracer *trc);
void sweep(JSRuntime *rt);
private:
bool addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt);

View File

@ -2649,6 +2649,7 @@ GCRuntime::updatePointersToRelocatedCells()
// Sweep everything to fix up weak pointers
WatchpointMap::sweepAll(rt);
Debugger::sweepAll(rt->defaultFreeOp());
jit::JitRuntime::SweepJitcodeGlobalTable(rt);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
if (zone->isGCCompacting())
rt->gc.sweepZoneAfterCompacting(zone);
@ -4160,6 +4161,15 @@ GCRuntime::markAllGrayReferences(gcstats::Phase phase)
markGrayReferences<GCZonesIter, GCCompartmentsIter>(phase);
}
void
GCRuntime::markJitcodeGlobalTable()
{
gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_MARK_JITCODE_GLOBAL_TABLE);
jit::JitRuntime::MarkJitcodeGlobalTable(&marker);
SliceBudget budget;
marker.drainMarkStack(budget);
}
#ifdef DEBUG
class js::gc::MarkingValidator
@ -4277,6 +4287,8 @@ js::gc::MarkingValidator::nonIncrementalMark()
{
gcstats::AutoPhase ap1(gc->stats, gcstats::PHASE_SWEEP);
gcstats::AutoPhase ap2(gc->stats, gcstats::PHASE_SWEEP_MARK);
gc->markJitcodeGlobalTable();
gc->markAllWeakReferences(gcstats::PHASE_SWEEP_MARK_WEAK);
/* Update zone state for gray marking. */
@ -4819,6 +4831,8 @@ GCRuntime::endMarkingZoneGroup()
{
gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_MARK);
markJitcodeGlobalTable();
/*
* Mark any incoming black pointers from previously swept compartments
* whose referents are not marked. This can occur when gray cells become
@ -5030,6 +5044,10 @@ GCRuntime::beginSweepingZoneGroup()
// Detach unreachable debuggers and global objects from each other.
Debugger::sweepAll(&fop);
// Sweep entries containing about-to-be-finalized JitCode and
// update relocated TypeSet::Types inside the JitcodeGlobalTable.
jit::JitRuntime::SweepJitcodeGlobalTable(rt);
}
{

View File

@ -686,6 +686,59 @@ TypeSet::readBarrier(const TypeSet *types)
}
}
bool
TypeSet::IsTypeMarkedFromAnyThread(TypeSet::Type *v)
{
bool rv;
if (v->isSingletonUnchecked()) {
JSObject *obj = v->singleton();
rv = IsObjectMarkedFromAnyThread(&obj);
*v = TypeSet::ObjectType(obj);
} else if (v->isGroupUnchecked()) {
ObjectGroup *group = v->group();
rv = IsObjectGroupMarkedFromAnyThread(&group);
*v = TypeSet::ObjectType(group);
} else {
rv = true;
}
return rv;
}
static inline bool
IsObjectKeyAboutToBeFinalized(TypeSet::ObjectKey **keyp)
{
TypeSet::ObjectKey *key = *keyp;
bool isAboutToBeFinalized;
if (key->isGroup()) {
ObjectGroup *group = key->groupNoBarrier();
isAboutToBeFinalized = IsObjectGroupAboutToBeFinalized(&group);
if (!isAboutToBeFinalized)
*keyp = TypeSet::ObjectKey::get(group);
} else {
MOZ_ASSERT(key->isSingleton());
JSObject *singleton = key->singletonNoBarrier();
isAboutToBeFinalized = IsObjectAboutToBeFinalized(&singleton);
if (!isAboutToBeFinalized)
*keyp = TypeSet::ObjectKey::get(singleton);
}
return isAboutToBeFinalized;
}
bool
TypeSet::IsTypeAboutToBeFinalized(TypeSet::Type *v)
{
bool isAboutToBeFinalized;
if (v->isObjectUnchecked()) {
TypeSet::ObjectKey *key = v->objectKey();
isAboutToBeFinalized = IsObjectKeyAboutToBeFinalized(&key);
if (!isAboutToBeFinalized)
*v = TypeSet::ObjectType(key);
} else {
isAboutToBeFinalized = false;
}
return isAboutToBeFinalized;
}
bool
TypeSet::clone(LifoAlloc *alloc, TemporaryTypeSet *result) const
{
@ -3096,26 +3149,6 @@ js::TypeMonitorCallSlow(JSContext *cx, JSObject *callee, const CallArgs &args, b
TypeScript::SetArgument(cx, script, arg, UndefinedValue());
}
static inline bool
IsAboutToBeFinalized(TypeSet::ObjectKey **keyp)
{
TypeSet::ObjectKey *key = *keyp;
bool isAboutToBeFinalized;
if (key->isGroup()) {
ObjectGroup *group = key->groupNoBarrier();
isAboutToBeFinalized = IsObjectGroupAboutToBeFinalized(&group);
if (!isAboutToBeFinalized)
*keyp = TypeSet::ObjectKey::get(group);
} else {
MOZ_ASSERT(key->isSingleton());
JSObject *singleton = key->singletonNoBarrier();
isAboutToBeFinalized = IsObjectAboutToBeFinalized(&singleton);
if (!isAboutToBeFinalized)
*keyp = TypeSet::ObjectKey::get(singleton);
}
return isAboutToBeFinalized;
}
void
js::FillBytecodeTypeMap(JSScript *script, uint32_t *bytecodeMap)
{
@ -3870,7 +3903,7 @@ ConstraintTypeSet::sweep(Zone *zone, AutoClearTypeInferenceStateOnOOM &oom)
ObjectKey *key = oldArray[i];
if (!key)
continue;
if (!IsAboutToBeFinalized(&key)) {
if (!IsObjectKeyAboutToBeFinalized(&key)) {
ObjectKey **pentry =
TypeHashSet::Insert<ObjectKey *, ObjectKey, ObjectKey>
(zone->types.typeLifoAlloc, objectSet, objectCount, key);
@ -3902,7 +3935,7 @@ ConstraintTypeSet::sweep(Zone *zone, AutoClearTypeInferenceStateOnOOM &oom)
setBaseObjectCount(objectCount);
} else if (objectCount == 1) {
ObjectKey *key = (ObjectKey *) objectSet;
if (!IsAboutToBeFinalized(&key)) {
if (!IsObjectKeyAboutToBeFinalized(&key)) {
objectSet = reinterpret_cast<ObjectKey **>(key);
} else {
// As above, mark type sets containing objects with unknown

View File

@ -527,6 +527,9 @@ class TypeSet
static inline Type GetMaybeUntrackedValueType(const Value &val);
static void MarkTypeRoot(JSTracer *trc, Type *v, const char *name);
static void MarkTypeUnbarriered(JSTracer *trc, Type *v, const char *name);
static bool IsTypeMarkedFromAnyThread(Type *v);
static bool IsTypeAboutToBeFinalized(Type *v);
};
/*