Bug 1171780 - We no longer need to cast out of barrieried types in GC; r=jonco

This commit is contained in:
Terrence Cole 2015-06-05 09:01:12 -07:00
parent 0ba66d6570
commit 373a642a58
9 changed files with 11 additions and 53 deletions

View File

@ -388,7 +388,7 @@ IsIncrementalBarrierNeededOnTenuredGCThing(JS::shadow::Runtime* rt, const JS::GC
{
MOZ_ASSERT(thing);
MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
if (!rt->needsIncrementalBarrier())
if (rt->isHeapBusy())
return false;
JS::Zone* zone = JS::GetTenuredGCThingZone(thing);
return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();

View File

@ -836,20 +836,6 @@ class HeapSlotArray
}
};
/*
* Operations on a Heap thing inside the GC need to strip the barriers from
* pointer operations. This template helps do that in contexts where the type
* is templatized.
*/
template <typename T> struct Unbarriered {};
template <typename S> struct Unbarriered< PreBarriered<S> > { typedef S* type; };
template <typename S> struct Unbarriered< RelocatablePtr<S> > { typedef S* type; };
template <> struct Unbarriered<PreBarrieredValue> { typedef Value type; };
template <> struct Unbarriered<RelocatableValue> { typedef Value type; };
template <typename S> struct Unbarriered< DefaultHasher< PreBarriered<S> > > {
typedef DefaultHasher<S*> type;
};
} /* namespace js */
#endif /* gc_Barrier_h */

View File

@ -1432,7 +1432,7 @@ TenuredCell::readBarrier(TenuredCell* thing)
{
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
MOZ_ASSERT(!isNullLike(thing));
if (!thing->shadowRuntimeFromAnyThread()->needsIncrementalBarrier())
if (thing->shadowRuntimeFromAnyThread()->isHeapBusy())
return;
JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
@ -1450,7 +1450,7 @@ TenuredCell::readBarrier(TenuredCell* thing)
TenuredCell::writeBarrierPre(TenuredCell* thing)
{
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
if (isNullLike(thing) || !thing->shadowRuntimeFromAnyThread()->needsIncrementalBarrier())
if (isNullLike(thing) || thing->shadowRuntimeFromAnyThread()->isHeapBusy())
return;
JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();

View File

@ -331,6 +331,8 @@ class StoreBuffer
};
bool isOkayToUseBuffer() const {
MOZ_ASSERT(!JS::shadow::Runtime::asShadowRuntime(runtime_)->isHeapBusy());
/*
* Disabled store buffers may not have a valid state; e.g. when stored
* inline in the ChunkTrailer.

View File

@ -163,9 +163,7 @@ struct Runtime
, gcStoreBufferPtr_(nullptr)
{}
bool needsIncrementalBarrier() const {
return heapState_ == JS::HeapState::Idle;
}
bool isHeapBusy() const { return heapState_ != JS::HeapState::Idle; }
js::gc::StoreBuffer* gcStoreBufferPtr() { return gcStoreBufferPtr_; }

View File

@ -348,7 +348,7 @@ WeakMapPostWriteBarrier(JSRuntime* rt, ObjectValueMap* weakMap, JSObject* key)
// Strip the barriers from the type before inserting into the store buffer.
// This will automatically ensure that barriers do not fire during GC.
if (key && IsInsideNursery(key))
rt->gc.storeBuffer.putGeneric(UnbarrieredRef(weakMap, key));
rt->gc.storeBuffer.putGeneric(gc::HashKeyRef<ObjectValueMap, JSObject*>(weakMap, key));
}
static MOZ_ALWAYS_INLINE bool

View File

@ -258,13 +258,8 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
}
/* Rekey an entry when moved, ensuring we do not trigger barriers. */
void entryMoved(Enum& eArg, const Key& k) {
typedef typename HashMap<typename Unbarriered<Key>::type,
typename Unbarriered<Value>::type,
typename Unbarriered<HashPolicy>::type,
RuntimeAllocPolicy>::Enum UnbarrieredEnum;
UnbarrieredEnum& e = reinterpret_cast<UnbarrieredEnum&>(eArg);
e.rekeyFront(reinterpret_cast<const typename Unbarriered<Key>::type&>(k));
void entryMoved(Enum& e, const Key& k) {
e.rekeyFront(k);
}
protected:
@ -280,28 +275,6 @@ protected:
}
};
/*
* At times, you will need to ignore barriers when accessing WeakMap entries.
* Localize the templatized casting craziness here.
*/
template <class Key, class Value>
static inline gc::HashKeyRef<HashMap<Key, Value, DefaultHasher<Key>, RuntimeAllocPolicy>, Key>
UnbarrieredRef(WeakMap<PreBarriered<Key>, RelocatablePtr<Value>>* map, Key key)
{
/*
* Some compilers complain about instantiating the WeakMap class for
* unbarriered type arguments, so we cast to a HashMap instead. Because of
* WeakMap's multiple inheritance, we need to do this in two stages, first
* to the HashMap base class and then to the unbarriered version.
*/
typedef typename WeakMap<PreBarriered<Key>, RelocatablePtr<Value>>::Base BaseMap;
auto baseMap = static_cast<BaseMap*>(map);
typedef HashMap<Key, Value, DefaultHasher<Key>, RuntimeAllocPolicy> UnbarrieredMap;
typedef gc::HashKeyRef<UnbarrieredMap, Key> UnbarrieredKeyRef;
return UnbarrieredKeyRef(reinterpret_cast<UnbarrieredMap*>(baseMap), key);
}
/* WeakMap methods exposed so they can be installed in the self-hosting global. */
extern JSObject*

View File

@ -1003,7 +1003,6 @@ struct JSRuntime : public JS::shadow::Runtime,
/* Garbage collector state has been sucessfully initialized. */
bool gcInitialized;
bool isHeapBusy() const { return heapState_ != JS::HeapState::Idle; }
bool isHeapMajorCollecting() const { return heapState_ == JS::HeapState::MajorCollecting; }
bool isHeapMinorCollecting() const { return heapState_ == JS::HeapState::MinorCollecting; }
bool isHeapCollecting() const { return isHeapMinorCollecting() || isHeapMajorCollecting(); }

View File

@ -3366,7 +3366,7 @@ PreliminaryObjectArrayWithTemplate::writeBarrierPre(PreliminaryObjectArrayWithTe
{
Shape* shape = objects->shape();
if (!shape || !shape->runtimeFromAnyThread()->needsIncrementalBarrier())
if (!shape || shape->runtimeFromAnyThread()->isHeapBusy())
return;
JS::Zone* zone = shape->zoneFromAnyThread();
@ -3918,7 +3918,7 @@ TypeNewScript::trace(JSTracer* trc)
/* static */ void
TypeNewScript::writeBarrierPre(TypeNewScript* newScript)
{
if (!newScript->function()->runtimeFromAnyThread()->needsIncrementalBarrier())
if (newScript->function()->runtimeFromAnyThread()->isHeapBusy())
return;
JS::Zone* zone = newScript->function()->zoneFromAnyThread();