Bug 1209704 - Part 3: Share tracing accessors between all barrier classes; r=jonco

This commit is contained in:
Terrence Cole 2015-10-01 14:06:55 -07:00
parent bd8686b3af
commit b537ca8112
8 changed files with 39 additions and 44 deletions

View File

@ -593,7 +593,7 @@ class TypedObject : public JSObject
static bool GetBuffer(JSContext* cx, unsigned argc, Value* vp);
static bool GetByteOffset(JSContext* cx, unsigned argc, Value* vp);
Shape** addressOfShapeFromGC() { return shape_.unsafeGet(); }
Shape** addressOfShapeFromGC() { return shape_.unsafeUnbarrieredForTracing(); }
};
typedef Handle<TypedObject*> HandleTypedObject;

View File

@ -333,6 +333,12 @@ class BarrieredBase : public BarrieredBaseMixins<T>
// barrier types are NOT supported. See assertTypeConstraints.
T value;
public:
// Note: this is public because C++ cannot friend to a specific template instantiation.
// Friending to the generic template leads to a number of unintended consequences, including
// template resolution ambiguity and a circular dependency with Tracing.h.
T* unsafeUnbarrieredForTracing() { return &value; }
private:
#ifdef DEBUG
// Static type assertions about T must be moved out of line to avoid
@ -353,18 +359,14 @@ class WriteBarrieredBase : public BarrieredBase<T>
DECLARE_POINTER_COMPARISON_OPS(T);
DECLARE_POINTER_CONSTREF_OPS(T);
/* Use this if the automatic coercion to T isn't working. */
// Use this if the automatic coercion to T isn't working.
const T& get() const { return this->value; }
/*
* Use these if you want to change the value without invoking barriers.
* Obviously this is dangerous unless you know the barrier is not needed.
*/
T* unsafeGet() { return &this->value; }
const T* unsafeGet() const { return &this->value; }
// Use this if you want to change the value without invoking barriers.
// Obviously this is dangerous unless you know the barrier is not needed.
void unsafeSet(T v) { this->value = v; }
/* For users who need to manually barrier the raw types. */
// For users who need to manually barrier the raw types.
static void writeBarrierPre(const T& v) { InternalGCMethods<T>::preBarrier(v); }
protected:
@ -534,10 +536,6 @@ class ReadBarrieredBase : public BarrieredBase<T>
// ReadBarrieredBase is not directly instantiable.
explicit ReadBarrieredBase(T v) : BarrieredBase<T>(v) {}
public:
// For use by the GC.
T* unsafeGet() { return &this->value; }
protected:
void read() const { InternalGCMethods<T>::readBarrier(this->value); }
};
@ -555,28 +553,27 @@ class ReadBarriered : public ReadBarrieredBase<T>
{
public:
ReadBarriered() : ReadBarrieredBase<T>(GCMethods<T>::initial()) {}
explicit ReadBarriered(T v) : ReadBarrieredBase<T>(v) {}
explicit ReadBarriered(const T& v) : ReadBarrieredBase<T>(v) {}
T get() const {
const T get() const {
if (!InternalGCMethods<T>::isMarkable(this->value))
return GCMethods<T>::initial();
this->read();
return this->value;
}
T unbarrieredGet() const {
const T unbarrieredGet() const {
return this->value;
}
operator T() const { return get(); }
operator const T() const { return get(); }
T& operator*() const { return *get(); }
T operator->() const { return get(); }
const T& operator*() const { return *get(); }
const T operator->() const { return get(); }
T* unsafeGet() { return &this->value; }
T const* unsafeGet() const { return &this->value; }
void set(T v) { this->value = v; }
void set(const T& v) { this->value = v; }
};
// Add Value operations to all Barrier types. Note, this must be defined before
@ -636,8 +633,6 @@ class HeapSlot : public WriteBarrieredBase<Value>
reinterpret_cast<HeapSlot*>(const_cast<Value*>(&target))->post(owner, kind, slot, target);
}
Value* unsafeGet() { return &value; }
private:
void post(NativeObject* owner, Kind kind, uint32_t slot, const Value& target) {
MOZ_ASSERT(preconditionForWriteBarrierPost(owner, kind, slot, target));

View File

@ -392,7 +392,7 @@ template <typename T>
void
js::TraceEdge(JSTracer* trc, WriteBarrieredBase<T>* thingp, const char* name)
{
DispatchToTracer(trc, ConvertToBase(thingp->unsafeGet()), name);
DispatchToTracer(trc, ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
}
template <typename T>
@ -426,7 +426,7 @@ js::TraceRange(JSTracer* trc, size_t len, WriteBarrieredBase<T>* vec, const char
JS::AutoTracingIndex index(trc);
for (auto i : MakeRange(len)) {
if (InternalGCMethods<T>::isMarkable(vec[i].get()))
DispatchToTracer(trc, ConvertToBase(vec[i].unsafeGet()), name);
DispatchToTracer(trc, ConvertToBase(vec[i].unsafeUnbarrieredForTracing()), name);
++index;
}
}
@ -474,7 +474,7 @@ js::TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src, WriteBarrieredBase<T
const char* name)
{
if (ShouldMarkCrossCompartment(trc, src, dst->get()))
DispatchToTracer(trc, dst->unsafeGet(), name);
DispatchToTracer(trc, dst->unsafeUnbarrieredForTracing(), name);
}
template void js::TraceCrossCompartmentEdge<Value>(JSTracer*, JSObject*,
WriteBarrieredBase<Value>*, const char*);
@ -1894,8 +1894,8 @@ js::gc::StoreBuffer::SlotsEdge::trace(TenuringTracer& mover) const
int32_t initLen = obj->getDenseInitializedLength();
int32_t clampedStart = Min(start_, initLen);
int32_t clampedEnd = Min(start_ + count_, initLen);
mover.traceSlots(static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart)->unsafeGet(),
clampedEnd - clampedStart);
mover.traceSlots(static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart)
->unsafeUnbarrieredForTracing(), clampedEnd - clampedStart);
} else {
int32_t start = Min(uint32_t(start_), obj->slotSpan());
int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
@ -2032,7 +2032,7 @@ js::TenuringTracer::traceObject(JSObject* obj)
!nobj->denseElementsAreCopyOnWrite() &&
ObjectDenseElementsMayBeMarkable(nobj))
{
Value* elems = static_cast<HeapSlot*>(nobj->getDenseElements())->unsafeGet();
Value* elems = static_cast<HeapSlot*>(nobj->getDenseElements())->unsafeUnbarrieredForTracing();
traceSlots(elems, elems + nobj->getDenseInitializedLength());
}
@ -2048,9 +2048,9 @@ js::TenuringTracer::traceObjectSlots(NativeObject* nobj, uint32_t start, uint32_
HeapSlot* dynEnd;
nobj->getSlotRange(start, length, &fixedStart, &fixedEnd, &dynStart, &dynEnd);
if (fixedStart)
traceSlots(fixedStart->unsafeGet(), fixedEnd->unsafeGet());
traceSlots(fixedStart->unsafeUnbarrieredForTracing(), fixedEnd->unsafeUnbarrieredForTracing());
if (dynStart)
traceSlots(dynStart->unsafeGet(), dynEnd->unsafeGet());
traceSlots(dynStart->unsafeUnbarrieredForTracing(), dynEnd->unsafeUnbarrieredForTracing());
}
void
@ -2325,14 +2325,14 @@ template <typename T>
bool
IsMarked(WriteBarrieredBase<T>* thingp)
{
return IsMarkedInternal(ConvertToBase(thingp->unsafeGet()));
return IsMarkedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
}
template <typename T>
bool
IsMarked(ReadBarrieredBase<T>* thingp)
{
return IsMarkedInternal(ConvertToBase(thingp->unsafeGet()));
return IsMarkedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
}
template <typename T>
@ -2346,14 +2346,14 @@ template <typename T>
bool
IsAboutToBeFinalized(WriteBarrieredBase<T>* thingp)
{
return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeGet()));
return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
}
template <typename T>
bool
IsAboutToBeFinalized(ReadBarrieredBase<T>* thingp)
{
return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeGet()));
return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
}
// Instantiate a copy of the Tracing templates for each derived type.

View File

@ -11105,7 +11105,7 @@ ICGetElem_NativePrototypeCallNative<T>::Clone(JSContext* cx,
ICGetElem_NativePrototypeCallNative<T>& other)
{
return ICStub::New<ICGetElem_NativePrototypeCallNative<T>>(cx, space, other.jitCode(),
firstMonitorStub, other.receiverGuard(), other.key().unsafeGet(), other.accessType(),
firstMonitorStub, other.receiverGuard(), &other.key().get(), other.accessType(),
other.needsAtomize(), other.getter(), other.pcOffset_, other.holder(),
other.holderShape());
}
@ -11125,7 +11125,7 @@ ICGetElem_NativePrototypeCallScripted<T>::Clone(JSContext* cx,
ICGetElem_NativePrototypeCallScripted<T>& other)
{
return ICStub::New<ICGetElem_NativePrototypeCallScripted<T>>(cx, space, other.jitCode(),
firstMonitorStub, other.receiverGuard(), other.key().unsafeGet(), other.accessType(),
firstMonitorStub, other.receiverGuard(), &other.key().get(), other.accessType(),
other.needsAtomize(), other.getter(), other.pcOffset_, other.holder(),
other.holderShape());
}

View File

@ -536,7 +536,7 @@ JSCompartment::traceRoots(JSTracer* trc, js::gc::GCRuntime::TraceOrMarkRuntime t
{
if (objectMetadataState.is<PendingMetadata>()) {
TraceRoot(trc,
objectMetadataState.as<PendingMetadata>().unsafeGet(),
objectMetadataState.as<PendingMetadata>().unsafeUnbarrieredForTracing(),
"on-stack object pending metadata");
}
@ -550,7 +550,7 @@ JSCompartment::traceRoots(JSTracer* trc, js::gc::GCRuntime::TraceOrMarkRuntime t
// If a compartment is on-stack, we mark its global so that
// JSContext::global() remains valid.
if (enterCompartmentDepth && global_.unbarrieredGet())
TraceRoot(trc, global_.unsafeGet(), "on-stack compartment global");
TraceRoot(trc, global_.unsafeUnbarrieredForTracing(), "on-stack compartment global");
}
// Nothing below here needs to be treated as a root if we aren't marking

View File

@ -199,7 +199,7 @@ class MOZ_RAII AutoSetNewObjectMetadata : private JS::CustomAutoRooter
virtual void trace(JSTracer* trc) override {
if (prevState_.is<PendingMetadata>()) {
TraceRoot(trc,
prevState_.as<PendingMetadata>().unsafeGet(),
prevState_.as<PendingMetadata>().unsafeUnbarrieredForTracing(),
"Object pending metadata");
}
}

View File

@ -3713,7 +3713,8 @@ JSObject::traceChildren(JSTracer* trc)
JS::AutoTracingDetails ctx(trc, func);
JS::AutoTracingIndex index(trc);
for (uint32_t i = 0; i < nobj->slotSpan(); ++i) {
TraceManuallyBarrieredEdge(trc, nobj->getSlotRef(i).unsafeGet(), "object slot");
TraceManuallyBarrieredEdge(trc, nobj->getSlotRef(i).unsafeUnbarrieredForTracing(),
"object slot");
++index;
}
}

View File

@ -2106,8 +2106,7 @@ DebugScopes::sweep(JSRuntime* rt)
* released more eagerly.
*/
for (MissingScopeMap::Enum e(missingScopes); !e.empty(); e.popFront()) {
DebugScopeObject** debugScope = e.front().value().unsafeGet();
if (IsAboutToBeFinalizedUnbarriered(debugScope)) {
if (IsAboutToBeFinalized(&e.front().value())) {
/*
* Note that onPopCall and onPopBlock rely on missingScopes to find
* scope objects that we synthesized for the debugger's sake, and
@ -2125,7 +2124,7 @@ DebugScopes::sweep(JSRuntime* rt)
* Thus, we must explicitly remove the entries from both liveScopes
* and missingScopes here.
*/
liveScopes.remove(&(*debugScope)->scope());
liveScopes.remove(&e.front().value()->scope());
e.removeFront();
} else {
MissingScopeKey key = e.front().key();