Bug 989414 - Convert BarrieredPtr to take a T* as template parameter instead of T; r=jonco

--HG--
extra : rebase_source : 2394326ee9de3485ffd087944092bff9c669b1f5
This commit is contained in:
Terrence Cole 2014-04-25 11:02:44 -07:00
parent 168d95537c
commit 3e883f51c5
18 changed files with 154 additions and 136 deletions

View File

@ -37,7 +37,7 @@ typedef InlineMap<JSAtom *, DefinitionList, 24> AtomDefnListMap;
* the list and map->vector must point to pre-allocated memory. * the list and map->vector must point to pre-allocated memory.
*/ */
void void
InitAtomMap(AtomIndexMap *indices, HeapPtr<JSAtom> *atoms); InitAtomMap(AtomIndexMap *indices, HeapPtrAtom *atoms);
/* /*
* A pool that permits the reuse of the backing storage for the defn, index, or * A pool that permits the reuse of the backing storage for the defn, index, or

View File

@ -150,6 +150,10 @@
* js/public/RootingAPI.h. * js/public/RootingAPI.h.
*/ */
class JSAtom;
class JSFlatString;
class JSLinearString;
namespace js { namespace js {
class PropertyName; class PropertyName;
@ -285,46 +289,54 @@ ZoneOfValueFromAnyThread(const JS::Value &value)
return static_cast<js::gc::Cell *>(value.toGCThing())->tenuredZoneFromAnyThread(); return static_cast<js::gc::Cell *>(value.toGCThing())->tenuredZoneFromAnyThread();
} }
template <typename T>
struct InternalGCMethods {};
template <typename T>
struct InternalGCMethods<T *>
{
static void preBarrier(T *v) { T::writeBarrierPre(v); }
#ifdef JSGC_GENERATIONAL
static void postBarrier(T **vp) { T::writeBarrierPost(*vp, vp); }
static void postBarrierRelocate(T **vp) { T::writeBarrierPostRelocate(*vp, vp); }
static void postBarrierRemove(T **vp) { T::writeBarrierPostRemove(*vp, vp); }
#endif
};
/* /*
* Base class for barriered pointer types. * Base class for barriered pointer types.
*/ */
template <class T, typename Unioned = uintptr_t> template <class T>
class BarrieredPtr class BarrieredPtr
{ {
protected: protected:
union { T value;
T *value;
Unioned other;
};
BarrieredPtr(T *v) : value(v) {} BarrieredPtr(T v) : value(v) {}
~BarrieredPtr() { pre(); } ~BarrieredPtr() { pre(); }
public: public:
void init(T *v) { void init(T v) {
JS_ASSERT(!IsPoisonedPtr<T>(v)); JS_ASSERT(!GCMethods<T>::poisoned(v));
this->value = v; this->value = v;
} }
/* Use this if the automatic coercion to T* isn't working. */ /* Use this if the automatic coercion to T isn't working. */
T *get() const { return value; } T get() const { return value; }
/* /*
* Use these if you want to change the value without invoking the barrier. * Use these if you want to change the value without invoking the barrier.
* Obviously this is dangerous unless you know the barrier is not needed. * Obviously this is dangerous unless you know the barrier is not needed.
*/ */
T **unsafeGet() { return &value; } T *unsafeGet() { return &value; }
void unsafeSet(T *v) { value = v; } void unsafeSet(T v) { value = v; }
Unioned *unsafeGetUnioned() { return &other; } T operator->() const { return value; }
T &operator*() const { return *value; } operator T() const { return value; }
T *operator->() const { return value; }
operator T*() const { return value; }
protected: protected:
void pre() { T::writeBarrierPre(value); } void pre() { InternalGCMethods<T>::preBarrier(value); }
}; };
/* /*
@ -333,14 +345,14 @@ class BarrieredPtr
* should be used in all cases that do not require explicit low-level control * should be used in all cases that do not require explicit low-level control
* of moving behavior, e.g. for HashMap keys. * of moving behavior, e.g. for HashMap keys.
*/ */
template <class T, typename Unioned = uintptr_t> template <class T>
class EncapsulatedPtr : public BarrieredPtr<T, Unioned> class EncapsulatedPtr : public BarrieredPtr<T>
{ {
public: public:
EncapsulatedPtr() : BarrieredPtr<T, Unioned>(nullptr) {} EncapsulatedPtr() : BarrieredPtr<T>(nullptr) {}
EncapsulatedPtr(T *v) : BarrieredPtr<T, Unioned>(v) {} EncapsulatedPtr(T v) : BarrieredPtr<T>(v) {}
explicit EncapsulatedPtr(const EncapsulatedPtr<T, Unioned> &v) explicit EncapsulatedPtr(const EncapsulatedPtr<T> &v)
: BarrieredPtr<T, Unioned>(v.value) {} : BarrieredPtr<T>(v.value) {}
/* Use to set the pointer to nullptr. */ /* Use to set the pointer to nullptr. */
void clear() { void clear() {
@ -348,16 +360,16 @@ class EncapsulatedPtr : public BarrieredPtr<T, Unioned>
this->value = nullptr; this->value = nullptr;
} }
EncapsulatedPtr<T, Unioned> &operator=(T *v) { EncapsulatedPtr<T> &operator=(T v) {
this->pre(); this->pre();
JS_ASSERT(!IsPoisonedPtr<T>(v)); JS_ASSERT(!GCMethods<T>::poisoned(v));
this->value = v; this->value = v;
return *this; return *this;
} }
EncapsulatedPtr<T, Unioned> &operator=(const EncapsulatedPtr<T> &v) { EncapsulatedPtr<T> &operator=(const EncapsulatedPtr<T> &v) {
this->pre(); this->pre();
JS_ASSERT(!IsPoisonedPtr<T>(v.value)); JS_ASSERT(!GCMethods<T>::poisoned(v.value));
this->value = v.value; this->value = v.value;
return *this; return *this;
} }
@ -375,45 +387,49 @@ class EncapsulatedPtr : public BarrieredPtr<T, Unioned>
* stored in memory that has GC lifetime. HeapPtr must not be used in contexts * stored in memory that has GC lifetime. HeapPtr must not be used in contexts
* where it may be implicitly moved or deleted, e.g. most containers. * where it may be implicitly moved or deleted, e.g. most containers.
*/ */
template <class T, class Unioned = uintptr_t> template <class T>
class HeapPtr : public BarrieredPtr<T, Unioned> class HeapPtr : public BarrieredPtr<T>
{ {
public: public:
HeapPtr() : BarrieredPtr<T, Unioned>(nullptr) {} HeapPtr() : BarrieredPtr<T>(nullptr) {}
explicit HeapPtr(T *v) : BarrieredPtr<T, Unioned>(v) { post(); } explicit HeapPtr(T v) : BarrieredPtr<T>(v) { post(); }
explicit HeapPtr(const HeapPtr<T, Unioned> &v) : BarrieredPtr<T, Unioned>(v) { post(); } explicit HeapPtr(const HeapPtr<T> &v) : BarrieredPtr<T>(v) { post(); }
void init(T *v) { void init(T v) {
JS_ASSERT(!IsPoisonedPtr<T>(v)); JS_ASSERT(!GCMethods<T>::poisoned(v));
this->value = v; this->value = v;
post(); post();
} }
HeapPtr<T, Unioned> &operator=(T *v) { HeapPtr<T> &operator=(T v) {
this->pre(); this->pre();
JS_ASSERT(!IsPoisonedPtr<T>(v)); JS_ASSERT(!GCMethods<T>::poisoned(v));
this->value = v; this->value = v;
post(); post();
return *this; return *this;
} }
HeapPtr<T, Unioned> &operator=(const HeapPtr<T, Unioned> &v) { HeapPtr<T> &operator=(const HeapPtr<T> &v) {
this->pre(); this->pre();
JS_ASSERT(!IsPoisonedPtr<T>(v.value)); JS_ASSERT(!GCMethods<T>::poisoned(v.value));
this->value = v.value; this->value = v.value;
post(); post();
return *this; return *this;
} }
protected: protected:
void post() { T::writeBarrierPost(this->value, (void *)&this->value); } void post() {
#ifdef JSGC_GENERATIONAL
InternalGCMethods<T>::postBarrier(&this->value);
#endif
}
/* Make this friend so it can access pre() and post(). */ /* Make this friend so it can access pre() and post(). */
template <class T1, class T2> template <class T1, class T2>
friend inline void friend inline void
BarrieredSetPair(Zone *zone, BarrieredSetPair(Zone *zone,
HeapPtr<T1> &v1, T1 *val1, HeapPtr<T1*> &v1, T1 *val1,
HeapPtr<T2> &v2, T2 *val2); HeapPtr<T2*> &v2, T2 *val2);
private: private:
/* /*
@ -424,7 +440,7 @@ class HeapPtr : public BarrieredPtr<T, Unioned>
* semantics, so this does not completely prevent invalid uses. * semantics, so this does not completely prevent invalid uses.
*/ */
HeapPtr(HeapPtr<T> &&) MOZ_DELETE; HeapPtr(HeapPtr<T> &&) MOZ_DELETE;
HeapPtr<T, Unioned> &operator=(HeapPtr<T, Unioned> &&) MOZ_DELETE; HeapPtr<T> &operator=(HeapPtr<T> &&) MOZ_DELETE;
}; };
/* /*
@ -469,7 +485,7 @@ class RelocatablePtr : public BarrieredPtr<T>
{ {
public: public:
RelocatablePtr() : BarrieredPtr<T>(nullptr) {} RelocatablePtr() : BarrieredPtr<T>(nullptr) {}
explicit RelocatablePtr(T *v) : BarrieredPtr<T>(v) { explicit RelocatablePtr(T v) : BarrieredPtr<T>(v) {
if (v) if (v)
post(); post();
} }
@ -490,9 +506,9 @@ class RelocatablePtr : public BarrieredPtr<T>
relocate(); relocate();
} }
RelocatablePtr<T> &operator=(T *v) { RelocatablePtr<T> &operator=(T v) {
this->pre(); this->pre();
JS_ASSERT(!IsPoisonedPtr<T>(v)); JS_ASSERT(!GCMethods<T>::poisoned(v));
if (v) { if (v) {
this->value = v; this->value = v;
post(); post();
@ -505,7 +521,7 @@ class RelocatablePtr : public BarrieredPtr<T>
RelocatablePtr<T> &operator=(const RelocatablePtr<T> &v) { RelocatablePtr<T> &operator=(const RelocatablePtr<T> &v) {
this->pre(); this->pre();
JS_ASSERT(!IsPoisonedPtr<T>(v.value)); JS_ASSERT(!GCMethods<T>::poisoned(v.value));
if (v.value) { if (v.value) {
this->value = v.value; this->value = v.value;
post(); post();
@ -520,14 +536,14 @@ class RelocatablePtr : public BarrieredPtr<T>
void post() { void post() {
#ifdef JSGC_GENERATIONAL #ifdef JSGC_GENERATIONAL
JS_ASSERT(this->value); JS_ASSERT(this->value);
T::writeBarrierPostRelocate(this->value, &this->value); InternalGCMethods<T>::postBarrierRelocate(&this->value);
#endif #endif
} }
void relocate() { void relocate() {
#ifdef JSGC_GENERATIONAL #ifdef JSGC_GENERATIONAL
JS_ASSERT(this->value); JS_ASSERT(this->value);
T::writeBarrierPostRemove(this->value, &this->value); InternalGCMethods<T>::postBarrierRemove(&this->value);
#endif #endif
} }
}; };
@ -539,8 +555,8 @@ class RelocatablePtr : public BarrieredPtr<T>
template <class T1, class T2> template <class T1, class T2>
static inline void static inline void
BarrieredSetPair(Zone *zone, BarrieredSetPair(Zone *zone,
HeapPtr<T1> &v1, T1 *val1, HeapPtr<T1*> &v1, T1 *val1,
HeapPtr<T2> &v2, T2 *val2) HeapPtr<T2*> &v2, T2 *val2)
{ {
if (T1::needWriteBarrierPre(zone)) { if (T1::needWriteBarrierPre(zone)) {
v1.pre(); v1.pre();
@ -552,27 +568,45 @@ BarrieredSetPair(Zone *zone,
v2.post(); v2.post();
} }
class ArrayBufferObject;
class NestedScopeObject;
class Shape; class Shape;
class BaseShape; class BaseShape;
namespace types { struct TypeObject; } class UnownedBaseShape;
namespace jit {
class JitCode;
}
namespace types {
struct TypeObject;
struct TypeObjectAddendum;
}
typedef BarrieredPtr<JSObject> BarrieredPtrObject; typedef BarrieredPtr<JSObject*> BarrieredPtrObject;
typedef BarrieredPtr<JSScript> BarrieredPtrScript; typedef BarrieredPtr<JSScript*> BarrieredPtrScript;
typedef EncapsulatedPtr<JSObject> EncapsulatedPtrObject; typedef EncapsulatedPtr<JSObject*> EncapsulatedPtrObject;
typedef EncapsulatedPtr<JSScript> EncapsulatedPtrScript; typedef EncapsulatedPtr<JSScript*> EncapsulatedPtrScript;
typedef EncapsulatedPtr<jit::JitCode*> EncapsulatedPtrJitCode;
typedef RelocatablePtr<JSObject> RelocatablePtrObject; typedef RelocatablePtr<JSObject*> RelocatablePtrObject;
typedef RelocatablePtr<JSScript> RelocatablePtrScript; typedef RelocatablePtr<JSScript*> RelocatablePtrScript;
typedef RelocatablePtr<NestedScopeObject*> RelocatablePtrNestedScopeObject;
typedef HeapPtr<JSObject> HeapPtrObject; typedef HeapPtr<ArrayBufferObject*> HeapPtrArrayBufferObject;
typedef HeapPtr<JSFunction> HeapPtrFunction; typedef HeapPtr<JSObject*> HeapPtrObject;
typedef HeapPtr<JSString> HeapPtrString; typedef HeapPtr<JSFunction*> HeapPtrFunction;
typedef HeapPtr<PropertyName> HeapPtrPropertyName; typedef HeapPtr<JSAtom*> HeapPtrAtom;
typedef HeapPtr<JSScript> HeapPtrScript; typedef HeapPtr<JSString*> HeapPtrString;
typedef HeapPtr<Shape> HeapPtrShape; typedef HeapPtr<JSFlatString*> HeapPtrFlatString;
typedef HeapPtr<BaseShape> HeapPtrBaseShape; typedef HeapPtr<JSLinearString*> HeapPtrLinearString;
typedef HeapPtr<types::TypeObject> HeapPtrTypeObject; typedef HeapPtr<PropertyName*> HeapPtrPropertyName;
typedef HeapPtr<JSScript*> HeapPtrScript;
typedef HeapPtr<Shape*> HeapPtrShape;
typedef HeapPtr<BaseShape*> HeapPtrBaseShape;
typedef HeapPtr<UnownedBaseShape*> HeapPtrUnownedBaseShape;
typedef HeapPtr<types::TypeObject*> HeapPtrTypeObject;
typedef HeapPtr<types::TypeObjectAddendum*> HeapPtrTypeObjectAddendum;
typedef HeapPtr<jit::JitCode*> HeapPtrJitCode;
/* Useful for hashtables with a HeapPtr as key. */ /* Useful for hashtables with a HeapPtr as key. */
@ -580,9 +614,9 @@ template <class T>
struct HeapPtrHasher struct HeapPtrHasher
{ {
typedef HeapPtr<T> Key; typedef HeapPtr<T> Key;
typedef T *Lookup; typedef T Lookup;
static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); } static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
static bool match(const Key &k, Lookup l) { return k.get() == l; } static bool match(const Key &k, Lookup l) { return k.get() == l; }
static void rekey(Key &k, const Key& newKey) { k.unsafeSet(newKey); } static void rekey(Key &k, const Key& newKey) { k.unsafeSet(newKey); }
}; };
@ -595,9 +629,9 @@ template <class T>
struct EncapsulatedPtrHasher struct EncapsulatedPtrHasher
{ {
typedef EncapsulatedPtr<T> Key; typedef EncapsulatedPtr<T> Key;
typedef T *Lookup; typedef T Lookup;
static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); } static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
static bool match(const Key &k, Lookup l) { return k.get() == l; } static bool match(const Key &k, Lookup l) { return k.get() == l; }
static void rekey(Key &k, const Key& newKey) { k.unsafeSet(newKey); } static void rekey(Key &k, const Key& newKey) { k.unsafeSet(newKey); }
}; };

View File

@ -263,7 +263,7 @@ MarkUnbarriered(JSTracer *trc, T **thingp, const char *name)
template <typename T> template <typename T>
static void static void
Mark(JSTracer *trc, BarrieredPtr<T> *thing, const char *name) Mark(JSTracer *trc, BarrieredPtr<T*> *thing, const char *name)
{ {
trc->setTracingName(name); trc->setTracingName(name);
MarkInternal(trc, thing->unsafeGet()); MarkInternal(trc, thing->unsafeGet());
@ -306,7 +306,7 @@ MarkRoot(JSTracer *trc, T **thingp, const char *name)
template <typename T> template <typename T>
static void static void
MarkRange(JSTracer *trc, size_t len, HeapPtr<T> *vec, const char *name) MarkRange(JSTracer *trc, size_t len, HeapPtr<T*> *vec, const char *name)
{ {
for (size_t i = 0; i < len; ++i) { for (size_t i = 0; i < len; ++i) {
if (vec[i].get()) { if (vec[i].get()) {
@ -402,7 +402,7 @@ UpdateIfRelocated(JSRuntime *rt, T **thingp)
#define DeclMarkerImpl(base, type) \ #define DeclMarkerImpl(base, type) \
void \ void \
Mark##base(JSTracer *trc, BarrieredPtr<type> *thing, const char *name) \ Mark##base(JSTracer *trc, BarrieredPtr<type*> *thing, const char *name) \
{ \ { \
Mark<type>(trc, thing, name); \ Mark<type>(trc, thing, name); \
} \ } \
@ -419,13 +419,13 @@ Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name)
MarkUnbarriered<type>(trc, thingp, name); \ MarkUnbarriered<type>(trc, thingp, name); \
} \ } \
\ \
/* Explicitly instantiate MarkUnbarriered<type>. It is referenced from */ \ /* Explicitly instantiate MarkUnbarriered<type*>. It is referenced from */ \
/* other translation units and the instantiation might otherwise get */ \ /* other translation units and the instantiation might otherwise get */ \
/* inlined away. */ \ /* inlined away. */ \
template void MarkUnbarriered<type>(JSTracer *, type **, const char *); \ template void MarkUnbarriered<type>(JSTracer *, type **, const char *); \
\ \
void \ void \
Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *vec, const char *name) \ Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type*> *vec, const char *name) \
{ \ { \
MarkRange<type>(trc, len, vec, name); \ MarkRange<type>(trc, len, vec, name); \
} \ } \
@ -443,7 +443,7 @@ Is##base##Marked(type **thingp)
} \ } \
\ \
bool \ bool \
Is##base##Marked(BarrieredPtr<type> *thingp) \ Is##base##Marked(BarrieredPtr<type*> *thingp) \
{ \ { \
return IsMarked<type>(thingp->unsafeGet()); \ return IsMarked<type>(thingp->unsafeGet()); \
} \ } \
@ -455,13 +455,13 @@ Is##base##AboutToBeFinalized(type **thingp)
} \ } \
\ \
bool \ bool \
Is##base##AboutToBeFinalized(BarrieredPtr<type> *thingp) \ Is##base##AboutToBeFinalized(BarrieredPtr<type*> *thingp) \
{ \ { \
return IsAboutToBeFinalized<type>(thingp->unsafeGet()); \ return IsAboutToBeFinalized<type>(thingp->unsafeGet()); \
} \ } \
\ \
type * \ type * \
Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type> *thingp) \ Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type*> *thingp) \
{ \ { \
return UpdateIfRelocated<type>(rt, thingp->unsafeGet()); \ return UpdateIfRelocated<type>(rt, thingp->unsafeGet()); \
} \ } \
@ -826,13 +826,6 @@ gc::MarkCrossCompartmentSlot(JSTracer *trc, JSObject *src, HeapSlot *dst, const
/*** Special Marking ***/ /*** Special Marking ***/
void
gc::MarkObject(JSTracer *trc, HeapPtr<GlobalObject, JSScript *> *thingp, const char *name)
{
trc->setTracingName(name);
MarkInternal(trc, thingp->unsafeGet());
}
void void
gc::MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name) gc::MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name)
{ {

View File

@ -27,7 +27,7 @@ class ScopeObject;
class Shape; class Shape;
class UnownedBaseShape; class UnownedBaseShape;
template<class, typename> class HeapPtr; template<class> class HeapPtr;
namespace jit { namespace jit {
class JitCode; class JitCode;
@ -49,10 +49,10 @@ namespace gc {
* these are the variants generated for JSObject. They are listed from most to * these are the variants generated for JSObject. They are listed from most to
* least desirable for use: * least desirable for use:
* *
* MarkObject(JSTracer *trc, const HeapPtr<JSObject> &thing, const char *name); * MarkObject(JSTracer *trc, const HeapPtrObject &thing, const char *name);
* This function should be used for marking JSObjects, in preference to all * This function should be used for marking JSObjects, in preference to all
* others below. Use it when you have HeapPtr<JSObject>, which * others below. Use it when you have HeapPtrObject, which automatically
* automatically implements write barriers. * implements write barriers.
* *
* MarkObjectRoot(JSTracer *trc, JSObject *thing, const char *name); * MarkObjectRoot(JSTracer *trc, JSObject *thing, const char *name);
* This function is only valid during the root marking phase of GC (i.e., * This function is only valid during the root marking phase of GC (i.e.,
@ -88,16 +88,16 @@ namespace gc {
*/ */
#define DeclMarker(base, type) \ #define DeclMarker(base, type) \
void Mark##base(JSTracer *trc, BarrieredPtr<type> *thing, const char *name); \ void Mark##base(JSTracer *trc, BarrieredPtr<type*> *thing, const char *name); \
void Mark##base##Root(JSTracer *trc, type **thingp, const char *name); \ void Mark##base##Root(JSTracer *trc, type **thingp, const char *name); \
void Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name); \ void Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name); \
void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *thing, const char *name); \ void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type*> *thing, const char *name); \
void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name); \ void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name); \
bool Is##base##Marked(type **thingp); \ bool Is##base##Marked(type **thingp); \
bool Is##base##Marked(BarrieredPtr<type> *thingp); \ bool Is##base##Marked(BarrieredPtr<type*> *thingp); \
bool Is##base##AboutToBeFinalized(type **thingp); \ bool Is##base##AboutToBeFinalized(type **thingp); \
bool Is##base##AboutToBeFinalized(BarrieredPtr<type> *thingp); \ bool Is##base##AboutToBeFinalized(BarrieredPtr<type*> *thingp); \
type *Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type> *thingp); \ type *Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type*> *thingp); \
type *Update##base##IfRelocated(JSRuntime *rt, type **thingp); type *Update##base##IfRelocated(JSRuntime *rt, type **thingp);
DeclMarker(BaseShape, BaseShape) DeclMarker(BaseShape, BaseShape)
@ -239,13 +239,6 @@ MarkCrossCompartmentSlot(JSTracer *trc, JSObject *src, HeapSlot *dst_slot, const
/*** Special Cases ***/ /*** Special Cases ***/
/*
* The unioned HeapPtr stored in script->globalObj needs special treatment to
* typecheck correctly.
*/
void
MarkObject(JSTracer *trc, HeapPtr<GlobalObject, JSScript *> *thingp, const char *name);
/* /*
* MarkChildren<JSObject> is exposed solely for preWriteBarrier on * MarkChildren<JSObject> is exposed solely for preWriteBarrier on
* JSObject::TradeGuts. It should not be considered external interface. * JSObject::TradeGuts. It should not be considered external interface.
@ -290,7 +283,7 @@ Mark(JSTracer *trc, BarrieredPtrScript *o, const char *name)
} }
inline void inline void
Mark(JSTracer *trc, HeapPtr<jit::JitCode> *code, const char *name) Mark(JSTracer *trc, HeapPtrJitCode *code, const char *name)
{ {
MarkJitCode(trc, code, name); MarkJitCode(trc, code, name);
} }

View File

@ -455,7 +455,7 @@ class AsmJSModule
StaticLinkData staticLinkData_; StaticLinkData staticLinkData_;
bool dynamicallyLinked_; bool dynamicallyLinked_;
bool loadedFromCache_; bool loadedFromCache_;
HeapPtr<ArrayBufferObject> maybeHeap_; HeapPtrArrayBufferObject maybeHeap_;
// The next two fields need to be kept out of the Pod as they depend on the // The next two fields need to be kept out of the Pod as they depend on the
// position of the module within the ScriptSource and thus aren't invariant // position of the module within the ScriptSource and thus aren't invariant

View File

@ -107,7 +107,7 @@ struct BaselineScript
private: private:
// Code pointer containing the actual method. // Code pointer containing the actual method.
HeapPtr<JitCode> method_; HeapPtrJitCode method_;
// For heavyweight scripts, template objects to use for the call object and // For heavyweight scripts, template objects to use for the call object and
// decl env object (linked via the call object's enclosing scope). // decl env object (linked via the call object's enclosing scope).

View File

@ -169,10 +169,10 @@ struct IonScript
{ {
private: private:
// Code pointer containing the actual method. // Code pointer containing the actual method.
EncapsulatedPtr<JitCode> method_; EncapsulatedPtrJitCode method_;
// Deoptimization table used by this method. // Deoptimization table used by this method.
EncapsulatedPtr<JitCode> deoptTable_; EncapsulatedPtrJitCode deoptTable_;
// Entrypoint for OSR, or nullptr. // Entrypoint for OSR, or nullptr.
jsbytecode *osrPc_; jsbytecode *osrPc_;

View File

@ -987,7 +987,7 @@ struct TypeObject : gc::BarrieredCell<TypeObject>
* some number of properties to the object in a definite order * some number of properties to the object in a definite order
* before the object escapes. * before the object escapes.
*/ */
HeapPtr<TypeObjectAddendum> addendum; HeapPtrTypeObjectAddendum addendum;
public: public:
TypeObjectFlags flags() const { TypeObjectFlags flags() const {

View File

@ -55,7 +55,7 @@ static const gc::AllocKind ITERATOR_FINALIZE_KIND = gc::FINALIZE_OBJECT2_BACKGRO
void void
NativeIterator::mark(JSTracer *trc) NativeIterator::mark(JSTracer *trc)
{ {
for (HeapPtr<JSFlatString> *str = begin(); str < end(); str++) for (HeapPtrFlatString *str = begin(); str < end(); str++)
MarkString(trc, str, "prop"); MarkString(trc, str, "prop");
if (obj) if (obj)
MarkObject(trc, &obj, "obj"); MarkObject(trc, &obj, "obj");
@ -426,7 +426,7 @@ NativeIterator::allocateIterator(JSContext *cx, uint32_t slength, const AutoIdVe
if (!ni) if (!ni)
return nullptr; return nullptr;
AutoValueVector strings(cx); AutoValueVector strings(cx);
ni->props_array = ni->props_cursor = (HeapPtr<JSFlatString> *) (ni + 1); ni->props_array = ni->props_cursor = (HeapPtrFlatString *) (ni + 1);
ni->props_end = ni->props_array + plength; ni->props_end = ni->props_array + plength;
if (plength) { if (plength) {
for (size_t i = 0; i < plength; i++) { for (size_t i = 0; i < plength; i++) {
@ -1044,9 +1044,9 @@ SuppressDeletedPropertyHelper(JSContext *cx, HandleObject obj, StringPredicate p
/* This only works for identified suppressed keys, not values. */ /* This only works for identified suppressed keys, not values. */
if (ni->isKeyIter() && ni->obj == obj && ni->props_cursor < ni->props_end) { if (ni->isKeyIter() && ni->obj == obj && ni->props_cursor < ni->props_end) {
/* Check whether id is still to come. */ /* Check whether id is still to come. */
HeapPtr<JSFlatString> *props_cursor = ni->current(); HeapPtrFlatString *props_cursor = ni->current();
HeapPtr<JSFlatString> *props_end = ni->end(); HeapPtrFlatString *props_end = ni->end();
for (HeapPtr<JSFlatString> *idp = props_cursor; idp < props_end; ++idp) { for (HeapPtrFlatString *idp = props_cursor; idp < props_end; ++idp) {
if (predicate(*idp)) { if (predicate(*idp)) {
/* /*
* Check whether another property along the prototype chain * Check whether another property along the prototype chain
@ -1091,7 +1091,7 @@ SuppressDeletedPropertyHelper(JSContext *cx, HandleObject obj, StringPredicate p
if (idp == props_cursor) { if (idp == props_cursor) {
ni->incCursor(); ni->incCursor();
} else { } else {
for (HeapPtr<JSFlatString> *p = idp; p + 1 != props_end; p++) for (HeapPtrFlatString *p = idp; p + 1 != props_end; p++)
*p = *(p + 1); *p = *(p + 1);
ni->props_end = ni->end() - 1; ni->props_end = ni->end() - 1;

View File

@ -31,9 +31,9 @@ struct NativeIterator
{ {
HeapPtrObject obj; // Object being iterated. HeapPtrObject obj; // Object being iterated.
JSObject *iterObj_; // Internal iterator object. JSObject *iterObj_; // Internal iterator object.
HeapPtr<JSFlatString> *props_array; HeapPtrFlatString *props_array;
HeapPtr<JSFlatString> *props_cursor; HeapPtrFlatString *props_cursor;
HeapPtr<JSFlatString> *props_end; HeapPtrFlatString *props_end;
Shape **shapes_array; Shape **shapes_array;
uint32_t shapes_length; uint32_t shapes_length;
uint32_t shapes_key; uint32_t shapes_key;
@ -49,11 +49,11 @@ struct NativeIterator
return (flags & JSITER_FOREACH) == 0; return (flags & JSITER_FOREACH) == 0;
} }
inline HeapPtr<JSFlatString> *begin() const { inline HeapPtrFlatString *begin() const {
return props_array; return props_array;
} }
inline HeapPtr<JSFlatString> *end() const { inline HeapPtrFlatString *end() const {
return props_end; return props_end;
} }
@ -64,7 +64,7 @@ struct NativeIterator
JSObject *iterObj() const { JSObject *iterObj() const {
return iterObj_; return iterObj_;
} }
HeapPtr<JSFlatString> *current() const { HeapPtrFlatString *current() const {
JS_ASSERT(props_cursor < props_end); JS_ASSERT(props_cursor < props_end);
return props_cursor; return props_cursor;
} }

View File

@ -438,7 +438,7 @@ class JSObject : public js::ObjectImpl
inline js::types::TypeObject* getType(JSContext *cx); inline js::types::TypeObject* getType(JSContext *cx);
js::types::TypeObject* uninlinedGetType(JSContext *cx); js::types::TypeObject* uninlinedGetType(JSContext *cx);
const js::HeapPtr<js::types::TypeObject> &typeFromGC() const { const js::HeapPtrTypeObject &typeFromGC() const {
/* Direct field access for use by GC. */ /* Direct field access for use by GC. */
return type_; return type_;
} }

View File

@ -851,7 +851,7 @@ js::XDRScript(XDRState<mode> *xdr, HandleObject enclosingScope, HandleScript enc
* after the enclosing block has been XDR'd. * after the enclosing block has been XDR'd.
*/ */
for (i = 0; i != nobjects; ++i) { for (i = 0; i != nobjects; ++i) {
HeapPtr<JSObject> *objp = &script->objects()->vector[i]; HeapPtrObject *objp = &script->objects()->vector[i];
XDRClassKind classk; XDRClassKind classk;
if (mode == XDR_ENCODE) { if (mode == XDR_ENCODE) {
@ -2360,13 +2360,13 @@ JSScript::partiallyInit(ExclusiveContext *cx, HandleScript script, uint32_t ncon
if (nobjects != 0) { if (nobjects != 0) {
script->objects()->length = nobjects; script->objects()->length = nobjects;
script->objects()->vector = (HeapPtr<JSObject> *)cursor; script->objects()->vector = (HeapPtrObject *)cursor;
cursor += nobjects * sizeof(script->objects()->vector[0]); cursor += nobjects * sizeof(script->objects()->vector[0]);
} }
if (nregexps != 0) { if (nregexps != 0) {
script->regexps()->length = nregexps; script->regexps()->length = nregexps;
script->regexps()->vector = (HeapPtr<JSObject> *)cursor; script->regexps()->vector = (HeapPtrObject *)cursor;
cursor += nregexps * sizeof(script->regexps()->vector[0]); cursor += nregexps * sizeof(script->regexps()->vector[0]);
} }
@ -3026,13 +3026,13 @@ js::CloneScript(JSContext *cx, HandleObject enclosingScope, HandleFunction fun,
JS_ASSERT_IF(vector[i].isMarkable(), vector[i].toString()->isAtom()); JS_ASSERT_IF(vector[i].isMarkable(), vector[i].toString()->isAtom());
} }
if (nobjects != 0) { if (nobjects != 0) {
HeapPtrObject *vector = Rebase<HeapPtr<JSObject> >(dst, src, src->objects()->vector); HeapPtrObject *vector = Rebase<HeapPtrObject>(dst, src, src->objects()->vector);
dst->objects()->vector = vector; dst->objects()->vector = vector;
for (unsigned i = 0; i < nobjects; ++i) for (unsigned i = 0; i < nobjects; ++i)
vector[i].init(objects[i]); vector[i].init(objects[i]);
} }
if (nregexps != 0) { if (nregexps != 0) {
HeapPtrObject *vector = Rebase<HeapPtr<JSObject> >(dst, src, src->regexps()->vector); HeapPtrObject *vector = Rebase<HeapPtrObject>(dst, src, src->regexps()->vector);
dst->regexps()->vector = vector; dst->regexps()->vector = vector;
for (unsigned i = 0; i < nregexps; ++i) for (unsigned i = 0; i < nregexps; ++i)
vector[i].init(regexps[i]); vector[i].init(regexps[i]);

View File

@ -180,7 +180,7 @@ class Bindings
friend class BindingIter; friend class BindingIter;
friend class AliasedFormalIter; friend class AliasedFormalIter;
HeapPtr<Shape> callObjShape_; HeapPtrShape callObjShape_;
uintptr_t bindingArrayAndFlag_; uintptr_t bindingArrayAndFlag_;
uint16_t numArgs_; uint16_t numArgs_;
uint16_t numBlockScoped_; uint16_t numBlockScoped_;

View File

@ -20,19 +20,19 @@ class RegExpStatics
{ {
/* The latest RegExp output, set after execution. */ /* The latest RegExp output, set after execution. */
VectorMatchPairs matches; VectorMatchPairs matches;
HeapPtr<JSLinearString> matchesInput; HeapPtrLinearString matchesInput;
/* /*
* The previous RegExp input, used to resolve lazy state. * The previous RegExp input, used to resolve lazy state.
* A raw RegExpShared cannot be stored because it may be in * A raw RegExpShared cannot be stored because it may be in
* a different compartment via evalcx(). * a different compartment via evalcx().
*/ */
HeapPtr<JSAtom> lazySource; HeapPtrAtom lazySource;
RegExpFlag lazyFlags; RegExpFlag lazyFlags;
size_t lazyIndex; size_t lazyIndex;
/* The latest RegExp input, set before execution. */ /* The latest RegExp input, set before execution. */
HeapPtr<JSString> pendingInput; HeapPtrString pendingInput;
RegExpFlag flags; RegExpFlag flags;
/* /*

View File

@ -1563,7 +1563,7 @@ js_IsDebugScopeSlow(ProxyObject *proxy)
/* static */ MOZ_ALWAYS_INLINE void /* static */ MOZ_ALWAYS_INLINE void
DebugScopes::proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map, DebugScopes::proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map,
const EncapsulatedPtr<JSObject> &key) const EncapsulatedPtrObject &key)
{ {
#ifdef JSGC_GENERATIONAL #ifdef JSGC_GENERATIONAL
/* /*

View File

@ -726,8 +726,8 @@ class ScopeIterVal
friend class DebugScopes; friend class DebugScopes;
AbstractFramePtr frame_; AbstractFramePtr frame_;
RelocatablePtr<JSObject> cur_; RelocatablePtrObject cur_;
RelocatablePtr<NestedScopeObject> staticScope_; RelocatablePtrNestedScopeObject staticScope_;
ScopeIter::Type type_; ScopeIter::Type type_;
bool hasScopeObject_; bool hasScopeObject_;

View File

@ -602,7 +602,7 @@ class BaseShape : public gc::BarrieredCell<BaseShape>
}; };
/* For owned BaseShapes, the canonical unowned BaseShape. */ /* For owned BaseShapes, the canonical unowned BaseShape. */
HeapPtr<UnownedBaseShape> unowned_; HeapPtrUnownedBaseShape unowned_;
/* For owned BaseShapes, the shape's shape table. */ /* For owned BaseShapes, the shape's shape table. */
ShapeTable *table_; ShapeTable *table_;

View File

@ -925,8 +925,6 @@ NameToId(PropertyName *name)
return NON_INTEGER_ATOM_TO_JSID(name); return NON_INTEGER_ATOM_TO_JSID(name);
} }
typedef HeapPtr<JSAtom> HeapPtrAtom;
class AutoNameVector : public AutoVectorRooter<PropertyName *> class AutoNameVector : public AutoVectorRooter<PropertyName *>
{ {
typedef AutoVectorRooter<PropertyName *> BaseType; typedef AutoVectorRooter<PropertyName *> BaseType;