mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 989414 - Convert BarrieredPtr to take a T* as template parameter instead of T; r=jonco
--HG-- extra : rebase_source : 2394326ee9de3485ffd087944092bff9c669b1f5
This commit is contained in:
parent
168d95537c
commit
3e883f51c5
@ -37,7 +37,7 @@ typedef InlineMap<JSAtom *, DefinitionList, 24> AtomDefnListMap;
|
||||
* the list and map->vector must point to pre-allocated memory.
|
||||
*/
|
||||
void
|
||||
InitAtomMap(AtomIndexMap *indices, HeapPtr<JSAtom> *atoms);
|
||||
InitAtomMap(AtomIndexMap *indices, HeapPtrAtom *atoms);
|
||||
|
||||
/*
|
||||
* A pool that permits the reuse of the backing storage for the defn, index, or
|
||||
|
@ -150,6 +150,10 @@
|
||||
* js/public/RootingAPI.h.
|
||||
*/
|
||||
|
||||
class JSAtom;
|
||||
class JSFlatString;
|
||||
class JSLinearString;
|
||||
|
||||
namespace js {
|
||||
|
||||
class PropertyName;
|
||||
@ -285,46 +289,54 @@ ZoneOfValueFromAnyThread(const JS::Value &value)
|
||||
return static_cast<js::gc::Cell *>(value.toGCThing())->tenuredZoneFromAnyThread();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
struct InternalGCMethods {};
|
||||
|
||||
template <typename T>
|
||||
struct InternalGCMethods<T *>
|
||||
{
|
||||
static void preBarrier(T *v) { T::writeBarrierPre(v); }
|
||||
#ifdef JSGC_GENERATIONAL
|
||||
static void postBarrier(T **vp) { T::writeBarrierPost(*vp, vp); }
|
||||
static void postBarrierRelocate(T **vp) { T::writeBarrierPostRelocate(*vp, vp); }
|
||||
static void postBarrierRemove(T **vp) { T::writeBarrierPostRemove(*vp, vp); }
|
||||
#endif
|
||||
};
|
||||
|
||||
/*
|
||||
* Base class for barriered pointer types.
|
||||
*/
|
||||
template <class T, typename Unioned = uintptr_t>
|
||||
template <class T>
|
||||
class BarrieredPtr
|
||||
{
|
||||
protected:
|
||||
union {
|
||||
T *value;
|
||||
Unioned other;
|
||||
};
|
||||
T value;
|
||||
|
||||
BarrieredPtr(T *v) : value(v) {}
|
||||
BarrieredPtr(T v) : value(v) {}
|
||||
~BarrieredPtr() { pre(); }
|
||||
|
||||
public:
|
||||
void init(T *v) {
|
||||
JS_ASSERT(!IsPoisonedPtr<T>(v));
|
||||
void init(T v) {
|
||||
JS_ASSERT(!GCMethods<T>::poisoned(v));
|
||||
this->value = v;
|
||||
}
|
||||
|
||||
/* Use this if the automatic coercion to T* isn't working. */
|
||||
T *get() const { return value; }
|
||||
/* Use this if the automatic coercion to T isn't working. */
|
||||
T get() const { return value; }
|
||||
|
||||
/*
|
||||
* Use these if you want to change the value without invoking the barrier.
|
||||
* Obviously this is dangerous unless you know the barrier is not needed.
|
||||
*/
|
||||
T **unsafeGet() { return &value; }
|
||||
void unsafeSet(T *v) { value = v; }
|
||||
T *unsafeGet() { return &value; }
|
||||
void unsafeSet(T v) { value = v; }
|
||||
|
||||
Unioned *unsafeGetUnioned() { return &other; }
|
||||
T operator->() const { return value; }
|
||||
|
||||
T &operator*() const { return *value; }
|
||||
T *operator->() const { return value; }
|
||||
|
||||
operator T*() const { return value; }
|
||||
operator T() const { return value; }
|
||||
|
||||
protected:
|
||||
void pre() { T::writeBarrierPre(value); }
|
||||
void pre() { InternalGCMethods<T>::preBarrier(value); }
|
||||
};
|
||||
|
||||
/*
|
||||
@ -333,14 +345,14 @@ class BarrieredPtr
|
||||
* should be used in all cases that do not require explicit low-level control
|
||||
* of moving behavior, e.g. for HashMap keys.
|
||||
*/
|
||||
template <class T, typename Unioned = uintptr_t>
|
||||
class EncapsulatedPtr : public BarrieredPtr<T, Unioned>
|
||||
template <class T>
|
||||
class EncapsulatedPtr : public BarrieredPtr<T>
|
||||
{
|
||||
public:
|
||||
EncapsulatedPtr() : BarrieredPtr<T, Unioned>(nullptr) {}
|
||||
EncapsulatedPtr(T *v) : BarrieredPtr<T, Unioned>(v) {}
|
||||
explicit EncapsulatedPtr(const EncapsulatedPtr<T, Unioned> &v)
|
||||
: BarrieredPtr<T, Unioned>(v.value) {}
|
||||
EncapsulatedPtr() : BarrieredPtr<T>(nullptr) {}
|
||||
EncapsulatedPtr(T v) : BarrieredPtr<T>(v) {}
|
||||
explicit EncapsulatedPtr(const EncapsulatedPtr<T> &v)
|
||||
: BarrieredPtr<T>(v.value) {}
|
||||
|
||||
/* Use to set the pointer to nullptr. */
|
||||
void clear() {
|
||||
@ -348,16 +360,16 @@ class EncapsulatedPtr : public BarrieredPtr<T, Unioned>
|
||||
this->value = nullptr;
|
||||
}
|
||||
|
||||
EncapsulatedPtr<T, Unioned> &operator=(T *v) {
|
||||
EncapsulatedPtr<T> &operator=(T v) {
|
||||
this->pre();
|
||||
JS_ASSERT(!IsPoisonedPtr<T>(v));
|
||||
JS_ASSERT(!GCMethods<T>::poisoned(v));
|
||||
this->value = v;
|
||||
return *this;
|
||||
}
|
||||
|
||||
EncapsulatedPtr<T, Unioned> &operator=(const EncapsulatedPtr<T> &v) {
|
||||
EncapsulatedPtr<T> &operator=(const EncapsulatedPtr<T> &v) {
|
||||
this->pre();
|
||||
JS_ASSERT(!IsPoisonedPtr<T>(v.value));
|
||||
JS_ASSERT(!GCMethods<T>::poisoned(v.value));
|
||||
this->value = v.value;
|
||||
return *this;
|
||||
}
|
||||
@ -375,45 +387,49 @@ class EncapsulatedPtr : public BarrieredPtr<T, Unioned>
|
||||
* stored in memory that has GC lifetime. HeapPtr must not be used in contexts
|
||||
* where it may be implicitly moved or deleted, e.g. most containers.
|
||||
*/
|
||||
template <class T, class Unioned = uintptr_t>
|
||||
class HeapPtr : public BarrieredPtr<T, Unioned>
|
||||
template <class T>
|
||||
class HeapPtr : public BarrieredPtr<T>
|
||||
{
|
||||
public:
|
||||
HeapPtr() : BarrieredPtr<T, Unioned>(nullptr) {}
|
||||
explicit HeapPtr(T *v) : BarrieredPtr<T, Unioned>(v) { post(); }
|
||||
explicit HeapPtr(const HeapPtr<T, Unioned> &v) : BarrieredPtr<T, Unioned>(v) { post(); }
|
||||
HeapPtr() : BarrieredPtr<T>(nullptr) {}
|
||||
explicit HeapPtr(T v) : BarrieredPtr<T>(v) { post(); }
|
||||
explicit HeapPtr(const HeapPtr<T> &v) : BarrieredPtr<T>(v) { post(); }
|
||||
|
||||
void init(T *v) {
|
||||
JS_ASSERT(!IsPoisonedPtr<T>(v));
|
||||
void init(T v) {
|
||||
JS_ASSERT(!GCMethods<T>::poisoned(v));
|
||||
this->value = v;
|
||||
post();
|
||||
}
|
||||
|
||||
HeapPtr<T, Unioned> &operator=(T *v) {
|
||||
HeapPtr<T> &operator=(T v) {
|
||||
this->pre();
|
||||
JS_ASSERT(!IsPoisonedPtr<T>(v));
|
||||
JS_ASSERT(!GCMethods<T>::poisoned(v));
|
||||
this->value = v;
|
||||
post();
|
||||
return *this;
|
||||
}
|
||||
|
||||
HeapPtr<T, Unioned> &operator=(const HeapPtr<T, Unioned> &v) {
|
||||
HeapPtr<T> &operator=(const HeapPtr<T> &v) {
|
||||
this->pre();
|
||||
JS_ASSERT(!IsPoisonedPtr<T>(v.value));
|
||||
JS_ASSERT(!GCMethods<T>::poisoned(v.value));
|
||||
this->value = v.value;
|
||||
post();
|
||||
return *this;
|
||||
}
|
||||
|
||||
protected:
|
||||
void post() { T::writeBarrierPost(this->value, (void *)&this->value); }
|
||||
void post() {
|
||||
#ifdef JSGC_GENERATIONAL
|
||||
InternalGCMethods<T>::postBarrier(&this->value);
|
||||
#endif
|
||||
}
|
||||
|
||||
/* Make this friend so it can access pre() and post(). */
|
||||
template <class T1, class T2>
|
||||
friend inline void
|
||||
BarrieredSetPair(Zone *zone,
|
||||
HeapPtr<T1> &v1, T1 *val1,
|
||||
HeapPtr<T2> &v2, T2 *val2);
|
||||
HeapPtr<T1*> &v1, T1 *val1,
|
||||
HeapPtr<T2*> &v2, T2 *val2);
|
||||
|
||||
private:
|
||||
/*
|
||||
@ -424,7 +440,7 @@ class HeapPtr : public BarrieredPtr<T, Unioned>
|
||||
* semantics, so this does not completely prevent invalid uses.
|
||||
*/
|
||||
HeapPtr(HeapPtr<T> &&) MOZ_DELETE;
|
||||
HeapPtr<T, Unioned> &operator=(HeapPtr<T, Unioned> &&) MOZ_DELETE;
|
||||
HeapPtr<T> &operator=(HeapPtr<T> &&) MOZ_DELETE;
|
||||
};
|
||||
|
||||
/*
|
||||
@ -469,7 +485,7 @@ class RelocatablePtr : public BarrieredPtr<T>
|
||||
{
|
||||
public:
|
||||
RelocatablePtr() : BarrieredPtr<T>(nullptr) {}
|
||||
explicit RelocatablePtr(T *v) : BarrieredPtr<T>(v) {
|
||||
explicit RelocatablePtr(T v) : BarrieredPtr<T>(v) {
|
||||
if (v)
|
||||
post();
|
||||
}
|
||||
@ -490,9 +506,9 @@ class RelocatablePtr : public BarrieredPtr<T>
|
||||
relocate();
|
||||
}
|
||||
|
||||
RelocatablePtr<T> &operator=(T *v) {
|
||||
RelocatablePtr<T> &operator=(T v) {
|
||||
this->pre();
|
||||
JS_ASSERT(!IsPoisonedPtr<T>(v));
|
||||
JS_ASSERT(!GCMethods<T>::poisoned(v));
|
||||
if (v) {
|
||||
this->value = v;
|
||||
post();
|
||||
@ -505,7 +521,7 @@ class RelocatablePtr : public BarrieredPtr<T>
|
||||
|
||||
RelocatablePtr<T> &operator=(const RelocatablePtr<T> &v) {
|
||||
this->pre();
|
||||
JS_ASSERT(!IsPoisonedPtr<T>(v.value));
|
||||
JS_ASSERT(!GCMethods<T>::poisoned(v.value));
|
||||
if (v.value) {
|
||||
this->value = v.value;
|
||||
post();
|
||||
@ -520,14 +536,14 @@ class RelocatablePtr : public BarrieredPtr<T>
|
||||
void post() {
|
||||
#ifdef JSGC_GENERATIONAL
|
||||
JS_ASSERT(this->value);
|
||||
T::writeBarrierPostRelocate(this->value, &this->value);
|
||||
InternalGCMethods<T>::postBarrierRelocate(&this->value);
|
||||
#endif
|
||||
}
|
||||
|
||||
void relocate() {
|
||||
#ifdef JSGC_GENERATIONAL
|
||||
JS_ASSERT(this->value);
|
||||
T::writeBarrierPostRemove(this->value, &this->value);
|
||||
InternalGCMethods<T>::postBarrierRemove(&this->value);
|
||||
#endif
|
||||
}
|
||||
};
|
||||
@ -539,8 +555,8 @@ class RelocatablePtr : public BarrieredPtr<T>
|
||||
template <class T1, class T2>
|
||||
static inline void
|
||||
BarrieredSetPair(Zone *zone,
|
||||
HeapPtr<T1> &v1, T1 *val1,
|
||||
HeapPtr<T2> &v2, T2 *val2)
|
||||
HeapPtr<T1*> &v1, T1 *val1,
|
||||
HeapPtr<T2*> &v2, T2 *val2)
|
||||
{
|
||||
if (T1::needWriteBarrierPre(zone)) {
|
||||
v1.pre();
|
||||
@ -552,27 +568,45 @@ BarrieredSetPair(Zone *zone,
|
||||
v2.post();
|
||||
}
|
||||
|
||||
class ArrayBufferObject;
|
||||
class NestedScopeObject;
|
||||
class Shape;
|
||||
class BaseShape;
|
||||
namespace types { struct TypeObject; }
|
||||
class UnownedBaseShape;
|
||||
namespace jit {
|
||||
class JitCode;
|
||||
}
|
||||
namespace types {
|
||||
struct TypeObject;
|
||||
struct TypeObjectAddendum;
|
||||
}
|
||||
|
||||
typedef BarrieredPtr<JSObject> BarrieredPtrObject;
|
||||
typedef BarrieredPtr<JSScript> BarrieredPtrScript;
|
||||
typedef BarrieredPtr<JSObject*> BarrieredPtrObject;
|
||||
typedef BarrieredPtr<JSScript*> BarrieredPtrScript;
|
||||
|
||||
typedef EncapsulatedPtr<JSObject> EncapsulatedPtrObject;
|
||||
typedef EncapsulatedPtr<JSScript> EncapsulatedPtrScript;
|
||||
typedef EncapsulatedPtr<JSObject*> EncapsulatedPtrObject;
|
||||
typedef EncapsulatedPtr<JSScript*> EncapsulatedPtrScript;
|
||||
typedef EncapsulatedPtr<jit::JitCode*> EncapsulatedPtrJitCode;
|
||||
|
||||
typedef RelocatablePtr<JSObject> RelocatablePtrObject;
|
||||
typedef RelocatablePtr<JSScript> RelocatablePtrScript;
|
||||
typedef RelocatablePtr<JSObject*> RelocatablePtrObject;
|
||||
typedef RelocatablePtr<JSScript*> RelocatablePtrScript;
|
||||
typedef RelocatablePtr<NestedScopeObject*> RelocatablePtrNestedScopeObject;
|
||||
|
||||
typedef HeapPtr<JSObject> HeapPtrObject;
|
||||
typedef HeapPtr<JSFunction> HeapPtrFunction;
|
||||
typedef HeapPtr<JSString> HeapPtrString;
|
||||
typedef HeapPtr<PropertyName> HeapPtrPropertyName;
|
||||
typedef HeapPtr<JSScript> HeapPtrScript;
|
||||
typedef HeapPtr<Shape> HeapPtrShape;
|
||||
typedef HeapPtr<BaseShape> HeapPtrBaseShape;
|
||||
typedef HeapPtr<types::TypeObject> HeapPtrTypeObject;
|
||||
typedef HeapPtr<ArrayBufferObject*> HeapPtrArrayBufferObject;
|
||||
typedef HeapPtr<JSObject*> HeapPtrObject;
|
||||
typedef HeapPtr<JSFunction*> HeapPtrFunction;
|
||||
typedef HeapPtr<JSAtom*> HeapPtrAtom;
|
||||
typedef HeapPtr<JSString*> HeapPtrString;
|
||||
typedef HeapPtr<JSFlatString*> HeapPtrFlatString;
|
||||
typedef HeapPtr<JSLinearString*> HeapPtrLinearString;
|
||||
typedef HeapPtr<PropertyName*> HeapPtrPropertyName;
|
||||
typedef HeapPtr<JSScript*> HeapPtrScript;
|
||||
typedef HeapPtr<Shape*> HeapPtrShape;
|
||||
typedef HeapPtr<BaseShape*> HeapPtrBaseShape;
|
||||
typedef HeapPtr<UnownedBaseShape*> HeapPtrUnownedBaseShape;
|
||||
typedef HeapPtr<types::TypeObject*> HeapPtrTypeObject;
|
||||
typedef HeapPtr<types::TypeObjectAddendum*> HeapPtrTypeObjectAddendum;
|
||||
typedef HeapPtr<jit::JitCode*> HeapPtrJitCode;
|
||||
|
||||
/* Useful for hashtables with a HeapPtr as key. */
|
||||
|
||||
@ -580,9 +614,9 @@ template <class T>
|
||||
struct HeapPtrHasher
|
||||
{
|
||||
typedef HeapPtr<T> Key;
|
||||
typedef T *Lookup;
|
||||
typedef T Lookup;
|
||||
|
||||
static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
|
||||
static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
|
||||
static bool match(const Key &k, Lookup l) { return k.get() == l; }
|
||||
static void rekey(Key &k, const Key& newKey) { k.unsafeSet(newKey); }
|
||||
};
|
||||
@ -595,9 +629,9 @@ template <class T>
|
||||
struct EncapsulatedPtrHasher
|
||||
{
|
||||
typedef EncapsulatedPtr<T> Key;
|
||||
typedef T *Lookup;
|
||||
typedef T Lookup;
|
||||
|
||||
static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
|
||||
static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
|
||||
static bool match(const Key &k, Lookup l) { return k.get() == l; }
|
||||
static void rekey(Key &k, const Key& newKey) { k.unsafeSet(newKey); }
|
||||
};
|
||||
|
@ -263,7 +263,7 @@ MarkUnbarriered(JSTracer *trc, T **thingp, const char *name)
|
||||
|
||||
template <typename T>
|
||||
static void
|
||||
Mark(JSTracer *trc, BarrieredPtr<T> *thing, const char *name)
|
||||
Mark(JSTracer *trc, BarrieredPtr<T*> *thing, const char *name)
|
||||
{
|
||||
trc->setTracingName(name);
|
||||
MarkInternal(trc, thing->unsafeGet());
|
||||
@ -306,7 +306,7 @@ MarkRoot(JSTracer *trc, T **thingp, const char *name)
|
||||
|
||||
template <typename T>
|
||||
static void
|
||||
MarkRange(JSTracer *trc, size_t len, HeapPtr<T> *vec, const char *name)
|
||||
MarkRange(JSTracer *trc, size_t len, HeapPtr<T*> *vec, const char *name)
|
||||
{
|
||||
for (size_t i = 0; i < len; ++i) {
|
||||
if (vec[i].get()) {
|
||||
@ -402,7 +402,7 @@ UpdateIfRelocated(JSRuntime *rt, T **thingp)
|
||||
|
||||
#define DeclMarkerImpl(base, type) \
|
||||
void \
|
||||
Mark##base(JSTracer *trc, BarrieredPtr<type> *thing, const char *name) \
|
||||
Mark##base(JSTracer *trc, BarrieredPtr<type*> *thing, const char *name) \
|
||||
{ \
|
||||
Mark<type>(trc, thing, name); \
|
||||
} \
|
||||
@ -419,13 +419,13 @@ Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name)
|
||||
MarkUnbarriered<type>(trc, thingp, name); \
|
||||
} \
|
||||
\
|
||||
/* Explicitly instantiate MarkUnbarriered<type>. It is referenced from */ \
|
||||
/* Explicitly instantiate MarkUnbarriered<type*>. It is referenced from */ \
|
||||
/* other translation units and the instantiation might otherwise get */ \
|
||||
/* inlined away. */ \
|
||||
template void MarkUnbarriered<type>(JSTracer *, type **, const char *); \
|
||||
\
|
||||
void \
|
||||
Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *vec, const char *name) \
|
||||
Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type*> *vec, const char *name) \
|
||||
{ \
|
||||
MarkRange<type>(trc, len, vec, name); \
|
||||
} \
|
||||
@ -443,7 +443,7 @@ Is##base##Marked(type **thingp)
|
||||
} \
|
||||
\
|
||||
bool \
|
||||
Is##base##Marked(BarrieredPtr<type> *thingp) \
|
||||
Is##base##Marked(BarrieredPtr<type*> *thingp) \
|
||||
{ \
|
||||
return IsMarked<type>(thingp->unsafeGet()); \
|
||||
} \
|
||||
@ -455,13 +455,13 @@ Is##base##AboutToBeFinalized(type **thingp)
|
||||
} \
|
||||
\
|
||||
bool \
|
||||
Is##base##AboutToBeFinalized(BarrieredPtr<type> *thingp) \
|
||||
Is##base##AboutToBeFinalized(BarrieredPtr<type*> *thingp) \
|
||||
{ \
|
||||
return IsAboutToBeFinalized<type>(thingp->unsafeGet()); \
|
||||
} \
|
||||
\
|
||||
type * \
|
||||
Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type> *thingp) \
|
||||
Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type*> *thingp) \
|
||||
{ \
|
||||
return UpdateIfRelocated<type>(rt, thingp->unsafeGet()); \
|
||||
} \
|
||||
@ -826,13 +826,6 @@ gc::MarkCrossCompartmentSlot(JSTracer *trc, JSObject *src, HeapSlot *dst, const
|
||||
|
||||
/*** Special Marking ***/
|
||||
|
||||
void
|
||||
gc::MarkObject(JSTracer *trc, HeapPtr<GlobalObject, JSScript *> *thingp, const char *name)
|
||||
{
|
||||
trc->setTracingName(name);
|
||||
MarkInternal(trc, thingp->unsafeGet());
|
||||
}
|
||||
|
||||
void
|
||||
gc::MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name)
|
||||
{
|
||||
|
@ -27,7 +27,7 @@ class ScopeObject;
|
||||
class Shape;
|
||||
class UnownedBaseShape;
|
||||
|
||||
template<class, typename> class HeapPtr;
|
||||
template<class> class HeapPtr;
|
||||
|
||||
namespace jit {
|
||||
class JitCode;
|
||||
@ -49,10 +49,10 @@ namespace gc {
|
||||
* these are the variants generated for JSObject. They are listed from most to
|
||||
* least desirable for use:
|
||||
*
|
||||
* MarkObject(JSTracer *trc, const HeapPtr<JSObject> &thing, const char *name);
|
||||
* MarkObject(JSTracer *trc, const HeapPtrObject &thing, const char *name);
|
||||
* This function should be used for marking JSObjects, in preference to all
|
||||
* others below. Use it when you have HeapPtr<JSObject>, which
|
||||
* automatically implements write barriers.
|
||||
* others below. Use it when you have HeapPtrObject, which automatically
|
||||
* implements write barriers.
|
||||
*
|
||||
* MarkObjectRoot(JSTracer *trc, JSObject *thing, const char *name);
|
||||
* This function is only valid during the root marking phase of GC (i.e.,
|
||||
@ -88,16 +88,16 @@ namespace gc {
|
||||
*/
|
||||
|
||||
#define DeclMarker(base, type) \
|
||||
void Mark##base(JSTracer *trc, BarrieredPtr<type> *thing, const char *name); \
|
||||
void Mark##base(JSTracer *trc, BarrieredPtr<type*> *thing, const char *name); \
|
||||
void Mark##base##Root(JSTracer *trc, type **thingp, const char *name); \
|
||||
void Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name); \
|
||||
void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *thing, const char *name); \
|
||||
void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type*> *thing, const char *name); \
|
||||
void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name); \
|
||||
bool Is##base##Marked(type **thingp); \
|
||||
bool Is##base##Marked(BarrieredPtr<type> *thingp); \
|
||||
bool Is##base##Marked(BarrieredPtr<type*> *thingp); \
|
||||
bool Is##base##AboutToBeFinalized(type **thingp); \
|
||||
bool Is##base##AboutToBeFinalized(BarrieredPtr<type> *thingp); \
|
||||
type *Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type> *thingp); \
|
||||
bool Is##base##AboutToBeFinalized(BarrieredPtr<type*> *thingp); \
|
||||
type *Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type*> *thingp); \
|
||||
type *Update##base##IfRelocated(JSRuntime *rt, type **thingp);
|
||||
|
||||
DeclMarker(BaseShape, BaseShape)
|
||||
@ -239,13 +239,6 @@ MarkCrossCompartmentSlot(JSTracer *trc, JSObject *src, HeapSlot *dst_slot, const
|
||||
|
||||
/*** Special Cases ***/
|
||||
|
||||
/*
|
||||
* The unioned HeapPtr stored in script->globalObj needs special treatment to
|
||||
* typecheck correctly.
|
||||
*/
|
||||
void
|
||||
MarkObject(JSTracer *trc, HeapPtr<GlobalObject, JSScript *> *thingp, const char *name);
|
||||
|
||||
/*
|
||||
* MarkChildren<JSObject> is exposed solely for preWriteBarrier on
|
||||
* JSObject::TradeGuts. It should not be considered external interface.
|
||||
@ -290,7 +283,7 @@ Mark(JSTracer *trc, BarrieredPtrScript *o, const char *name)
|
||||
}
|
||||
|
||||
inline void
|
||||
Mark(JSTracer *trc, HeapPtr<jit::JitCode> *code, const char *name)
|
||||
Mark(JSTracer *trc, HeapPtrJitCode *code, const char *name)
|
||||
{
|
||||
MarkJitCode(trc, code, name);
|
||||
}
|
||||
|
@ -455,7 +455,7 @@ class AsmJSModule
|
||||
StaticLinkData staticLinkData_;
|
||||
bool dynamicallyLinked_;
|
||||
bool loadedFromCache_;
|
||||
HeapPtr<ArrayBufferObject> maybeHeap_;
|
||||
HeapPtrArrayBufferObject maybeHeap_;
|
||||
|
||||
// The next two fields need to be kept out of the Pod as they depend on the
|
||||
// position of the module within the ScriptSource and thus aren't invariant
|
||||
|
@ -107,7 +107,7 @@ struct BaselineScript
|
||||
|
||||
private:
|
||||
// Code pointer containing the actual method.
|
||||
HeapPtr<JitCode> method_;
|
||||
HeapPtrJitCode method_;
|
||||
|
||||
// For heavyweight scripts, template objects to use for the call object and
|
||||
// decl env object (linked via the call object's enclosing scope).
|
||||
|
@ -169,10 +169,10 @@ struct IonScript
|
||||
{
|
||||
private:
|
||||
// Code pointer containing the actual method.
|
||||
EncapsulatedPtr<JitCode> method_;
|
||||
EncapsulatedPtrJitCode method_;
|
||||
|
||||
// Deoptimization table used by this method.
|
||||
EncapsulatedPtr<JitCode> deoptTable_;
|
||||
EncapsulatedPtrJitCode deoptTable_;
|
||||
|
||||
// Entrypoint for OSR, or nullptr.
|
||||
jsbytecode *osrPc_;
|
||||
|
@ -987,7 +987,7 @@ struct TypeObject : gc::BarrieredCell<TypeObject>
|
||||
* some number of properties to the object in a definite order
|
||||
* before the object escapes.
|
||||
*/
|
||||
HeapPtr<TypeObjectAddendum> addendum;
|
||||
HeapPtrTypeObjectAddendum addendum;
|
||||
public:
|
||||
|
||||
TypeObjectFlags flags() const {
|
||||
|
@ -55,7 +55,7 @@ static const gc::AllocKind ITERATOR_FINALIZE_KIND = gc::FINALIZE_OBJECT2_BACKGRO
|
||||
void
|
||||
NativeIterator::mark(JSTracer *trc)
|
||||
{
|
||||
for (HeapPtr<JSFlatString> *str = begin(); str < end(); str++)
|
||||
for (HeapPtrFlatString *str = begin(); str < end(); str++)
|
||||
MarkString(trc, str, "prop");
|
||||
if (obj)
|
||||
MarkObject(trc, &obj, "obj");
|
||||
@ -426,7 +426,7 @@ NativeIterator::allocateIterator(JSContext *cx, uint32_t slength, const AutoIdVe
|
||||
if (!ni)
|
||||
return nullptr;
|
||||
AutoValueVector strings(cx);
|
||||
ni->props_array = ni->props_cursor = (HeapPtr<JSFlatString> *) (ni + 1);
|
||||
ni->props_array = ni->props_cursor = (HeapPtrFlatString *) (ni + 1);
|
||||
ni->props_end = ni->props_array + plength;
|
||||
if (plength) {
|
||||
for (size_t i = 0; i < plength; i++) {
|
||||
@ -1044,9 +1044,9 @@ SuppressDeletedPropertyHelper(JSContext *cx, HandleObject obj, StringPredicate p
|
||||
/* This only works for identified suppressed keys, not values. */
|
||||
if (ni->isKeyIter() && ni->obj == obj && ni->props_cursor < ni->props_end) {
|
||||
/* Check whether id is still to come. */
|
||||
HeapPtr<JSFlatString> *props_cursor = ni->current();
|
||||
HeapPtr<JSFlatString> *props_end = ni->end();
|
||||
for (HeapPtr<JSFlatString> *idp = props_cursor; idp < props_end; ++idp) {
|
||||
HeapPtrFlatString *props_cursor = ni->current();
|
||||
HeapPtrFlatString *props_end = ni->end();
|
||||
for (HeapPtrFlatString *idp = props_cursor; idp < props_end; ++idp) {
|
||||
if (predicate(*idp)) {
|
||||
/*
|
||||
* Check whether another property along the prototype chain
|
||||
@ -1091,7 +1091,7 @@ SuppressDeletedPropertyHelper(JSContext *cx, HandleObject obj, StringPredicate p
|
||||
if (idp == props_cursor) {
|
||||
ni->incCursor();
|
||||
} else {
|
||||
for (HeapPtr<JSFlatString> *p = idp; p + 1 != props_end; p++)
|
||||
for (HeapPtrFlatString *p = idp; p + 1 != props_end; p++)
|
||||
*p = *(p + 1);
|
||||
ni->props_end = ni->end() - 1;
|
||||
|
||||
|
@ -31,9 +31,9 @@ struct NativeIterator
|
||||
{
|
||||
HeapPtrObject obj; // Object being iterated.
|
||||
JSObject *iterObj_; // Internal iterator object.
|
||||
HeapPtr<JSFlatString> *props_array;
|
||||
HeapPtr<JSFlatString> *props_cursor;
|
||||
HeapPtr<JSFlatString> *props_end;
|
||||
HeapPtrFlatString *props_array;
|
||||
HeapPtrFlatString *props_cursor;
|
||||
HeapPtrFlatString *props_end;
|
||||
Shape **shapes_array;
|
||||
uint32_t shapes_length;
|
||||
uint32_t shapes_key;
|
||||
@ -49,11 +49,11 @@ struct NativeIterator
|
||||
return (flags & JSITER_FOREACH) == 0;
|
||||
}
|
||||
|
||||
inline HeapPtr<JSFlatString> *begin() const {
|
||||
inline HeapPtrFlatString *begin() const {
|
||||
return props_array;
|
||||
}
|
||||
|
||||
inline HeapPtr<JSFlatString> *end() const {
|
||||
inline HeapPtrFlatString *end() const {
|
||||
return props_end;
|
||||
}
|
||||
|
||||
@ -64,7 +64,7 @@ struct NativeIterator
|
||||
JSObject *iterObj() const {
|
||||
return iterObj_;
|
||||
}
|
||||
HeapPtr<JSFlatString> *current() const {
|
||||
HeapPtrFlatString *current() const {
|
||||
JS_ASSERT(props_cursor < props_end);
|
||||
return props_cursor;
|
||||
}
|
||||
|
@ -438,7 +438,7 @@ class JSObject : public js::ObjectImpl
|
||||
inline js::types::TypeObject* getType(JSContext *cx);
|
||||
js::types::TypeObject* uninlinedGetType(JSContext *cx);
|
||||
|
||||
const js::HeapPtr<js::types::TypeObject> &typeFromGC() const {
|
||||
const js::HeapPtrTypeObject &typeFromGC() const {
|
||||
/* Direct field access for use by GC. */
|
||||
return type_;
|
||||
}
|
||||
|
@ -851,7 +851,7 @@ js::XDRScript(XDRState<mode> *xdr, HandleObject enclosingScope, HandleScript enc
|
||||
* after the enclosing block has been XDR'd.
|
||||
*/
|
||||
for (i = 0; i != nobjects; ++i) {
|
||||
HeapPtr<JSObject> *objp = &script->objects()->vector[i];
|
||||
HeapPtrObject *objp = &script->objects()->vector[i];
|
||||
XDRClassKind classk;
|
||||
|
||||
if (mode == XDR_ENCODE) {
|
||||
@ -2360,13 +2360,13 @@ JSScript::partiallyInit(ExclusiveContext *cx, HandleScript script, uint32_t ncon
|
||||
|
||||
if (nobjects != 0) {
|
||||
script->objects()->length = nobjects;
|
||||
script->objects()->vector = (HeapPtr<JSObject> *)cursor;
|
||||
script->objects()->vector = (HeapPtrObject *)cursor;
|
||||
cursor += nobjects * sizeof(script->objects()->vector[0]);
|
||||
}
|
||||
|
||||
if (nregexps != 0) {
|
||||
script->regexps()->length = nregexps;
|
||||
script->regexps()->vector = (HeapPtr<JSObject> *)cursor;
|
||||
script->regexps()->vector = (HeapPtrObject *)cursor;
|
||||
cursor += nregexps * sizeof(script->regexps()->vector[0]);
|
||||
}
|
||||
|
||||
@ -3026,13 +3026,13 @@ js::CloneScript(JSContext *cx, HandleObject enclosingScope, HandleFunction fun,
|
||||
JS_ASSERT_IF(vector[i].isMarkable(), vector[i].toString()->isAtom());
|
||||
}
|
||||
if (nobjects != 0) {
|
||||
HeapPtrObject *vector = Rebase<HeapPtr<JSObject> >(dst, src, src->objects()->vector);
|
||||
HeapPtrObject *vector = Rebase<HeapPtrObject>(dst, src, src->objects()->vector);
|
||||
dst->objects()->vector = vector;
|
||||
for (unsigned i = 0; i < nobjects; ++i)
|
||||
vector[i].init(objects[i]);
|
||||
}
|
||||
if (nregexps != 0) {
|
||||
HeapPtrObject *vector = Rebase<HeapPtr<JSObject> >(dst, src, src->regexps()->vector);
|
||||
HeapPtrObject *vector = Rebase<HeapPtrObject>(dst, src, src->regexps()->vector);
|
||||
dst->regexps()->vector = vector;
|
||||
for (unsigned i = 0; i < nregexps; ++i)
|
||||
vector[i].init(regexps[i]);
|
||||
|
@ -180,7 +180,7 @@ class Bindings
|
||||
friend class BindingIter;
|
||||
friend class AliasedFormalIter;
|
||||
|
||||
HeapPtr<Shape> callObjShape_;
|
||||
HeapPtrShape callObjShape_;
|
||||
uintptr_t bindingArrayAndFlag_;
|
||||
uint16_t numArgs_;
|
||||
uint16_t numBlockScoped_;
|
||||
|
@ -20,19 +20,19 @@ class RegExpStatics
|
||||
{
|
||||
/* The latest RegExp output, set after execution. */
|
||||
VectorMatchPairs matches;
|
||||
HeapPtr<JSLinearString> matchesInput;
|
||||
HeapPtrLinearString matchesInput;
|
||||
|
||||
/*
|
||||
* The previous RegExp input, used to resolve lazy state.
|
||||
* A raw RegExpShared cannot be stored because it may be in
|
||||
* a different compartment via evalcx().
|
||||
*/
|
||||
HeapPtr<JSAtom> lazySource;
|
||||
HeapPtrAtom lazySource;
|
||||
RegExpFlag lazyFlags;
|
||||
size_t lazyIndex;
|
||||
|
||||
/* The latest RegExp input, set before execution. */
|
||||
HeapPtr<JSString> pendingInput;
|
||||
HeapPtrString pendingInput;
|
||||
RegExpFlag flags;
|
||||
|
||||
/*
|
||||
|
@ -1563,7 +1563,7 @@ js_IsDebugScopeSlow(ProxyObject *proxy)
|
||||
|
||||
/* static */ MOZ_ALWAYS_INLINE void
|
||||
DebugScopes::proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map,
|
||||
const EncapsulatedPtr<JSObject> &key)
|
||||
const EncapsulatedPtrObject &key)
|
||||
{
|
||||
#ifdef JSGC_GENERATIONAL
|
||||
/*
|
||||
|
@ -726,8 +726,8 @@ class ScopeIterVal
|
||||
friend class DebugScopes;
|
||||
|
||||
AbstractFramePtr frame_;
|
||||
RelocatablePtr<JSObject> cur_;
|
||||
RelocatablePtr<NestedScopeObject> staticScope_;
|
||||
RelocatablePtrObject cur_;
|
||||
RelocatablePtrNestedScopeObject staticScope_;
|
||||
ScopeIter::Type type_;
|
||||
bool hasScopeObject_;
|
||||
|
||||
|
@ -602,7 +602,7 @@ class BaseShape : public gc::BarrieredCell<BaseShape>
|
||||
};
|
||||
|
||||
/* For owned BaseShapes, the canonical unowned BaseShape. */
|
||||
HeapPtr<UnownedBaseShape> unowned_;
|
||||
HeapPtrUnownedBaseShape unowned_;
|
||||
|
||||
/* For owned BaseShapes, the shape's shape table. */
|
||||
ShapeTable *table_;
|
||||
|
@ -925,8 +925,6 @@ NameToId(PropertyName *name)
|
||||
return NON_INTEGER_ATOM_TO_JSID(name);
|
||||
}
|
||||
|
||||
typedef HeapPtr<JSAtom> HeapPtrAtom;
|
||||
|
||||
class AutoNameVector : public AutoVectorRooter<PropertyName *>
|
||||
{
|
||||
typedef AutoVectorRooter<PropertyName *> BaseType;
|
||||
|
Loading…
Reference in New Issue
Block a user