mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 1161726 - Use a custom tracer for tenuring to avoid the indirect calls; r=jonco
This commit is contained in:
parent
333aa5d482
commit
72db04da5b
@ -98,12 +98,14 @@ class JS_PUBLIC_API(JSTracer)
|
||||
WeakMapTraceKind eagerlyTraceWeakMaps() const { return eagerlyTraceWeakMaps_; }
|
||||
|
||||
// An intermediate state on the road from C to C++ style dispatch.
|
||||
enum TracerKindTag {
|
||||
MarkingTracer,
|
||||
CallbackTracer
|
||||
enum class TracerKindTag {
|
||||
Marking,
|
||||
Tenuring,
|
||||
Callback
|
||||
};
|
||||
bool isMarkingTracer() const { return tag_ == MarkingTracer; }
|
||||
bool isCallbackTracer() const { return tag_ == CallbackTracer; }
|
||||
bool isMarkingTracer() const { return tag_ == TracerKindTag::Marking; }
|
||||
bool isTenuringTracer() const { return tag_ == TracerKindTag::Tenuring; }
|
||||
bool isCallbackTracer() const { return tag_ == TracerKindTag::Callback; }
|
||||
inline JS::CallbackTracer* asCallbackTracer();
|
||||
|
||||
protected:
|
||||
@ -129,7 +131,7 @@ class JS_PUBLIC_API(CallbackTracer) : public JSTracer
|
||||
public:
|
||||
CallbackTracer(JSRuntime* rt, JSTraceCallback traceCallback,
|
||||
WeakMapTraceKind weakTraceKind = TraceWeakMapValues)
|
||||
: JSTracer(rt, JSTracer::CallbackTracer, weakTraceKind), callback(traceCallback),
|
||||
: JSTracer(rt, JSTracer::TracerKindTag::Callback, weakTraceKind), callback(traceCallback),
|
||||
contextName_(nullptr), contextIndex_(InvalidIndex), contextFunctor_(nullptr)
|
||||
{}
|
||||
|
||||
|
@ -8,6 +8,7 @@
|
||||
|
||||
#include "mozilla/DebugOnly.h"
|
||||
#include "mozilla/IntegerRange.h"
|
||||
#include "mozilla/ReentrancyGuard.h"
|
||||
#include "mozilla/TypeTraits.h"
|
||||
|
||||
#include "jsgc.h"
|
||||
@ -33,6 +34,7 @@
|
||||
using namespace js;
|
||||
using namespace js::gc;
|
||||
|
||||
using mozilla::ArrayLength;
|
||||
using mozilla::DebugOnly;
|
||||
using mozilla::IsBaseOf;
|
||||
using mozilla::IsSame;
|
||||
@ -172,7 +174,7 @@ js::CheckTracedThing(JSTracer* trc, T thing)
|
||||
if (IsInsideNursery(thing))
|
||||
return;
|
||||
|
||||
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc) && !Nursery::IsMinorCollectionTracer(trc),
|
||||
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc) && !trc->isTenuringTracer(),
|
||||
!IsForwarded(thing));
|
||||
|
||||
/*
|
||||
@ -426,6 +428,7 @@ ConvertToBase(T* thingp)
|
||||
|
||||
template <typename T> void DispatchToTracer(JSTracer* trc, T* thingp, const char* name);
|
||||
template <typename T> T DoCallback(JS::CallbackTracer* trc, T* thingp, const char* name);
|
||||
template <typename T> void DoTenuring(TenuringTracer& mover, T* thingp);
|
||||
template <typename T> void DoMarking(GCMarker* gcmarker, T thing);
|
||||
|
||||
template <typename T>
|
||||
@ -594,6 +597,9 @@ DispatchToTracer(JSTracer* trc, T* thingp, const char* name)
|
||||
#undef IS_SAME_TYPE_OR
|
||||
if (trc->isMarkingTracer())
|
||||
return DoMarking(static_cast<GCMarker*>(trc), *thingp);
|
||||
if (trc->isTenuringTracer())
|
||||
return DoTenuring(*static_cast<TenuringTracer*>(trc), thingp);
|
||||
MOZ_ASSERT(trc->isCallbackTracer());
|
||||
DoCallback(trc->asCallbackTracer(), thingp, name);
|
||||
}
|
||||
|
||||
@ -1525,7 +1531,7 @@ MarkStack::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
|
||||
* so we delay visting entries.
|
||||
*/
|
||||
GCMarker::GCMarker(JSRuntime* rt)
|
||||
: JSTracer(rt, JSTracer::MarkingTracer, DoNotTraceWeakMaps),
|
||||
: JSTracer(rt, JSTracer::TracerKindTag::Marking, DoNotTraceWeakMaps),
|
||||
stack(size_t(-1)),
|
||||
color(BLACK),
|
||||
unmarkedArenaStackTop(nullptr),
|
||||
@ -1718,11 +1724,415 @@ GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
|
||||
return size;
|
||||
}
|
||||
|
||||
|
||||
/*** Tenuring Tracer *****************************************************************************/
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
js::SetMarkStackLimit(JSRuntime* rt, size_t limit)
|
||||
DoTenuring(TenuringTracer& mover, T* thingp)
|
||||
{
|
||||
rt->gc.setMarkStackLimit(limit);
|
||||
// Non-JSObject types are not in the nursery, so do not need to be tenured.
|
||||
MOZ_ASSERT(!IsInsideNursery(*thingp));
|
||||
}
|
||||
|
||||
template <>
|
||||
void
|
||||
DoTenuring(TenuringTracer& mover, JSObject** objp)
|
||||
{
|
||||
// Only roots and store buffer entries should be marked via this path; all
|
||||
// internal pointers are marked via collectToFixedPoint.
|
||||
MOZ_ASSERT(!mover.nursery().isInside(objp));
|
||||
|
||||
if (IsInsideNursery(*objp) && !mover.nursery().getForwardedPointer(objp))
|
||||
*objp = mover.moveToTenured(*objp);
|
||||
}
|
||||
|
||||
template <>
|
||||
void
|
||||
DoTenuring<Value>(TenuringTracer& mover, Value* valp)
|
||||
{
|
||||
if (valp->isObject()) {
|
||||
JSObject *obj = &valp->toObject();
|
||||
DoTenuring(mover, &obj);
|
||||
valp->setObject(*obj);
|
||||
} else {
|
||||
MOZ_ASSERT_IF(valp->isMarkable(), !IsInsideNursery(valp->toGCThing()));
|
||||
}
|
||||
}
|
||||
|
||||
template <>
|
||||
void
|
||||
DoTenuring<jsid>(TenuringTracer& mover, jsid* idp)
|
||||
{
|
||||
MOZ_ASSERT_IF(JSID_IS_GCTHING(*idp), !IsInsideNursery(JSID_TO_GCTHING(*idp).asCell()));
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
StoreBuffer::MonoTypeBuffer<T>::mark(StoreBuffer* owner, TenuringTracer& mover)
|
||||
{
|
||||
mozilla::ReentrancyGuard g(*owner);
|
||||
MOZ_ASSERT(owner->isEnabled());
|
||||
MOZ_ASSERT(stores_.initialized());
|
||||
sinkStores(owner);
|
||||
for (typename StoreSet::Range r = stores_.all(); !r.empty(); r.popFront())
|
||||
r.front().mark(mover);
|
||||
}
|
||||
|
||||
void
|
||||
StoreBuffer::SlotsEdge::mark(TenuringTracer& mover) const
|
||||
{
|
||||
NativeObject* obj = object();
|
||||
|
||||
// Beware JSObject::swap exchanging a native object for a non-native one.
|
||||
if (!obj->isNative())
|
||||
return;
|
||||
|
||||
if (IsInsideNursery(obj))
|
||||
return;
|
||||
|
||||
if (kind() == ElementKind) {
|
||||
int32_t initLen = obj->getDenseInitializedLength();
|
||||
int32_t clampedStart = Min(start_, initLen);
|
||||
int32_t clampedEnd = Min(start_ + count_, initLen);
|
||||
TraceRange(&mover, clampedEnd - clampedStart,
|
||||
static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart), "element");
|
||||
} else {
|
||||
int32_t start = Min(uint32_t(start_), obj->slotSpan());
|
||||
int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
|
||||
MOZ_ASSERT(end >= start);
|
||||
TraceObjectSlots(&mover, obj, start, end - start);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
StoreBuffer::WholeCellEdges::mark(TenuringTracer& mover) const
|
||||
{
|
||||
MOZ_ASSERT(edge->isTenured());
|
||||
JSGCTraceKind kind = GetGCThingTraceKind(edge);
|
||||
if (kind <= JSTRACE_OBJECT) {
|
||||
JSObject* object = static_cast<JSObject*>(edge);
|
||||
if (object->is<ArgumentsObject>())
|
||||
ArgumentsObject::trace(&mover, object);
|
||||
// FIXME: bug 1161664 -- call the inline path below, now that it is accessable.
|
||||
object->traceChildren(&mover);
|
||||
return;
|
||||
}
|
||||
MOZ_ASSERT(kind == JSTRACE_JITCODE);
|
||||
static_cast<jit::JitCode*>(edge)->traceChildren(&mover);
|
||||
}
|
||||
|
||||
void
|
||||
StoreBuffer::CellPtrEdge::mark(TenuringTracer& mover) const
|
||||
{
|
||||
if (!*edge)
|
||||
return;
|
||||
|
||||
MOZ_ASSERT(GetGCThingTraceKind(*edge) == JSTRACE_OBJECT);
|
||||
DoTenuring(mover, reinterpret_cast<JSObject**>(edge));
|
||||
}
|
||||
|
||||
void
|
||||
StoreBuffer::ValueEdge::mark(TenuringTracer& mover) const
|
||||
{
|
||||
if (deref())
|
||||
DoTenuring(mover, edge);
|
||||
}
|
||||
|
||||
/* Insert the given relocation entry into the list of things to visit. */
|
||||
void
|
||||
TenuringTracer::insertIntoFixupList(RelocationOverlay* entry) {
|
||||
*tail = entry;
|
||||
tail = &entry->next_;
|
||||
*tail = nullptr;
|
||||
}
|
||||
|
||||
JSObject*
|
||||
TenuringTracer::moveToTenured(JSObject* obj) {
|
||||
return (JSObject*)nursery_.moveToTenured(*this, obj);
|
||||
}
|
||||
|
||||
void*
|
||||
js::Nursery::moveToTenured(TenuringTracer& mover, JSObject* src)
|
||||
{
|
||||
AllocKind dstKind = src->allocKindForTenure(*this);
|
||||
Zone* zone = src->zone();
|
||||
JSObject* dst = reinterpret_cast<JSObject*>(allocateFromTenured(zone, dstKind));
|
||||
if (!dst)
|
||||
CrashAtUnhandlableOOM("Failed to allocate object while tenuring.");
|
||||
|
||||
mover.tenuredSize += moveObjectToTenured(mover, dst, src, dstKind);
|
||||
|
||||
RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
|
||||
overlay->forwardTo(dst);
|
||||
mover.insertIntoFixupList(overlay);
|
||||
|
||||
TracePromoteToTenured(src, dst);
|
||||
return static_cast<void*>(dst);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE TenuredCell*
|
||||
js::Nursery::allocateFromTenured(Zone* zone, AllocKind thingKind)
|
||||
{
|
||||
TenuredCell* t = zone->arenas.allocateFromFreeList(thingKind, Arena::thingSize(thingKind));
|
||||
if (t)
|
||||
return t;
|
||||
zone->arenas.checkEmptyFreeList(thingKind);
|
||||
AutoMaybeStartBackgroundAllocation maybeStartBackgroundAllocation;
|
||||
return zone->arenas.allocateFromArena(zone, thingKind, maybeStartBackgroundAllocation);
|
||||
}
|
||||
|
||||
// Structure for counting how many times objects in a particular group have
|
||||
// been tenured during a minor collection.
|
||||
struct TenureCount
|
||||
{
|
||||
ObjectGroup* group;
|
||||
int count;
|
||||
};
|
||||
|
||||
// Keep rough track of how many times we tenure objects in particular groups
|
||||
// during minor collections, using a fixed size hash for efficiency at the cost
|
||||
// of potential collisions.
|
||||
struct Nursery::TenureCountCache
|
||||
{
|
||||
TenureCount entries[16];
|
||||
|
||||
TenureCountCache() { PodZero(this); }
|
||||
|
||||
TenureCount& findEntry(ObjectGroup* group) {
|
||||
return entries[PointerHasher<ObjectGroup*, 3>::hash(group) % ArrayLength(entries)];
|
||||
}
|
||||
};
|
||||
|
||||
void
|
||||
js::Nursery::collectToFixedPoint(TenuringTracer& mover, TenureCountCache& tenureCounts)
|
||||
{
|
||||
for (RelocationOverlay* p = mover.head; p; p = p->next()) {
|
||||
JSObject* obj = static_cast<JSObject*>(p->forwardingAddress());
|
||||
traceObject(mover, obj);
|
||||
|
||||
TenureCount& entry = tenureCounts.findEntry(obj->group());
|
||||
if (entry.group == obj->group()) {
|
||||
entry.count++;
|
||||
} else if (!entry.group) {
|
||||
entry.group = obj->group();
|
||||
entry.count = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void
|
||||
js::Nursery::traceObject(TenuringTracer& mover, JSObject* obj)
|
||||
{
|
||||
const Class* clasp = obj->getClass();
|
||||
if (clasp->trace) {
|
||||
if (clasp->trace == InlineTypedObject::obj_trace) {
|
||||
TypeDescr* descr = &obj->as<InlineTypedObject>().typeDescr();
|
||||
if (descr->hasTraceList()) {
|
||||
markTraceList(mover, descr->traceList(),
|
||||
obj->as<InlineTypedObject>().inlineTypedMem());
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (clasp == &UnboxedPlainObject::class_) {
|
||||
JSObject** pexpando = obj->as<UnboxedPlainObject>().addressOfExpando();
|
||||
if (*pexpando)
|
||||
markObject(mover, pexpando);
|
||||
const UnboxedLayout& layout = obj->as<UnboxedPlainObject>().layoutDontCheckGeneration();
|
||||
if (layout.traceList()) {
|
||||
markTraceList(mover, layout.traceList(),
|
||||
obj->as<UnboxedPlainObject>().data());
|
||||
}
|
||||
return;
|
||||
}
|
||||
clasp->trace(&mover, obj);
|
||||
}
|
||||
|
||||
MOZ_ASSERT(obj->isNative() == clasp->isNative());
|
||||
if (!clasp->isNative())
|
||||
return;
|
||||
NativeObject* nobj = &obj->as<NativeObject>();
|
||||
|
||||
// Note: the contents of copy on write elements pointers are filled in
|
||||
// during parsing and cannot contain nursery pointers.
|
||||
if (!nobj->hasEmptyElements() && !nobj->denseElementsAreCopyOnWrite())
|
||||
markSlots(mover, nobj->getDenseElements(), nobj->getDenseInitializedLength());
|
||||
|
||||
HeapSlot* fixedStart;
|
||||
HeapSlot* fixedEnd;
|
||||
HeapSlot* dynStart;
|
||||
HeapSlot* dynEnd;
|
||||
nobj->getSlotRange(0, nobj->slotSpan(), &fixedStart, &fixedEnd, &dynStart, &dynEnd);
|
||||
markSlots(mover, fixedStart, fixedEnd);
|
||||
markSlots(mover, dynStart, dynEnd);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void
|
||||
js::Nursery::markSlots(TenuringTracer& mover, HeapSlot* vp, uint32_t nslots)
|
||||
{
|
||||
markSlots(mover, vp, vp + nslots);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void
|
||||
js::Nursery::markSlots(TenuringTracer& mover, HeapSlot* vp, HeapSlot* end)
|
||||
{
|
||||
for (; vp != end; ++vp)
|
||||
markSlot(mover, vp);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void
|
||||
js::Nursery::markSlot(TenuringTracer& mover, HeapSlot* slotp)
|
||||
{
|
||||
if (!slotp->isObject())
|
||||
return;
|
||||
|
||||
JSObject* obj = &slotp->toObject();
|
||||
if (markObject(mover, &obj))
|
||||
slotp->unsafeGet()->setObject(*obj);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void
|
||||
js::Nursery::markTraceList(TenuringTracer& mover, const int32_t* traceList, uint8_t* memory)
|
||||
{
|
||||
while (*traceList != -1) {
|
||||
// Strings are not in the nursery and do not need tracing.
|
||||
traceList++;
|
||||
}
|
||||
traceList++;
|
||||
while (*traceList != -1) {
|
||||
JSObject** pobj = reinterpret_cast<JSObject **>(memory + *traceList);
|
||||
markObject(mover, pobj);
|
||||
traceList++;
|
||||
}
|
||||
traceList++;
|
||||
while (*traceList != -1) {
|
||||
HeapSlot* pslot = reinterpret_cast<HeapSlot *>(memory + *traceList);
|
||||
markSlot(mover, pslot);
|
||||
traceList++;
|
||||
}
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE bool
|
||||
js::Nursery::markObject(TenuringTracer& mover, JSObject** pobj)
|
||||
{
|
||||
if (!IsInsideNursery(*pobj))
|
||||
return false;
|
||||
|
||||
if (getForwardedPointer(pobj))
|
||||
return true;
|
||||
|
||||
*pobj = static_cast<JSObject*>(moveToTenured(mover, *pobj));
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
MOZ_ALWAYS_INLINE size_t
|
||||
js::Nursery::moveObjectToTenured(TenuringTracer& mover,
|
||||
JSObject* dst, JSObject* src, AllocKind dstKind)
|
||||
{
|
||||
size_t srcSize = Arena::thingSize(dstKind);
|
||||
size_t tenuredSize = srcSize;
|
||||
|
||||
/*
|
||||
* Arrays do not necessarily have the same AllocKind between src and dst.
|
||||
* We deal with this by copying elements manually, possibly re-inlining
|
||||
* them if there is adequate room inline in dst.
|
||||
*
|
||||
* For Arrays we're reducing tenuredSize to the smaller srcSize
|
||||
* because moveElementsToTenured() accounts for all Array elements,
|
||||
* even if they are inlined.
|
||||
*/
|
||||
if (src->is<ArrayObject>())
|
||||
tenuredSize = srcSize = sizeof(NativeObject);
|
||||
|
||||
js_memcpy(dst, src, srcSize);
|
||||
if (src->isNative()) {
|
||||
NativeObject* ndst = &dst->as<NativeObject>();
|
||||
NativeObject* nsrc = &src->as<NativeObject>();
|
||||
tenuredSize += moveSlotsToTenured(ndst, nsrc, dstKind);
|
||||
tenuredSize += moveElementsToTenured(ndst, nsrc, dstKind);
|
||||
|
||||
// The shape's list head may point into the old object. This can only
|
||||
// happen for dictionaries, which are native objects.
|
||||
if (&nsrc->shape_ == ndst->shape_->listp) {
|
||||
MOZ_ASSERT(nsrc->shape_->inDictionary());
|
||||
ndst->shape_->listp = &ndst->shape_;
|
||||
}
|
||||
}
|
||||
|
||||
if (src->is<InlineTypedObject>()) {
|
||||
InlineTypedObject::objectMovedDuringMinorGC(&mover, dst, src);
|
||||
} else if (src->is<UnboxedArrayObject>()) {
|
||||
tenuredSize += UnboxedArrayObject::objectMovedDuringMinorGC(&mover, dst, src, dstKind);
|
||||
} else {
|
||||
// Objects with JSCLASS_SKIP_NURSERY_FINALIZE need to be handled above
|
||||
// to ensure any additional nursery buffers they hold are moved.
|
||||
MOZ_ASSERT(!(src->getClass()->flags & JSCLASS_SKIP_NURSERY_FINALIZE));
|
||||
}
|
||||
|
||||
return tenuredSize;
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE size_t
|
||||
js::Nursery::moveSlotsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
|
||||
{
|
||||
/* Fixed slots have already been copied over. */
|
||||
if (!src->hasDynamicSlots())
|
||||
return 0;
|
||||
|
||||
if (!isInside(src->slots_)) {
|
||||
removeMallocedBuffer(src->slots_);
|
||||
return 0;
|
||||
}
|
||||
|
||||
Zone* zone = src->zone();
|
||||
size_t count = src->numDynamicSlots();
|
||||
dst->slots_ = zone->pod_malloc<HeapSlot>(count);
|
||||
if (!dst->slots_)
|
||||
CrashAtUnhandlableOOM("Failed to allocate slots while tenuring.");
|
||||
PodCopy(dst->slots_, src->slots_, count);
|
||||
setSlotsForwardingPointer(src->slots_, dst->slots_, count);
|
||||
return count * sizeof(HeapSlot);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE size_t
|
||||
js::Nursery::moveElementsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
|
||||
{
|
||||
if (src->hasEmptyElements() || src->denseElementsAreCopyOnWrite())
|
||||
return 0;
|
||||
|
||||
Zone* zone = src->zone();
|
||||
ObjectElements* srcHeader = src->getElementsHeader();
|
||||
ObjectElements* dstHeader;
|
||||
|
||||
/* TODO Bug 874151: Prefer to put element data inline if we have space. */
|
||||
if (!isInside(srcHeader)) {
|
||||
MOZ_ASSERT(src->elements_ == dst->elements_);
|
||||
removeMallocedBuffer(srcHeader);
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->capacity;
|
||||
|
||||
/* Unlike other objects, Arrays can have fixed elements. */
|
||||
if (src->is<ArrayObject>() && nslots <= GetGCKindSlots(dstKind)) {
|
||||
dst->as<ArrayObject>().setFixedElements();
|
||||
dstHeader = dst->as<ArrayObject>().getElementsHeader();
|
||||
js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
|
||||
setElementsForwardingPointer(srcHeader, dstHeader, nslots);
|
||||
return nslots * sizeof(HeapSlot);
|
||||
}
|
||||
|
||||
MOZ_ASSERT(nslots >= 2);
|
||||
dstHeader = reinterpret_cast<ObjectElements*>(zone->pod_malloc<HeapSlot>(nslots));
|
||||
if (!dstHeader)
|
||||
CrashAtUnhandlableOOM("Failed to allocate elements while tenuring.");
|
||||
js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
|
||||
setElementsForwardingPointer(srcHeader, dstHeader, nslots);
|
||||
dst->elements_ = dstHeader->elements();
|
||||
return nslots * sizeof(HeapSlot);
|
||||
}
|
||||
|
||||
|
||||
/*** IsMarked / IsAboutToBeFinalized **************************************************************/
|
||||
|
||||
@ -1748,15 +2158,10 @@ CheckIsMarkedThing(T* thingp)
|
||||
|
||||
template <typename T>
|
||||
static bool
|
||||
IsMarkedInternal(T* thingp)
|
||||
IsMarkedInternalCommon(T* thingp)
|
||||
{
|
||||
CheckIsMarkedThing(thingp);
|
||||
JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
|
||||
|
||||
if (IsInsideNursery(*thingp)) {
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
|
||||
return rt->gc.nursery.getForwardedPointer(thingp);
|
||||
}
|
||||
MOZ_ASSERT(!IsInsideNursery(*thingp));
|
||||
|
||||
Zone* zone = (*thingp)->asTenured().zoneFromAnyThread();
|
||||
if (!zone->isCollectingFromAnyThread() || zone->isGCFinished())
|
||||
@ -1766,6 +2171,25 @@ IsMarkedInternal(T* thingp)
|
||||
return (*thingp)->asTenured().isMarked();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static bool
|
||||
IsMarkedInternal(T* thingp)
|
||||
{
|
||||
return IsMarkedInternalCommon(thingp);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static bool
|
||||
IsMarkedInternal(JSObject** thingp)
|
||||
{
|
||||
if (IsInsideNursery(*thingp)) {
|
||||
JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
|
||||
return rt->gc.nursery.getForwardedPointer(thingp);
|
||||
}
|
||||
return IsMarkedInternalCommon(thingp);
|
||||
}
|
||||
|
||||
template <typename S>
|
||||
struct IsMarkedFunctor : public IdentityDefaultAdaptor<S> {
|
||||
template <typename T> S operator()(T* t, bool* rv) {
|
||||
@ -1808,7 +2232,7 @@ IsAboutToBeFinalizedInternal(T* thingp)
|
||||
MOZ_ASSERT_IF(!rt->isHeapMinorCollecting(), !IsInsideNursery(thing));
|
||||
if (rt->isHeapMinorCollecting()) {
|
||||
if (IsInsideNursery(thing))
|
||||
return !nursery.getForwardedPointer(thingp);
|
||||
return !nursery.getForwardedPointer(reinterpret_cast<JSObject**>(thingp));
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -300,14 +300,9 @@ class GCMarker : public JSTracer
|
||||
mozilla::DebugOnly<bool> strictCompartmentChecking;
|
||||
};
|
||||
|
||||
void
|
||||
SetMarkStackLimit(JSRuntime* rt, size_t limit);
|
||||
|
||||
bool
|
||||
IsBufferingGrayRoots(JSTracer* trc);
|
||||
|
||||
} /* namespace js */
|
||||
namespace js {
|
||||
namespace gc {
|
||||
|
||||
/*** Special Cases ***/
|
||||
|
@ -14,11 +14,8 @@
|
||||
#include "js/TracingAPI.h"
|
||||
#include "vm/Runtime.h"
|
||||
|
||||
namespace js {
|
||||
|
||||
template <typename T>
|
||||
MOZ_ALWAYS_INLINE bool
|
||||
Nursery::getForwardedPointer(T** ref)
|
||||
js::Nursery::getForwardedPointer(JSObject** ref) const
|
||||
{
|
||||
MOZ_ASSERT(ref);
|
||||
MOZ_ASSERT(isInside((void*)*ref));
|
||||
@ -26,10 +23,12 @@ Nursery::getForwardedPointer(T** ref)
|
||||
if (!overlay->isForwarded())
|
||||
return false;
|
||||
/* This static cast from Cell* restricts T to valid (GC thing) types. */
|
||||
*ref = static_cast<T*>(overlay->forwardingAddress());
|
||||
*ref = static_cast<JSObject*>(overlay->forwardingAddress());
|
||||
return true;
|
||||
}
|
||||
|
||||
namespace js {
|
||||
|
||||
// The allocation methods below will not run the garbage collector. If the
|
||||
// nursery cannot accomodate the allocation, the malloc heap will be used
|
||||
// instead.
|
||||
|
@ -300,81 +300,6 @@ js::Nursery::freeBuffer(void* buffer)
|
||||
}
|
||||
}
|
||||
|
||||
namespace js {
|
||||
namespace gc {
|
||||
|
||||
class MinorCollectionTracer : public JS::CallbackTracer
|
||||
{
|
||||
public:
|
||||
Nursery* nursery;
|
||||
AutoTraceSession session;
|
||||
|
||||
/* Amount of data moved to the tenured generation during collection. */
|
||||
size_t tenuredSize;
|
||||
|
||||
/*
|
||||
* This list is threaded through the Nursery using the space from already
|
||||
* moved things. The list is used to fix up the moved things and to find
|
||||
* things held live by intra-Nursery pointers.
|
||||
*/
|
||||
RelocationOverlay* head;
|
||||
RelocationOverlay** tail;
|
||||
|
||||
/* Save and restore all of the runtime state we use during MinorGC. */
|
||||
bool savedRuntimeNeedBarrier;
|
||||
AutoDisableProxyCheck disableStrictProxyChecking;
|
||||
AutoEnterOOMUnsafeRegion oomUnsafeRegion;
|
||||
|
||||
/* Insert the given relocation entry into the list of things to visit. */
|
||||
MOZ_ALWAYS_INLINE void insertIntoFixupList(RelocationOverlay* entry) {
|
||||
*tail = entry;
|
||||
tail = &entry->next_;
|
||||
*tail = nullptr;
|
||||
}
|
||||
|
||||
MinorCollectionTracer(JSRuntime* rt, Nursery* nursery)
|
||||
: JS::CallbackTracer(rt, Nursery::MinorGCCallback, TraceWeakMapKeysValues),
|
||||
nursery(nursery),
|
||||
session(rt, MinorCollecting),
|
||||
tenuredSize(0),
|
||||
head(nullptr),
|
||||
tail(&head),
|
||||
savedRuntimeNeedBarrier(rt->needsIncrementalBarrier()),
|
||||
disableStrictProxyChecking(rt)
|
||||
{
|
||||
rt->gc.incGcNumber();
|
||||
|
||||
/*
|
||||
* We disable the runtime needsIncrementalBarrier() check so that
|
||||
* pre-barriers do not fire on objects that have been relocated. The
|
||||
* pre-barrier's call to obj->zone() will try to look through shape_,
|
||||
* which is now the relocation magic and will crash. However,
|
||||
* zone->needsIncrementalBarrier() must still be set correctly so that
|
||||
* allocations we make in minor GCs between incremental slices will
|
||||
* allocate their objects marked.
|
||||
*/
|
||||
rt->setNeedsIncrementalBarrier(false);
|
||||
}
|
||||
|
||||
~MinorCollectionTracer() {
|
||||
runtime()->setNeedsIncrementalBarrier(savedRuntimeNeedBarrier);
|
||||
}
|
||||
};
|
||||
|
||||
} /* namespace gc */
|
||||
} /* namespace js */
|
||||
|
||||
MOZ_ALWAYS_INLINE TenuredCell*
|
||||
js::Nursery::allocateFromTenured(Zone* zone, AllocKind thingKind)
|
||||
{
|
||||
TenuredCell* t = zone->arenas.allocateFromFreeList(thingKind, Arena::thingSize(thingKind));
|
||||
if (t)
|
||||
return t;
|
||||
zone->arenas.checkEmptyFreeList(thingKind);
|
||||
AutoMaybeStartBackgroundAllocation maybeStartBackgroundAllocation;
|
||||
return zone->arenas.allocateFromArena(zone, thingKind, maybeStartBackgroundAllocation);
|
||||
}
|
||||
|
||||
void
|
||||
Nursery::setForwardingPointer(void* oldData, void* newData, bool direct)
|
||||
{
|
||||
@ -447,291 +372,32 @@ js::Nursery::forwardBufferPointer(HeapSlot** pSlotsElems)
|
||||
MOZ_ASSERT(IsWriteableAddress(*pSlotsElems));
|
||||
}
|
||||
|
||||
// Structure for counting how many times objects in a particular group have
|
||||
// been tenured during a minor collection.
|
||||
struct TenureCount
|
||||
js::TenuringTracer::TenuringTracer(JSRuntime* rt, Nursery* nursery)
|
||||
: JSTracer(rt, JSTracer::TracerKindTag::Tenuring, TraceWeakMapKeysValues)
|
||||
, nursery_(*nursery)
|
||||
, tenuredSize(0)
|
||||
, head(nullptr)
|
||||
, tail(&head)
|
||||
, savedRuntimeNeedBarrier(rt->needsIncrementalBarrier())
|
||||
#ifdef JS_GC_ZEAL
|
||||
, verifyingPostBarriers(nullptr)
|
||||
#endif
|
||||
{
|
||||
ObjectGroup* group;
|
||||
int count;
|
||||
};
|
||||
rt->gc.incGcNumber();
|
||||
|
||||
// Keep rough track of how many times we tenure objects in particular groups
|
||||
// during minor collections, using a fixed size hash for efficiency at the cost
|
||||
// of potential collisions.
|
||||
struct Nursery::TenureCountCache
|
||||
{
|
||||
TenureCount entries[16];
|
||||
|
||||
TenureCountCache() { PodZero(this); }
|
||||
|
||||
TenureCount& findEntry(ObjectGroup* group) {
|
||||
return entries[PointerHasher<ObjectGroup*, 3>::hash(group) % ArrayLength(entries)];
|
||||
}
|
||||
};
|
||||
|
||||
void
|
||||
js::Nursery::collectToFixedPoint(MinorCollectionTracer* trc, TenureCountCache& tenureCounts)
|
||||
{
|
||||
for (RelocationOverlay* p = trc->head; p; p = p->next()) {
|
||||
JSObject* obj = static_cast<JSObject*>(p->forwardingAddress());
|
||||
traceObject(trc, obj);
|
||||
|
||||
TenureCount& entry = tenureCounts.findEntry(obj->group());
|
||||
if (entry.group == obj->group()) {
|
||||
entry.count++;
|
||||
} else if (!entry.group) {
|
||||
entry.group = obj->group();
|
||||
entry.count = 1;
|
||||
}
|
||||
}
|
||||
// We disable the runtime needsIncrementalBarrier() check so that
|
||||
// pre-barriers do not fire on objects that have been relocated. The
|
||||
// pre-barrier's call to obj->zone() will try to look through shape_,
|
||||
// which is now the relocation magic and will crash. However,
|
||||
// zone->needsIncrementalBarrier() must still be set correctly so that
|
||||
// allocations we make in minor GCs between incremental slices will
|
||||
// allocate their objects marked.
|
||||
rt->setNeedsIncrementalBarrier(false);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void
|
||||
js::Nursery::traceObject(MinorCollectionTracer* trc, JSObject* obj)
|
||||
js::TenuringTracer::~TenuringTracer()
|
||||
{
|
||||
const Class* clasp = obj->getClass();
|
||||
if (clasp->trace) {
|
||||
if (clasp->trace == InlineTypedObject::obj_trace) {
|
||||
TypeDescr* descr = &obj->as<InlineTypedObject>().typeDescr();
|
||||
if (descr->hasTraceList()) {
|
||||
markTraceList(trc, descr->traceList(),
|
||||
obj->as<InlineTypedObject>().inlineTypedMem());
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (clasp == &UnboxedPlainObject::class_) {
|
||||
JSObject** pexpando = obj->as<UnboxedPlainObject>().addressOfExpando();
|
||||
if (*pexpando)
|
||||
markObject(trc, pexpando);
|
||||
const UnboxedLayout& layout = obj->as<UnboxedPlainObject>().layoutDontCheckGeneration();
|
||||
if (layout.traceList()) {
|
||||
markTraceList(trc, layout.traceList(),
|
||||
obj->as<UnboxedPlainObject>().data());
|
||||
}
|
||||
return;
|
||||
}
|
||||
clasp->trace(trc, obj);
|
||||
}
|
||||
|
||||
MOZ_ASSERT(obj->isNative() == clasp->isNative());
|
||||
if (!clasp->isNative())
|
||||
return;
|
||||
NativeObject* nobj = &obj->as<NativeObject>();
|
||||
|
||||
// Note: the contents of copy on write elements pointers are filled in
|
||||
// during parsing and cannot contain nursery pointers.
|
||||
if (!nobj->hasEmptyElements() && !nobj->denseElementsAreCopyOnWrite())
|
||||
markSlots(trc, nobj->getDenseElements(), nobj->getDenseInitializedLength());
|
||||
|
||||
HeapSlot* fixedStart;
|
||||
HeapSlot* fixedEnd;
|
||||
HeapSlot* dynStart;
|
||||
HeapSlot* dynEnd;
|
||||
nobj->getSlotRange(0, nobj->slotSpan(), &fixedStart, &fixedEnd, &dynStart, &dynEnd);
|
||||
markSlots(trc, fixedStart, fixedEnd);
|
||||
markSlots(trc, dynStart, dynEnd);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void
|
||||
js::Nursery::markSlots(MinorCollectionTracer* trc, HeapSlot* vp, uint32_t nslots)
|
||||
{
|
||||
markSlots(trc, vp, vp + nslots);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void
|
||||
js::Nursery::markSlots(MinorCollectionTracer* trc, HeapSlot* vp, HeapSlot* end)
|
||||
{
|
||||
for (; vp != end; ++vp)
|
||||
markSlot(trc, vp);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void
|
||||
js::Nursery::markSlot(MinorCollectionTracer* trc, HeapSlot* slotp)
|
||||
{
|
||||
if (!slotp->isObject())
|
||||
return;
|
||||
|
||||
JSObject* obj = &slotp->toObject();
|
||||
if (markObject(trc, &obj))
|
||||
slotp->unsafeGet()->setObject(*obj);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void
|
||||
js::Nursery::markTraceList(MinorCollectionTracer* trc, const int32_t* traceList, uint8_t* memory)
|
||||
{
|
||||
while (*traceList != -1) {
|
||||
// Strings are not in the nursery and do not need tracing.
|
||||
traceList++;
|
||||
}
|
||||
traceList++;
|
||||
while (*traceList != -1) {
|
||||
JSObject** pobj = reinterpret_cast<JSObject **>(memory + *traceList);
|
||||
markObject(trc, pobj);
|
||||
traceList++;
|
||||
}
|
||||
traceList++;
|
||||
while (*traceList != -1) {
|
||||
HeapSlot* pslot = reinterpret_cast<HeapSlot *>(memory + *traceList);
|
||||
markSlot(trc, pslot);
|
||||
traceList++;
|
||||
}
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE bool
|
||||
js::Nursery::markObject(MinorCollectionTracer* trc, JSObject** pobj)
|
||||
{
|
||||
if (!IsInsideNursery(*pobj))
|
||||
return false;
|
||||
|
||||
if (getForwardedPointer(pobj))
|
||||
return true;
|
||||
|
||||
*pobj = static_cast<JSObject*>(moveToTenured(trc, *pobj));
|
||||
return true;
|
||||
}
|
||||
|
||||
void*
|
||||
js::Nursery::moveToTenured(MinorCollectionTracer* trc, JSObject* src)
|
||||
{
|
||||
|
||||
AllocKind dstKind = src->allocKindForTenure(*this);
|
||||
Zone* zone = src->zone();
|
||||
JSObject* dst = reinterpret_cast<JSObject*>(allocateFromTenured(zone, dstKind));
|
||||
if (!dst)
|
||||
CrashAtUnhandlableOOM("Failed to allocate object while tenuring.");
|
||||
|
||||
trc->tenuredSize += moveObjectToTenured(trc, dst, src, dstKind);
|
||||
|
||||
RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
|
||||
overlay->forwardTo(dst);
|
||||
trc->insertIntoFixupList(overlay);
|
||||
|
||||
TracePromoteToTenured(src, dst);
|
||||
return static_cast<void*>(dst);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE size_t
|
||||
js::Nursery::moveObjectToTenured(MinorCollectionTracer* trc,
|
||||
JSObject* dst, JSObject* src, AllocKind dstKind)
|
||||
{
|
||||
size_t srcSize = Arena::thingSize(dstKind);
|
||||
size_t tenuredSize = srcSize;
|
||||
|
||||
/*
|
||||
* Arrays do not necessarily have the same AllocKind between src and dst.
|
||||
* We deal with this by copying elements manually, possibly re-inlining
|
||||
* them if there is adequate room inline in dst.
|
||||
*
|
||||
* For Arrays we're reducing tenuredSize to the smaller srcSize
|
||||
* because moveElementsToTenured() accounts for all Array elements,
|
||||
* even if they are inlined.
|
||||
*/
|
||||
if (src->is<ArrayObject>())
|
||||
tenuredSize = srcSize = sizeof(NativeObject);
|
||||
|
||||
js_memcpy(dst, src, srcSize);
|
||||
if (src->isNative()) {
|
||||
NativeObject* ndst = &dst->as<NativeObject>();
|
||||
NativeObject* nsrc = &src->as<NativeObject>();
|
||||
tenuredSize += moveSlotsToTenured(ndst, nsrc, dstKind);
|
||||
tenuredSize += moveElementsToTenured(ndst, nsrc, dstKind);
|
||||
|
||||
// The shape's list head may point into the old object. This can only
|
||||
// happen for dictionaries, which are native objects.
|
||||
if (&nsrc->shape_ == ndst->shape_->listp) {
|
||||
MOZ_ASSERT(nsrc->shape_->inDictionary());
|
||||
ndst->shape_->listp = &ndst->shape_;
|
||||
}
|
||||
}
|
||||
|
||||
if (src->is<InlineTypedObject>()) {
|
||||
InlineTypedObject::objectMovedDuringMinorGC(trc, dst, src);
|
||||
} else if (src->is<UnboxedArrayObject>()) {
|
||||
tenuredSize += UnboxedArrayObject::objectMovedDuringMinorGC(trc, dst, src, dstKind);
|
||||
} else {
|
||||
// Objects with JSCLASS_SKIP_NURSERY_FINALIZE need to be handled above
|
||||
// to ensure any additional nursery buffers they hold are moved.
|
||||
MOZ_ASSERT(!(src->getClass()->flags & JSCLASS_SKIP_NURSERY_FINALIZE));
|
||||
}
|
||||
|
||||
return tenuredSize;
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE size_t
|
||||
js::Nursery::moveSlotsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
|
||||
{
|
||||
/* Fixed slots have already been copied over. */
|
||||
if (!src->hasDynamicSlots())
|
||||
return 0;
|
||||
|
||||
if (!isInside(src->slots_)) {
|
||||
removeMallocedBuffer(src->slots_);
|
||||
return 0;
|
||||
}
|
||||
|
||||
Zone* zone = src->zone();
|
||||
size_t count = src->numDynamicSlots();
|
||||
dst->slots_ = zone->pod_malloc<HeapSlot>(count);
|
||||
if (!dst->slots_)
|
||||
CrashAtUnhandlableOOM("Failed to allocate slots while tenuring.");
|
||||
PodCopy(dst->slots_, src->slots_, count);
|
||||
setSlotsForwardingPointer(src->slots_, dst->slots_, count);
|
||||
return count * sizeof(HeapSlot);
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE size_t
|
||||
js::Nursery::moveElementsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
|
||||
{
|
||||
if (src->hasEmptyElements() || src->denseElementsAreCopyOnWrite())
|
||||
return 0;
|
||||
|
||||
Zone* zone = src->zone();
|
||||
ObjectElements* srcHeader = src->getElementsHeader();
|
||||
ObjectElements* dstHeader;
|
||||
|
||||
/* TODO Bug 874151: Prefer to put element data inline if we have space. */
|
||||
if (!isInside(srcHeader)) {
|
||||
MOZ_ASSERT(src->elements_ == dst->elements_);
|
||||
removeMallocedBuffer(srcHeader);
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->capacity;
|
||||
|
||||
/* Unlike other objects, Arrays can have fixed elements. */
|
||||
if (src->is<ArrayObject>() && nslots <= GetGCKindSlots(dstKind)) {
|
||||
dst->as<ArrayObject>().setFixedElements();
|
||||
dstHeader = dst->as<ArrayObject>().getElementsHeader();
|
||||
js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
|
||||
setElementsForwardingPointer(srcHeader, dstHeader, nslots);
|
||||
return nslots * sizeof(HeapSlot);
|
||||
}
|
||||
|
||||
MOZ_ASSERT(nslots >= 2);
|
||||
dstHeader = reinterpret_cast<ObjectElements*>(zone->pod_malloc<HeapSlot>(nslots));
|
||||
if (!dstHeader)
|
||||
CrashAtUnhandlableOOM("Failed to allocate elements while tenuring.");
|
||||
js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
|
||||
setElementsForwardingPointer(srcHeader, dstHeader, nslots);
|
||||
dst->elements_ = dstHeader->elements();
|
||||
return nslots * sizeof(HeapSlot);
|
||||
}
|
||||
|
||||
static bool
|
||||
ShouldMoveToTenured(MinorCollectionTracer* trc, void** thingp)
|
||||
{
|
||||
Cell* cell = static_cast<Cell*>(*thingp);
|
||||
Nursery& nursery = *trc->nursery;
|
||||
return !nursery.isInside(thingp) && IsInsideNursery(cell) &&
|
||||
!nursery.getForwardedPointer(thingp);
|
||||
}
|
||||
|
||||
/* static */ void
|
||||
js::Nursery::MinorGCCallback(JS::CallbackTracer* jstrc, void** thingp, JSGCTraceKind kind)
|
||||
{
|
||||
MinorCollectionTracer* trc = static_cast<MinorCollectionTracer*>(jstrc);
|
||||
if (ShouldMoveToTenured(trc, thingp))
|
||||
*thingp = trc->nursery->moveToTenured(trc, static_cast<JSObject*>(*thingp));
|
||||
runtime()->setNeedsIncrementalBarrier(savedRuntimeNeedBarrier);
|
||||
}
|
||||
|
||||
#define TIME_START(name) int64_t timstampStart_##name = enableProfiling_ ? PRMJ_Now() : 0
|
||||
@ -766,48 +432,51 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList
|
||||
|
||||
TIME_START(total);
|
||||
|
||||
AutoTraceSession session(rt, MinorCollecting);
|
||||
AutoStopVerifyingBarriers av(rt, false);
|
||||
AutoDisableProxyCheck disableStrictProxyChecking(rt);
|
||||
DebugOnly<AutoEnterOOMUnsafeRegion> oomUnsafeRegion;
|
||||
|
||||
// Move objects pointed to by roots from the nursery to the major heap.
|
||||
MinorCollectionTracer trc(rt, this);
|
||||
TenuringTracer mover(rt, this);
|
||||
|
||||
// Mark the store buffer. This must happen first.
|
||||
TIME_START(markValues);
|
||||
sb.markValues(&trc);
|
||||
sb.markValues(mover);
|
||||
TIME_END(markValues);
|
||||
|
||||
TIME_START(markCells);
|
||||
sb.markCells(&trc);
|
||||
sb.markCells(mover);
|
||||
TIME_END(markCells);
|
||||
|
||||
TIME_START(markSlots);
|
||||
sb.markSlots(&trc);
|
||||
sb.markSlots(mover);
|
||||
TIME_END(markSlots);
|
||||
|
||||
TIME_START(markWholeCells);
|
||||
sb.markWholeCells(&trc);
|
||||
sb.markWholeCells(mover);
|
||||
TIME_END(markWholeCells);
|
||||
|
||||
TIME_START(markRelocatableValues);
|
||||
sb.markRelocatableValues(&trc);
|
||||
sb.markRelocatableValues(mover);
|
||||
TIME_END(markRelocatableValues);
|
||||
|
||||
TIME_START(markRelocatableCells);
|
||||
sb.markRelocatableCells(&trc);
|
||||
sb.markRelocatableCells(mover);
|
||||
TIME_END(markRelocatableCells);
|
||||
|
||||
TIME_START(markGenericEntries);
|
||||
sb.markGenericEntries(&trc);
|
||||
sb.markGenericEntries(&mover);
|
||||
TIME_END(markGenericEntries);
|
||||
|
||||
TIME_START(markRuntime);
|
||||
rt->gc.markRuntime(&trc);
|
||||
rt->gc.markRuntime(&mover);
|
||||
TIME_END(markRuntime);
|
||||
|
||||
TIME_START(markDebugger);
|
||||
{
|
||||
gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_MARK_ROOTS);
|
||||
Debugger::markAll(&trc);
|
||||
Debugger::markAll(&mover);
|
||||
}
|
||||
TIME_END(markDebugger);
|
||||
|
||||
@ -821,7 +490,7 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList
|
||||
// objects are left to move. That is, we iterate to a fixed point.
|
||||
TIME_START(collectToFP);
|
||||
TenureCountCache tenureCounts;
|
||||
collectToFixedPoint(&trc, tenureCounts);
|
||||
collectToFixedPoint(mover, tenureCounts);
|
||||
TIME_END(collectToFP);
|
||||
|
||||
// Update the array buffer object's view lists.
|
||||
@ -834,7 +503,7 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList
|
||||
|
||||
// Update any slot or element pointers whose destination has been tenured.
|
||||
TIME_START(updateJitActivations);
|
||||
js::jit::UpdateJitActivationsForMinorGC(rt, &trc);
|
||||
js::jit::UpdateJitActivationsForMinorGC(rt, &mover);
|
||||
forwardedBuffers.finish();
|
||||
TIME_END(updateJitActivations);
|
||||
|
||||
@ -861,7 +530,7 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason, ObjectGroupList
|
||||
|
||||
// Resize the nursery.
|
||||
TIME_START(resize);
|
||||
double promotionRate = trc.tenuredSize / double(allocationEnd() - start());
|
||||
double promotionRate = mover.tenuredSize / double(allocationEnd() - start());
|
||||
if (promotionRate > 0.05)
|
||||
growAllocableSpace();
|
||||
else if (promotionRate < 0.01)
|
||||
|
@ -29,6 +29,7 @@ namespace js {
|
||||
|
||||
class ObjectElements;
|
||||
class NativeObject;
|
||||
class Nursery;
|
||||
class HeapSlot;
|
||||
class ObjectGroup;
|
||||
|
||||
@ -37,12 +38,44 @@ void SetGCZeal(JSRuntime*, uint8_t, uint32_t);
|
||||
namespace gc {
|
||||
struct Cell;
|
||||
class MinorCollectionTracer;
|
||||
class RelocationOverlay;
|
||||
} /* namespace gc */
|
||||
|
||||
namespace jit {
|
||||
class MacroAssembler;
|
||||
}
|
||||
|
||||
class TenuringTracer : public JSTracer
|
||||
{
|
||||
friend class Nursery;
|
||||
Nursery& nursery_;
|
||||
|
||||
// Amount of data moved to the tenured generation during collection.
|
||||
size_t tenuredSize;
|
||||
|
||||
// This list is threaded through the Nursery using the space from already
|
||||
// moved things. The list is used to fix up the moved things and to find
|
||||
// things held live by intra-Nursery pointers.
|
||||
gc::RelocationOverlay* head;
|
||||
gc::RelocationOverlay** tail;
|
||||
|
||||
// Save and restore all of the runtime state we use during MinorGC.
|
||||
bool savedRuntimeNeedBarrier;
|
||||
|
||||
TenuringTracer(JSRuntime* rt, Nursery* nursery);
|
||||
~TenuringTracer();
|
||||
|
||||
public:
|
||||
const Nursery& nursery() const { return nursery_; }
|
||||
JSObject* moveToTenured(JSObject* thing);
|
||||
|
||||
void insertIntoFixupList(gc::RelocationOverlay* entry);
|
||||
|
||||
#ifdef JS_GC_ZEAL
|
||||
bool verifyingPostBarriers;
|
||||
#endif
|
||||
};
|
||||
|
||||
class Nursery
|
||||
{
|
||||
public:
|
||||
@ -122,14 +155,13 @@ class Nursery
|
||||
* sets |*ref| to the new location of the object and returns true. Otherwise
|
||||
* returns false and leaves |*ref| unset.
|
||||
*/
|
||||
template <typename T>
|
||||
MOZ_ALWAYS_INLINE bool getForwardedPointer(T** ref);
|
||||
MOZ_ALWAYS_INLINE bool getForwardedPointer(JSObject** ref) const;
|
||||
|
||||
/* Forward a slots/elements pointer stored in an Ion frame. */
|
||||
void forwardBufferPointer(HeapSlot** pSlotsElems);
|
||||
|
||||
void maybeSetForwardingPointer(JSTracer* trc, void* oldData, void* newData, bool direct) {
|
||||
if (IsMinorCollectionTracer(trc) && isInside(oldData))
|
||||
if (trc->isTenuringTracer() && isInside(oldData))
|
||||
setForwardingPointer(oldData, newData, direct);
|
||||
}
|
||||
|
||||
@ -162,10 +194,6 @@ class Nursery
|
||||
return heapEnd_;
|
||||
}
|
||||
|
||||
static bool IsMinorCollectionTracer(JSTracer* trc) {
|
||||
return trc->isCallbackTracer() && trc->asCallbackTracer()->hasCallback(MinorGCCallback);
|
||||
}
|
||||
|
||||
#ifdef JS_GC_ZEAL
|
||||
void enterZealMode();
|
||||
void leaveZealMode();
|
||||
@ -297,16 +325,16 @@ class Nursery
|
||||
* Move the object at |src| in the Nursery to an already-allocated cell
|
||||
* |dst| in Tenured.
|
||||
*/
|
||||
void collectToFixedPoint(gc::MinorCollectionTracer* trc, TenureCountCache& tenureCounts);
|
||||
MOZ_ALWAYS_INLINE void traceObject(gc::MinorCollectionTracer* trc, JSObject* src);
|
||||
MOZ_ALWAYS_INLINE void markSlots(gc::MinorCollectionTracer* trc, HeapSlot* vp, uint32_t nslots);
|
||||
MOZ_ALWAYS_INLINE void markSlots(gc::MinorCollectionTracer* trc, HeapSlot* vp, HeapSlot* end);
|
||||
MOZ_ALWAYS_INLINE void markSlot(gc::MinorCollectionTracer* trc, HeapSlot* slotp);
|
||||
MOZ_ALWAYS_INLINE void markTraceList(gc::MinorCollectionTracer* trc,
|
||||
void collectToFixedPoint(TenuringTracer& trc, TenureCountCache& tenureCounts);
|
||||
MOZ_ALWAYS_INLINE void traceObject(TenuringTracer& trc, JSObject* src);
|
||||
MOZ_ALWAYS_INLINE void markSlots(TenuringTracer& trc, HeapSlot* vp, uint32_t nslots);
|
||||
MOZ_ALWAYS_INLINE void markSlots(TenuringTracer& trc, HeapSlot* vp, HeapSlot* end);
|
||||
MOZ_ALWAYS_INLINE void markSlot(TenuringTracer& trc, HeapSlot* slotp);
|
||||
MOZ_ALWAYS_INLINE void markTraceList(TenuringTracer& trc,
|
||||
const int32_t* traceList, uint8_t* memory);
|
||||
MOZ_ALWAYS_INLINE bool markObject(gc::MinorCollectionTracer* trc, JSObject** pobj);
|
||||
void* moveToTenured(gc::MinorCollectionTracer* trc, JSObject* src);
|
||||
size_t moveObjectToTenured(gc::MinorCollectionTracer* trc, JSObject* dst, JSObject* src,
|
||||
MOZ_ALWAYS_INLINE bool markObject(TenuringTracer& trc, JSObject** pobj);
|
||||
void* moveToTenured(TenuringTracer& trc, JSObject* src);
|
||||
size_t moveObjectToTenured(TenuringTracer& trc, JSObject* dst, JSObject* src,
|
||||
gc::AllocKind dstKind);
|
||||
size_t moveElementsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
|
||||
size_t moveSlotsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
|
||||
@ -331,8 +359,7 @@ class Nursery
|
||||
void growAllocableSpace();
|
||||
void shrinkAllocableSpace();
|
||||
|
||||
static void MinorGCCallback(JS::CallbackTracer* trc, void** thingp, JSGCTraceKind kind);
|
||||
|
||||
friend class TenuringTracer;
|
||||
friend class gc::MinorCollectionTracer;
|
||||
friend class jit::MacroAssembler;
|
||||
};
|
||||
|
@ -16,91 +16,11 @@
|
||||
|
||||
using namespace js;
|
||||
using namespace js::gc;
|
||||
using mozilla::ReentrancyGuard;
|
||||
|
||||
/*** Edges ***/
|
||||
|
||||
void
|
||||
StoreBuffer::SlotsEdge::mark(JSTracer* trc) const
|
||||
{
|
||||
NativeObject* obj = object();
|
||||
|
||||
// Beware JSObject::swap exchanging a native object for a non-native one.
|
||||
if (!obj->isNative())
|
||||
return;
|
||||
|
||||
if (IsInsideNursery(obj))
|
||||
return;
|
||||
|
||||
if (kind() == ElementKind) {
|
||||
int32_t initLen = obj->getDenseInitializedLength();
|
||||
int32_t clampedStart = Min(start_, initLen);
|
||||
int32_t clampedEnd = Min(start_ + count_, initLen);
|
||||
TraceRange(trc, clampedEnd - clampedStart,
|
||||
static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart), "element");
|
||||
} else {
|
||||
int32_t start = Min(uint32_t(start_), obj->slotSpan());
|
||||
int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
|
||||
MOZ_ASSERT(end >= start);
|
||||
TraceObjectSlots(trc, obj, start, end - start);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
StoreBuffer::WholeCellEdges::mark(JSTracer* trc) const
|
||||
{
|
||||
MOZ_ASSERT(edge->isTenured());
|
||||
JSGCTraceKind kind = GetGCThingTraceKind(edge);
|
||||
if (kind <= JSTRACE_OBJECT) {
|
||||
JSObject* object = static_cast<JSObject*>(edge);
|
||||
if (object->is<ArgumentsObject>())
|
||||
ArgumentsObject::trace(trc, object);
|
||||
object->traceChildren(trc);
|
||||
return;
|
||||
}
|
||||
MOZ_ASSERT(kind == JSTRACE_JITCODE);
|
||||
static_cast<jit::JitCode*>(edge)->traceChildren(trc);
|
||||
}
|
||||
|
||||
void
|
||||
StoreBuffer::CellPtrEdge::mark(JSTracer* trc) const
|
||||
{
|
||||
if (!*edge)
|
||||
return;
|
||||
|
||||
MOZ_ASSERT(GetGCThingTraceKind(*edge) == JSTRACE_OBJECT);
|
||||
TraceRoot(trc, reinterpret_cast<JSObject**>(edge), "store buffer edge");
|
||||
}
|
||||
|
||||
void
|
||||
StoreBuffer::ValueEdge::mark(JSTracer* trc) const
|
||||
{
|
||||
if (!deref())
|
||||
return;
|
||||
|
||||
TraceRoot(trc, edge, "store buffer edge");
|
||||
}
|
||||
|
||||
/*** MonoTypeBuffer ***/
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
StoreBuffer::MonoTypeBuffer<T>::mark(StoreBuffer* owner, JSTracer* trc)
|
||||
{
|
||||
ReentrancyGuard g(*owner);
|
||||
MOZ_ASSERT(owner->isEnabled());
|
||||
MOZ_ASSERT(stores_.initialized());
|
||||
sinkStores(owner);
|
||||
for (typename StoreSet::Range r = stores_.all(); !r.empty(); r.popFront())
|
||||
r.front().mark(trc);
|
||||
}
|
||||
|
||||
/*** GenericBuffer ***/
|
||||
|
||||
void
|
||||
StoreBuffer::GenericBuffer::mark(StoreBuffer* owner, JSTracer* trc)
|
||||
{
|
||||
ReentrancyGuard g(*owner);
|
||||
mozilla::ReentrancyGuard g(*owner);
|
||||
MOZ_ASSERT(owner->isEnabled());
|
||||
if (!storage_)
|
||||
return;
|
||||
@ -114,8 +34,6 @@ StoreBuffer::GenericBuffer::mark(StoreBuffer* owner, JSTracer* trc)
|
||||
}
|
||||
}
|
||||
|
||||
/*** StoreBuffer ***/
|
||||
|
||||
bool
|
||||
StoreBuffer::enable()
|
||||
{
|
||||
@ -167,18 +85,6 @@ StoreBuffer::clear()
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
StoreBuffer::markAll(JSTracer* trc)
|
||||
{
|
||||
bufferVal.mark(this, trc);
|
||||
bufferCell.mark(this, trc);
|
||||
bufferSlot.mark(this, trc);
|
||||
bufferWholeCell.mark(this, trc);
|
||||
bufferRelocVal.mark(this, trc);
|
||||
bufferRelocCell.mark(this, trc);
|
||||
bufferGeneric.mark(this, trc);
|
||||
}
|
||||
|
||||
void
|
||||
StoreBuffer::setAboutToOverflow()
|
||||
{
|
||||
|
@ -122,7 +122,7 @@ class StoreBuffer
|
||||
}
|
||||
|
||||
/* Mark the source of all edges in the store buffer. */
|
||||
void mark(StoreBuffer* owner, JSTracer* trc);
|
||||
void mark(StoreBuffer* owner, TenuringTracer& mover);
|
||||
|
||||
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
|
||||
return stores_.sizeOfExcludingThis(mallocSizeOf);
|
||||
@ -211,7 +211,7 @@ class StoreBuffer
|
||||
return !nursery.isInside(edge);
|
||||
}
|
||||
|
||||
void mark(JSTracer* trc) const;
|
||||
void mark(TenuringTracer& mover) const;
|
||||
|
||||
CellPtrEdge tagged() const { return CellPtrEdge((Cell**)(uintptr_t(edge) | 1)); }
|
||||
CellPtrEdge untagged() const { return CellPtrEdge((Cell**)(uintptr_t(edge) & ~1)); }
|
||||
@ -236,7 +236,7 @@ class StoreBuffer
|
||||
return !nursery.isInside(edge);
|
||||
}
|
||||
|
||||
void mark(JSTracer* trc) const;
|
||||
void mark(TenuringTracer& mover) const;
|
||||
|
||||
ValueEdge tagged() const { return ValueEdge((JS::Value*)(uintptr_t(edge) | 1)); }
|
||||
ValueEdge untagged() const { return ValueEdge((JS::Value*)(uintptr_t(edge) & ~1)); }
|
||||
@ -282,7 +282,7 @@ class StoreBuffer
|
||||
return !IsInsideNursery(reinterpret_cast<Cell*>(object()));
|
||||
}
|
||||
|
||||
void mark(JSTracer* trc) const;
|
||||
void mark(TenuringTracer& mover) const;
|
||||
|
||||
typedef struct {
|
||||
typedef SlotsEdge Lookup;
|
||||
@ -308,7 +308,7 @@ class StoreBuffer
|
||||
static bool supportsDeduplication() { return true; }
|
||||
void* deduplicationKey() const { return (void*)edge; }
|
||||
|
||||
void mark(JSTracer* trc) const;
|
||||
void mark(TenuringTracer& mover) const;
|
||||
|
||||
typedef PointerEdgeHasher<WholeCellEdges> Hasher;
|
||||
};
|
||||
@ -444,14 +444,13 @@ class StoreBuffer
|
||||
}
|
||||
|
||||
/* Methods to mark the source of all edges in the store buffer. */
|
||||
void markAll(JSTracer* trc);
|
||||
void markValues(JSTracer* trc) { bufferVal.mark(this, trc); }
|
||||
void markCells(JSTracer* trc) { bufferCell.mark(this, trc); }
|
||||
void markSlots(JSTracer* trc) { bufferSlot.mark(this, trc); }
|
||||
void markWholeCells(JSTracer* trc) { bufferWholeCell.mark(this, trc); }
|
||||
void markRelocatableValues(JSTracer* trc) { bufferRelocVal.mark(this, trc); }
|
||||
void markRelocatableCells(JSTracer* trc) { bufferRelocCell.mark(this, trc); }
|
||||
void markGenericEntries(JSTracer* trc) { bufferGeneric.mark(this, trc); }
|
||||
void markValues(TenuringTracer& mover) { bufferVal.mark(this, mover); }
|
||||
void markCells(TenuringTracer& mover) { bufferCell.mark(this, mover); }
|
||||
void markSlots(TenuringTracer& mover) { bufferSlot.mark(this, mover); }
|
||||
void markWholeCells(TenuringTracer& mover) { bufferWholeCell.mark(this, mover); }
|
||||
void markRelocatableValues(TenuringTracer& mover) { bufferRelocVal.mark(this, mover); }
|
||||
void markRelocatableCells(TenuringTracer& mover) { bufferRelocCell.mark(this, mover); }
|
||||
void markGenericEntries(JSTracer *trc) { bufferGeneric.mark(this, trc); }
|
||||
|
||||
/* For use by our owned buffers and for testing. */
|
||||
void setAboutToOverflow();
|
||||
|
@ -1189,6 +1189,7 @@ MergeCompartments(JSCompartment* source, JSCompartment* target);
|
||||
class RelocationOverlay
|
||||
{
|
||||
friend class MinorCollectionTracer;
|
||||
friend class js::TenuringTracer;
|
||||
|
||||
/* The low bit is set so this should never equal a normal pointer. */
|
||||
static const uintptr_t Relocated = uintptr_t(0xbad0bad1);
|
||||
|
@ -277,7 +277,7 @@ JSRuntime::init(uint32_t maxbytes, uint32_t maxNurseryBytes)
|
||||
|
||||
const char* size = getenv("JSGC_MARK_STACK_LIMIT");
|
||||
if (size)
|
||||
SetMarkStackLimit(this, atoi(size));
|
||||
gc.setMarkStackLimit(atoi(size));
|
||||
|
||||
ScopedJSDeletePtr<Zone> atomsZone(new_<Zone>(this));
|
||||
if (!atomsZone || !atomsZone->init(true))
|
||||
|
Loading…
Reference in New Issue
Block a user