Bug 990336 - Generalize the slots barrier and use it to optimize dense-range-ref; r=jonco

This commit is contained in:
Terrence Cole 2014-03-31 17:51:53 -07:00
parent 02270e76f1
commit 5511a142b0
5 changed files with 142 additions and 141 deletions

View File

@ -147,6 +147,12 @@ class BumpChunk
return result;
}
void *peek(size_t n) {
if (bumpBase() - bump < ptrdiff_t(n))
return nullptr;
return bump - n;
}
static BumpChunk *new_(size_t chunkSize);
static void delete_(BumpChunk *chunk);
};
@ -470,6 +476,16 @@ class LifoAlloc
return Mark(chunk_, position_);
}
};
// Return a modifiable pointer to the most recently allocated bytes. The
// type of the thing must be known, so is only applicable to some special-
// purpose allocators. Will return a nullptr if nothing has been allocated.
template <typename T>
T *peek() {
if (!latest)
return nullptr;
return static_cast<T *>(latest->peek(sizeof(T)));
}
};
class LifoAllocScope

View File

@ -907,8 +907,8 @@ class HeapSlot : public BarrieredValue
{
public:
enum Kind {
Slot,
Element
Slot = 0,
Element = 1
};
explicit HeapSlot() MOZ_DELETE;
@ -982,7 +982,7 @@ class HeapSlot : public BarrieredValue
#ifdef JSGC_GENERATIONAL
if (target.isObject()) {
JS::shadow::Runtime *shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
shadowRuntime->gcStoreBufferPtr()->putSlot(obj, kind, slot, &target.toObject());
shadowRuntime->gcStoreBufferPtr()->putSlot(obj, kind, slot, 1);
}
#endif
}

View File

@ -19,44 +19,33 @@ using namespace js;
using namespace js::gc;
using mozilla::ReentrancyGuard;
/*** SlotEdge ***/
/*** Edges ***/
MOZ_ALWAYS_INLINE HeapSlot *
StoreBuffer::SlotEdge::slotLocation() const
void
StoreBuffer::SlotsEdge::mark(JSTracer *trc)
{
if (kind == HeapSlot::Element) {
if (offset >= object->getDenseInitializedLength())
return nullptr;
return (HeapSlot *)&object->getDenseElement(offset);
if (trc->runtime->gcNursery.isInside(object_))
return;
if (!object_->isNative()) {
const Class *clasp = object_->getClass();
if (clasp)
clasp->trace(trc, object_);
return;
}
if (offset >= object->slotSpan())
return nullptr;
return &object->getSlotRef(offset);
}
MOZ_ALWAYS_INLINE void *
StoreBuffer::SlotEdge::deref() const
{
HeapSlot *loc = slotLocation();
return (loc && loc->isGCThing()) ? loc->toGCThing() : nullptr;
}
MOZ_ALWAYS_INLINE void *
StoreBuffer::SlotEdge::location() const
{
return (void *)slotLocation();
}
bool
StoreBuffer::SlotEdge::inRememberedSet(const Nursery &nursery) const
{
return !nursery.isInside(object) && nursery.isInside(deref());
}
MOZ_ALWAYS_INLINE bool
StoreBuffer::SlotEdge::isNullEdge() const
{
return !deref();
if (count_ > 0) {
int32_t initLen = object_->getDenseInitializedLength();
int32_t clampedStart = Min(start_, initLen);
int32_t clampedEnd = Min(start_ + count_, initLen);
gc::MarkArraySlots(trc, clampedEnd - clampedStart,
object_->getDenseElements() + clampedStart, "element");
} else {
int32_t start = Min(uint32_t(start_), object_->slotSpan());
int32_t end = Min(uint32_t(start_) + (-count_), object_->slotSpan());
MOZ_ASSERT(end >= start);
MarkObjectSlots(trc, object_, start, end - start);
}
}
void
@ -79,6 +68,25 @@ StoreBuffer::WholeCellEdges::mark(JSTracer *trc)
#endif
}
void
StoreBuffer::CellPtrEdge::mark(JSTracer *trc)
{
if (!*edge)
return;
JS_ASSERT(GetGCThingTraceKind(*edge) == JSTRACE_OBJECT);
MarkObjectRoot(trc, reinterpret_cast<JSObject**>(edge), "store buffer edge");
}
void
StoreBuffer::ValueEdge::mark(JSTracer *trc)
{
if (!deref())
return;
MarkValueRoot(trc, edge, "store buffer edge");
}
/*** MonoTypeBuffer ***/
template <typename T>
@ -107,6 +115,9 @@ template <typename T>
void
StoreBuffer::MonoTypeBuffer<T>::compactRemoveDuplicates(StoreBuffer *owner)
{
if (!T::supportsDeduplication())
return;
EdgeSet duplicates;
if (!duplicates.init())
return; /* Failure to de-dup is acceptable. */
@ -114,12 +125,13 @@ StoreBuffer::MonoTypeBuffer<T>::compactRemoveDuplicates(StoreBuffer *owner)
LifoAlloc::Enum insert(*storage_);
for (LifoAlloc::Enum e(*storage_); !e.empty(); e.popFront<T>()) {
T *edge = e.get<T>();
if (!duplicates.has(edge->location())) {
void *key = edge->deduplicationKey();
if (!duplicates.has(key)) {
insert.updateFront<T>(*edge);
insert.popFront<T>();
/* Failure to insert will leave the set with duplicates. Oh well. */
duplicates.put(edge->location());
duplicates.put(key);
}
}
storage_->release(insert.mark());
@ -157,10 +169,7 @@ StoreBuffer::MonoTypeBuffer<T>::mark(StoreBuffer *owner, JSTracer *trc)
maybeCompact(owner);
for (LifoAlloc::Enum e(*storage_); !e.empty(); e.popFront<T>()) {
T *edge = e.get<T>();
if (edge->isNullEdge())
continue;
edge->mark(trc);
}
}
@ -179,10 +188,10 @@ StoreBuffer::RelocatableMonoTypeBuffer<T>::compactMoved(StoreBuffer *owner)
for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) {
T *edge = e.get<T>();
if (edge->isTagged()) {
if (!invalidated.put(edge->location()))
if (!invalidated.put(edge->deduplicationKey()))
CrashAtUnhandlableOOM("RelocatableMonoTypeBuffer::compactMoved: Failed to put removal.");
} else {
invalidated.remove(edge->location());
invalidated.remove(edge->deduplicationKey());
}
}
@ -190,7 +199,7 @@ StoreBuffer::RelocatableMonoTypeBuffer<T>::compactMoved(StoreBuffer *owner)
LifoAlloc::Enum insert(storage);
for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) {
T *edge = e.get<T>();
if (!edge->isTagged() && !invalidated.has(edge->location())) {
if (!edge->isTagged() && !invalidated.has(edge->deduplicationKey())) {
insert.updateFront<T>(*edge);
insert.popFront<T>();
}
@ -232,30 +241,6 @@ StoreBuffer::GenericBuffer::mark(StoreBuffer *owner, JSTracer *trc)
}
}
/*** Edges ***/
void
StoreBuffer::CellPtrEdge::mark(JSTracer *trc)
{
JS_ASSERT(GetGCThingTraceKind(*edge) == JSTRACE_OBJECT);
MarkObjectRoot(trc, reinterpret_cast<JSObject**>(edge), "store buffer edge");
}
void
StoreBuffer::ValueEdge::mark(JSTracer *trc)
{
MarkValueRoot(trc, edge, "store buffer edge");
}
void
StoreBuffer::SlotEdge::mark(JSTracer *trc)
{
if (kind == HeapSlot::Element)
MarkSlot(trc, (HeapSlot*)&object->getDenseElement(offset), "store buffer edge");
else
MarkSlot(trc, &object->getSlotRef(offset), "store buffer edge");
}
/*** StoreBuffer ***/
bool
@ -387,7 +372,7 @@ JS::HeapValueRelocate(JS::Value *valuep)
template class StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>;
template class StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>;
template class StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotEdge>;
template class StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>;
template class StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>;
template class StoreBuffer::RelocatableMonoTypeBuffer<StoreBuffer::ValueEdge>;
template class StoreBuffer::RelocatableMonoTypeBuffer<StoreBuffer::CellPtrEdge>;

View File

@ -40,7 +40,7 @@ class BufferableRef
{
public:
virtual void mark(JSTracer *trc) = 0;
bool inRememberedSet(const Nursery &) const { return true; }
bool maybeInRememberedSet(const Nursery &) const { return true; }
};
/*
@ -134,6 +134,10 @@ class StoreBuffer
void put(StoreBuffer *owner, const T &t) {
JS_ASSERT(storage_);
T *tip = storage_->peek<T>();
if (tip && tip->canMergeWith(t))
return tip->mergeInplace(t);
T *tp = storage_->new_<T>(t);
if (!tp)
CrashAtUnhandlableOOM("Failed to allocate for MonoTypeBuffer::put.");
@ -235,15 +239,15 @@ class StoreBuffer
bool operator==(const CellPtrEdge &other) const { return edge == other.edge; }
bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
void *location() const { return (void *)untagged().edge; }
static bool supportsDeduplication() { return true; }
void *deduplicationKey() const { return (void *)untagged().edge; }
bool inRememberedSet(const Nursery &nursery) const {
bool maybeInRememberedSet(const Nursery &nursery) const {
return !nursery.isInside(edge) && nursery.isInside(*edge);
}
bool isNullEdge() const {
return !*edge;
}
bool canMergeWith(const CellPtrEdge &other) const { return edge == other.edge; }
void mergeInplace(const CellPtrEdge &) {}
void mark(JSTracer *trc);
@ -261,15 +265,16 @@ class StoreBuffer
bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
void *deref() const { return edge->isGCThing() ? edge->toGCThing() : nullptr; }
void *location() const { return (void *)untagged().edge; }
bool inRememberedSet(const Nursery &nursery) const {
static bool supportsDeduplication() { return true; }
void *deduplicationKey() const { return (void *)untagged().edge; }
bool maybeInRememberedSet(const Nursery &nursery) const {
return !nursery.isInside(edge) && nursery.isInside(deref());
}
bool isNullEdge() const {
return !deref();
}
bool canMergeWith(const ValueEdge &other) const { return edge == other.edge; }
void mergeInplace(const ValueEdge &) {}
void mark(JSTracer *trc);
@ -278,30 +283,57 @@ class StoreBuffer
bool isTagged() const { return bool(uintptr_t(edge) & 1); }
};
struct SlotEdge
struct SlotsEdge
{
JSObject *object;
uint32_t offset;
int kind; // this is really just HeapSlot::Kind, but we can't see that type easily here
// These definitions must match those in HeapSlot::Kind.
const static int SlotKind = 0;
const static int ElementKind = 1;
SlotEdge(JSObject *object, int kind, uint32_t offset)
: object(object), offset(offset), kind(kind)
{}
JSObject *object_;
int32_t start_;
int32_t count_;
bool operator==(const SlotEdge &other) const {
return object == other.object && offset == other.offset && kind == other.kind;
SlotsEdge(JSObject *object, int kind, int32_t start, int32_t count)
: object_(object), start_(start), count_(count)
{
JS_ASSERT(start >= 0);
JS_ASSERT(count > 0); // Must be non-zero size so that |count_| < 0 can be kind.
if (kind == SlotKind)
count_ = -count_;
}
bool operator!=(const SlotEdge &other) const {
return object != other.object || offset != other.offset || kind != other.kind;
bool operator==(const SlotsEdge &other) const {
return object_ == other.object_ && start_ == other.start_ && count_ == other.count_;
}
MOZ_ALWAYS_INLINE HeapSlot *slotLocation() const;
bool operator!=(const SlotsEdge &other) const {
return object_ != other.object_ || start_ != other.start_ || count_ != other.count_;
}
MOZ_ALWAYS_INLINE void *deref() const;
MOZ_ALWAYS_INLINE void *location() const;
bool inRememberedSet(const Nursery &nursery) const;
MOZ_ALWAYS_INLINE bool isNullEdge() const;
bool canMergeWith(const SlotsEdge &other) const {
JS_ASSERT(sizeof(count_) == 4);
return object_ == other.object_ && count_ >> 31 == other.count_ >> 31;
}
void mergeInplace(const SlotsEdge &other) {
JS_ASSERT((count_ > 0 && other.count_ > 0) || (count_ < 0 && other.count_ < 0));
int32_t end1 = start_ + abs(count_);
int32_t end2 = other.start_ + abs(other.count_);
start_ = Min(start_, other.start_);
count_ = Max(end1, end2) - start_;
if (other.count_ < 0)
count_ = -count_;
}
static bool supportsDeduplication() { return false; }
void *deduplicationKey() const {
MOZ_CRASH("Dedup not supported on SlotsEdge.");
return nullptr;
}
bool maybeInRememberedSet(const Nursery &nursery) const {
return !nursery.isInside(object_);
}
void mark(JSTracer *trc);
};
@ -310,19 +342,20 @@ class StoreBuffer
{
Cell *tenured;
WholeCellEdges(Cell *cell) : tenured(cell) {
explicit WholeCellEdges(Cell *cell) : tenured(cell) {
JS_ASSERT(tenured->isTenured());
}
bool operator==(const WholeCellEdges &other) const { return tenured == other.tenured; }
bool operator!=(const WholeCellEdges &other) const { return tenured != other.tenured; }
bool inRememberedSet(const Nursery &nursery) const { return true; }
bool maybeInRememberedSet(const Nursery &nursery) const { return true; }
/* This is used by RemoveDuplicates as a unique pointer to this Edge. */
void *location() const { return (void *)tenured; }
static bool supportsDeduplication() { return true; }
void *deduplicationKey() const { return (void *)tenured; }
bool isNullEdge() const { return false; }
bool canMergeWith(const WholeCellEdges &other) const { return tenured == other.tenured; }
void mergeInplace(const WholeCellEdges &) {}
void mark(JSTracer *trc);
};
@ -357,10 +390,8 @@ class StoreBuffer
* The concurrent parsing thread cannot validly insert into the buffer,
* but it should not activate the re-entrancy guard either.
*/
if (!CurrentThreadCanAccessRuntime(runtime_)) {
JS_ASSERT(!edge.inRememberedSet(nursery_));
if (!CurrentThreadCanAccessRuntime(runtime_))
return false;
}
return true;
}
@ -370,7 +401,7 @@ class StoreBuffer
if (!isOkayToUseBuffer(edge))
return;
mozilla::ReentrancyGuard g(*this);
if (edge.inRememberedSet(nursery_))
if (edge.maybeInRememberedSet(nursery_))
buffer.put(this, edge);
}
@ -384,7 +415,7 @@ class StoreBuffer
MonoTypeBuffer<ValueEdge> bufferVal;
MonoTypeBuffer<CellPtrEdge> bufferCell;
MonoTypeBuffer<SlotEdge> bufferSlot;
MonoTypeBuffer<SlotsEdge> bufferSlot;
MonoTypeBuffer<WholeCellEdges> bufferWholeCell;
RelocatableMonoTypeBuffer<ValueEdge> bufferRelocVal;
RelocatableMonoTypeBuffer<CellPtrEdge> bufferRelocCell;
@ -418,8 +449,8 @@ class StoreBuffer
/* Insert a single edge into the buffer/remembered set. */
void putValue(JS::Value *valuep) { put(bufferVal, ValueEdge(valuep)); }
void putCell(Cell **cellp) { put(bufferCell, CellPtrEdge(cellp)); }
void putSlot(JSObject *obj, int kind, uint32_t slot, void *target) {
put(bufferSlot, SlotEdge(obj, kind, slot));
void putSlot(JSObject *obj, int kind, int32_t start, int32_t count) {
put(bufferSlot, SlotsEdge(obj, kind, start, count));
}
void putWholeCell(Cell *cell) {
JS_ASSERT(cell->isTenured());

View File

@ -172,24 +172,6 @@ class NormalArgumentsObject;
class SetObject;
class StrictArgumentsObject;
#ifdef JSGC_GENERATIONAL
class DenseRangeRef : public gc::BufferableRef
{
JSObject *owner;
uint32_t start;
uint32_t end;
public:
DenseRangeRef(JSObject *obj, uint32_t start, uint32_t end)
: owner(obj), start(start), end(end)
{
JS_ASSERT(start < end);
}
inline void mark(JSTracer *trc);
};
#endif
/*
* NOTE: This is a placeholder for bug 619558.
*
@ -202,7 +184,7 @@ DenseRangeWriteBarrierPost(JSRuntime *rt, JSObject *obj, uint32_t start, uint32_
#ifdef JSGC_GENERATIONAL
if (count > 0) {
JS::shadow::Runtime *shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
shadowRuntime->gcStoreBufferPtr()->putGeneric(DenseRangeRef(obj, start, start + count));
shadowRuntime->gcStoreBufferPtr()->putSlot(obj, HeapSlot::Element, start, count);
}
#endif
}
@ -1249,19 +1231,6 @@ class ValueArray {
namespace js {
#ifdef JSGC_GENERATIONAL
inline void
DenseRangeRef::mark(JSTracer *trc)
{
/* Apply forwarding, if we have already visited owner. */
js::gc::IsObjectMarked(&owner);
uint32_t initLen = owner->getDenseInitializedLength();
uint32_t clampedStart = Min(start, initLen);
gc::MarkArraySlots(trc, Min(end, initLen) - clampedStart,
owner->getDenseElements() + clampedStart, "element");
}
#endif
/* Set *resultp to tell whether obj has an own property with the given id. */
bool
HasOwnProperty(JSContext *cx, HandleObject obj, HandleId id, bool *resultp);