mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Backed out changeset df60c0297aa2 (bug 1100652) for ggc orange
This commit is contained in:
parent
13fe636623
commit
f829edc179
@ -22,7 +22,7 @@ using mozilla::ReentrancyGuard;
|
|||||||
/*** Edges ***/
|
/*** Edges ***/
|
||||||
|
|
||||||
void
|
void
|
||||||
StoreBuffer::SlotsEdge::mark(JSTracer *trc) const
|
StoreBuffer::SlotsEdge::mark(JSTracer *trc)
|
||||||
{
|
{
|
||||||
NativeObject *obj = object();
|
NativeObject *obj = object();
|
||||||
|
|
||||||
@ -48,7 +48,7 @@ StoreBuffer::SlotsEdge::mark(JSTracer *trc) const
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
StoreBuffer::WholeCellEdges::mark(JSTracer *trc) const
|
StoreBuffer::WholeCellEdges::mark(JSTracer *trc)
|
||||||
{
|
{
|
||||||
MOZ_ASSERT(edge->isTenured());
|
MOZ_ASSERT(edge->isTenured());
|
||||||
JSGCTraceKind kind = GetGCThingTraceKind(edge);
|
JSGCTraceKind kind = GetGCThingTraceKind(edge);
|
||||||
@ -64,7 +64,7 @@ StoreBuffer::WholeCellEdges::mark(JSTracer *trc) const
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
StoreBuffer::CellPtrEdge::mark(JSTracer *trc) const
|
StoreBuffer::CellPtrEdge::mark(JSTracer *trc)
|
||||||
{
|
{
|
||||||
if (!*edge)
|
if (!*edge)
|
||||||
return;
|
return;
|
||||||
@ -74,7 +74,7 @@ StoreBuffer::CellPtrEdge::mark(JSTracer *trc) const
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
StoreBuffer::ValueEdge::mark(JSTracer *trc) const
|
StoreBuffer::ValueEdge::mark(JSTracer *trc)
|
||||||
{
|
{
|
||||||
if (!deref())
|
if (!deref())
|
||||||
return;
|
return;
|
||||||
@ -84,14 +84,135 @@ StoreBuffer::ValueEdge::mark(JSTracer *trc) const
|
|||||||
|
|
||||||
/*** MonoTypeBuffer ***/
|
/*** MonoTypeBuffer ***/
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void
|
||||||
|
StoreBuffer::MonoTypeBuffer<T>::handleOverflow(StoreBuffer *owner)
|
||||||
|
{
|
||||||
|
if (!owner->isAboutToOverflow()) {
|
||||||
|
/*
|
||||||
|
* Compact the buffer now, and if that fails to free enough space then
|
||||||
|
* trigger a minor collection.
|
||||||
|
*/
|
||||||
|
compact(owner);
|
||||||
|
if (isLowOnSpace())
|
||||||
|
owner->setAboutToOverflow();
|
||||||
|
} else {
|
||||||
|
/*
|
||||||
|
* A minor GC has already been triggered, so there's no point
|
||||||
|
* compacting unless the buffer is totally full.
|
||||||
|
*/
|
||||||
|
if (storage_->availableInCurrentChunk() < sizeof(T))
|
||||||
|
maybeCompact(owner);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void
|
||||||
|
StoreBuffer::MonoTypeBuffer<T>::compactRemoveDuplicates(StoreBuffer *owner)
|
||||||
|
{
|
||||||
|
typedef HashSet<T, typename T::Hasher, SystemAllocPolicy> DedupSet;
|
||||||
|
|
||||||
|
DedupSet duplicates;
|
||||||
|
if (!duplicates.init())
|
||||||
|
return; /* Failure to de-dup is acceptable. */
|
||||||
|
|
||||||
|
LifoAlloc::Enum insert(*storage_);
|
||||||
|
for (LifoAlloc::Enum e(*storage_); !e.empty(); e.popFront<T>()) {
|
||||||
|
T *edge = e.get<T>();
|
||||||
|
if (!duplicates.has(*edge)) {
|
||||||
|
insert.updateFront<T>(*edge);
|
||||||
|
insert.popFront<T>();
|
||||||
|
|
||||||
|
/* Failure to insert will leave the set with duplicates. Oh well. */
|
||||||
|
duplicates.put(*edge);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
storage_->release(insert.mark());
|
||||||
|
|
||||||
|
duplicates.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void
|
||||||
|
StoreBuffer::MonoTypeBuffer<T>::compact(StoreBuffer *owner)
|
||||||
|
{
|
||||||
|
MOZ_ASSERT(storage_);
|
||||||
|
compactRemoveDuplicates(owner);
|
||||||
|
usedAtLastCompact_ = storage_->used();
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void
|
||||||
|
StoreBuffer::MonoTypeBuffer<T>::maybeCompact(StoreBuffer *owner)
|
||||||
|
{
|
||||||
|
MOZ_ASSERT(storage_);
|
||||||
|
if (storage_->used() != usedAtLastCompact_)
|
||||||
|
compact(owner);
|
||||||
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
void
|
void
|
||||||
StoreBuffer::MonoTypeBuffer<T>::mark(StoreBuffer *owner, JSTracer *trc)
|
StoreBuffer::MonoTypeBuffer<T>::mark(StoreBuffer *owner, JSTracer *trc)
|
||||||
{
|
{
|
||||||
MOZ_ASSERT(stores_.initialized());
|
MOZ_ASSERT(owner->isEnabled());
|
||||||
sinkStores(owner);
|
ReentrancyGuard g(*owner);
|
||||||
for (typename StoreSet::Range r = stores_.all(); !r.empty(); r.popFront())
|
if (!storage_)
|
||||||
r.front().mark(trc);
|
return;
|
||||||
|
|
||||||
|
maybeCompact(owner);
|
||||||
|
for (LifoAlloc::Enum e(*storage_); !e.empty(); e.popFront<T>()) {
|
||||||
|
T *edge = e.get<T>();
|
||||||
|
edge->mark(trc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*** RelocatableMonoTypeBuffer ***/
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void
|
||||||
|
StoreBuffer::RelocatableMonoTypeBuffer<T>::compactMoved(StoreBuffer *owner)
|
||||||
|
{
|
||||||
|
LifoAlloc &storage = *this->storage_;
|
||||||
|
EdgeSet invalidated;
|
||||||
|
if (!invalidated.init())
|
||||||
|
CrashAtUnhandlableOOM("RelocatableMonoTypeBuffer::compactMoved: Failed to init table.");
|
||||||
|
|
||||||
|
/* Collect the set of entries which are currently invalid. */
|
||||||
|
for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) {
|
||||||
|
T *edge = e.get<T>();
|
||||||
|
if (edge->isTagged()) {
|
||||||
|
if (!invalidated.put(edge->untagged().edge))
|
||||||
|
CrashAtUnhandlableOOM("RelocatableMonoTypeBuffer::compactMoved: Failed to put removal.");
|
||||||
|
} else {
|
||||||
|
invalidated.remove(edge->untagged().edge);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Remove all entries which are in the invalidated set. */
|
||||||
|
LifoAlloc::Enum insert(storage);
|
||||||
|
for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) {
|
||||||
|
T *edge = e.get<T>();
|
||||||
|
if (!edge->isTagged() && !invalidated.has(edge->untagged().edge)) {
|
||||||
|
insert.updateFront<T>(*edge);
|
||||||
|
insert.popFront<T>();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
storage.release(insert.mark());
|
||||||
|
|
||||||
|
invalidated.clear();
|
||||||
|
|
||||||
|
#ifdef DEBUG
|
||||||
|
for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>())
|
||||||
|
MOZ_ASSERT(!e.get<T>()->isTagged());
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void
|
||||||
|
StoreBuffer::RelocatableMonoTypeBuffer<T>::compact(StoreBuffer *owner)
|
||||||
|
{
|
||||||
|
compactMoved(owner);
|
||||||
|
StoreBuffer::MonoTypeBuffer<T>::compact(owner);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*** GenericBuffer ***/
|
/*** GenericBuffer ***/
|
||||||
@ -252,5 +373,7 @@ template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>;
|
|||||||
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>;
|
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>;
|
||||||
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>;
|
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>;
|
||||||
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>;
|
template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>;
|
||||||
|
template struct StoreBuffer::RelocatableMonoTypeBuffer<StoreBuffer::ValueEdge>;
|
||||||
|
template struct StoreBuffer::RelocatableMonoTypeBuffer<StoreBuffer::CellPtrEdge>;
|
||||||
|
|
||||||
#endif /* JSGC_GENERATIONAL */
|
#endif /* JSGC_GENERATIONAL */
|
||||||
|
@ -80,6 +80,17 @@ class StoreBuffer
|
|||||||
/* The size at which a block is about to overflow. */
|
/* The size at which a block is about to overflow. */
|
||||||
static const size_t LowAvailableThreshold = (size_t)(LifoAllocBlockSize * 1.0 / 16.0);
|
static const size_t LowAvailableThreshold = (size_t)(LifoAllocBlockSize * 1.0 / 16.0);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If the space available in the store buffer hits the
|
||||||
|
* LowAvailableThreshold and gets compacted, but still doesn't have at
|
||||||
|
* least HighAvailableThreshold space available, then we will trigger a
|
||||||
|
* minor GC. HighAvailableThreshold should be set to provide enough space
|
||||||
|
* for the mutator to run for a while in between compactions. (If
|
||||||
|
* HighAvailableThreshold is too low, we will thrash and spend most of the
|
||||||
|
* time compacting. If it is too high, we will tenure things too early.)
|
||||||
|
*/
|
||||||
|
static const size_t HighAvailableThreshold = (size_t)(LifoAllocBlockSize * 1.0 / 4.0);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This buffer holds only a single type of edge. Using this buffer is more
|
* This buffer holds only a single type of edge. Using this buffer is more
|
||||||
* efficient than the generic buffer when many writes will be to the same
|
* efficient than the generic buffer when many writes will be to the same
|
||||||
@ -88,75 +99,89 @@ class StoreBuffer
|
|||||||
template<typename T>
|
template<typename T>
|
||||||
struct MonoTypeBuffer
|
struct MonoTypeBuffer
|
||||||
{
|
{
|
||||||
/* The canonical set of stores. */
|
LifoAlloc *storage_;
|
||||||
typedef HashSet<T, typename T::Hasher, SystemAllocPolicy> StoreSet;
|
size_t usedAtLastCompact_;
|
||||||
StoreSet stores_;
|
|
||||||
|
|
||||||
/*
|
explicit MonoTypeBuffer() : storage_(nullptr), usedAtLastCompact_(0) {}
|
||||||
* A small, fixed-size buffer in front of the canonical set to simplify
|
~MonoTypeBuffer() { js_delete(storage_); }
|
||||||
* insertion via jit code.
|
|
||||||
*/
|
|
||||||
const static size_t NumBufferEntries = 4096 / sizeof(T);
|
|
||||||
T buffer_[NumBufferEntries];
|
|
||||||
T *insert_;
|
|
||||||
|
|
||||||
/* Maximum number of entries before we request a minor GC. */
|
|
||||||
const static size_t MaxEntries = 48 * 1024 / sizeof(T);
|
|
||||||
|
|
||||||
explicit MonoTypeBuffer() : insert_(buffer_) {}
|
|
||||||
~MonoTypeBuffer() { stores_.finish(); }
|
|
||||||
|
|
||||||
bool init() {
|
bool init() {
|
||||||
if (!stores_.initialized() && !stores_.init())
|
if (!storage_)
|
||||||
return false;
|
storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
|
||||||
clear();
|
clear();
|
||||||
return true;
|
return bool(storage_);
|
||||||
}
|
}
|
||||||
|
|
||||||
void clear() {
|
void clear() {
|
||||||
if (stores_.initialized())
|
if (!storage_)
|
||||||
stores_.clear();
|
return;
|
||||||
|
|
||||||
|
storage_->used() ? storage_->releaseAll() : storage_->freeAll();
|
||||||
|
usedAtLastCompact_ = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool isAboutToOverflow() const {
|
||||||
|
return !storage_->isEmpty() && storage_->availableInCurrentChunk() < LowAvailableThreshold;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool isLowOnSpace() const {
|
||||||
|
return !storage_->isEmpty() && storage_->availableInCurrentChunk() < HighAvailableThreshold;
|
||||||
|
}
|
||||||
|
|
||||||
|
void handleOverflow(StoreBuffer *owner);
|
||||||
|
|
||||||
|
/* Compaction algorithms. */
|
||||||
|
void compactRemoveDuplicates(StoreBuffer *owner);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Attempts to reduce the usage of the buffer by removing unnecessary
|
||||||
|
* entries.
|
||||||
|
*/
|
||||||
|
virtual void compact(StoreBuffer *owner);
|
||||||
|
|
||||||
|
/* Compacts if any entries have been added since the last compaction. */
|
||||||
|
void maybeCompact(StoreBuffer *owner);
|
||||||
|
|
||||||
/* Add one item to the buffer. */
|
/* Add one item to the buffer. */
|
||||||
void put(StoreBuffer *owner, const T &t) {
|
void put(StoreBuffer *owner, const T &t) {
|
||||||
MOZ_ASSERT(stores_.initialized());
|
MOZ_ASSERT(storage_);
|
||||||
*insert_++ = t;
|
|
||||||
if (MOZ_UNLIKELY(insert_ == buffer_ + NumBufferEntries))
|
|
||||||
sinkStores(owner);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Move any buffered stores to the canonical store set. */
|
T *tp = storage_->new_<T>(t);
|
||||||
void sinkStores(StoreBuffer *owner) {
|
if (!tp)
|
||||||
MOZ_ASSERT(stores_.initialized());
|
CrashAtUnhandlableOOM("Failed to allocate for MonoTypeBuffer::put.");
|
||||||
|
|
||||||
for (T *p = buffer_; p < insert_; ++p) {
|
if (isAboutToOverflow())
|
||||||
if (!stores_.put(*p))
|
handleOverflow(owner);
|
||||||
CrashAtUnhandlableOOM("Failed to allocate for MonoTypeBuffer::sinkStores.");
|
|
||||||
}
|
|
||||||
insert_ = buffer_;
|
|
||||||
|
|
||||||
if (MOZ_UNLIKELY(stores_.count() > MaxEntries))
|
|
||||||
owner->setAboutToOverflow();
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Remove an item from the store buffer. */
|
|
||||||
void unput(StoreBuffer *owner, const T &v) {
|
|
||||||
sinkStores(owner);
|
|
||||||
stores_.remove(v);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Mark the source of all edges in the store buffer. */
|
/* Mark the source of all edges in the store buffer. */
|
||||||
void mark(StoreBuffer *owner, JSTracer *trc);
|
void mark(StoreBuffer *owner, JSTracer *trc);
|
||||||
|
|
||||||
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
|
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
|
||||||
return stores_.sizeOfExcludingThis(mallocSizeOf);
|
return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
MonoTypeBuffer &operator=(const MonoTypeBuffer& other) MOZ_DELETE;
|
MonoTypeBuffer &operator=(const MonoTypeBuffer& other) MOZ_DELETE;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Overrides the MonoTypeBuffer to support pointers that may be moved in
|
||||||
|
* memory outside of the GC's control.
|
||||||
|
*/
|
||||||
|
template <typename T>
|
||||||
|
struct RelocatableMonoTypeBuffer : public MonoTypeBuffer<T>
|
||||||
|
{
|
||||||
|
/* Override compaction to filter out removed items. */
|
||||||
|
void compactMoved(StoreBuffer *owner);
|
||||||
|
virtual void compact(StoreBuffer *owner) MOZ_OVERRIDE;
|
||||||
|
|
||||||
|
/* Record a removal from the buffer. */
|
||||||
|
void unput(StoreBuffer *owner, const T &v) {
|
||||||
|
MonoTypeBuffer<T>::put(owner, v.tagged());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
struct GenericBuffer
|
struct GenericBuffer
|
||||||
{
|
{
|
||||||
LifoAlloc *storage_;
|
LifoAlloc *storage_;
|
||||||
@ -226,7 +251,6 @@ class StoreBuffer
|
|||||||
{
|
{
|
||||||
Cell **edge;
|
Cell **edge;
|
||||||
|
|
||||||
CellPtrEdge() : edge(nullptr) {}
|
|
||||||
explicit CellPtrEdge(Cell **v) : edge(v) {}
|
explicit CellPtrEdge(Cell **v) : edge(v) {}
|
||||||
bool operator==(const CellPtrEdge &other) const { return edge == other.edge; }
|
bool operator==(const CellPtrEdge &other) const { return edge == other.edge; }
|
||||||
bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
|
bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
|
||||||
@ -236,7 +260,7 @@ class StoreBuffer
|
|||||||
return !nursery.isInside(edge);
|
return !nursery.isInside(edge);
|
||||||
}
|
}
|
||||||
|
|
||||||
void mark(JSTracer *trc) const;
|
void mark(JSTracer *trc);
|
||||||
|
|
||||||
CellPtrEdge tagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) | 1)); }
|
CellPtrEdge tagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) | 1)); }
|
||||||
CellPtrEdge untagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) & ~1)); }
|
CellPtrEdge untagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) & ~1)); }
|
||||||
@ -249,7 +273,6 @@ class StoreBuffer
|
|||||||
{
|
{
|
||||||
JS::Value *edge;
|
JS::Value *edge;
|
||||||
|
|
||||||
ValueEdge() : edge(nullptr) {}
|
|
||||||
explicit ValueEdge(JS::Value *v) : edge(v) {}
|
explicit ValueEdge(JS::Value *v) : edge(v) {}
|
||||||
bool operator==(const ValueEdge &other) const { return edge == other.edge; }
|
bool operator==(const ValueEdge &other) const { return edge == other.edge; }
|
||||||
bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
|
bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
|
||||||
@ -261,7 +284,7 @@ class StoreBuffer
|
|||||||
return !nursery.isInside(edge);
|
return !nursery.isInside(edge);
|
||||||
}
|
}
|
||||||
|
|
||||||
void mark(JSTracer *trc) const;
|
void mark(JSTracer *trc);
|
||||||
|
|
||||||
ValueEdge tagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) | 1)); }
|
ValueEdge tagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) | 1)); }
|
||||||
ValueEdge untagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) & ~1)); }
|
ValueEdge untagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) & ~1)); }
|
||||||
@ -280,7 +303,6 @@ class StoreBuffer
|
|||||||
int32_t start_;
|
int32_t start_;
|
||||||
int32_t count_;
|
int32_t count_;
|
||||||
|
|
||||||
SlotsEdge() : objectAndKind_(0), start_(0), count_(0) {}
|
|
||||||
SlotsEdge(NativeObject *object, int kind, int32_t start, int32_t count)
|
SlotsEdge(NativeObject *object, int kind, int32_t start, int32_t count)
|
||||||
: objectAndKind_(uintptr_t(object) | kind), start_(start), count_(count)
|
: objectAndKind_(uintptr_t(object) | kind), start_(start), count_(count)
|
||||||
{
|
{
|
||||||
@ -307,7 +329,7 @@ class StoreBuffer
|
|||||||
return !IsInsideNursery(JS::AsCell(reinterpret_cast<JSObject *>(object())));
|
return !IsInsideNursery(JS::AsCell(reinterpret_cast<JSObject *>(object())));
|
||||||
}
|
}
|
||||||
|
|
||||||
void mark(JSTracer *trc) const;
|
void mark(JSTracer *trc);
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
typedef SlotsEdge Lookup;
|
typedef SlotsEdge Lookup;
|
||||||
@ -320,7 +342,6 @@ class StoreBuffer
|
|||||||
{
|
{
|
||||||
Cell *edge;
|
Cell *edge;
|
||||||
|
|
||||||
WholeCellEdges() : edge(nullptr) {}
|
|
||||||
explicit WholeCellEdges(Cell *cell) : edge(cell) {
|
explicit WholeCellEdges(Cell *cell) : edge(cell) {
|
||||||
MOZ_ASSERT(edge->isTenured());
|
MOZ_ASSERT(edge->isTenured());
|
||||||
}
|
}
|
||||||
@ -333,7 +354,7 @@ class StoreBuffer
|
|||||||
static bool supportsDeduplication() { return true; }
|
static bool supportsDeduplication() { return true; }
|
||||||
void *deduplicationKey() const { return (void *)edge; }
|
void *deduplicationKey() const { return (void *)edge; }
|
||||||
|
|
||||||
void mark(JSTracer *trc) const;
|
void mark(JSTracer *trc);
|
||||||
|
|
||||||
typedef PointerEdgeHasher<WholeCellEdges> Hasher;
|
typedef PointerEdgeHasher<WholeCellEdges> Hasher;
|
||||||
};
|
};
|
||||||
@ -404,8 +425,8 @@ class StoreBuffer
|
|||||||
MonoTypeBuffer<CellPtrEdge> bufferCell;
|
MonoTypeBuffer<CellPtrEdge> bufferCell;
|
||||||
MonoTypeBuffer<SlotsEdge> bufferSlot;
|
MonoTypeBuffer<SlotsEdge> bufferSlot;
|
||||||
MonoTypeBuffer<WholeCellEdges> bufferWholeCell;
|
MonoTypeBuffer<WholeCellEdges> bufferWholeCell;
|
||||||
MonoTypeBuffer<ValueEdge> bufferRelocVal;
|
RelocatableMonoTypeBuffer<ValueEdge> bufferRelocVal;
|
||||||
MonoTypeBuffer<CellPtrEdge> bufferRelocCell;
|
RelocatableMonoTypeBuffer<CellPtrEdge> bufferRelocCell;
|
||||||
GenericBuffer bufferGeneric;
|
GenericBuffer bufferGeneric;
|
||||||
|
|
||||||
JSRuntime *runtime_;
|
JSRuntime *runtime_;
|
||||||
@ -484,13 +505,6 @@ class StoreBuffer
|
|||||||
/* For use by our owned buffers and for testing. */
|
/* For use by our owned buffers and for testing. */
|
||||||
void setAboutToOverflow();
|
void setAboutToOverflow();
|
||||||
|
|
||||||
/* For jit access to the raw buffer. */
|
|
||||||
void oolSinkStoresForWholeCellBuffer() { bufferWholeCell.sinkStores(this); }
|
|
||||||
void *addressOfWholeCellBufferPointer() const { return (void *)&bufferWholeCell.insert_; }
|
|
||||||
void *addressOfWholeCellBufferEnd() const {
|
|
||||||
return (void *)(bufferWholeCell.buffer_ + bufferWholeCell.NumBufferEntries);
|
|
||||||
}
|
|
||||||
|
|
||||||
void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes *sizes);
|
void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes *sizes);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user