Bug 1251833 - Part 3: Merge ArenaHeader into Arena. r=terrence

This commit is contained in:
Emanuel Hoogeveen 2016-02-29 11:24:00 -05:00
parent 0630443dce
commit 206c937084
16 changed files with 574 additions and 637 deletions

View File

@ -57,6 +57,7 @@ const size_t ChunkRuntimeOffset = ChunkSize - sizeof(void*);
const size_t ChunkTrailerSize = 2 * sizeof(uintptr_t) + sizeof(uint64_t);
const size_t ChunkLocationOffset = ChunkSize - ChunkTrailerSize;
const size_t ArenaZoneOffset = sizeof(size_t);
const size_t ArenaHeaderSize = sizeof(size_t) + 2 * sizeof(uintptr_t) + sizeof(size_t);
/*
* Live objects are marked black. How many other additional colors are available

View File

@ -92,7 +92,7 @@ GCRuntime::checkIncrementalZoneState(ExclusiveContext* cx, T* t)
Zone* zone = cx->asJSContext()->zone();
MOZ_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
t->asTenured().arenaHeader()->allocatedDuringIncremental);
t->asTenured().arena()->allocatedDuringIncremental);
#endif
}
@ -323,12 +323,12 @@ ArenaLists::allocateFromArena(JS::Zone* zone, AllocKind thingKind,
maybeLock.emplace(rt);
ArenaList& al = arenaLists[thingKind];
ArenaHeader* aheader = al.takeNextArena();
if (aheader) {
Arena* arena = al.takeNextArena();
if (arena) {
// Empty arenas should be immediately freed.
MOZ_ASSERT(!aheader->isEmpty());
MOZ_ASSERT(!arena->isEmpty());
return allocateFromArenaInner(zone, aheader, thingKind);
return allocateFromArenaInner(zone, arena, thingKind);
}
// Parallel threads have their own ArenaLists, but chunks are shared;
@ -342,33 +342,33 @@ ArenaLists::allocateFromArena(JS::Zone* zone, AllocKind thingKind,
// Although our chunk should definitely have enough space for another arena,
// there are other valid reasons why Chunk::allocateArena() may fail.
aheader = rt->gc.allocateArena(chunk, zone, thingKind, maybeLock.ref());
if (!aheader)
arena = rt->gc.allocateArena(chunk, zone, thingKind, maybeLock.ref());
if (!arena)
return nullptr;
MOZ_ASSERT(!maybeLock->wasUnlocked());
MOZ_ASSERT(al.isCursorAtEnd());
al.insertBeforeCursor(aheader);
al.insertBeforeCursor(arena);
return allocateFromArenaInner(zone, aheader, thingKind);
return allocateFromArenaInner(zone, arena, thingKind);
}
inline TenuredCell*
ArenaLists::allocateFromArenaInner(JS::Zone* zone, ArenaHeader* aheader, AllocKind kind)
ArenaLists::allocateFromArenaInner(JS::Zone* zone, Arena* arena, AllocKind kind)
{
size_t thingSize = Arena::thingSize(kind);
freeLists[kind] = aheader->getFirstFreeSpan();
freeLists[kind] = arena->getFirstFreeSpan();
if (MOZ_UNLIKELY(zone->wasGCStarted()))
zone->runtimeFromAnyThread()->gc.arenaAllocatedDuringGC(zone, aheader);
zone->runtimeFromAnyThread()->gc.arenaAllocatedDuringGC(zone, arena);
TenuredCell* thing = freeLists[kind]->allocate(thingSize);
MOZ_ASSERT(thing); // This allocation is infallible.
return thing;
}
void
GCRuntime::arenaAllocatedDuringGC(JS::Zone* zone, ArenaHeader* arena)
GCRuntime::arenaAllocatedDuringGC(JS::Zone* zone, Arena* arena)
{
if (zone->needsIncrementalBarrier()) {
arena->allocatedDuringIncremental = true;

View File

@ -847,7 +847,7 @@ class GCRuntime
void freeAllLifoBlocksAfterSweeping(LifoAlloc* lifo);
// Public here for ReleaseArenaLists and FinalizeTypedArenas.
void releaseArena(ArenaHeader* aheader, const AutoLockGC& lock);
void releaseArena(Arena* arena, const AutoLockGC& lock);
void releaseHeldRelocatedArenas();
void releaseHeldRelocatedArenasWithoutUnlocking(const AutoLockGC& lock);
@ -878,8 +878,8 @@ class GCRuntime
friend class ArenaLists;
Chunk* pickChunk(const AutoLockGC& lock,
AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc);
ArenaHeader* allocateArena(Chunk* chunk, Zone* zone, AllocKind kind, const AutoLockGC& lock);
void arenaAllocatedDuringGC(JS::Zone* zone, ArenaHeader* arena);
Arena* allocateArena(Chunk* chunk, Zone* zone, AllocKind kind, const AutoLockGC& lock);
void arenaAllocatedDuringGC(JS::Zone* zone, Arena* arena);
// Allocator internals
bool gcIfNeededPerAllocation(JSContext* cx);
@ -961,15 +961,15 @@ class GCRuntime
void endCompactPhase(JS::gcreason::Reason reason);
void sweepTypesAfterCompacting(Zone* zone);
void sweepZoneAfterCompacting(Zone* zone);
bool relocateArenas(Zone* zone, JS::gcreason::Reason reason, ArenaHeader*& relocatedListOut,
bool relocateArenas(Zone* zone, JS::gcreason::Reason reason, Arena*& relocatedListOut,
SliceBudget& sliceBudget);
void updateAllCellPointersParallel(MovingTracer* trc, Zone* zone);
void updateAllCellPointersSerial(MovingTracer* trc, Zone* zone);
void updatePointersToRelocatedCells(Zone* zone);
void protectAndHoldArenas(ArenaHeader* arenaList);
void protectAndHoldArenas(Arena* arenaList);
void unprotectHeldRelocatedArenas();
void releaseRelocatedArenas(ArenaHeader* arenaList);
void releaseRelocatedArenasWithoutUnlocking(ArenaHeader* arenaList, const AutoLockGC& lock);
void releaseRelocatedArenas(Arena* arenaList);
void releaseRelocatedArenasWithoutUnlocking(Arena* arenaList, const AutoLockGC& lock);
void finishCollection(JS::gcreason::Reason reason);
void computeNonIncrementalMarkingForValidation();
@ -1177,14 +1177,14 @@ class GCRuntime
/*
* List head of arenas allocated during the sweep phase.
*/
js::gc::ArenaHeader* arenasAllocatedDuringSweep;
js::gc::Arena* arenasAllocatedDuringSweep;
/*
* Incremental compacting state.
*/
bool startedCompacting;
js::gc::ZoneList zonesToMaybeCompact;
ArenaHeader* relocatedArenasToRelease;
Arena* relocatedArenasToRelease;
#ifdef JS_GC_ZEAL
js::gc::MarkingValidator* markingValidator;

View File

@ -60,10 +60,9 @@ TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, gc::Cell** thingp, const
namespace gc {
struct Arena;
class Arena;
class ArenaList;
class SortedArenaList;
struct ArenaHeader;
struct Chunk;
/*
@ -251,7 +250,7 @@ struct Cell
};
// A GC TenuredCell gets behaviors that are valid for things in the Tenured
// heap, such as access to the arena header and mark bits.
// heap, such as access to the arena and mark bits.
class TenuredCell : public Cell
{
public:
@ -269,8 +268,8 @@ class TenuredCell : public Cell
// used tagged.
static MOZ_ALWAYS_INLINE bool isNullLike(const Cell* thing) { return !thing; }
// Access to the arena header.
inline ArenaHeader* arenaHeader() const;
// Access to the arena.
inline Arena* arena() const;
inline AllocKind getAllocKind() const;
inline JS::TraceKind getTraceKind() const;
inline JS::Zone* zone() const;
@ -323,8 +322,8 @@ const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
*/
class FreeSpan
{
friend class Arena;
friend class ArenaCellIterImpl;
friend struct ArenaHeader;
uint16_t first;
uint16_t last;
@ -332,8 +331,8 @@ class FreeSpan
public:
// This inits just |first| and |last|; if the span is non-empty it doesn't
// do anything with the next span stored at |last|.
void initBounds(uintptr_t firstArg, uintptr_t lastArg, const ArenaHeader* aheader) {
checkRange(firstArg, lastArg, aheader);
void initBounds(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
checkRange(firstArg, lastArg, arena);
first = firstArg;
last = lastArg;
}
@ -346,19 +345,19 @@ class FreeSpan
// This sets |first| and |last|, and also sets the next span stored at
// |last| as empty. (As a result, |firstArg| and |lastArg| cannot represent
// an empty span.)
void initFinal(uintptr_t firstArg, uintptr_t lastArg, const ArenaHeader* aheader) {
initBounds(firstArg, lastArg, aheader);
FreeSpan* last = nextSpanUnchecked(aheader);
void initFinal(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
initBounds(firstArg, lastArg, arena);
FreeSpan* last = nextSpanUnchecked(arena);
last->initAsEmpty();
checkSpan(aheader);
checkSpan(arena);
}
bool isEmpty() const {
return !first;
}
ArenaHeader* getArenaUnchecked() { return reinterpret_cast<ArenaHeader*>(this); }
inline ArenaHeader* getArena();
Arena* getArenaUnchecked() { return reinterpret_cast<Arena*>(this); }
inline Arena* getArena();
static size_t offsetOfFirst() {
return offsetof(FreeSpan, first);
@ -369,20 +368,20 @@ class FreeSpan
}
// Like nextSpan(), but no checking of the following span is done.
FreeSpan* nextSpanUnchecked(const ArenaHeader* aheader) const {
MOZ_ASSERT(aheader && !isEmpty());
return reinterpret_cast<FreeSpan*>(uintptr_t(aheader) + last);
FreeSpan* nextSpanUnchecked(const Arena* arena) const {
MOZ_ASSERT(arena && !isEmpty());
return reinterpret_cast<FreeSpan*>(uintptr_t(arena) + last);
}
const FreeSpan* nextSpan(const ArenaHeader* aheader) const {
checkSpan(aheader);
return nextSpanUnchecked(aheader);
const FreeSpan* nextSpan(const Arena* arena) const {
checkSpan(arena);
return nextSpanUnchecked(arena);
}
MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) {
// Eschew the usual checks, because this might be the placeholder span.
// If this is somehow an invalid, non-empty span, checkSpan() will catch it.
ArenaHeader* arena = getArenaUnchecked();
Arena* arena = getArenaUnchecked();
checkSpan(arena);
uintptr_t thing = uintptr_t(arena) + first;
if (first < last) {
@ -402,74 +401,135 @@ class FreeSpan
return reinterpret_cast<TenuredCell*>(thing);
}
inline void checkSpan(const ArenaHeader* aheader) const;
inline void checkRange(uintptr_t first, uintptr_t last, const ArenaHeader*) const;
inline void checkSpan(const Arena* arena) const;
inline void checkRange(uintptr_t first, uintptr_t last, const Arena* arena) const;
};
/* Every arena has a header. */
struct ArenaHeader
/*
* Arenas are the allocation units of the tenured heap in the GC. An arena
* is 4kiB in size and 4kiB-aligned. It starts with several header fields
* followed by some bytes of padding. The remainder of the arena is filled
* with GC things of a particular AllocKind. The padding ensures that the
* GC thing array ends exactly at the end of the arena:
*
* <----------------------------------------------> = ArenaSize bytes
* +---------------+---------+----+----+-----+----+
* | header fields | padding | T0 | T1 | ... | Tn |
* +---------------+---------+----+----+-----+----+
* <-------------------------> = first thing offset
*/
class Arena
{
friend struct Arena;
static JS_FRIEND_DATA(const uint32_t) ThingSizes[];
static JS_FRIEND_DATA(const uint32_t) FirstThingOffsets[];
static JS_FRIEND_DATA(const uint32_t) ThingsPerArena[];
private:
/*
* The first span of free things in the arena.
* The first span of free things in the arena. Most of these spans are
* stored as offsets in free regions of the data array, and most operations
* on FreeSpans take an Arena pointer for safety. However, the FreeSpans
* used for allocation are stored here, at the start of an Arena, and use
* their own address to grab the next span within the same Arena.
*/
FreeSpan firstFreeSpan;
public:
/*
* The zone that this Arena is contained within, when allocated. The offset
* of this field must match the ArenaZoneOffset stored in js/HeapAPI.h,
* as is statically asserted below.
*/
JS::Zone* zone;
/*
* ArenaHeader::next has two purposes: when unallocated, it points to the
* next available Arena's header. When allocated, it points to the next
* arena of the same size class and compartment.
* Arena::next has two purposes: when unallocated, it points to the next
* available Arena. When allocated, it points to the next Arena in the same
* zone and with the same alloc kind.
*/
ArenaHeader* next;
Arena* next;
private:
/*
* One of AllocKind constants or AllocKind::LIMIT when the arena does not
* contain any GC things and is on the list of empty arenas in the GC
* One of the AllocKind constants or AllocKind::LIMIT when the arena does
* not contain any GC things and is on the list of empty arenas in the GC
* chunk.
*
* We use 8 bits for the allocKind so the compiler can use byte-level memory
* instructions to access it.
* We use 8 bits for the alloc kind so the compiler can use byte-level
* memory instructions to access it.
*/
size_t allocKind : 8;
public:
/*
* When collecting we sometimes need to keep an auxillary list of arenas,
* for which we use the following fields. This happens for several reasons:
* for which we use the following fields. This happens for several reasons:
*
* When recursive marking uses too much stack the marking is delayed and the
* corresponding arenas are put into a stack. To distinguish the bottom of
* the stack from the arenas not present in the stack we use the
* When recursive marking uses too much stack, the marking is delayed and
* the corresponding arenas are put into a stack. To distinguish the bottom
* of the stack from the arenas not present in the stack we use the
* markOverflow flag to tag arenas on the stack.
*
* Delayed marking is also used for arenas that we allocate into during an
* incremental GC. In this case, we intend to mark all the objects in the
* arena, and it's faster to do this marking in bulk.
*
* When sweeping we keep track of which arenas have been allocated since the
* end of the mark phase. This allows us to tell whether a pointer to an
* unmarked object is yet to be finalized or has already been reallocated.
* We set the allocatedDuringIncremental flag for this and clear it at the
* end of the sweep phase.
* When sweeping we keep track of which arenas have been allocated since
* the end of the mark phase. This allows us to tell whether a pointer to
* an unmarked object is yet to be finalized or has already been
* reallocated. We set the allocatedDuringIncremental flag for this and
* clear it at the end of the sweep phase.
*
* To minimize the ArenaHeader size we record the next linkage as
* address() >> ArenaShift and pack it with the allocKind field and the flags.
* To minimize the size of the header fields we record the next linkage as
* address() >> ArenaShift and pack it with the allocKind and the flags.
*/
public:
size_t hasDelayedMarking : 1;
size_t allocatedDuringIncremental : 1;
size_t markOverflow : 1;
size_t auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
size_t hasDelayedMarking : 1;
size_t allocatedDuringIncremental : 1;
size_t markOverflow : 1;
size_t auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
static_assert(ArenaShift >= 8 + 1 + 1 + 1,
"ArenaHeader::auxNextLink packing assumes that ArenaShift has enough bits to "
"cover allocKind and hasDelayedMarking.");
"Arena::auxNextLink packing assumes that ArenaShift has "
"enough bits to cover allocKind and hasDelayedMarking.");
/*
* The size of data should be |ArenaSize - offsetof(data)|, but the offset
* is not yet known to the compiler, so we do it by hand. |firstFreeSpan|
* takes up 8 bytes on 64-bit due to alignment requirements; the rest are
* obvious. This constant is stored in js/HeapAPI.h.
*/
uint8_t data[ArenaSize - ArenaHeaderSize];
ArenaHeader() { setAsNotAllocated(); }
void init(JS::Zone* zoneArg, AllocKind kind) {
MOZ_ASSERT(firstFreeSpan.isEmpty());
MOZ_ASSERT(!zone);
MOZ_ASSERT(!allocated());
MOZ_ASSERT(!hasDelayedMarking);
MOZ_ASSERT(!allocatedDuringIncremental);
MOZ_ASSERT(!markOverflow);
MOZ_ASSERT(!auxNextLink);
zone = zoneArg;
allocKind = size_t(kind);
setAsFullyUnused();
}
// Sets |firstFreeSpan| to the Arena's entire valid range, and
// also sets the next span stored at |firstFreeSpan.last| as empty.
void setAsFullyUnused() {
AllocKind kind = getAllocKind();
firstFreeSpan.first = firstThingOffset(kind);
firstFreeSpan.last = lastThingOffset(kind);
FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
last->initAsEmpty();
}
void setAsNotAllocated() {
firstFreeSpan.initAsEmpty();
zone = nullptr;
allocKind = size_t(AllocKind::LIMIT);
hasDelayedMarking = 0;
allocatedDuringIncremental = 0;
markOverflow = 0;
auxNextLink = 0;
}
uintptr_t address() const {
checkAddress();
@ -485,47 +545,37 @@ struct ArenaHeader
return IsValidAllocKind(AllocKind(allocKind));
}
// This sets |firstFreeSpan| to the Arena's entire valid range, and
// also sets the next span stored at |firstFreeSpan.last| as empty.
inline void setAsFullyUnused();
void init(JS::Zone* zoneArg, AllocKind kind) {
MOZ_ASSERT(!allocated());
MOZ_ASSERT(!markOverflow);
MOZ_ASSERT(!allocatedDuringIncremental);
MOZ_ASSERT(!hasDelayedMarking);
zone = zoneArg;
static_assert(size_t(AllocKind::LIMIT) <= 255,
"We must be able to fit the allockind into uint8_t.");
allocKind = size_t(kind);
setAsFullyUnused();
}
void setAsNotAllocated() {
allocKind = size_t(AllocKind::LIMIT);
markOverflow = 0;
allocatedDuringIncremental = 0;
hasDelayedMarking = 0;
auxNextLink = 0;
firstFreeSpan.initAsEmpty();
}
Arena* getArena() { return reinterpret_cast<Arena*>(address()); }
FreeSpan* getFirstFreeSpan() { return &firstFreeSpan; }
AllocKind getAllocKind() const {
MOZ_ASSERT(allocated());
return AllocKind(allocKind);
}
inline size_t getThingSize() const;
FreeSpan* getFirstFreeSpan() { return &firstFreeSpan; }
bool hasFreeThings() const {
return !firstFreeSpan.isEmpty();
static size_t thingSize(AllocKind kind) { return ThingSizes[size_t(kind)]; }
static size_t thingsPerArena(AllocKind kind) { return ThingsPerArena[size_t(kind)]; }
static size_t thingsSpan(AllocKind kind) { return thingsPerArena(kind) * thingSize(kind); }
static size_t firstThingOffset(AllocKind kind) { return FirstThingOffsets[size_t(kind)]; }
static size_t lastThingOffset(AllocKind kind) { return ArenaSize - thingSize(kind); }
size_t getThingSize() const { return thingSize(getAllocKind()); }
size_t getThingsPerArena() const { return thingsPerArena(getAllocKind()); }
size_t getThingsSpan() const { return getThingsPerArena() * getThingSize(); }
uintptr_t thingsStart() const { return address() + firstThingOffset(getAllocKind()); }
uintptr_t thingsEnd() const { return address() + ArenaSize; }
bool isEmpty() const {
// Arena is empty if its first span covers the whole arena.
firstFreeSpan.checkSpan(this);
AllocKind kind = getAllocKind();
return firstFreeSpan.first == firstThingOffset(kind) &&
firstFreeSpan.last == lastThingOffset(kind);
}
bool hasFreeThings() const { return !firstFreeSpan.isEmpty(); }
size_t numFreeThings(size_t thingSize) const {
firstFreeSpan.checkSpan(this);
size_t numFree = 0;
@ -535,7 +585,8 @@ struct ArenaHeader
return numFree;
}
inline bool isEmpty() const;
size_t countFreeCells() { return numFreeThings(getThingSize()); }
size_t countUsedCells() { return getThingsPerArena() - countFreeCells(); }
bool inFreeList(uintptr_t thing) {
uintptr_t base = address();
@ -552,94 +603,88 @@ struct ArenaHeader
return false;
}
inline ArenaHeader* getNextDelayedMarking() const;
inline void setNextDelayedMarking(ArenaHeader* aheader);
inline void unsetDelayedMarking();
inline ArenaHeader* getNextAllocDuringSweep() const;
inline void setNextAllocDuringSweep(ArenaHeader* aheader);
inline void unsetAllocDuringSweep();
inline void setNextArenaToUpdate(ArenaHeader* aheader);
inline ArenaHeader* getNextArenaToUpdateAndUnlink();
void unmarkAll();
size_t countUsedCells();
size_t countFreeCells() { return numFreeThings(getThingSize()); }
};
static_assert(ArenaZoneOffset == offsetof(ArenaHeader, zone),
"The hardcoded API zone offset must match the actual offset.");
struct Arena
{
/*
* Layout of an arena:
* An arena is 4K in size and 4K-aligned. It starts with the ArenaHeader
* descriptor followed by some pad bytes. The remainder of the arena is
* filled with the array of T things. The pad bytes ensure that the thing
* array ends exactly at the end of the arena.
*
* +-------------+-----+----+----+-----+----+
* | ArenaHeader | pad | T0 | T1 | ... | Tn |
* +-------------+-----+----+----+-----+----+
*
* <----------------------------------------> = ArenaSize bytes
* <-------------------> = first thing offset
*/
ArenaHeader aheader;
uint8_t data[ArenaSize - sizeof(ArenaHeader)];
private:
static JS_FRIEND_DATA(const uint32_t) ThingSizes[];
static JS_FRIEND_DATA(const uint32_t) FirstThingOffsets[];
static const uint32_t ThingsPerArena[];
public:
static void staticAsserts();
static size_t thingSize(AllocKind kind) {
return ThingSizes[size_t(kind)];
}
static size_t firstThingOffset(AllocKind kind) {
return FirstThingOffsets[size_t(kind)];
}
static size_t thingsPerArena(AllocKind kind) {
return ThingsPerArena[size_t(kind)];
}
static size_t thingsSpan(AllocKind kind) {
return thingsPerArena(kind) * thingSize(kind);
}
static bool isAligned(uintptr_t thing, size_t thingSize) {
/* Things ends at the arena end. */
uintptr_t tailOffset = (ArenaSize - thing) & ArenaMask;
uintptr_t tailOffset = ArenaSize - (thing & ArenaMask);
return tailOffset % thingSize == 0;
}
uintptr_t address() const {
return aheader.address();
Arena* getNextDelayedMarking() const {
MOZ_ASSERT(hasDelayedMarking);
return reinterpret_cast<Arena*>(auxNextLink << ArenaShift);
}
uintptr_t thingsStart(AllocKind thingKind) {
return address() + firstThingOffset(thingKind);
void setNextDelayedMarking(Arena* arena) {
MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
MOZ_ASSERT(!auxNextLink && !hasDelayedMarking);
hasDelayedMarking = 1;
if (arena)
auxNextLink = arena->address() >> ArenaShift;
}
uintptr_t thingsEnd() {
return address() + ArenaSize;
void unsetDelayedMarking() {
MOZ_ASSERT(hasDelayedMarking);
hasDelayedMarking = 0;
auxNextLink = 0;
}
Arena* getNextAllocDuringSweep() const {
MOZ_ASSERT(allocatedDuringIncremental);
return reinterpret_cast<Arena*>(auxNextLink << ArenaShift);
}
void setNextAllocDuringSweep(Arena* arena) {
MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
MOZ_ASSERT(!auxNextLink && !allocatedDuringIncremental);
allocatedDuringIncremental = 1;
if (arena)
auxNextLink = arena->address() >> ArenaShift;
}
void unsetAllocDuringSweep() {
MOZ_ASSERT(allocatedDuringIncremental);
allocatedDuringIncremental = 0;
auxNextLink = 0;
}
Arena* getNextArenaToUpdateAndUnlink() {
MOZ_ASSERT(!hasDelayedMarking && !allocatedDuringIncremental && !markOverflow);
Arena* next = reinterpret_cast<Arena*>(auxNextLink << ArenaShift);
auxNextLink = 0;
return next;
}
void setNextArenaToUpdate(Arena* arena) {
MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
MOZ_ASSERT(!hasDelayedMarking && !allocatedDuringIncremental && !markOverflow);
MOZ_ASSERT(!auxNextLink);
auxNextLink = arena->address() >> ArenaShift;
}
template <typename T>
size_t finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize);
static void staticAsserts();
void unmarkAll();
};
static_assert(sizeof(Arena) == ArenaSize, "The hardcoded arena size must match the struct size.");
static_assert(ArenaZoneOffset == offsetof(Arena, zone),
"The hardcoded API zone offset must match the actual offset.");
static_assert(sizeof(Arena) == ArenaSize, "The hardcoded API header size (ArenaHeaderSize) "
"must match the actual size of the header fields.");
inline Arena*
FreeSpan::getArena()
{
Arena* arena = getArenaUnchecked();
arena->checkAddress();
return arena;
}
inline void
FreeSpan::checkSpan(const ArenaHeader* aheader) const
FreeSpan::checkSpan(const Arena* arena) const
{
#ifdef DEBUG
if (!first) {
@ -647,41 +692,33 @@ FreeSpan::checkSpan(const ArenaHeader* aheader) const
return;
}
aheader->checkAddress();
checkRange(first, last, aheader);
arena->checkAddress();
checkRange(first, last, arena);
// If there's a following span, it must have a higher address,
// and the gap must be at least 2 * thingSize.
const FreeSpan* next = nextSpanUnchecked(aheader);
const FreeSpan* next = nextSpanUnchecked(arena);
if (next->first) {
checkRange(next->first, next->last, aheader);
size_t thingSize = aheader->getThingSize();
checkRange(next->first, next->last, arena);
size_t thingSize = arena->getThingSize();
MOZ_ASSERT(last + 2 * thingSize <= next->first);
}
#endif
}
inline void
FreeSpan::checkRange(uintptr_t first, uintptr_t last, const ArenaHeader* aheader) const
FreeSpan::checkRange(uintptr_t first, uintptr_t last, const Arena* arena) const
{
#ifdef DEBUG
MOZ_ASSERT(aheader);
AllocKind thingKind = aheader->getAllocKind();
size_t thingSize = Arena::thingSize(thingKind);
MOZ_ASSERT(arena);
MOZ_ASSERT(first <= last);
AllocKind thingKind = arena->getAllocKind();
MOZ_ASSERT(first >= Arena::firstThingOffset(thingKind));
MOZ_ASSERT(last <= ArenaSize - thingSize);
MOZ_ASSERT((last - first) % thingSize == 0);
MOZ_ASSERT(last <= Arena::lastThingOffset(thingKind));
MOZ_ASSERT((last - first) % Arena::thingSize(thingKind) == 0);
#endif
}
inline size_t
ArenaHeader::getThingSize() const
{
return Arena::thingSize(getAllocKind());
}
/*
* The tail of the chunk info is shared between all chunks in the system, both
* nursery and tenured. This structure is locatable from any GC pointer by
@ -728,8 +765,8 @@ struct ChunkInfo
Chunk* prev;
public:
/* Free arenas are linked together with aheader.next. */
ArenaHeader* freeArenasHead;
/* Free arenas are linked together with arena.next. */
Arena* freeArenasHead;
#if JS_BITS_PER_WORD == 32
/*
@ -854,14 +891,14 @@ struct ChunkBitmap
memset((void*)bitmap, 0, sizeof(bitmap));
}
uintptr_t* arenaBits(ArenaHeader* aheader) {
uintptr_t* arenaBits(Arena* arena) {
static_assert(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
"We assume that the part of the bitmap corresponding to the arena "
"has the exact number of words so we do not need to deal with a word "
"that covers bits from two arenas.");
uintptr_t* word, unused;
getMarkWordAndMask(reinterpret_cast<Cell*>(aheader->address()), BLACK, &word, &unused);
getMarkWordAndMask(reinterpret_cast<Cell*>(arena->address()), BLACK, &word, &unused);
return word;
}
};
@ -932,11 +969,10 @@ struct Chunk
return info.trailer.storeBuffer;
}
ArenaHeader* allocateArena(JSRuntime* rt, JS::Zone* zone, AllocKind kind,
const AutoLockGC& lock);
Arena* allocateArena(JSRuntime* rt, JS::Zone* zone, AllocKind kind, const AutoLockGC& lock);
void releaseArena(JSRuntime* rt, ArenaHeader* aheader, const AutoLockGC& lock);
void recycleArena(ArenaHeader* aheader, SortedArenaList& dest, size_t thingsPerArena);
void releaseArena(JSRuntime* rt, Arena* arena, const AutoLockGC& lock);
void recycleArena(Arena* arena, SortedArenaList& dest, size_t thingsPerArena);
bool decommitOneFreeArena(JSRuntime* rt, AutoLockGC& lock);
void decommitAllArenasWithoutUnlocking(const AutoLockGC& lock);
@ -950,17 +986,17 @@ struct Chunk
/* Search for a decommitted arena to allocate. */
unsigned findDecommittedArenaOffset();
ArenaHeader* fetchNextDecommittedArena();
Arena* fetchNextDecommittedArena();
void addArenaToFreeList(JSRuntime* rt, ArenaHeader* aheader);
void addArenaToDecommittedList(JSRuntime* rt, const ArenaHeader* aheader);
void addArenaToFreeList(JSRuntime* rt, Arena* arena);
void addArenaToDecommittedList(JSRuntime* rt, const Arena* arena);
void updateChunkListAfterAlloc(JSRuntime* rt, const AutoLockGC& lock);
void updateChunkListAfterFree(JSRuntime* rt, const AutoLockGC& lock);
public:
/* Unlink and return the freeArenasHead. */
inline ArenaHeader* fetchNextFreeArena(JSRuntime* rt);
inline Arena* fetchNextFreeArena(JSRuntime* rt);
};
static_assert(sizeof(Chunk) == ChunkSize,
@ -1024,16 +1060,8 @@ class HeapUsage
}
};
inline ArenaHeader*
FreeSpan::getArena()
{
ArenaHeader* arena = getArenaUnchecked();
arena->checkAddress();
return arena;
}
inline void
ArenaHeader::checkAddress() const
Arena::checkAddress() const
{
mozilla::DebugOnly<uintptr_t> addr = uintptr_t(this);
MOZ_ASSERT(addr);
@ -1042,110 +1070,17 @@ ArenaHeader::checkAddress() const
}
inline Chunk*
ArenaHeader::chunk() const
Arena::chunk() const
{
return Chunk::fromAddress(address());
}
inline bool
ArenaHeader::isEmpty() const
{
/* Arena is empty if its first span covers the whole arena. */
firstFreeSpan.checkSpan(this);
AllocKind kind = getAllocKind();
size_t firstThingOffset = Arena::firstThingOffset(kind);
size_t lastThingOffset = ArenaSize - Arena::thingSize(kind);
return firstFreeSpan.first == firstThingOffset && firstFreeSpan.last == lastThingOffset;
}
inline void
ArenaHeader::setAsFullyUnused()
{
AllocKind kind = getAllocKind();
firstFreeSpan.first = Arena::firstThingOffset(kind);
firstFreeSpan.last = ArenaSize - Arena::thingSize(kind);
FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
last->initAsEmpty();
}
inline ArenaHeader*
ArenaHeader::getNextDelayedMarking() const
{
MOZ_ASSERT(hasDelayedMarking);
return &reinterpret_cast<Arena*>(auxNextLink << ArenaShift)->aheader;
}
inline void
ArenaHeader::setNextDelayedMarking(ArenaHeader* aheader)
{
MOZ_ASSERT(!(uintptr_t(aheader) & ArenaMask));
MOZ_ASSERT(!auxNextLink && !hasDelayedMarking);
hasDelayedMarking = 1;
if (aheader)
auxNextLink = aheader->address() >> ArenaShift;
}
inline void
ArenaHeader::unsetDelayedMarking()
{
MOZ_ASSERT(hasDelayedMarking);
hasDelayedMarking = 0;
auxNextLink = 0;
}
inline ArenaHeader*
ArenaHeader::getNextAllocDuringSweep() const
{
MOZ_ASSERT(allocatedDuringIncremental);
return &reinterpret_cast<Arena*>(auxNextLink << ArenaShift)->aheader;
}
inline void
ArenaHeader::setNextAllocDuringSweep(ArenaHeader* aheader)
{
MOZ_ASSERT(!auxNextLink && !allocatedDuringIncremental);
allocatedDuringIncremental = 1;
if (aheader)
auxNextLink = aheader->address() >> ArenaShift;
}
inline void
ArenaHeader::unsetAllocDuringSweep()
{
MOZ_ASSERT(allocatedDuringIncremental);
allocatedDuringIncremental = 0;
auxNextLink = 0;
}
inline ArenaHeader*
ArenaHeader::getNextArenaToUpdateAndUnlink()
{
MOZ_ASSERT(!hasDelayedMarking && !allocatedDuringIncremental && !markOverflow);
ArenaHeader* next = &reinterpret_cast<Arena*>(auxNextLink << ArenaShift)->aheader;
auxNextLink = 0;
return next;
}
inline void
ArenaHeader::setNextArenaToUpdate(ArenaHeader* aheader)
{
MOZ_ASSERT(!hasDelayedMarking && !allocatedDuringIncremental && !markOverflow);
MOZ_ASSERT(!auxNextLink);
auxNextLink = aheader->address() >> ArenaShift;
}
inline size_t
ArenaHeader::countUsedCells()
{
return Arena::thingsPerArena(getAllocKind()) - countFreeCells();
}
static void
AssertValidColor(const TenuredCell* thing, uint32_t color)
{
#ifdef DEBUG
ArenaHeader* aheader = thing->arenaHeader();
MOZ_ASSERT(color < aheader->getThingSize() / CellSize);
Arena* arena = thing->arena();
MOZ_ASSERT(color < arena->getThingSize() / CellSize);
#endif
}
@ -1220,11 +1155,11 @@ Cell::getTraceKind() const
}
inline bool
InFreeList(ArenaHeader* aheader, void* thing)
InFreeList(Arena* arena, void* thing)
{
uintptr_t addr = reinterpret_cast<uintptr_t>(thing);
MOZ_ASSERT(Arena::isAligned(addr, aheader->getThingSize()));
return aheader->inFreeList(addr);
MOZ_ASSERT(Arena::isAligned(addr, arena->getThingSize()));
return arena->inFreeList(addr);
}
/* static */ MOZ_ALWAYS_INLINE bool
@ -1249,7 +1184,7 @@ TenuredCell::fromPointer(const void* ptr)
bool
TenuredCell::isMarked(uint32_t color /* = BLACK */) const
{
MOZ_ASSERT(arenaHeader()->allocated());
MOZ_ASSERT(arena()->allocated());
AssertValidColor(this, color);
return chunk()->bitmap.isMarked(this, color);
}
@ -1277,19 +1212,19 @@ TenuredCell::copyMarkBitsFrom(const TenuredCell* src)
bitmap.copyMarkBit(this, src, GRAY);
}
inline ArenaHeader*
TenuredCell::arenaHeader() const
inline Arena*
TenuredCell::arena() const
{
MOZ_ASSERT(isTenured());
uintptr_t addr = address();
addr &= ~ArenaMask;
return reinterpret_cast<ArenaHeader*>(addr);
return reinterpret_cast<Arena*>(addr);
}
AllocKind
TenuredCell::getAllocKind() const
{
return arenaHeader()->getAllocKind();
return arena()->getAllocKind();
}
JS::TraceKind
@ -1301,7 +1236,7 @@ TenuredCell::getTraceKind() const
JS::Zone*
TenuredCell::zone() const
{
JS::Zone* zone = arenaHeader()->zone;
JS::Zone* zone = arena()->zone;
MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
return zone;
}
@ -1309,13 +1244,13 @@ TenuredCell::zone() const
JS::Zone*
TenuredCell::zoneFromAnyThread() const
{
return arenaHeader()->zone;
return arena()->zone;
}
bool
TenuredCell::isInsideZone(JS::Zone* zone) const
{
return zone == arenaHeader()->zone;
return zone == arena()->zone;
}
/* static */ MOZ_ALWAYS_INLINE void
@ -1384,7 +1319,7 @@ Cell::isAligned() const
bool
TenuredCell::isAligned() const
{
return Arena::isAligned(address(), arenaHeader()->getThingSize());
return Arena::isAligned(address(), arena()->getThingSize());
}
#endif

View File

@ -43,9 +43,9 @@ IterateCompartmentsArenasCells(JSRuntime* rt, Zone* zone, void* data,
size_t thingSize = Arena::thingSize(thingKind);
for (ArenaIter aiter(zone, thingKind); !aiter.done(); aiter.next()) {
ArenaHeader* aheader = aiter.get();
(*arenaCallback)(rt, data, aheader->getArena(), traceKind, thingSize);
for (ArenaCellIterUnderGC iter(aheader); !iter.done(); iter.next())
Arena* arena = aiter.get();
(*arenaCallback)(rt, data, arena, traceKind, thingSize);
for (ArenaCellIterUnderGC iter(arena); !iter.done(); iter.next())
(*cellCallback)(rt, data, iter.getCell(), traceKind, thingSize);
}
}

View File

@ -233,7 +233,7 @@ js::CheckTracedThing(JSTracer* trc, T* thing)
* and concurrently modifiying the free list.
*/
MOZ_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy() && !rt->gc.isBackgroundSweeping(),
!InFreeList(thing->asTenured().arenaHeader(), thing));
!InFreeList(thing->asTenured().arena(), thing));
#endif
}
@ -727,7 +727,7 @@ MustSkipMarking<JSObject*>(JSObject* obj)
return true;
// Don't mark things outside a zone if we are in a per-zone GC. It is
// faster to check our own arena header, which we can do since we know that
// faster to check our own arena, which we can do since we know that
// the object is tenured.
return !TenuredCell::fromPointer(obj)->zone()->isGCMarking();
}
@ -1817,13 +1817,13 @@ GCMarker::reset()
MOZ_ASSERT(isMarkStackEmpty());
while (unmarkedArenaStackTop) {
ArenaHeader* aheader = unmarkedArenaStackTop;
MOZ_ASSERT(aheader->hasDelayedMarking);
Arena* arena = unmarkedArenaStackTop;
MOZ_ASSERT(arena->hasDelayedMarking);
MOZ_ASSERT(markLaterArenas);
unmarkedArenaStackTop = aheader->getNextDelayedMarking();
aheader->unsetDelayedMarking();
aheader->markOverflow = 0;
aheader->allocatedDuringIncremental = 0;
unmarkedArenaStackTop = arena->getNextDelayedMarking();
arena->unsetDelayedMarking();
arena->markOverflow = 0;
arena->allocatedDuringIncremental = 0;
markLaterArenas--;
}
MOZ_ASSERT(isDrained());
@ -1868,27 +1868,27 @@ GCMarker::leaveWeakMarkingMode()
}
void
GCMarker::markDelayedChildren(ArenaHeader* aheader)
GCMarker::markDelayedChildren(Arena* arena)
{
if (aheader->markOverflow) {
bool always = aheader->allocatedDuringIncremental;
aheader->markOverflow = 0;
if (arena->markOverflow) {
bool always = arena->allocatedDuringIncremental;
arena->markOverflow = 0;
for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next()) {
for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
TenuredCell* t = i.getCell();
if (always || t->isMarked()) {
t->markIfUnmarked();
js::TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
js::TraceChildren(this, t, MapAllocToTraceKind(arena->getAllocKind()));
}
}
} else {
MOZ_ASSERT(aheader->allocatedDuringIncremental);
PushArena(this, aheader);
MOZ_ASSERT(arena->allocatedDuringIncremental);
PushArena(this, arena);
}
aheader->allocatedDuringIncremental = 0;
arena->allocatedDuringIncremental = 0;
/*
* Note that during an incremental GC we may still be allocating into
* aheader. However, prepareForIncrementalGC sets the
* the arena. However, prepareForIncrementalGC sets the
* allocatedDuringIncremental flag if we continue marking.
*/
}
@ -1906,13 +1906,13 @@ GCMarker::markDelayedChildren(SliceBudget& budget)
* marking of its things. For that we pop arena from the stack and
* clear its hasDelayedMarking flag before we begin the marking.
*/
ArenaHeader* aheader = unmarkedArenaStackTop;
MOZ_ASSERT(aheader->hasDelayedMarking);
Arena* arena = unmarkedArenaStackTop;
MOZ_ASSERT(arena->hasDelayedMarking);
MOZ_ASSERT(markLaterArenas);
unmarkedArenaStackTop = aheader->getNextDelayedMarking();
aheader->unsetDelayedMarking();
unmarkedArenaStackTop = arena->getNextDelayedMarking();
arena->unsetDelayedMarking();
markLaterArenas--;
markDelayedChildren(aheader);
markDelayedChildren(arena);
budget.step(150);
if (budget.isOverBudget())
@ -1925,22 +1925,23 @@ GCMarker::markDelayedChildren(SliceBudget& budget)
template<typename T>
static void
PushArenaTyped(GCMarker* gcmarker, ArenaHeader* aheader)
PushArenaTyped(GCMarker* gcmarker, Arena* arena)
{
for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next())
for (ArenaCellIterUnderGC i(arena); !i.done(); i.next())
gcmarker->traverse(i.get<T>());
}
struct PushArenaFunctor {
template <typename T> void operator()(GCMarker* gcmarker, ArenaHeader* aheader) {
PushArenaTyped<T>(gcmarker, aheader);
template <typename T> void operator()(GCMarker* gcmarker, Arena* arena) {
PushArenaTyped<T>(gcmarker, arena);
}
};
void
gc::PushArena(GCMarker* gcmarker, ArenaHeader* aheader)
gc::PushArena(GCMarker* gcmarker, Arena* arena)
{
DispatchTraceKindTyped(PushArenaFunctor(), MapAllocToTraceKind(aheader->getAllocKind()), gcmarker, aheader);
DispatchTraceKindTyped(PushArenaFunctor(),
MapAllocToTraceKind(arena->getAllocKind()), gcmarker, arena);
}
#ifdef DEBUG
@ -2410,7 +2411,7 @@ js::gc::IsAboutToBeFinalizedDuringSweep(TenuredCell& tenured)
MOZ_ASSERT(!IsInsideNursery(&tenured));
MOZ_ASSERT(!tenured.runtimeFromAnyThread()->isHeapMinorCollecting());
MOZ_ASSERT(tenured.zoneFromAnyThread()->isGCSweeping());
if (tenured.arenaHeader()->allocatedDuringIncremental)
if (tenured.arena()->allocatedDuringIncremental)
return false;
return !tenured.isMarked();
}

View File

@ -32,7 +32,7 @@ class NativeObject;
class ObjectGroup;
class WeakMapBase;
namespace gc {
struct ArenaHeader;
class Arena;
} // namespace gc
namespace jit {
class JitCode;
@ -214,9 +214,9 @@ class GCMarker : public JSTracer
linearWeakMarkingDisabled_ = true;
}
void delayMarkingArena(gc::ArenaHeader* aheader);
void delayMarkingArena(gc::Arena* arena);
void delayMarkingChildren(const void* thing);
void markDelayedChildren(gc::ArenaHeader* aheader);
void markDelayedChildren(gc::Arena* arena);
bool markDelayedChildren(SliceBudget& budget);
bool hasDelayedChildren() const {
return !!unmarkedArenaStackTop;
@ -283,7 +283,7 @@ class GCMarker : public JSTracer
void eagerlyMarkChildren(Shape* shape);
void lazilyMarkChildren(ObjectGroup* group);
// We may not have concrete types yet, so this has to be out of the header.
// We may not have concrete types yet, so this has to be outside the header.
template <typename T>
void dispatchToTraceChildren(T* thing);
@ -331,7 +331,7 @@ class GCMarker : public JSTracer
uint32_t color;
/* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::ArenaHeader* unmarkedArenaStackTop;
js::gc::Arena* unmarkedArenaStackTop;
/*
* If the weakKeys table OOMs, disable the linear algorithm and fall back
@ -364,7 +364,7 @@ namespace gc {
/*** Special Cases ***/
void
PushArena(GCMarker* gcmarker, ArenaHeader* aheader);
PushArena(GCMarker* gcmarker, Arena* arena);
/*** Liveness ***/

View File

@ -250,7 +250,7 @@ oom:
static bool
IsMarkedOrAllocated(TenuredCell* cell)
{
return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental;
return cell->isMarked() || cell->arena()->allocatedDuringIncremental;
}
struct CheckEdgeTracer : public JS::CallbackTracer {

View File

@ -870,7 +870,7 @@ bool
JitcodeGlobalEntry::BaseEntry::isJitcodeMarkedFromAnyThread()
{
return IsMarkedUnbarriered(&jitcode_) ||
jitcode_->arenaHeader()->allocatedDuringIncremental;
jitcode_->arena()->allocatedDuringIncremental;
}
bool
@ -900,7 +900,7 @@ bool
JitcodeGlobalEntry::BaselineEntry::isMarkedFromAnyThread()
{
return IsMarkedUnbarriered(&script_) ||
script_->arenaHeader()->allocatedDuringIncremental;
script_->arena()->allocatedDuringIncremental;
}
template <class ShouldMarkProvider>
@ -968,7 +968,7 @@ JitcodeGlobalEntry::IonEntry::isMarkedFromAnyThread()
{
for (unsigned i = 0; i < numScripts(); i++) {
if (!IsMarkedUnbarriered(&sizedScriptList()->pairs[i].script) &&
!sizedScriptList()->pairs[i].script->arenaHeader()->allocatedDuringIncremental)
!sizedScriptList()->pairs[i].script->arena()->allocatedDuringIncremental)
{
return false;
}

View File

@ -1008,7 +1008,7 @@ DumpHeapVisitArena(JSRuntime* rt, void* data, gc::Arena* arena,
{
DumpHeapTracer* dtrc = static_cast<DumpHeapTracer*>(data);
fprintf(dtrc->output, "# arena allockind=%u size=%u\n",
unsigned(arena->aheader.getAllocKind()), unsigned(thingSize));
unsigned(arena->getAllocKind()), unsigned(thingSize));
}
static void

View File

@ -307,7 +307,7 @@ FreeSpan ArenaLists::placeholder;
#undef CHECK_THING_SIZE_INNER
#undef CHECK_THING_SIZE
#define OFFSET(type) uint32_t(sizeof(ArenaHeader) + (ArenaSize - sizeof(ArenaHeader)) % sizeof(type))
#define OFFSET(type) uint32_t(ArenaHeaderSize + (ArenaSize - ArenaHeaderSize) % sizeof(type))
const uint32_t Arena::FirstThingOffsets[] = {
OFFSET(JSFunction), /* AllocKind::FUNCTION */
@ -339,7 +339,7 @@ const uint32_t Arena::FirstThingOffsets[] = {
#undef OFFSET
#define COUNT(type) uint32_t((ArenaSize - sizeof(ArenaHeader)) / sizeof(type))
#define COUNT(type) uint32_t((ArenaSize - ArenaHeaderSize) / sizeof(type))
const uint32_t Arena::ThingsPerArena[] = {
COUNT(JSFunction), /* AllocKind::FUNCTION */
@ -452,7 +452,7 @@ ArenaCellIterImpl::get<JSObject>() const
}
void
ArenaHeader::unmarkAll()
Arena::unmarkAll()
{
uintptr_t* word = chunk()->bitmap.arenaBits(this);
memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t));
@ -461,6 +461,8 @@ ArenaHeader::unmarkAll()
/* static */ void
Arena::staticAsserts()
{
static_assert(size_t(AllocKind::LIMIT) <= 255,
"We must be able to fit the allockind into uint8_t.");
static_assert(JS_ARRAY_LENGTH(ThingSizes) == size_t(AllocKind::LIMIT),
"We haven't defined all thing sizes.");
static_assert(JS_ARRAY_LENGTH(FirstThingOffsets) == size_t(AllocKind::LIMIT),
@ -477,12 +479,12 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
MOZ_ASSERT(thingSize % CellSize == 0);
MOZ_ASSERT(thingSize <= 255);
MOZ_ASSERT(aheader.allocated());
MOZ_ASSERT(thingKind == aheader.getAllocKind());
MOZ_ASSERT(thingSize == aheader.getThingSize());
MOZ_ASSERT(!aheader.hasDelayedMarking);
MOZ_ASSERT(!aheader.markOverflow);
MOZ_ASSERT(!aheader.allocatedDuringIncremental);
MOZ_ASSERT(allocated());
MOZ_ASSERT(thingKind == getAllocKind());
MOZ_ASSERT(thingSize == getThingSize());
MOZ_ASSERT(!hasDelayedMarking);
MOZ_ASSERT(!markOverflow);
MOZ_ASSERT(!allocatedDuringIncremental);
uint_fast16_t firstThing = firstThingOffset(thingKind);
uint_fast16_t firstThingOrSuccessorOfLastMarkedThing = firstThing;
@ -493,14 +495,14 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
size_t nmarked = 0;
if (MOZ_UNLIKELY(MemProfiler::enabled())) {
for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) {
for (ArenaCellIterUnderFinalize i(this); !i.done(); i.next()) {
T* t = i.get<T>();
if (t->asTenured().isMarked())
MemProfiler::MarkTenured(reinterpret_cast<void*>(t));
}
}
for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) {
for (ArenaCellIterUnderFinalize i(this); !i.done(); i.next()) {
T* t = i.get<T>();
if (t->asTenured().isMarked()) {
uint_fast16_t thing = uintptr_t(t) & ArenaMask;
@ -508,8 +510,8 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
// We just finished passing over one or more free things,
// so record a new FreeSpan.
newListTail->initBounds(firstThingOrSuccessorOfLastMarkedThing,
thing - thingSize, &aheader);
newListTail = newListTail->nextSpanUnchecked(&aheader);
thing - thingSize, this);
newListTail = newListTail->nextSpanUnchecked(this);
}
firstThingOrSuccessorOfLastMarkedThing = thing + thingSize;
nmarked++;
@ -521,7 +523,7 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
}
if (nmarked == 0) {
// Do nothing. The caller will update the arena header appropriately.
// Do nothing. The caller will update the arena appropriately.
MOZ_ASSERT(newListTail == &newListHead);
JS_EXTRA_POISON(data, JS_SWEPT_TENURED_PATTERN, sizeof(data));
return nmarked;
@ -535,12 +537,12 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
newListTail->initAsEmpty();
} else {
// Otherwise, end the list with a span that covers the final stretch of free things.
newListTail->initFinal(firstThingOrSuccessorOfLastMarkedThing, lastThing, &aheader);
newListTail->initFinal(firstThingOrSuccessorOfLastMarkedThing, lastThing, this);
}
aheader.firstFreeSpan = newListHead;
firstFreeSpan = newListHead;
#ifdef DEBUG
size_t nfree = aheader.numFreeThings(thingSize);
size_t nfree = numFreeThings(thingSize);
MOZ_ASSERT(nfree + nmarked == thingsPerArena(thingKind));
#endif
return nmarked;
@ -552,7 +554,7 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
template<typename T>
static inline bool
FinalizeTypedArenas(FreeOp* fop,
ArenaHeader** src,
Arena** src,
SortedArenaList& dest,
AllocKind thingKind,
SliceBudget& budget,
@ -570,17 +572,17 @@ FinalizeTypedArenas(FreeOp* fop,
size_t thingSize = Arena::thingSize(thingKind);
size_t thingsPerArena = Arena::thingsPerArena(thingKind);
while (ArenaHeader* aheader = *src) {
*src = aheader->next;
size_t nmarked = aheader->getArena()->finalize<T>(fop, thingKind, thingSize);
while (Arena* arena = *src) {
*src = arena->next;
size_t nmarked = arena->finalize<T>(fop, thingKind, thingSize);
size_t nfree = thingsPerArena - nmarked;
if (nmarked)
dest.insertAt(aheader, nfree);
dest.insertAt(arena, nfree);
else if (keepArenas == ArenaLists::KEEP_ARENAS)
aheader->chunk()->recycleArena(aheader, dest, thingsPerArena);
arena->chunk()->recycleArena(arena, dest, thingsPerArena);
else
fop->runtime()->gc.releaseArena(aheader, maybeLock.ref());
fop->runtime()->gc.releaseArena(arena, maybeLock.ref());
budget.step(thingsPerArena);
if (budget.isOverBudget())
@ -596,7 +598,7 @@ FinalizeTypedArenas(FreeOp* fop,
*/
static bool
FinalizeArenas(FreeOp* fop,
ArenaHeader** src,
Arena** src,
SortedArenaList& dest,
AllocKind thingKind,
SliceBudget& budget,
@ -860,7 +862,7 @@ Chunk::findDecommittedArenaOffset()
MOZ_CRASH("No decommitted arenas found.");
}
ArenaHeader*
Arena*
Chunk::fetchNextDecommittedArena()
{
MOZ_ASSERT(info.numArenasFreeCommitted == 0);
@ -873,9 +875,9 @@ Chunk::fetchNextDecommittedArena()
Arena* arena = &arenas[offset];
MarkPagesInUse(arena, ArenaSize);
arena->aheader.setAsNotAllocated();
arena->setAsNotAllocated();
return &arena->aheader;
return arena;
}
inline void
@ -885,30 +887,30 @@ GCRuntime::updateOnFreeArenaAlloc(const ChunkInfo& info)
--numArenasFreeCommitted;
}
inline ArenaHeader*
inline Arena*
Chunk::fetchNextFreeArena(JSRuntime* rt)
{
MOZ_ASSERT(info.numArenasFreeCommitted > 0);
MOZ_ASSERT(info.numArenasFreeCommitted <= info.numArenasFree);
ArenaHeader* aheader = info.freeArenasHead;
info.freeArenasHead = aheader->next;
Arena* arena = info.freeArenasHead;
info.freeArenasHead = arena->next;
--info.numArenasFreeCommitted;
--info.numArenasFree;
rt->gc.updateOnFreeArenaAlloc(info);
return aheader;
return arena;
}
ArenaHeader*
Arena*
Chunk::allocateArena(JSRuntime* rt, Zone* zone, AllocKind thingKind, const AutoLockGC& lock)
{
ArenaHeader* aheader = info.numArenasFreeCommitted > 0
? fetchNextFreeArena(rt)
: fetchNextDecommittedArena();
aheader->init(zone, thingKind);
Arena* arena = info.numArenasFreeCommitted > 0
? fetchNextFreeArena(rt)
: fetchNextDecommittedArena();
arena->init(zone, thingKind);
updateChunkListAfterAlloc(rt, lock);
return aheader;
return arena;
}
inline void
@ -918,38 +920,38 @@ GCRuntime::updateOnArenaFree(const ChunkInfo& info)
}
void
Chunk::addArenaToFreeList(JSRuntime* rt, ArenaHeader* aheader)
Chunk::addArenaToFreeList(JSRuntime* rt, Arena* arena)
{
MOZ_ASSERT(!aheader->allocated());
aheader->next = info.freeArenasHead;
info.freeArenasHead = aheader;
MOZ_ASSERT(!arena->allocated());
arena->next = info.freeArenasHead;
info.freeArenasHead = arena;
++info.numArenasFreeCommitted;
++info.numArenasFree;
rt->gc.updateOnArenaFree(info);
}
void
Chunk::addArenaToDecommittedList(JSRuntime* rt, const ArenaHeader* aheader)
Chunk::addArenaToDecommittedList(JSRuntime* rt, const Arena* arena)
{
++info.numArenasFree;
decommittedArenas.set(Chunk::arenaIndex(aheader->address()));
decommittedArenas.set(Chunk::arenaIndex(arena->address()));
}
void
Chunk::recycleArena(ArenaHeader* aheader, SortedArenaList& dest, size_t thingsPerArena)
Chunk::recycleArena(Arena* arena, SortedArenaList& dest, size_t thingsPerArena)
{
aheader->setAsFullyUnused();
dest.insertAt(aheader, thingsPerArena);
arena->setAsFullyUnused();
dest.insertAt(arena, thingsPerArena);
}
void
Chunk::releaseArena(JSRuntime* rt, ArenaHeader* aheader, const AutoLockGC& lock)
Chunk::releaseArena(JSRuntime* rt, Arena* arena, const AutoLockGC& lock)
{
MOZ_ASSERT(aheader->allocated());
MOZ_ASSERT(!aheader->hasDelayedMarking);
MOZ_ASSERT(arena->allocated());
MOZ_ASSERT(!arena->hasDelayedMarking);
aheader->setAsNotAllocated();
addArenaToFreeList(rt, aheader);
arena->setAsNotAllocated();
addArenaToFreeList(rt, arena);
updateChunkListAfterFree(rt, lock);
}
@ -957,19 +959,19 @@ bool
Chunk::decommitOneFreeArena(JSRuntime* rt, AutoLockGC& lock)
{
MOZ_ASSERT(info.numArenasFreeCommitted > 0);
ArenaHeader* aheader = fetchNextFreeArena(rt);
Arena* arena = fetchNextFreeArena(rt);
updateChunkListAfterAlloc(rt, lock);
bool ok;
{
AutoUnlockGC unlock(lock);
ok = MarkPagesUnused(aheader->getArena(), ArenaSize);
ok = MarkPagesUnused(arena, ArenaSize);
}
if (ok)
addArenaToDecommittedList(rt, aheader);
addArenaToDecommittedList(rt, arena);
else
addArenaToFreeList(rt, aheader);
addArenaToFreeList(rt, arena);
updateChunkListAfterFree(rt, lock);
return ok;
@ -979,7 +981,7 @@ void
Chunk::decommitAllArenasWithoutUnlocking(const AutoLockGC& lock)
{
for (size_t i = 0; i < ArenasPerChunk; ++i) {
if (decommittedArenas.get(i) || arenas[i].aheader.allocated())
if (decommittedArenas.get(i) || arenas[i].allocated())
continue;
if (MarkPagesUnused(&arenas[i], ArenaSize)) {
@ -1068,7 +1070,7 @@ GCRuntime::pickChunk(const AutoLockGC& lock,
return chunk;
}
ArenaHeader*
Arena*
GCRuntime::allocateArena(Chunk* chunk, Zone* zone, AllocKind thingKind, const AutoLockGC& lock)
{
MOZ_ASSERT(chunk->hasAvailableArenas());
@ -1081,23 +1083,23 @@ GCRuntime::allocateArena(Chunk* chunk, Zone* zone, AllocKind thingKind, const Au
return nullptr;
}
ArenaHeader* aheader = chunk->allocateArena(rt, zone, thingKind, lock);
Arena* arena = chunk->allocateArena(rt, zone, thingKind, lock);
zone->usage.addGCArena();
// Trigger an incremental slice if needed.
if (!rt->isHeapMinorCollecting() && !isHeapCompacting())
maybeAllocTriggerZoneGC(zone, lock);
return aheader;
return arena;
}
void
GCRuntime::releaseArena(ArenaHeader* aheader, const AutoLockGC& lock)
GCRuntime::releaseArena(Arena* arena, const AutoLockGC& lock)
{
aheader->zone->usage.removeGCArena();
arena->zone->usage.removeGCArena();
if (isBackgroundSweeping())
aheader->zone->threshold.updateForRemovedArena(tunables);
return aheader->chunk()->releaseArena(rt, aheader, lock);
arena->zone->threshold.updateForRemovedArena(tunables);
return arena->chunk()->releaseArena(rt, arena, lock);
}
GCRuntime::GCRuntime(JSRuntime* rt) :
@ -1928,14 +1930,14 @@ ZoneHeapThreshold::updateForRemovedArena(const GCSchedulingTunables& tunables)
}
void
GCMarker::delayMarkingArena(ArenaHeader* aheader)
GCMarker::delayMarkingArena(Arena* arena)
{
if (aheader->hasDelayedMarking) {
if (arena->hasDelayedMarking) {
/* Arena already scheduled to be marked later */
return;
}
aheader->setNextDelayedMarking(unmarkedArenaStackTop);
unmarkedArenaStackTop = aheader;
arena->setNextDelayedMarking(unmarkedArenaStackTop);
unmarkedArenaStackTop = arena;
markLaterArenas++;
}
@ -1943,8 +1945,8 @@ void
GCMarker::delayMarkingChildren(const void* thing)
{
const TenuredCell* cell = TenuredCell::fromPointer(thing);
cell->arenaHeader()->markOverflow = 1;
delayMarkingArena(cell->arenaHeader());
cell->arena()->markOverflow = 1;
delayMarkingArena(cell->arena());
}
inline void
@ -1954,9 +1956,9 @@ ArenaLists::prepareForIncrementalGC(JSRuntime* rt)
FreeSpan* span = freeLists[i];
if (span != &placeholder) {
if (!span->isEmpty()) {
ArenaHeader* aheader = span->getArena();
aheader->allocatedDuringIncremental = true;
rt->gc.marker.delayMarkingArena(aheader);
Arena* arena = span->getArena();
arena->allocatedDuringIncremental = true;
rt->gc.marker.delayMarkingArena(arena);
} else {
freeLists[i] = &placeholder;
}
@ -2022,16 +2024,16 @@ CanRelocateAllocKind(AllocKind kind)
return IsObjectAllocKind(kind);
}
ArenaHeader*
ArenaList::removeRemainingArenas(ArenaHeader** arenap)
Arena*
ArenaList::removeRemainingArenas(Arena** arenap)
{
// This is only ever called to remove arenas that are after the cursor, so
// we don't need to update it.
#ifdef DEBUG
for (ArenaHeader* arena = *arenap; arena; arena = arena->next)
for (Arena* arena = *arenap; arena; arena = arena->next)
MOZ_ASSERT(cursorp_ != &arena->next);
#endif
ArenaHeader* remainingArenas = *arenap;
Arena* remainingArenas = *arenap;
*arenap = nullptr;
check();
return remainingArenas;
@ -2047,7 +2049,7 @@ ShouldRelocateAllArenas(JS::gcreason::Reason reason)
* Choose which arenas to relocate all cells from. Return an arena cursor that
* can be passed to removeRemainingArenas().
*/
ArenaHeader**
Arena**
ArenaList::pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut)
{
// Relocate the greatest number of arenas such that the number of used cells
@ -2065,17 +2067,17 @@ ArenaList::pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut)
if (isCursorAtEnd())
return nullptr;
ArenaHeader** arenap = cursorp_; // Next arena to consider for relocation.
size_t previousFreeCells = 0; // Count of free cells before arenap.
size_t followingUsedCells = 0; // Count of used cells after arenap.
size_t fullArenaCount = 0; // Number of full arenas (not relocated).
size_t nonFullArenaCount = 0; // Number of non-full arenas (considered for relocation).
size_t arenaIndex = 0; // Index of the next arena to consider.
Arena** arenap = cursorp_; // Next arena to consider for relocation.
size_t previousFreeCells = 0; // Count of free cells before arenap.
size_t followingUsedCells = 0; // Count of used cells after arenap.
size_t fullArenaCount = 0; // Number of full arenas (not relocated).
size_t nonFullArenaCount = 0; // Number of non-full arenas (considered for relocation).
size_t arenaIndex = 0; // Index of the next arena to consider.
for (ArenaHeader* arena = head_; arena != *cursorp_; arena = arena->next)
for (Arena* arena = head_; arena != *cursorp_; arena = arena->next)
fullArenaCount++;
for (ArenaHeader* arena = *cursorp_; arena; arena = arena->next) {
for (Arena* arena = *cursorp_; arena; arena = arena->next) {
followingUsedCells += arena->countUsedCells();
nonFullArenaCount++;
}
@ -2084,7 +2086,7 @@ ArenaList::pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut)
size_t cellsPerArena = Arena::thingsPerArena((*arenap)->getAllocKind());
while (*arenap) {
ArenaHeader* arena = *arenap;
Arena* arena = *arenap;
if (followingUsedCells <= previousFreeCells)
break;
@ -2187,25 +2189,25 @@ RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind, size_t thingSize
}
static void
RelocateArena(ArenaHeader* aheader, SliceBudget& sliceBudget)
RelocateArena(Arena* arena, SliceBudget& sliceBudget)
{
MOZ_ASSERT(aheader->allocated());
MOZ_ASSERT(!aheader->hasDelayedMarking);
MOZ_ASSERT(!aheader->markOverflow);
MOZ_ASSERT(!aheader->allocatedDuringIncremental);
MOZ_ASSERT(arena->allocated());
MOZ_ASSERT(!arena->hasDelayedMarking);
MOZ_ASSERT(!arena->markOverflow);
MOZ_ASSERT(!arena->allocatedDuringIncremental);
Zone* zone = aheader->zone;
Zone* zone = arena->zone;
AllocKind thingKind = aheader->getAllocKind();
size_t thingSize = aheader->getThingSize();
AllocKind thingKind = arena->getAllocKind();
size_t thingSize = arena->getThingSize();
for (ArenaCellIterUnderFinalize i(aheader); !i.done(); i.next()) {
for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
RelocateCell(zone, i.getCell(), thingKind, thingSize);
sliceBudget.step();
}
#ifdef DEBUG
for (ArenaCellIterUnderFinalize i(aheader); !i.done(); i.next()) {
for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
TenuredCell* src = i.getCell();
MOZ_ASSERT(RelocationOverlay::isCellForwarded(src));
TenuredCell* dest = Forwarded(src);
@ -2232,13 +2234,13 @@ ShouldProtectRelocatedArenas(JS::gcreason::Reason reason)
* Relocate all arenas identified by pickArenasToRelocate: for each arena,
* relocate each cell within it, then add it to a list of relocated arenas.
*/
ArenaHeader*
ArenaList::relocateArenas(ArenaHeader* toRelocate, ArenaHeader* relocated, SliceBudget& sliceBudget,
Arena*
ArenaList::relocateArenas(Arena* toRelocate, Arena* relocated, SliceBudget& sliceBudget,
gcstats::Statistics& stats)
{
check();
while (ArenaHeader* arena = toRelocate) {
while (Arena* arena = toRelocate) {
toRelocate = arena->next;
RelocateArena(arena, sliceBudget);
// Prepend to list of relocated arenas
@ -2276,7 +2278,7 @@ ShouldRelocateZone(size_t arenaCount, size_t relocCount, JS::gcreason::Reason re
}
bool
ArenaLists::relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcreason::Reason reason,
ArenaLists::relocateArenas(Zone* zone, Arena*& relocatedListOut, JS::gcreason::Reason reason,
SliceBudget& sliceBudget, gcstats::Statistics& stats)
{
// This is only called from the main thread while we are doing a GC, so
@ -2293,7 +2295,7 @@ ArenaLists::relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcrea
for (auto i : AllAllocKinds()) {
if (CanRelocateAllocKind(i)) {
ArenaList& al = arenaLists[i];
ArenaHeader* allArenas = al.head();
Arena* allArenas = al.head();
al.clear();
relocatedListOut = al.relocateArenas(allArenas, relocatedListOut, sliceBudget, stats);
}
@ -2301,7 +2303,7 @@ ArenaLists::relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcrea
} else {
size_t arenaCount = 0;
size_t relocCount = 0;
AllAllocKindArray<ArenaHeader**> toRelocate;
AllAllocKindArray<Arena**> toRelocate;
for (auto i : AllAllocKinds()) {
toRelocate[i] = nullptr;
@ -2316,7 +2318,7 @@ ArenaLists::relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcrea
for (auto i : AllAllocKinds()) {
if (toRelocate[i]) {
ArenaList& al = arenaLists[i];
ArenaHeader* arenas = al.removeRemainingArenas(toRelocate[i]);
Arena* arenas = al.removeRemainingArenas(toRelocate[i]);
relocatedListOut = al.relocateArenas(arenas, relocatedListOut, sliceBudget, stats);
}
}
@ -2326,7 +2328,7 @@ ArenaLists::relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcrea
}
bool
GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, ArenaHeader*& relocatedListOut,
GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, Arena*& relocatedListOut,
SliceBudget& sliceBudget)
{
gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_MOVE);
@ -2347,7 +2349,7 @@ GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, ArenaHeader*&
if (CanRelocateAllocKind(i)) {
ArenaList& al = zone->arenas.arenaLists[i];
size_t freeCells = 0;
for (ArenaHeader* arena = al.arenaAfterCursor(); arena; arena = arena->next)
for (Arena* arena = al.arenaAfterCursor(); arena; arena = arena->next)
freeCells += arena->countFreeCells();
MOZ_ASSERT(freeCells < thingsPerArena);
}
@ -2420,7 +2422,7 @@ GCRuntime::sweepZoneAfterCompacting(Zone* zone)
template <typename T>
static void
UpdateCellPointersTyped(MovingTracer* trc, ArenaHeader* arena, JS::TraceKind traceKind)
UpdateCellPointersTyped(MovingTracer* trc, Arena* arena, JS::TraceKind traceKind)
{
for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
T* cell = reinterpret_cast<T*>(i.getCell());
@ -2433,7 +2435,7 @@ UpdateCellPointersTyped(MovingTracer* trc, ArenaHeader* arena, JS::TraceKind tra
* Update the interal pointers for all cells in an arena.
*/
static void
UpdateCellPointers(MovingTracer* trc, ArenaHeader* arena)
UpdateCellPointers(MovingTracer* trc, Arena* arena)
{
AllocKind kind = arena->getAllocKind();
JS::TraceKind traceKind = MapAllocToTraceKind(kind);
@ -2493,17 +2495,17 @@ struct ArenasToUpdate
};
ArenasToUpdate(Zone* zone, KindsToUpdate kinds);
bool done() { return kind == AllocKind::LIMIT; }
ArenaHeader* getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned max);
Arena* getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned max);
private:
KindsToUpdate kinds; // Selects which thing kinds to iterate
Zone* zone; // Zone to process
AllocKind kind; // Current alloc kind to process
ArenaHeader* arena; // Next arena to process
Arena* arena; // Next arena to process
AllocKind nextAllocKind(AllocKind i) { return AllocKind(uint8_t(i) + 1); }
bool shouldProcessKind(AllocKind kind);
ArenaHeader* next(AutoLockHelperThreadState& lock);
Arena* next(AutoLockHelperThreadState& lock);
};
bool ArenasToUpdate::shouldProcessKind(AllocKind kind)
@ -2541,7 +2543,7 @@ ArenasToUpdate::ArenasToUpdate(Zone* zone, KindsToUpdate kinds)
MOZ_ASSERT(kinds && !(kinds & ~ALL));
}
ArenaHeader*
Arena*
ArenasToUpdate::next(AutoLockHelperThreadState& lock)
{
// Find the next arena to update.
@ -2566,17 +2568,17 @@ ArenasToUpdate::next(AutoLockHelperThreadState& lock)
return nullptr;
}
ArenaHeader*
Arena*
ArenasToUpdate::getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned count)
{
if (done())
return nullptr;
ArenaHeader* head = nullptr;
ArenaHeader* tail = nullptr;
Arena* head = nullptr;
Arena* tail = nullptr;
for (unsigned i = 0; i < count; ++i) {
ArenaHeader* arena = next(lock);
Arena* arena = next(lock);
if (!arena)
break;
@ -2606,7 +2608,7 @@ struct UpdateCellPointersTask : public GCParallelTask
private:
JSRuntime* rt_;
ArenasToUpdate* source_;
ArenaHeader* arenaList_;
Arena* arenaList_;
virtual void run() override;
void getArenasToUpdate(AutoLockHelperThreadState& lock);
@ -2631,7 +2633,7 @@ void
UpdateCellPointersTask::updateArenas()
{
MovingTracer trc(rt_);
for (ArenaHeader* arena = arenaList_;
for (Arena* arena = arenaList_;
arena;
arena = arena->getNextArenaToUpdateAndUnlink())
{
@ -2775,11 +2777,11 @@ GCRuntime::updatePointersToRelocatedCells(Zone* zone)
}
void
GCRuntime::protectAndHoldArenas(ArenaHeader* arenaList)
GCRuntime::protectAndHoldArenas(Arena* arenaList)
{
for (ArenaHeader* arena = arenaList; arena; ) {
for (Arena* arena = arenaList; arena; ) {
MOZ_ASSERT(arena->allocated());
ArenaHeader* next = arena->next;
Arena* next = arena->next;
if (!next) {
// Prepend to hold list before we protect the memory.
arena->next = relocatedArenasToRelease;
@ -2793,14 +2795,14 @@ GCRuntime::protectAndHoldArenas(ArenaHeader* arenaList)
void
GCRuntime::unprotectHeldRelocatedArenas()
{
for (ArenaHeader* arena = relocatedArenasToRelease; arena; arena = arena->next) {
for (Arena* arena = relocatedArenasToRelease; arena; arena = arena->next) {
UnprotectPages(arena, ArenaSize);
MOZ_ASSERT(arena->allocated());
}
}
void
GCRuntime::releaseRelocatedArenas(ArenaHeader* arenaList)
GCRuntime::releaseRelocatedArenas(Arena* arenaList)
{
AutoLockGC lock(rt);
releaseRelocatedArenasWithoutUnlocking(arenaList, lock);
@ -2808,28 +2810,26 @@ GCRuntime::releaseRelocatedArenas(ArenaHeader* arenaList)
}
void
GCRuntime::releaseRelocatedArenasWithoutUnlocking(ArenaHeader* arenaList, const AutoLockGC& lock)
GCRuntime::releaseRelocatedArenasWithoutUnlocking(Arena* arenaList, const AutoLockGC& lock)
{
// Release the relocated arenas, now containing only forwarding pointers
unsigned count = 0;
while (arenaList) {
ArenaHeader* aheader = arenaList;
Arena* arena = arenaList;
arenaList = arenaList->next;
// Clear the mark bits
aheader->unmarkAll();
arena->unmarkAll();
// Mark arena as empty
aheader->setAsFullyUnused();
arena->setAsFullyUnused();
#if defined(JS_CRASH_DIAGNOSTICS) || defined(JS_GC_ZEAL)
Arena* arena = aheader->getArena();
AllocKind thingKind = aheader->getAllocKind();
JS_POISON(reinterpret_cast<void*>(arena->thingsStart(thingKind)),
JS_MOVED_TENURED_PATTERN, Arena::thingsSpan(thingKind));
JS_POISON(reinterpret_cast<void*>(arena->thingsStart()),
JS_MOVED_TENURED_PATTERN, arena->getThingsSpan());
#endif
releaseArena(aheader, lock);
releaseArena(arena, lock);
++count;
}
}
@ -2859,12 +2859,12 @@ GCRuntime::releaseHeldRelocatedArenasWithoutUnlocking(const AutoLockGC& lock)
}
void
ReleaseArenaList(JSRuntime* rt, ArenaHeader* aheader, const AutoLockGC& lock)
ReleaseArenaList(JSRuntime* rt, Arena* arena, const AutoLockGC& lock)
{
ArenaHeader* next;
for (; aheader; aheader = next) {
next = aheader->next;
rt->gc.releaseArena(aheader, lock);
Arena* next;
for (; arena; arena = next) {
next = arena->next;
rt->gc.releaseArena(arena, lock);
}
}
@ -2896,18 +2896,19 @@ ArenaLists::finalizeNow(FreeOp* fop, const FinalizePhase& phase)
}
void
ArenaLists::finalizeNow(FreeOp* fop, AllocKind thingKind, KeepArenasEnum keepArenas, ArenaHeader** empty)
ArenaLists::finalizeNow(FreeOp* fop, AllocKind thingKind, KeepArenasEnum keepArenas, Arena** empty)
{
MOZ_ASSERT(!IsBackgroundFinalized(thingKind));
forceFinalizeNow(fop, thingKind, keepArenas, empty);
}
void
ArenaLists::forceFinalizeNow(FreeOp* fop, AllocKind thingKind, KeepArenasEnum keepArenas, ArenaHeader** empty)
ArenaLists::forceFinalizeNow(FreeOp* fop, AllocKind thingKind,
KeepArenasEnum keepArenas, Arena** empty)
{
MOZ_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
ArenaHeader* arenas = arenaLists[thingKind].head();
Arena* arenas = arenaLists[thingKind].head();
if (!arenas)
return;
arenaLists[thingKind].clear();
@ -2973,7 +2974,7 @@ ArenaLists::queueForBackgroundSweep(FreeOp* fop, AllocKind thingKind)
}
/*static*/ void
ArenaLists::backgroundFinalize(FreeOp* fop, ArenaHeader* listHead, ArenaHeader** empty)
ArenaLists::backgroundFinalize(FreeOp* fop, Arena* listHead, Arena** empty)
{
MOZ_ASSERT(listHead);
MOZ_ASSERT(empty);
@ -3401,13 +3402,13 @@ GCRuntime::sweepBackgroundThings(ZoneList& zones, LifoAlloc& freeBlocks, ThreadT
return;
// We must finalize thing kinds in the order specified by BackgroundFinalizePhases.
ArenaHeader* emptyArenas = nullptr;
Arena* emptyArenas = nullptr;
FreeOp fop(rt, threadType);
for (unsigned phase = 0 ; phase < ArrayLength(BackgroundFinalizePhases) ; ++phase) {
for (Zone* zone = zones.front(); zone; zone = zone->nextZone()) {
for (unsigned index = 0 ; index < BackgroundFinalizePhases[phase].length ; ++index) {
AllocKind kind = BackgroundFinalizePhases[phase].kinds[index];
ArenaHeader* arenas = zone->arenas.arenaListsToSweep[kind];
Arena* arenas = zone->arenas.arenaListsToSweep[kind];
MOZ_RELEASE_ASSERT(uintptr_t(arenas) != uintptr_t(-1));
if (arenas)
ArenaLists::backgroundFinalize(&fop, arenas, &emptyArenas);
@ -4464,15 +4465,15 @@ js::gc::MarkingValidator::validate()
if (chunk->decommittedArenas.get(i))
continue;
Arena* arena = &chunk->arenas[i];
if (!arena->aheader.allocated())
if (!arena->allocated())
continue;
if (!arena->aheader.zone->isGCSweeping())
if (!arena->zone->isGCSweeping())
continue;
if (arena->aheader.allocatedDuringIncremental)
if (arena->allocatedDuringIncremental)
continue;
AllocKind kind = arena->aheader.getAllocKind();
uintptr_t thing = arena->thingsStart(kind);
AllocKind kind = arena->getAllocKind();
uintptr_t thing = arena->thingsStart();
uintptr_t end = arena->thingsEnd();
while (thing < end) {
Cell* cell = (Cell*)thing;
@ -5308,7 +5309,7 @@ GCRuntime::endSweepingZoneGroup()
sweepBackgroundThings(zones, freeLifoAlloc, MainThread);
/* Reset the list of arenas marked as being allocated during sweep phase. */
while (ArenaHeader* arena = arenasAllocatedDuringSweep) {
while (Arena* arena = arenasAllocatedDuringSweep) {
arenasAllocatedDuringSweep = arena->getNextAllocDuringSweep();
arena->unsetAllocDuringSweep();
}
@ -5412,9 +5413,9 @@ SweepThing(ObjectGroup* group, AutoClearTypeInferenceStateOnOOM* oom)
template <typename T, typename... Args>
static bool
SweepArenaList(ArenaHeader** arenasToSweep, SliceBudget& sliceBudget, Args... args)
SweepArenaList(Arena** arenasToSweep, SliceBudget& sliceBudget, Args... args)
{
while (ArenaHeader* arena = *arenasToSweep) {
while (Arena* arena = *arenasToSweep) {
for (ArenaCellIterUnderGC i(arena); !i.done(); i.next())
SweepThing(i.get<T>(), args...);
@ -5668,7 +5669,7 @@ GCRuntime::compactPhase(JS::gcreason::Reason reason, SliceBudget& sliceBudget)
while (!zonesToMaybeCompact.isEmpty()) {
Zone* zone = zonesToMaybeCompact.front();
MOZ_ASSERT(zone->isGCFinished());
ArenaHeader* relocatedArenas = nullptr;
Arena* relocatedArenas = nullptr;
if (relocateArenas(zone, reason, relocatedArenas, sliceBudget)) {
zone->setGCState(Zone::Compact);
updatePointersToRelocatedCells(zone);
@ -6877,8 +6878,8 @@ gc::MergeCompartments(JSCompartment* source, JSCompartment* target)
for (auto thingKind : AllAllocKinds()) {
for (ArenaIter aiter(source->zone(), thingKind); !aiter.done(); aiter.next()) {
ArenaHeader* aheader = aiter.get();
aheader->zone = target->zone();
Arena* arena = aiter.get();
arena->zone = target->zone();
}
}
@ -7093,13 +7094,13 @@ ArenaLists::adoptArenas(JSRuntime* rt, ArenaLists* fromArenaLists)
ArenaList* toList = &arenaLists[thingKind];
fromList->check();
toList->check();
ArenaHeader* next;
for (ArenaHeader* fromHeader = fromList->head(); fromHeader; fromHeader = next) {
// Copy fromHeader->next before releasing/reinserting.
next = fromHeader->next;
Arena* next;
for (Arena* fromArena = fromList->head(); fromArena; fromArena = next) {
// Copy fromArena->next before releasing/reinserting.
next = fromArena->next;
MOZ_ASSERT(!fromHeader->isEmpty());
toList->insertAtCursor(fromHeader);
MOZ_ASSERT(!fromArena->isEmpty());
toList->insertAtCursor(fromArena);
}
fromList->clear();
toList->check();
@ -7107,12 +7108,12 @@ ArenaLists::adoptArenas(JSRuntime* rt, ArenaLists* fromArenaLists)
}
bool
ArenaLists::containsArena(JSRuntime* rt, ArenaHeader* needle)
ArenaLists::containsArena(JSRuntime* rt, Arena* needle)
{
AutoLockGC lock(rt);
ArenaList& list = arenaLists[needle->getAllocKind()];
for (ArenaHeader* aheader = list.head(); aheader; aheader = aheader->next) {
if (aheader == needle)
for (Arena* arena = list.head(); arena; arena = arena->next) {
if (arena == needle)
return true;
}
return false;

View File

@ -291,8 +291,8 @@ class AutoMaybeStartBackgroundAllocation;
*/
struct SortedArenaListSegment
{
ArenaHeader* head;
ArenaHeader** tailp;
Arena* head;
Arena** tailp;
void clear() {
head = nullptr;
@ -303,21 +303,21 @@ struct SortedArenaListSegment
return tailp == &head;
}
// Appends |aheader| to this segment.
void append(ArenaHeader* aheader) {
MOZ_ASSERT(aheader);
MOZ_ASSERT_IF(head, head->getAllocKind() == aheader->getAllocKind());
*tailp = aheader;
tailp = &aheader->next;
// Appends |arena| to this segment.
void append(Arena* arena) {
MOZ_ASSERT(arena);
MOZ_ASSERT_IF(head, head->getAllocKind() == arena->getAllocKind());
*tailp = arena;
tailp = &arena->next;
}
// Points the tail of this segment at |aheader|, which may be null. Note
// Points the tail of this segment at |arena|, which may be null. Note
// that this does not change the tail itself, but merely which arena
// follows it. This essentially turns the tail into a cursor (see also the
// description of ArenaList), but from the perspective of a SortedArenaList
// this makes no difference.
void linkTo(ArenaHeader* aheader) {
*tailp = aheader;
void linkTo(Arena* arena) {
*tailp = arena;
}
};
@ -357,8 +357,8 @@ class ArenaList {
//
// |cursorp_| is never null.
//
ArenaHeader* head_;
ArenaHeader** cursorp_;
Arena* head_;
Arena** cursorp_;
void copy(const ArenaList& other) {
other.check();
@ -394,7 +394,7 @@ class ArenaList {
MOZ_ASSERT_IF(!head_, cursorp_ == &head_);
// If there's an arena following the cursor, it must not be full.
ArenaHeader* cursor = *cursorp_;
Arena* cursor = *cursorp_;
MOZ_ASSERT_IF(cursor, cursor->hasFreeThings());
#endif
}
@ -417,7 +417,7 @@ class ArenaList {
}
// This returns nullptr if the list is empty.
ArenaHeader* head() const {
Arena* head() const {
check();
return head_;
}
@ -433,27 +433,27 @@ class ArenaList {
}
// This can return nullptr.
ArenaHeader* arenaAfterCursor() const {
Arena* arenaAfterCursor() const {
check();
return *cursorp_;
}
// This returns the arena after the cursor and moves the cursor past it.
ArenaHeader* takeNextArena() {
Arena* takeNextArena() {
check();
ArenaHeader* aheader = *cursorp_;
if (!aheader)
Arena* arena = *cursorp_;
if (!arena)
return nullptr;
cursorp_ = &aheader->next;
cursorp_ = &arena->next;
check();
return aheader;
return arena;
}
// This does two things.
// - Inserts |a| at the cursor.
// - Leaves the cursor sitting just before |a|, if |a| is not full, or just
// after |a|, if |a| is full.
void insertAtCursor(ArenaHeader* a) {
void insertAtCursor(Arena* a) {
check();
a->next = *cursorp_;
*cursorp_ = a;
@ -465,7 +465,7 @@ class ArenaList {
}
// Inserts |a| at the cursor, then moves the cursor past it.
void insertBeforeCursor(ArenaHeader* a) {
void insertBeforeCursor(Arena* a) {
check();
a->next = *cursorp_;
*cursorp_ = a;
@ -488,10 +488,10 @@ class ArenaList {
return *this;
}
ArenaHeader* removeRemainingArenas(ArenaHeader** arenap);
ArenaHeader** pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut);
ArenaHeader* relocateArenas(ArenaHeader* toRelocate, ArenaHeader* relocated,
SliceBudget& sliceBudget, gcstats::Statistics& stats);
Arena* removeRemainingArenas(Arena** arenap);
Arena** pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut);
Arena* relocateArenas(Arena* toRelocate, Arena* relocated,
SliceBudget& sliceBudget, gcstats::Statistics& stats);
};
/*
@ -513,14 +513,14 @@ class SortedArenaList
private:
// The maximum number of GC things that an arena can hold.
static const size_t MaxThingsPerArena = (ArenaSize - sizeof(ArenaHeader)) / MinThingSize;
static const size_t MaxThingsPerArena = (ArenaSize - ArenaHeaderSize) / MinThingSize;
size_t thingsPerArena_;
SortedArenaListSegment segments[MaxThingsPerArena + 1];
// Convenience functions to get the nth head and tail.
ArenaHeader* headAt(size_t n) { return segments[n].head; }
ArenaHeader** tailAt(size_t n) { return segments[n].tailp; }
Arena* headAt(size_t n) { return segments[n].head; }
Arena** tailAt(size_t n) { return segments[n].tailp; }
public:
explicit SortedArenaList(size_t thingsPerArena = MaxThingsPerArena) {
@ -540,14 +540,14 @@ class SortedArenaList
segments[i].clear();
}
// Inserts a header, which has room for |nfree| more things, in its segment.
void insertAt(ArenaHeader* aheader, size_t nfree) {
// Inserts an arena, which has room for |nfree| more things, in its segment.
void insertAt(Arena* arena, size_t nfree) {
MOZ_ASSERT(nfree <= thingsPerArena_);
segments[nfree].append(aheader);
segments[nfree].append(arena);
}
// Remove all empty arenas, inserting them as a linked list.
void extractEmpty(ArenaHeader** empty) {
void extractEmpty(Arena** empty) {
SortedArenaListSegment& segment = segments[thingsPerArena_];
if (segment.head) {
*segment.tailp = *empty;
@ -559,7 +559,7 @@ class SortedArenaList
// Links up the tail of each non-empty segment to the head of the next
// non-empty segment, creating a contiguous list that is returned as an
// ArenaList. This is not a destructive operation: neither the head nor tail
// of any segment is modified. However, note that the ArenaHeaders in the
// of any segment is modified. However, note that the Arenas in the
// resulting ArenaList should be treated as read-only unless the
// SortedArenaList is no longer needed: inserting or removing arenas would
// invalidate the SortedArenaList.
@ -590,7 +590,7 @@ class ArenaLists
* free things. Initially all the spans are initialized as empty. After we
* find a new arena with available things we move its first free span into
* the list and set the arena as fully allocated. way we do not need to
* update the arena header after the initial allocation. When starting the
* update the arena after the initial allocation. When starting the
* GC we only move the head of the of the list of spans back to the arena
* only for the arena that was not fully allocated.
*/
@ -612,7 +612,7 @@ class ArenaLists
AllAllocKindArray<BackgroundFinalizeState> backgroundFinalizeState;
/* For each arena kind, a list of arenas remaining to be swept. */
AllAllocKindArray<ArenaHeader*> arenaListsToSweep;
AllAllocKindArray<Arena*> arenaListsToSweep;
/* During incremental sweeping, a list of the arenas already swept. */
AllocKind incrementalSweptArenaKind;
@ -620,17 +620,17 @@ class ArenaLists
// Arena lists which have yet to be swept, but need additional foreground
// processing before they are swept.
ArenaHeader* gcShapeArenasToUpdate;
ArenaHeader* gcAccessorShapeArenasToUpdate;
ArenaHeader* gcScriptArenasToUpdate;
ArenaHeader* gcObjectGroupArenasToUpdate;
Arena* gcShapeArenasToUpdate;
Arena* gcAccessorShapeArenasToUpdate;
Arena* gcScriptArenasToUpdate;
Arena* gcObjectGroupArenasToUpdate;
// While sweeping type information, these lists save the arenas for the
// objects which have already been finalized in the foreground (which must
// happen at the beginning of the GC), so that type sweeping can determine
// which of the object pointers are marked.
ObjectAllocKindArray<ArenaList> savedObjectArenas;
ArenaHeader* savedEmptyObjectArenas;
Arena* savedEmptyObjectArenas;
public:
explicit ArenaLists(JSRuntime* rt) : runtime_(rt) {
@ -654,21 +654,21 @@ class ArenaLists
return reinterpret_cast<const void*>(&freeLists[thingKind]);
}
ArenaHeader* getFirstArena(AllocKind thingKind) const {
Arena* getFirstArena(AllocKind thingKind) const {
return arenaLists[thingKind].head();
}
ArenaHeader* getFirstArenaToSweep(AllocKind thingKind) const {
Arena* getFirstArenaToSweep(AllocKind thingKind) const {
return arenaListsToSweep[thingKind];
}
ArenaHeader* getFirstSweptArena(AllocKind thingKind) const {
Arena* getFirstSweptArena(AllocKind thingKind) const {
if (thingKind != incrementalSweptArenaKind)
return nullptr;
return incrementalSweptArenas.head();
}
ArenaHeader* getArenaAfterCursor(AllocKind thingKind) const {
Arena* getArenaAfterCursor(AllocKind thingKind) const {
return arenaLists[thingKind].arenaAfterCursor();
}
@ -690,8 +690,8 @@ class ArenaLists
for (auto i : AllAllocKinds()) {
/* The background finalization must have stopped at this point. */
MOZ_ASSERT(backgroundFinalizeState[i] == BFS_DONE);
for (ArenaHeader* aheader = arenaLists[i].head(); aheader; aheader = aheader->next)
aheader->unmarkAll();
for (Arena* arena = arenaLists[i].head(); arena; arena = arena->next)
arena->unmarkAll();
}
}
@ -713,10 +713,10 @@ class ArenaLists
inline void prepareForIncrementalGC(JSRuntime* rt);
/* Check if |aheader|'s arena is in use. */
bool arenaIsInUse(ArenaHeader* aheader, AllocKind kind) const {
MOZ_ASSERT(aheader);
return aheader == freeLists[kind]->getArenaUnchecked();
/* Check if this arena is in use. */
bool arenaIsInUse(Arena* arena, AllocKind kind) const {
MOZ_ASSERT(arena);
return arena == freeLists[kind]->getArenaUnchecked();
}
MOZ_ALWAYS_INLINE TenuredCell* allocateFromFreeList(AllocKind thingKind, size_t thingSize) {
@ -728,8 +728,8 @@ class ArenaLists
*/
void adoptArenas(JSRuntime* runtime, ArenaLists* fromArenaLists);
/* True if the ArenaHeader in question is found in this ArenaLists */
bool containsArena(JSRuntime* runtime, ArenaHeader* arenaHeader);
/* True if the Arena in question is found in this ArenaLists */
bool containsArena(JSRuntime* runtime, Arena* arena);
void checkEmptyFreeLists() {
#ifdef DEBUG
@ -742,7 +742,7 @@ class ArenaLists
MOZ_ASSERT(freeLists[kind]->isEmpty());
}
bool relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcreason::Reason reason,
bool relocateArenas(Zone* zone, Arena*& relocatedListOut, JS::gcreason::Reason reason,
SliceBudget& sliceBudget, gcstats::Statistics& stats);
void queueForegroundObjectsForSweep(FreeOp* fop);
@ -752,7 +752,7 @@ class ArenaLists
bool foregroundFinalize(FreeOp* fop, AllocKind thingKind, SliceBudget& sliceBudget,
SortedArenaList& sweepList);
static void backgroundFinalize(FreeOp* fop, ArenaHeader* listHead, ArenaHeader** empty);
static void backgroundFinalize(FreeOp* fop, Arena* listHead, Arena** empty);
// When finalizing arenas, whether to keep empty arenas on the list or
// release them immediately.
@ -767,17 +767,16 @@ class ArenaLists
inline void queueForBackgroundSweep(FreeOp* fop, const FinalizePhase& phase);
inline void finalizeNow(FreeOp* fop, AllocKind thingKind,
KeepArenasEnum keepArenas, ArenaHeader** empty = nullptr);
KeepArenasEnum keepArenas, Arena** empty = nullptr);
inline void forceFinalizeNow(FreeOp* fop, AllocKind thingKind,
KeepArenasEnum keepArenas, ArenaHeader** empty = nullptr);
KeepArenasEnum keepArenas, Arena** empty = nullptr);
inline void queueForForegroundSweep(FreeOp* fop, AllocKind thingKind);
inline void queueForBackgroundSweep(FreeOp* fop, AllocKind thingKind);
inline void mergeSweptArenas(AllocKind thingKind);
TenuredCell* allocateFromArena(JS::Zone* zone, AllocKind thingKind,
AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc);
inline TenuredCell* allocateFromArenaInner(JS::Zone* zone, ArenaHeader* aheader,
AllocKind kind);
inline TenuredCell* allocateFromArenaInner(JS::Zone* zone, Arena* arena, AllocKind kind);
inline void normalizeBackgroundFinalizeState(AllocKind thingKind);

View File

@ -42,14 +42,14 @@ GCRuntime::poke()
class ArenaIter
{
ArenaHeader* aheader;
ArenaHeader* unsweptHeader;
ArenaHeader* sweptHeader;
Arena* arena;
Arena* unsweptArena;
Arena* sweptArena;
mozilla::DebugOnly<bool> initialized;
public:
ArenaIter()
: aheader(nullptr), unsweptHeader(nullptr), sweptHeader(nullptr), initialized(false) {}
: arena(nullptr), unsweptArena(nullptr), sweptArena(nullptr), initialized(false) {}
ArenaIter(JS::Zone* zone, AllocKind kind) : initialized(false) { init(zone, kind); }
@ -57,37 +57,37 @@ class ArenaIter
MOZ_ASSERT(!initialized);
MOZ_ASSERT(zone);
initialized = true;
aheader = zone->arenas.getFirstArena(kind);
unsweptHeader = zone->arenas.getFirstArenaToSweep(kind);
sweptHeader = zone->arenas.getFirstSweptArena(kind);
if (!unsweptHeader) {
unsweptHeader = sweptHeader;
sweptHeader = nullptr;
arena = zone->arenas.getFirstArena(kind);
unsweptArena = zone->arenas.getFirstArenaToSweep(kind);
sweptArena = zone->arenas.getFirstSweptArena(kind);
if (!unsweptArena) {
unsweptArena = sweptArena;
sweptArena = nullptr;
}
if (!aheader) {
aheader = unsweptHeader;
unsweptHeader = sweptHeader;
sweptHeader = nullptr;
if (!arena) {
arena = unsweptArena;
unsweptArena = sweptArena;
sweptArena = nullptr;
}
}
bool done() const {
MOZ_ASSERT(initialized);
return !aheader;
return !arena;
}
ArenaHeader* get() const {
Arena* get() const {
MOZ_ASSERT(!done());
return aheader;
return arena;
}
void next() {
MOZ_ASSERT(!done());
aheader = aheader->next;
if (!aheader) {
aheader = unsweptHeader;
unsweptHeader = sweptHeader;
sweptHeader = nullptr;
arena = arena->next;
if (!arena) {
arena = unsweptArena;
unsweptArena = sweptArena;
sweptArena = nullptr;
}
}
};
@ -96,7 +96,7 @@ class ArenaCellIterImpl
{
size_t firstThingOffset;
size_t thingSize;
ArenaHeader* arenaAddr;
Arena* arenaAddr;
FreeSpan span;
uint_fast16_t thing;
mozilla::DebugOnly<bool> initialized;
@ -120,25 +120,25 @@ class ArenaCellIterImpl
ArenaCellIterImpl()
: firstThingOffset(0), thingSize(0), arenaAddr(nullptr), thing(0), initialized(false) {}
explicit ArenaCellIterImpl(ArenaHeader* aheader) : initialized(false) { init(aheader); }
explicit ArenaCellIterImpl(Arena* arena) : initialized(false) { init(arena); }
void init(ArenaHeader* aheader) {
void init(Arena* arena) {
MOZ_ASSERT(!initialized);
MOZ_ASSERT(aheader);
MOZ_ASSERT(arena);
initialized = true;
AllocKind kind = aheader->getAllocKind();
AllocKind kind = arena->getAllocKind();
firstThingOffset = Arena::firstThingOffset(kind);
thingSize = Arena::thingSize(kind);
reset(aheader);
reset(arena);
}
// Use this to move from an Arena of a particular kind to another Arena of
// the same kind.
void reset(ArenaHeader* aheader) {
void reset(Arena* arena) {
MOZ_ASSERT(initialized);
MOZ_ASSERT(aheader);
arenaAddr = aheader;
span = *aheader->getFirstFreeSpan();
MOZ_ASSERT(arena);
arenaAddr = arena;
span = *arena->getFirstFreeSpan();
thing = firstThingOffset;
moveForwardIfFree();
}
@ -174,15 +174,15 @@ ArenaCellIterImpl::get<JSObject>() const;
class ArenaCellIterUnderGC : public ArenaCellIterImpl
{
public:
explicit ArenaCellIterUnderGC(ArenaHeader* aheader) : ArenaCellIterImpl(aheader) {
MOZ_ASSERT(aheader->zone->runtimeFromAnyThread()->isHeapBusy());
explicit ArenaCellIterUnderGC(Arena* arena) : ArenaCellIterImpl(arena) {
MOZ_ASSERT(arena->zone->runtimeFromAnyThread()->isHeapBusy());
}
};
class ArenaCellIterUnderFinalize : public ArenaCellIterImpl
{
public:
explicit ArenaCellIterUnderFinalize(ArenaHeader* aheader) : ArenaCellIterImpl(aheader) {}
explicit ArenaCellIterUnderFinalize(Arena* arena) : ArenaCellIterImpl(arena) {}
};
class ZoneCellIterImpl

View File

@ -164,7 +164,7 @@ PropertyTree::getChild(ExclusiveContext* cx, Shape* parentArg, Handle<StackShape
TraceManuallyBarrieredEdge(zone->barrierTracer(), &tmp, "read barrier");
MOZ_ASSERT(tmp == existingShape);
} else if (zone->isGCSweeping() && !existingShape->isMarked() &&
!existingShape->arenaHeader()->allocatedDuringIncremental)
!existingShape->arena()->allocatedDuringIncremental)
{
/*
* The shape we've found is unreachable and due to be finalized, so

View File

@ -355,9 +355,9 @@ StatsArenaCallback(JSRuntime* rt, void* data, gc::Arena* arena,
{
RuntimeStats* rtStats = static_cast<StatsClosure*>(data)->rtStats;
// The admin space includes (a) the header and (b) the padding between the
// end of the header and the start of the first GC thing.
size_t allocationSpace = gc::Arena::thingsSpan(arena->aheader.getAllocKind());
// The admin space includes (a) the header fields and (b) the padding
// between the end of the header fields and the first GC thing.
size_t allocationSpace = gc::Arena::thingsSpan(arena->getAllocKind());
rtStats->currZoneStats->gcHeapArenaAdmin += gc::ArenaSize - allocationSpace;
// We don't call the callback on unused things. So we compute the

View File

@ -776,10 +776,10 @@ TypeSet::IsTypeAllocatedDuringIncremental(TypeSet::Type v)
bool rv;
if (v.isSingletonUnchecked()) {
JSObject* obj = v.singletonNoBarrier();
rv = obj->isTenured() && obj->asTenured().arenaHeader()->allocatedDuringIncremental;
rv = obj->isTenured() && obj->asTenured().arena()->allocatedDuringIncremental;
} else if (v.isGroupUnchecked()) {
ObjectGroup* group = v.groupNoBarrier();
rv = group->arenaHeader()->allocatedDuringIncremental;
rv = group->arena()->allocatedDuringIncremental;
} else {
rv = false;
}