Bug 1251833 - Part 3: Merge ArenaHeader into Arena. r=terrence

This commit is contained in:
Emanuel Hoogeveen 2016-02-29 11:24:00 -05:00
parent 0630443dce
commit 206c937084
16 changed files with 574 additions and 637 deletions

View File

@ -57,6 +57,7 @@ const size_t ChunkRuntimeOffset = ChunkSize - sizeof(void*);
const size_t ChunkTrailerSize = 2 * sizeof(uintptr_t) + sizeof(uint64_t); const size_t ChunkTrailerSize = 2 * sizeof(uintptr_t) + sizeof(uint64_t);
const size_t ChunkLocationOffset = ChunkSize - ChunkTrailerSize; const size_t ChunkLocationOffset = ChunkSize - ChunkTrailerSize;
const size_t ArenaZoneOffset = sizeof(size_t); const size_t ArenaZoneOffset = sizeof(size_t);
const size_t ArenaHeaderSize = sizeof(size_t) + 2 * sizeof(uintptr_t) + sizeof(size_t);
/* /*
* Live objects are marked black. How many other additional colors are available * Live objects are marked black. How many other additional colors are available

View File

@ -92,7 +92,7 @@ GCRuntime::checkIncrementalZoneState(ExclusiveContext* cx, T* t)
Zone* zone = cx->asJSContext()->zone(); Zone* zone = cx->asJSContext()->zone();
MOZ_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()), MOZ_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
t->asTenured().arenaHeader()->allocatedDuringIncremental); t->asTenured().arena()->allocatedDuringIncremental);
#endif #endif
} }
@ -323,12 +323,12 @@ ArenaLists::allocateFromArena(JS::Zone* zone, AllocKind thingKind,
maybeLock.emplace(rt); maybeLock.emplace(rt);
ArenaList& al = arenaLists[thingKind]; ArenaList& al = arenaLists[thingKind];
ArenaHeader* aheader = al.takeNextArena(); Arena* arena = al.takeNextArena();
if (aheader) { if (arena) {
// Empty arenas should be immediately freed. // Empty arenas should be immediately freed.
MOZ_ASSERT(!aheader->isEmpty()); MOZ_ASSERT(!arena->isEmpty());
return allocateFromArenaInner(zone, aheader, thingKind); return allocateFromArenaInner(zone, arena, thingKind);
} }
// Parallel threads have their own ArenaLists, but chunks are shared; // Parallel threads have their own ArenaLists, but chunks are shared;
@ -342,33 +342,33 @@ ArenaLists::allocateFromArena(JS::Zone* zone, AllocKind thingKind,
// Although our chunk should definitely have enough space for another arena, // Although our chunk should definitely have enough space for another arena,
// there are other valid reasons why Chunk::allocateArena() may fail. // there are other valid reasons why Chunk::allocateArena() may fail.
aheader = rt->gc.allocateArena(chunk, zone, thingKind, maybeLock.ref()); arena = rt->gc.allocateArena(chunk, zone, thingKind, maybeLock.ref());
if (!aheader) if (!arena)
return nullptr; return nullptr;
MOZ_ASSERT(!maybeLock->wasUnlocked()); MOZ_ASSERT(!maybeLock->wasUnlocked());
MOZ_ASSERT(al.isCursorAtEnd()); MOZ_ASSERT(al.isCursorAtEnd());
al.insertBeforeCursor(aheader); al.insertBeforeCursor(arena);
return allocateFromArenaInner(zone, aheader, thingKind); return allocateFromArenaInner(zone, arena, thingKind);
} }
inline TenuredCell* inline TenuredCell*
ArenaLists::allocateFromArenaInner(JS::Zone* zone, ArenaHeader* aheader, AllocKind kind) ArenaLists::allocateFromArenaInner(JS::Zone* zone, Arena* arena, AllocKind kind)
{ {
size_t thingSize = Arena::thingSize(kind); size_t thingSize = Arena::thingSize(kind);
freeLists[kind] = aheader->getFirstFreeSpan(); freeLists[kind] = arena->getFirstFreeSpan();
if (MOZ_UNLIKELY(zone->wasGCStarted())) if (MOZ_UNLIKELY(zone->wasGCStarted()))
zone->runtimeFromAnyThread()->gc.arenaAllocatedDuringGC(zone, aheader); zone->runtimeFromAnyThread()->gc.arenaAllocatedDuringGC(zone, arena);
TenuredCell* thing = freeLists[kind]->allocate(thingSize); TenuredCell* thing = freeLists[kind]->allocate(thingSize);
MOZ_ASSERT(thing); // This allocation is infallible. MOZ_ASSERT(thing); // This allocation is infallible.
return thing; return thing;
} }
void void
GCRuntime::arenaAllocatedDuringGC(JS::Zone* zone, ArenaHeader* arena) GCRuntime::arenaAllocatedDuringGC(JS::Zone* zone, Arena* arena)
{ {
if (zone->needsIncrementalBarrier()) { if (zone->needsIncrementalBarrier()) {
arena->allocatedDuringIncremental = true; arena->allocatedDuringIncremental = true;

View File

@ -847,7 +847,7 @@ class GCRuntime
void freeAllLifoBlocksAfterSweeping(LifoAlloc* lifo); void freeAllLifoBlocksAfterSweeping(LifoAlloc* lifo);
// Public here for ReleaseArenaLists and FinalizeTypedArenas. // Public here for ReleaseArenaLists and FinalizeTypedArenas.
void releaseArena(ArenaHeader* aheader, const AutoLockGC& lock); void releaseArena(Arena* arena, const AutoLockGC& lock);
void releaseHeldRelocatedArenas(); void releaseHeldRelocatedArenas();
void releaseHeldRelocatedArenasWithoutUnlocking(const AutoLockGC& lock); void releaseHeldRelocatedArenasWithoutUnlocking(const AutoLockGC& lock);
@ -878,8 +878,8 @@ class GCRuntime
friend class ArenaLists; friend class ArenaLists;
Chunk* pickChunk(const AutoLockGC& lock, Chunk* pickChunk(const AutoLockGC& lock,
AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc); AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc);
ArenaHeader* allocateArena(Chunk* chunk, Zone* zone, AllocKind kind, const AutoLockGC& lock); Arena* allocateArena(Chunk* chunk, Zone* zone, AllocKind kind, const AutoLockGC& lock);
void arenaAllocatedDuringGC(JS::Zone* zone, ArenaHeader* arena); void arenaAllocatedDuringGC(JS::Zone* zone, Arena* arena);
// Allocator internals // Allocator internals
bool gcIfNeededPerAllocation(JSContext* cx); bool gcIfNeededPerAllocation(JSContext* cx);
@ -961,15 +961,15 @@ class GCRuntime
void endCompactPhase(JS::gcreason::Reason reason); void endCompactPhase(JS::gcreason::Reason reason);
void sweepTypesAfterCompacting(Zone* zone); void sweepTypesAfterCompacting(Zone* zone);
void sweepZoneAfterCompacting(Zone* zone); void sweepZoneAfterCompacting(Zone* zone);
bool relocateArenas(Zone* zone, JS::gcreason::Reason reason, ArenaHeader*& relocatedListOut, bool relocateArenas(Zone* zone, JS::gcreason::Reason reason, Arena*& relocatedListOut,
SliceBudget& sliceBudget); SliceBudget& sliceBudget);
void updateAllCellPointersParallel(MovingTracer* trc, Zone* zone); void updateAllCellPointersParallel(MovingTracer* trc, Zone* zone);
void updateAllCellPointersSerial(MovingTracer* trc, Zone* zone); void updateAllCellPointersSerial(MovingTracer* trc, Zone* zone);
void updatePointersToRelocatedCells(Zone* zone); void updatePointersToRelocatedCells(Zone* zone);
void protectAndHoldArenas(ArenaHeader* arenaList); void protectAndHoldArenas(Arena* arenaList);
void unprotectHeldRelocatedArenas(); void unprotectHeldRelocatedArenas();
void releaseRelocatedArenas(ArenaHeader* arenaList); void releaseRelocatedArenas(Arena* arenaList);
void releaseRelocatedArenasWithoutUnlocking(ArenaHeader* arenaList, const AutoLockGC& lock); void releaseRelocatedArenasWithoutUnlocking(Arena* arenaList, const AutoLockGC& lock);
void finishCollection(JS::gcreason::Reason reason); void finishCollection(JS::gcreason::Reason reason);
void computeNonIncrementalMarkingForValidation(); void computeNonIncrementalMarkingForValidation();
@ -1177,14 +1177,14 @@ class GCRuntime
/* /*
* List head of arenas allocated during the sweep phase. * List head of arenas allocated during the sweep phase.
*/ */
js::gc::ArenaHeader* arenasAllocatedDuringSweep; js::gc::Arena* arenasAllocatedDuringSweep;
/* /*
* Incremental compacting state. * Incremental compacting state.
*/ */
bool startedCompacting; bool startedCompacting;
js::gc::ZoneList zonesToMaybeCompact; js::gc::ZoneList zonesToMaybeCompact;
ArenaHeader* relocatedArenasToRelease; Arena* relocatedArenasToRelease;
#ifdef JS_GC_ZEAL #ifdef JS_GC_ZEAL
js::gc::MarkingValidator* markingValidator; js::gc::MarkingValidator* markingValidator;

View File

@ -60,10 +60,9 @@ TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, gc::Cell** thingp, const
namespace gc { namespace gc {
struct Arena; class Arena;
class ArenaList; class ArenaList;
class SortedArenaList; class SortedArenaList;
struct ArenaHeader;
struct Chunk; struct Chunk;
/* /*
@ -251,7 +250,7 @@ struct Cell
}; };
// A GC TenuredCell gets behaviors that are valid for things in the Tenured // A GC TenuredCell gets behaviors that are valid for things in the Tenured
// heap, such as access to the arena header and mark bits. // heap, such as access to the arena and mark bits.
class TenuredCell : public Cell class TenuredCell : public Cell
{ {
public: public:
@ -269,8 +268,8 @@ class TenuredCell : public Cell
// used tagged. // used tagged.
static MOZ_ALWAYS_INLINE bool isNullLike(const Cell* thing) { return !thing; } static MOZ_ALWAYS_INLINE bool isNullLike(const Cell* thing) { return !thing; }
// Access to the arena header. // Access to the arena.
inline ArenaHeader* arenaHeader() const; inline Arena* arena() const;
inline AllocKind getAllocKind() const; inline AllocKind getAllocKind() const;
inline JS::TraceKind getTraceKind() const; inline JS::TraceKind getTraceKind() const;
inline JS::Zone* zone() const; inline JS::Zone* zone() const;
@ -323,8 +322,8 @@ const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
*/ */
class FreeSpan class FreeSpan
{ {
friend class Arena;
friend class ArenaCellIterImpl; friend class ArenaCellIterImpl;
friend struct ArenaHeader;
uint16_t first; uint16_t first;
uint16_t last; uint16_t last;
@ -332,8 +331,8 @@ class FreeSpan
public: public:
// This inits just |first| and |last|; if the span is non-empty it doesn't // This inits just |first| and |last|; if the span is non-empty it doesn't
// do anything with the next span stored at |last|. // do anything with the next span stored at |last|.
void initBounds(uintptr_t firstArg, uintptr_t lastArg, const ArenaHeader* aheader) { void initBounds(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
checkRange(firstArg, lastArg, aheader); checkRange(firstArg, lastArg, arena);
first = firstArg; first = firstArg;
last = lastArg; last = lastArg;
} }
@ -346,19 +345,19 @@ class FreeSpan
// This sets |first| and |last|, and also sets the next span stored at // This sets |first| and |last|, and also sets the next span stored at
// |last| as empty. (As a result, |firstArg| and |lastArg| cannot represent // |last| as empty. (As a result, |firstArg| and |lastArg| cannot represent
// an empty span.) // an empty span.)
void initFinal(uintptr_t firstArg, uintptr_t lastArg, const ArenaHeader* aheader) { void initFinal(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
initBounds(firstArg, lastArg, aheader); initBounds(firstArg, lastArg, arena);
FreeSpan* last = nextSpanUnchecked(aheader); FreeSpan* last = nextSpanUnchecked(arena);
last->initAsEmpty(); last->initAsEmpty();
checkSpan(aheader); checkSpan(arena);
} }
bool isEmpty() const { bool isEmpty() const {
return !first; return !first;
} }
ArenaHeader* getArenaUnchecked() { return reinterpret_cast<ArenaHeader*>(this); } Arena* getArenaUnchecked() { return reinterpret_cast<Arena*>(this); }
inline ArenaHeader* getArena(); inline Arena* getArena();
static size_t offsetOfFirst() { static size_t offsetOfFirst() {
return offsetof(FreeSpan, first); return offsetof(FreeSpan, first);
@ -369,20 +368,20 @@ class FreeSpan
} }
// Like nextSpan(), but no checking of the following span is done. // Like nextSpan(), but no checking of the following span is done.
FreeSpan* nextSpanUnchecked(const ArenaHeader* aheader) const { FreeSpan* nextSpanUnchecked(const Arena* arena) const {
MOZ_ASSERT(aheader && !isEmpty()); MOZ_ASSERT(arena && !isEmpty());
return reinterpret_cast<FreeSpan*>(uintptr_t(aheader) + last); return reinterpret_cast<FreeSpan*>(uintptr_t(arena) + last);
} }
const FreeSpan* nextSpan(const ArenaHeader* aheader) const { const FreeSpan* nextSpan(const Arena* arena) const {
checkSpan(aheader); checkSpan(arena);
return nextSpanUnchecked(aheader); return nextSpanUnchecked(arena);
} }
MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) { MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) {
// Eschew the usual checks, because this might be the placeholder span. // Eschew the usual checks, because this might be the placeholder span.
// If this is somehow an invalid, non-empty span, checkSpan() will catch it. // If this is somehow an invalid, non-empty span, checkSpan() will catch it.
ArenaHeader* arena = getArenaUnchecked(); Arena* arena = getArenaUnchecked();
checkSpan(arena); checkSpan(arena);
uintptr_t thing = uintptr_t(arena) + first; uintptr_t thing = uintptr_t(arena) + first;
if (first < last) { if (first < last) {
@ -402,74 +401,135 @@ class FreeSpan
return reinterpret_cast<TenuredCell*>(thing); return reinterpret_cast<TenuredCell*>(thing);
} }
inline void checkSpan(const ArenaHeader* aheader) const; inline void checkSpan(const Arena* arena) const;
inline void checkRange(uintptr_t first, uintptr_t last, const ArenaHeader*) const; inline void checkRange(uintptr_t first, uintptr_t last, const Arena* arena) const;
}; };
/* Every arena has a header. */ /*
struct ArenaHeader * Arenas are the allocation units of the tenured heap in the GC. An arena
* is 4kiB in size and 4kiB-aligned. It starts with several header fields
* followed by some bytes of padding. The remainder of the arena is filled
* with GC things of a particular AllocKind. The padding ensures that the
* GC thing array ends exactly at the end of the arena:
*
* <----------------------------------------------> = ArenaSize bytes
* +---------------+---------+----+----+-----+----+
* | header fields | padding | T0 | T1 | ... | Tn |
* +---------------+---------+----+----+-----+----+
* <-------------------------> = first thing offset
*/
class Arena
{ {
friend struct Arena; static JS_FRIEND_DATA(const uint32_t) ThingSizes[];
static JS_FRIEND_DATA(const uint32_t) FirstThingOffsets[];
static JS_FRIEND_DATA(const uint32_t) ThingsPerArena[];
private:
/* /*
* The first span of free things in the arena. * The first span of free things in the arena. Most of these spans are
* stored as offsets in free regions of the data array, and most operations
* on FreeSpans take an Arena pointer for safety. However, the FreeSpans
* used for allocation are stored here, at the start of an Arena, and use
* their own address to grab the next span within the same Arena.
*/ */
FreeSpan firstFreeSpan; FreeSpan firstFreeSpan;
public: public:
/*
* The zone that this Arena is contained within, when allocated. The offset
* of this field must match the ArenaZoneOffset stored in js/HeapAPI.h,
* as is statically asserted below.
*/
JS::Zone* zone; JS::Zone* zone;
/* /*
* ArenaHeader::next has two purposes: when unallocated, it points to the * Arena::next has two purposes: when unallocated, it points to the next
* next available Arena's header. When allocated, it points to the next * available Arena. When allocated, it points to the next Arena in the same
* arena of the same size class and compartment. * zone and with the same alloc kind.
*/ */
ArenaHeader* next; Arena* next;
private: private:
/* /*
* One of AllocKind constants or AllocKind::LIMIT when the arena does not * One of the AllocKind constants or AllocKind::LIMIT when the arena does
* contain any GC things and is on the list of empty arenas in the GC * not contain any GC things and is on the list of empty arenas in the GC
* chunk. * chunk.
* *
* We use 8 bits for the allocKind so the compiler can use byte-level memory * We use 8 bits for the alloc kind so the compiler can use byte-level
* instructions to access it. * memory instructions to access it.
*/ */
size_t allocKind : 8; size_t allocKind : 8;
public:
/* /*
* When collecting we sometimes need to keep an auxillary list of arenas, * When collecting we sometimes need to keep an auxillary list of arenas,
* for which we use the following fields. This happens for several reasons: * for which we use the following fields. This happens for several reasons:
* *
* When recursive marking uses too much stack the marking is delayed and the * When recursive marking uses too much stack, the marking is delayed and
* corresponding arenas are put into a stack. To distinguish the bottom of * the corresponding arenas are put into a stack. To distinguish the bottom
* the stack from the arenas not present in the stack we use the * of the stack from the arenas not present in the stack we use the
* markOverflow flag to tag arenas on the stack. * markOverflow flag to tag arenas on the stack.
* *
* Delayed marking is also used for arenas that we allocate into during an * Delayed marking is also used for arenas that we allocate into during an
* incremental GC. In this case, we intend to mark all the objects in the * incremental GC. In this case, we intend to mark all the objects in the
* arena, and it's faster to do this marking in bulk. * arena, and it's faster to do this marking in bulk.
* *
* When sweeping we keep track of which arenas have been allocated since the * When sweeping we keep track of which arenas have been allocated since
* end of the mark phase. This allows us to tell whether a pointer to an * the end of the mark phase. This allows us to tell whether a pointer to
* unmarked object is yet to be finalized or has already been reallocated. * an unmarked object is yet to be finalized or has already been
* We set the allocatedDuringIncremental flag for this and clear it at the * reallocated. We set the allocatedDuringIncremental flag for this and
* end of the sweep phase. * clear it at the end of the sweep phase.
* *
* To minimize the ArenaHeader size we record the next linkage as * To minimize the size of the header fields we record the next linkage as
* address() >> ArenaShift and pack it with the allocKind field and the flags. * address() >> ArenaShift and pack it with the allocKind and the flags.
*/ */
public: size_t hasDelayedMarking : 1;
size_t hasDelayedMarking : 1; size_t allocatedDuringIncremental : 1;
size_t allocatedDuringIncremental : 1; size_t markOverflow : 1;
size_t markOverflow : 1; size_t auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
size_t auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
static_assert(ArenaShift >= 8 + 1 + 1 + 1, static_assert(ArenaShift >= 8 + 1 + 1 + 1,
"ArenaHeader::auxNextLink packing assumes that ArenaShift has enough bits to " "Arena::auxNextLink packing assumes that ArenaShift has "
"cover allocKind and hasDelayedMarking."); "enough bits to cover allocKind and hasDelayedMarking.");
/*
* The size of data should be |ArenaSize - offsetof(data)|, but the offset
* is not yet known to the compiler, so we do it by hand. |firstFreeSpan|
* takes up 8 bytes on 64-bit due to alignment requirements; the rest are
* obvious. This constant is stored in js/HeapAPI.h.
*/
uint8_t data[ArenaSize - ArenaHeaderSize];
ArenaHeader() { setAsNotAllocated(); } void init(JS::Zone* zoneArg, AllocKind kind) {
MOZ_ASSERT(firstFreeSpan.isEmpty());
MOZ_ASSERT(!zone);
MOZ_ASSERT(!allocated());
MOZ_ASSERT(!hasDelayedMarking);
MOZ_ASSERT(!allocatedDuringIncremental);
MOZ_ASSERT(!markOverflow);
MOZ_ASSERT(!auxNextLink);
zone = zoneArg;
allocKind = size_t(kind);
setAsFullyUnused();
}
// Sets |firstFreeSpan| to the Arena's entire valid range, and
// also sets the next span stored at |firstFreeSpan.last| as empty.
void setAsFullyUnused() {
AllocKind kind = getAllocKind();
firstFreeSpan.first = firstThingOffset(kind);
firstFreeSpan.last = lastThingOffset(kind);
FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
last->initAsEmpty();
}
void setAsNotAllocated() {
firstFreeSpan.initAsEmpty();
zone = nullptr;
allocKind = size_t(AllocKind::LIMIT);
hasDelayedMarking = 0;
allocatedDuringIncremental = 0;
markOverflow = 0;
auxNextLink = 0;
}
uintptr_t address() const { uintptr_t address() const {
checkAddress(); checkAddress();
@ -485,47 +545,37 @@ struct ArenaHeader
return IsValidAllocKind(AllocKind(allocKind)); return IsValidAllocKind(AllocKind(allocKind));
} }
// This sets |firstFreeSpan| to the Arena's entire valid range, and
// also sets the next span stored at |firstFreeSpan.last| as empty.
inline void setAsFullyUnused();
void init(JS::Zone* zoneArg, AllocKind kind) {
MOZ_ASSERT(!allocated());
MOZ_ASSERT(!markOverflow);
MOZ_ASSERT(!allocatedDuringIncremental);
MOZ_ASSERT(!hasDelayedMarking);
zone = zoneArg;
static_assert(size_t(AllocKind::LIMIT) <= 255,
"We must be able to fit the allockind into uint8_t.");
allocKind = size_t(kind);
setAsFullyUnused();
}
void setAsNotAllocated() {
allocKind = size_t(AllocKind::LIMIT);
markOverflow = 0;
allocatedDuringIncremental = 0;
hasDelayedMarking = 0;
auxNextLink = 0;
firstFreeSpan.initAsEmpty();
}
Arena* getArena() { return reinterpret_cast<Arena*>(address()); }
FreeSpan* getFirstFreeSpan() { return &firstFreeSpan; }
AllocKind getAllocKind() const { AllocKind getAllocKind() const {
MOZ_ASSERT(allocated()); MOZ_ASSERT(allocated());
return AllocKind(allocKind); return AllocKind(allocKind);
} }
inline size_t getThingSize() const; FreeSpan* getFirstFreeSpan() { return &firstFreeSpan; }
bool hasFreeThings() const { static size_t thingSize(AllocKind kind) { return ThingSizes[size_t(kind)]; }
return !firstFreeSpan.isEmpty(); static size_t thingsPerArena(AllocKind kind) { return ThingsPerArena[size_t(kind)]; }
static size_t thingsSpan(AllocKind kind) { return thingsPerArena(kind) * thingSize(kind); }
static size_t firstThingOffset(AllocKind kind) { return FirstThingOffsets[size_t(kind)]; }
static size_t lastThingOffset(AllocKind kind) { return ArenaSize - thingSize(kind); }
size_t getThingSize() const { return thingSize(getAllocKind()); }
size_t getThingsPerArena() const { return thingsPerArena(getAllocKind()); }
size_t getThingsSpan() const { return getThingsPerArena() * getThingSize(); }
uintptr_t thingsStart() const { return address() + firstThingOffset(getAllocKind()); }
uintptr_t thingsEnd() const { return address() + ArenaSize; }
bool isEmpty() const {
// Arena is empty if its first span covers the whole arena.
firstFreeSpan.checkSpan(this);
AllocKind kind = getAllocKind();
return firstFreeSpan.first == firstThingOffset(kind) &&
firstFreeSpan.last == lastThingOffset(kind);
} }
bool hasFreeThings() const { return !firstFreeSpan.isEmpty(); }
size_t numFreeThings(size_t thingSize) const { size_t numFreeThings(size_t thingSize) const {
firstFreeSpan.checkSpan(this); firstFreeSpan.checkSpan(this);
size_t numFree = 0; size_t numFree = 0;
@ -535,7 +585,8 @@ struct ArenaHeader
return numFree; return numFree;
} }
inline bool isEmpty() const; size_t countFreeCells() { return numFreeThings(getThingSize()); }
size_t countUsedCells() { return getThingsPerArena() - countFreeCells(); }
bool inFreeList(uintptr_t thing) { bool inFreeList(uintptr_t thing) {
uintptr_t base = address(); uintptr_t base = address();
@ -552,94 +603,88 @@ struct ArenaHeader
return false; return false;
} }
inline ArenaHeader* getNextDelayedMarking() const;
inline void setNextDelayedMarking(ArenaHeader* aheader);
inline void unsetDelayedMarking();
inline ArenaHeader* getNextAllocDuringSweep() const;
inline void setNextAllocDuringSweep(ArenaHeader* aheader);
inline void unsetAllocDuringSweep();
inline void setNextArenaToUpdate(ArenaHeader* aheader);
inline ArenaHeader* getNextArenaToUpdateAndUnlink();
void unmarkAll();
size_t countUsedCells();
size_t countFreeCells() { return numFreeThings(getThingSize()); }
};
static_assert(ArenaZoneOffset == offsetof(ArenaHeader, zone),
"The hardcoded API zone offset must match the actual offset.");
struct Arena
{
/*
* Layout of an arena:
* An arena is 4K in size and 4K-aligned. It starts with the ArenaHeader
* descriptor followed by some pad bytes. The remainder of the arena is
* filled with the array of T things. The pad bytes ensure that the thing
* array ends exactly at the end of the arena.
*
* +-------------+-----+----+----+-----+----+
* | ArenaHeader | pad | T0 | T1 | ... | Tn |
* +-------------+-----+----+----+-----+----+
*
* <----------------------------------------> = ArenaSize bytes
* <-------------------> = first thing offset
*/
ArenaHeader aheader;
uint8_t data[ArenaSize - sizeof(ArenaHeader)];
private:
static JS_FRIEND_DATA(const uint32_t) ThingSizes[];
static JS_FRIEND_DATA(const uint32_t) FirstThingOffsets[];
static const uint32_t ThingsPerArena[];
public:
static void staticAsserts();
static size_t thingSize(AllocKind kind) {
return ThingSizes[size_t(kind)];
}
static size_t firstThingOffset(AllocKind kind) {
return FirstThingOffsets[size_t(kind)];
}
static size_t thingsPerArena(AllocKind kind) {
return ThingsPerArena[size_t(kind)];
}
static size_t thingsSpan(AllocKind kind) {
return thingsPerArena(kind) * thingSize(kind);
}
static bool isAligned(uintptr_t thing, size_t thingSize) { static bool isAligned(uintptr_t thing, size_t thingSize) {
/* Things ends at the arena end. */ /* Things ends at the arena end. */
uintptr_t tailOffset = (ArenaSize - thing) & ArenaMask; uintptr_t tailOffset = ArenaSize - (thing & ArenaMask);
return tailOffset % thingSize == 0; return tailOffset % thingSize == 0;
} }
uintptr_t address() const { Arena* getNextDelayedMarking() const {
return aheader.address(); MOZ_ASSERT(hasDelayedMarking);
return reinterpret_cast<Arena*>(auxNextLink << ArenaShift);
} }
uintptr_t thingsStart(AllocKind thingKind) { void setNextDelayedMarking(Arena* arena) {
return address() + firstThingOffset(thingKind); MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
MOZ_ASSERT(!auxNextLink && !hasDelayedMarking);
hasDelayedMarking = 1;
if (arena)
auxNextLink = arena->address() >> ArenaShift;
} }
uintptr_t thingsEnd() { void unsetDelayedMarking() {
return address() + ArenaSize; MOZ_ASSERT(hasDelayedMarking);
hasDelayedMarking = 0;
auxNextLink = 0;
}
Arena* getNextAllocDuringSweep() const {
MOZ_ASSERT(allocatedDuringIncremental);
return reinterpret_cast<Arena*>(auxNextLink << ArenaShift);
}
void setNextAllocDuringSweep(Arena* arena) {
MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
MOZ_ASSERT(!auxNextLink && !allocatedDuringIncremental);
allocatedDuringIncremental = 1;
if (arena)
auxNextLink = arena->address() >> ArenaShift;
}
void unsetAllocDuringSweep() {
MOZ_ASSERT(allocatedDuringIncremental);
allocatedDuringIncremental = 0;
auxNextLink = 0;
}
Arena* getNextArenaToUpdateAndUnlink() {
MOZ_ASSERT(!hasDelayedMarking && !allocatedDuringIncremental && !markOverflow);
Arena* next = reinterpret_cast<Arena*>(auxNextLink << ArenaShift);
auxNextLink = 0;
return next;
}
void setNextArenaToUpdate(Arena* arena) {
MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
MOZ_ASSERT(!hasDelayedMarking && !allocatedDuringIncremental && !markOverflow);
MOZ_ASSERT(!auxNextLink);
auxNextLink = arena->address() >> ArenaShift;
} }
template <typename T> template <typename T>
size_t finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize); size_t finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize);
static void staticAsserts();
void unmarkAll();
}; };
static_assert(sizeof(Arena) == ArenaSize, "The hardcoded arena size must match the struct size."); static_assert(ArenaZoneOffset == offsetof(Arena, zone),
"The hardcoded API zone offset must match the actual offset.");
static_assert(sizeof(Arena) == ArenaSize, "The hardcoded API header size (ArenaHeaderSize) "
"must match the actual size of the header fields.");
inline Arena*
FreeSpan::getArena()
{
Arena* arena = getArenaUnchecked();
arena->checkAddress();
return arena;
}
inline void inline void
FreeSpan::checkSpan(const ArenaHeader* aheader) const FreeSpan::checkSpan(const Arena* arena) const
{ {
#ifdef DEBUG #ifdef DEBUG
if (!first) { if (!first) {
@ -647,41 +692,33 @@ FreeSpan::checkSpan(const ArenaHeader* aheader) const
return; return;
} }
aheader->checkAddress(); arena->checkAddress();
checkRange(first, last, arena);
checkRange(first, last, aheader);
// If there's a following span, it must have a higher address, // If there's a following span, it must have a higher address,
// and the gap must be at least 2 * thingSize. // and the gap must be at least 2 * thingSize.
const FreeSpan* next = nextSpanUnchecked(aheader); const FreeSpan* next = nextSpanUnchecked(arena);
if (next->first) { if (next->first) {
checkRange(next->first, next->last, aheader); checkRange(next->first, next->last, arena);
size_t thingSize = aheader->getThingSize(); size_t thingSize = arena->getThingSize();
MOZ_ASSERT(last + 2 * thingSize <= next->first); MOZ_ASSERT(last + 2 * thingSize <= next->first);
} }
#endif #endif
} }
inline void inline void
FreeSpan::checkRange(uintptr_t first, uintptr_t last, const ArenaHeader* aheader) const FreeSpan::checkRange(uintptr_t first, uintptr_t last, const Arena* arena) const
{ {
#ifdef DEBUG #ifdef DEBUG
MOZ_ASSERT(aheader); MOZ_ASSERT(arena);
AllocKind thingKind = aheader->getAllocKind();
size_t thingSize = Arena::thingSize(thingKind);
MOZ_ASSERT(first <= last); MOZ_ASSERT(first <= last);
AllocKind thingKind = arena->getAllocKind();
MOZ_ASSERT(first >= Arena::firstThingOffset(thingKind)); MOZ_ASSERT(first >= Arena::firstThingOffset(thingKind));
MOZ_ASSERT(last <= ArenaSize - thingSize); MOZ_ASSERT(last <= Arena::lastThingOffset(thingKind));
MOZ_ASSERT((last - first) % thingSize == 0); MOZ_ASSERT((last - first) % Arena::thingSize(thingKind) == 0);
#endif #endif
} }
inline size_t
ArenaHeader::getThingSize() const
{
return Arena::thingSize(getAllocKind());
}
/* /*
* The tail of the chunk info is shared between all chunks in the system, both * The tail of the chunk info is shared between all chunks in the system, both
* nursery and tenured. This structure is locatable from any GC pointer by * nursery and tenured. This structure is locatable from any GC pointer by
@ -728,8 +765,8 @@ struct ChunkInfo
Chunk* prev; Chunk* prev;
public: public:
/* Free arenas are linked together with aheader.next. */ /* Free arenas are linked together with arena.next. */
ArenaHeader* freeArenasHead; Arena* freeArenasHead;
#if JS_BITS_PER_WORD == 32 #if JS_BITS_PER_WORD == 32
/* /*
@ -854,14 +891,14 @@ struct ChunkBitmap
memset((void*)bitmap, 0, sizeof(bitmap)); memset((void*)bitmap, 0, sizeof(bitmap));
} }
uintptr_t* arenaBits(ArenaHeader* aheader) { uintptr_t* arenaBits(Arena* arena) {
static_assert(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD, static_assert(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
"We assume that the part of the bitmap corresponding to the arena " "We assume that the part of the bitmap corresponding to the arena "
"has the exact number of words so we do not need to deal with a word " "has the exact number of words so we do not need to deal with a word "
"that covers bits from two arenas."); "that covers bits from two arenas.");
uintptr_t* word, unused; uintptr_t* word, unused;
getMarkWordAndMask(reinterpret_cast<Cell*>(aheader->address()), BLACK, &word, &unused); getMarkWordAndMask(reinterpret_cast<Cell*>(arena->address()), BLACK, &word, &unused);
return word; return word;
} }
}; };
@ -932,11 +969,10 @@ struct Chunk
return info.trailer.storeBuffer; return info.trailer.storeBuffer;
} }
ArenaHeader* allocateArena(JSRuntime* rt, JS::Zone* zone, AllocKind kind, Arena* allocateArena(JSRuntime* rt, JS::Zone* zone, AllocKind kind, const AutoLockGC& lock);
const AutoLockGC& lock);
void releaseArena(JSRuntime* rt, ArenaHeader* aheader, const AutoLockGC& lock); void releaseArena(JSRuntime* rt, Arena* arena, const AutoLockGC& lock);
void recycleArena(ArenaHeader* aheader, SortedArenaList& dest, size_t thingsPerArena); void recycleArena(Arena* arena, SortedArenaList& dest, size_t thingsPerArena);
bool decommitOneFreeArena(JSRuntime* rt, AutoLockGC& lock); bool decommitOneFreeArena(JSRuntime* rt, AutoLockGC& lock);
void decommitAllArenasWithoutUnlocking(const AutoLockGC& lock); void decommitAllArenasWithoutUnlocking(const AutoLockGC& lock);
@ -950,17 +986,17 @@ struct Chunk
/* Search for a decommitted arena to allocate. */ /* Search for a decommitted arena to allocate. */
unsigned findDecommittedArenaOffset(); unsigned findDecommittedArenaOffset();
ArenaHeader* fetchNextDecommittedArena(); Arena* fetchNextDecommittedArena();
void addArenaToFreeList(JSRuntime* rt, ArenaHeader* aheader); void addArenaToFreeList(JSRuntime* rt, Arena* arena);
void addArenaToDecommittedList(JSRuntime* rt, const ArenaHeader* aheader); void addArenaToDecommittedList(JSRuntime* rt, const Arena* arena);
void updateChunkListAfterAlloc(JSRuntime* rt, const AutoLockGC& lock); void updateChunkListAfterAlloc(JSRuntime* rt, const AutoLockGC& lock);
void updateChunkListAfterFree(JSRuntime* rt, const AutoLockGC& lock); void updateChunkListAfterFree(JSRuntime* rt, const AutoLockGC& lock);
public: public:
/* Unlink and return the freeArenasHead. */ /* Unlink and return the freeArenasHead. */
inline ArenaHeader* fetchNextFreeArena(JSRuntime* rt); inline Arena* fetchNextFreeArena(JSRuntime* rt);
}; };
static_assert(sizeof(Chunk) == ChunkSize, static_assert(sizeof(Chunk) == ChunkSize,
@ -1024,16 +1060,8 @@ class HeapUsage
} }
}; };
inline ArenaHeader*
FreeSpan::getArena()
{
ArenaHeader* arena = getArenaUnchecked();
arena->checkAddress();
return arena;
}
inline void inline void
ArenaHeader::checkAddress() const Arena::checkAddress() const
{ {
mozilla::DebugOnly<uintptr_t> addr = uintptr_t(this); mozilla::DebugOnly<uintptr_t> addr = uintptr_t(this);
MOZ_ASSERT(addr); MOZ_ASSERT(addr);
@ -1042,110 +1070,17 @@ ArenaHeader::checkAddress() const
} }
inline Chunk* inline Chunk*
ArenaHeader::chunk() const Arena::chunk() const
{ {
return Chunk::fromAddress(address()); return Chunk::fromAddress(address());
} }
inline bool
ArenaHeader::isEmpty() const
{
/* Arena is empty if its first span covers the whole arena. */
firstFreeSpan.checkSpan(this);
AllocKind kind = getAllocKind();
size_t firstThingOffset = Arena::firstThingOffset(kind);
size_t lastThingOffset = ArenaSize - Arena::thingSize(kind);
return firstFreeSpan.first == firstThingOffset && firstFreeSpan.last == lastThingOffset;
}
inline void
ArenaHeader::setAsFullyUnused()
{
AllocKind kind = getAllocKind();
firstFreeSpan.first = Arena::firstThingOffset(kind);
firstFreeSpan.last = ArenaSize - Arena::thingSize(kind);
FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
last->initAsEmpty();
}
inline ArenaHeader*
ArenaHeader::getNextDelayedMarking() const
{
MOZ_ASSERT(hasDelayedMarking);
return &reinterpret_cast<Arena*>(auxNextLink << ArenaShift)->aheader;
}
inline void
ArenaHeader::setNextDelayedMarking(ArenaHeader* aheader)
{
MOZ_ASSERT(!(uintptr_t(aheader) & ArenaMask));
MOZ_ASSERT(!auxNextLink && !hasDelayedMarking);
hasDelayedMarking = 1;
if (aheader)
auxNextLink = aheader->address() >> ArenaShift;
}
inline void
ArenaHeader::unsetDelayedMarking()
{
MOZ_ASSERT(hasDelayedMarking);
hasDelayedMarking = 0;
auxNextLink = 0;
}
inline ArenaHeader*
ArenaHeader::getNextAllocDuringSweep() const
{
MOZ_ASSERT(allocatedDuringIncremental);
return &reinterpret_cast<Arena*>(auxNextLink << ArenaShift)->aheader;
}
inline void
ArenaHeader::setNextAllocDuringSweep(ArenaHeader* aheader)
{
MOZ_ASSERT(!auxNextLink && !allocatedDuringIncremental);
allocatedDuringIncremental = 1;
if (aheader)
auxNextLink = aheader->address() >> ArenaShift;
}
inline void
ArenaHeader::unsetAllocDuringSweep()
{
MOZ_ASSERT(allocatedDuringIncremental);
allocatedDuringIncremental = 0;
auxNextLink = 0;
}
inline ArenaHeader*
ArenaHeader::getNextArenaToUpdateAndUnlink()
{
MOZ_ASSERT(!hasDelayedMarking && !allocatedDuringIncremental && !markOverflow);
ArenaHeader* next = &reinterpret_cast<Arena*>(auxNextLink << ArenaShift)->aheader;
auxNextLink = 0;
return next;
}
inline void
ArenaHeader::setNextArenaToUpdate(ArenaHeader* aheader)
{
MOZ_ASSERT(!hasDelayedMarking && !allocatedDuringIncremental && !markOverflow);
MOZ_ASSERT(!auxNextLink);
auxNextLink = aheader->address() >> ArenaShift;
}
inline size_t
ArenaHeader::countUsedCells()
{
return Arena::thingsPerArena(getAllocKind()) - countFreeCells();
}
static void static void
AssertValidColor(const TenuredCell* thing, uint32_t color) AssertValidColor(const TenuredCell* thing, uint32_t color)
{ {
#ifdef DEBUG #ifdef DEBUG
ArenaHeader* aheader = thing->arenaHeader(); Arena* arena = thing->arena();
MOZ_ASSERT(color < aheader->getThingSize() / CellSize); MOZ_ASSERT(color < arena->getThingSize() / CellSize);
#endif #endif
} }
@ -1220,11 +1155,11 @@ Cell::getTraceKind() const
} }
inline bool inline bool
InFreeList(ArenaHeader* aheader, void* thing) InFreeList(Arena* arena, void* thing)
{ {
uintptr_t addr = reinterpret_cast<uintptr_t>(thing); uintptr_t addr = reinterpret_cast<uintptr_t>(thing);
MOZ_ASSERT(Arena::isAligned(addr, aheader->getThingSize())); MOZ_ASSERT(Arena::isAligned(addr, arena->getThingSize()));
return aheader->inFreeList(addr); return arena->inFreeList(addr);
} }
/* static */ MOZ_ALWAYS_INLINE bool /* static */ MOZ_ALWAYS_INLINE bool
@ -1249,7 +1184,7 @@ TenuredCell::fromPointer(const void* ptr)
bool bool
TenuredCell::isMarked(uint32_t color /* = BLACK */) const TenuredCell::isMarked(uint32_t color /* = BLACK */) const
{ {
MOZ_ASSERT(arenaHeader()->allocated()); MOZ_ASSERT(arena()->allocated());
AssertValidColor(this, color); AssertValidColor(this, color);
return chunk()->bitmap.isMarked(this, color); return chunk()->bitmap.isMarked(this, color);
} }
@ -1277,19 +1212,19 @@ TenuredCell::copyMarkBitsFrom(const TenuredCell* src)
bitmap.copyMarkBit(this, src, GRAY); bitmap.copyMarkBit(this, src, GRAY);
} }
inline ArenaHeader* inline Arena*
TenuredCell::arenaHeader() const TenuredCell::arena() const
{ {
MOZ_ASSERT(isTenured()); MOZ_ASSERT(isTenured());
uintptr_t addr = address(); uintptr_t addr = address();
addr &= ~ArenaMask; addr &= ~ArenaMask;
return reinterpret_cast<ArenaHeader*>(addr); return reinterpret_cast<Arena*>(addr);
} }
AllocKind AllocKind
TenuredCell::getAllocKind() const TenuredCell::getAllocKind() const
{ {
return arenaHeader()->getAllocKind(); return arena()->getAllocKind();
} }
JS::TraceKind JS::TraceKind
@ -1301,7 +1236,7 @@ TenuredCell::getTraceKind() const
JS::Zone* JS::Zone*
TenuredCell::zone() const TenuredCell::zone() const
{ {
JS::Zone* zone = arenaHeader()->zone; JS::Zone* zone = arena()->zone;
MOZ_ASSERT(CurrentThreadCanAccessZone(zone)); MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
return zone; return zone;
} }
@ -1309,13 +1244,13 @@ TenuredCell::zone() const
JS::Zone* JS::Zone*
TenuredCell::zoneFromAnyThread() const TenuredCell::zoneFromAnyThread() const
{ {
return arenaHeader()->zone; return arena()->zone;
} }
bool bool
TenuredCell::isInsideZone(JS::Zone* zone) const TenuredCell::isInsideZone(JS::Zone* zone) const
{ {
return zone == arenaHeader()->zone; return zone == arena()->zone;
} }
/* static */ MOZ_ALWAYS_INLINE void /* static */ MOZ_ALWAYS_INLINE void
@ -1384,7 +1319,7 @@ Cell::isAligned() const
bool bool
TenuredCell::isAligned() const TenuredCell::isAligned() const
{ {
return Arena::isAligned(address(), arenaHeader()->getThingSize()); return Arena::isAligned(address(), arena()->getThingSize());
} }
#endif #endif

View File

@ -43,9 +43,9 @@ IterateCompartmentsArenasCells(JSRuntime* rt, Zone* zone, void* data,
size_t thingSize = Arena::thingSize(thingKind); size_t thingSize = Arena::thingSize(thingKind);
for (ArenaIter aiter(zone, thingKind); !aiter.done(); aiter.next()) { for (ArenaIter aiter(zone, thingKind); !aiter.done(); aiter.next()) {
ArenaHeader* aheader = aiter.get(); Arena* arena = aiter.get();
(*arenaCallback)(rt, data, aheader->getArena(), traceKind, thingSize); (*arenaCallback)(rt, data, arena, traceKind, thingSize);
for (ArenaCellIterUnderGC iter(aheader); !iter.done(); iter.next()) for (ArenaCellIterUnderGC iter(arena); !iter.done(); iter.next())
(*cellCallback)(rt, data, iter.getCell(), traceKind, thingSize); (*cellCallback)(rt, data, iter.getCell(), traceKind, thingSize);
} }
} }

View File

@ -233,7 +233,7 @@ js::CheckTracedThing(JSTracer* trc, T* thing)
* and concurrently modifiying the free list. * and concurrently modifiying the free list.
*/ */
MOZ_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy() && !rt->gc.isBackgroundSweeping(), MOZ_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy() && !rt->gc.isBackgroundSweeping(),
!InFreeList(thing->asTenured().arenaHeader(), thing)); !InFreeList(thing->asTenured().arena(), thing));
#endif #endif
} }
@ -727,7 +727,7 @@ MustSkipMarking<JSObject*>(JSObject* obj)
return true; return true;
// Don't mark things outside a zone if we are in a per-zone GC. It is // Don't mark things outside a zone if we are in a per-zone GC. It is
// faster to check our own arena header, which we can do since we know that // faster to check our own arena, which we can do since we know that
// the object is tenured. // the object is tenured.
return !TenuredCell::fromPointer(obj)->zone()->isGCMarking(); return !TenuredCell::fromPointer(obj)->zone()->isGCMarking();
} }
@ -1817,13 +1817,13 @@ GCMarker::reset()
MOZ_ASSERT(isMarkStackEmpty()); MOZ_ASSERT(isMarkStackEmpty());
while (unmarkedArenaStackTop) { while (unmarkedArenaStackTop) {
ArenaHeader* aheader = unmarkedArenaStackTop; Arena* arena = unmarkedArenaStackTop;
MOZ_ASSERT(aheader->hasDelayedMarking); MOZ_ASSERT(arena->hasDelayedMarking);
MOZ_ASSERT(markLaterArenas); MOZ_ASSERT(markLaterArenas);
unmarkedArenaStackTop = aheader->getNextDelayedMarking(); unmarkedArenaStackTop = arena->getNextDelayedMarking();
aheader->unsetDelayedMarking(); arena->unsetDelayedMarking();
aheader->markOverflow = 0; arena->markOverflow = 0;
aheader->allocatedDuringIncremental = 0; arena->allocatedDuringIncremental = 0;
markLaterArenas--; markLaterArenas--;
} }
MOZ_ASSERT(isDrained()); MOZ_ASSERT(isDrained());
@ -1868,27 +1868,27 @@ GCMarker::leaveWeakMarkingMode()
} }
void void
GCMarker::markDelayedChildren(ArenaHeader* aheader) GCMarker::markDelayedChildren(Arena* arena)
{ {
if (aheader->markOverflow) { if (arena->markOverflow) {
bool always = aheader->allocatedDuringIncremental; bool always = arena->allocatedDuringIncremental;
aheader->markOverflow = 0; arena->markOverflow = 0;
for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next()) { for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
TenuredCell* t = i.getCell(); TenuredCell* t = i.getCell();
if (always || t->isMarked()) { if (always || t->isMarked()) {
t->markIfUnmarked(); t->markIfUnmarked();
js::TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind())); js::TraceChildren(this, t, MapAllocToTraceKind(arena->getAllocKind()));
} }
} }
} else { } else {
MOZ_ASSERT(aheader->allocatedDuringIncremental); MOZ_ASSERT(arena->allocatedDuringIncremental);
PushArena(this, aheader); PushArena(this, arena);
} }
aheader->allocatedDuringIncremental = 0; arena->allocatedDuringIncremental = 0;
/* /*
* Note that during an incremental GC we may still be allocating into * Note that during an incremental GC we may still be allocating into
* aheader. However, prepareForIncrementalGC sets the * the arena. However, prepareForIncrementalGC sets the
* allocatedDuringIncremental flag if we continue marking. * allocatedDuringIncremental flag if we continue marking.
*/ */
} }
@ -1906,13 +1906,13 @@ GCMarker::markDelayedChildren(SliceBudget& budget)
* marking of its things. For that we pop arena from the stack and * marking of its things. For that we pop arena from the stack and
* clear its hasDelayedMarking flag before we begin the marking. * clear its hasDelayedMarking flag before we begin the marking.
*/ */
ArenaHeader* aheader = unmarkedArenaStackTop; Arena* arena = unmarkedArenaStackTop;
MOZ_ASSERT(aheader->hasDelayedMarking); MOZ_ASSERT(arena->hasDelayedMarking);
MOZ_ASSERT(markLaterArenas); MOZ_ASSERT(markLaterArenas);
unmarkedArenaStackTop = aheader->getNextDelayedMarking(); unmarkedArenaStackTop = arena->getNextDelayedMarking();
aheader->unsetDelayedMarking(); arena->unsetDelayedMarking();
markLaterArenas--; markLaterArenas--;
markDelayedChildren(aheader); markDelayedChildren(arena);
budget.step(150); budget.step(150);
if (budget.isOverBudget()) if (budget.isOverBudget())
@ -1925,22 +1925,23 @@ GCMarker::markDelayedChildren(SliceBudget& budget)
template<typename T> template<typename T>
static void static void
PushArenaTyped(GCMarker* gcmarker, ArenaHeader* aheader) PushArenaTyped(GCMarker* gcmarker, Arena* arena)
{ {
for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next()) for (ArenaCellIterUnderGC i(arena); !i.done(); i.next())
gcmarker->traverse(i.get<T>()); gcmarker->traverse(i.get<T>());
} }
struct PushArenaFunctor { struct PushArenaFunctor {
template <typename T> void operator()(GCMarker* gcmarker, ArenaHeader* aheader) { template <typename T> void operator()(GCMarker* gcmarker, Arena* arena) {
PushArenaTyped<T>(gcmarker, aheader); PushArenaTyped<T>(gcmarker, arena);
} }
}; };
void void
gc::PushArena(GCMarker* gcmarker, ArenaHeader* aheader) gc::PushArena(GCMarker* gcmarker, Arena* arena)
{ {
DispatchTraceKindTyped(PushArenaFunctor(), MapAllocToTraceKind(aheader->getAllocKind()), gcmarker, aheader); DispatchTraceKindTyped(PushArenaFunctor(),
MapAllocToTraceKind(arena->getAllocKind()), gcmarker, arena);
} }
#ifdef DEBUG #ifdef DEBUG
@ -2410,7 +2411,7 @@ js::gc::IsAboutToBeFinalizedDuringSweep(TenuredCell& tenured)
MOZ_ASSERT(!IsInsideNursery(&tenured)); MOZ_ASSERT(!IsInsideNursery(&tenured));
MOZ_ASSERT(!tenured.runtimeFromAnyThread()->isHeapMinorCollecting()); MOZ_ASSERT(!tenured.runtimeFromAnyThread()->isHeapMinorCollecting());
MOZ_ASSERT(tenured.zoneFromAnyThread()->isGCSweeping()); MOZ_ASSERT(tenured.zoneFromAnyThread()->isGCSweeping());
if (tenured.arenaHeader()->allocatedDuringIncremental) if (tenured.arena()->allocatedDuringIncremental)
return false; return false;
return !tenured.isMarked(); return !tenured.isMarked();
} }

View File

@ -32,7 +32,7 @@ class NativeObject;
class ObjectGroup; class ObjectGroup;
class WeakMapBase; class WeakMapBase;
namespace gc { namespace gc {
struct ArenaHeader; class Arena;
} // namespace gc } // namespace gc
namespace jit { namespace jit {
class JitCode; class JitCode;
@ -214,9 +214,9 @@ class GCMarker : public JSTracer
linearWeakMarkingDisabled_ = true; linearWeakMarkingDisabled_ = true;
} }
void delayMarkingArena(gc::ArenaHeader* aheader); void delayMarkingArena(gc::Arena* arena);
void delayMarkingChildren(const void* thing); void delayMarkingChildren(const void* thing);
void markDelayedChildren(gc::ArenaHeader* aheader); void markDelayedChildren(gc::Arena* arena);
bool markDelayedChildren(SliceBudget& budget); bool markDelayedChildren(SliceBudget& budget);
bool hasDelayedChildren() const { bool hasDelayedChildren() const {
return !!unmarkedArenaStackTop; return !!unmarkedArenaStackTop;
@ -283,7 +283,7 @@ class GCMarker : public JSTracer
void eagerlyMarkChildren(Shape* shape); void eagerlyMarkChildren(Shape* shape);
void lazilyMarkChildren(ObjectGroup* group); void lazilyMarkChildren(ObjectGroup* group);
// We may not have concrete types yet, so this has to be out of the header. // We may not have concrete types yet, so this has to be outside the header.
template <typename T> template <typename T>
void dispatchToTraceChildren(T* thing); void dispatchToTraceChildren(T* thing);
@ -331,7 +331,7 @@ class GCMarker : public JSTracer
uint32_t color; uint32_t color;
/* Pointer to the top of the stack of arenas we are delaying marking on. */ /* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::ArenaHeader* unmarkedArenaStackTop; js::gc::Arena* unmarkedArenaStackTop;
/* /*
* If the weakKeys table OOMs, disable the linear algorithm and fall back * If the weakKeys table OOMs, disable the linear algorithm and fall back
@ -364,7 +364,7 @@ namespace gc {
/*** Special Cases ***/ /*** Special Cases ***/
void void
PushArena(GCMarker* gcmarker, ArenaHeader* aheader); PushArena(GCMarker* gcmarker, Arena* arena);
/*** Liveness ***/ /*** Liveness ***/

View File

@ -250,7 +250,7 @@ oom:
static bool static bool
IsMarkedOrAllocated(TenuredCell* cell) IsMarkedOrAllocated(TenuredCell* cell)
{ {
return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental; return cell->isMarked() || cell->arena()->allocatedDuringIncremental;
} }
struct CheckEdgeTracer : public JS::CallbackTracer { struct CheckEdgeTracer : public JS::CallbackTracer {

View File

@ -870,7 +870,7 @@ bool
JitcodeGlobalEntry::BaseEntry::isJitcodeMarkedFromAnyThread() JitcodeGlobalEntry::BaseEntry::isJitcodeMarkedFromAnyThread()
{ {
return IsMarkedUnbarriered(&jitcode_) || return IsMarkedUnbarriered(&jitcode_) ||
jitcode_->arenaHeader()->allocatedDuringIncremental; jitcode_->arena()->allocatedDuringIncremental;
} }
bool bool
@ -900,7 +900,7 @@ bool
JitcodeGlobalEntry::BaselineEntry::isMarkedFromAnyThread() JitcodeGlobalEntry::BaselineEntry::isMarkedFromAnyThread()
{ {
return IsMarkedUnbarriered(&script_) || return IsMarkedUnbarriered(&script_) ||
script_->arenaHeader()->allocatedDuringIncremental; script_->arena()->allocatedDuringIncremental;
} }
template <class ShouldMarkProvider> template <class ShouldMarkProvider>
@ -968,7 +968,7 @@ JitcodeGlobalEntry::IonEntry::isMarkedFromAnyThread()
{ {
for (unsigned i = 0; i < numScripts(); i++) { for (unsigned i = 0; i < numScripts(); i++) {
if (!IsMarkedUnbarriered(&sizedScriptList()->pairs[i].script) && if (!IsMarkedUnbarriered(&sizedScriptList()->pairs[i].script) &&
!sizedScriptList()->pairs[i].script->arenaHeader()->allocatedDuringIncremental) !sizedScriptList()->pairs[i].script->arena()->allocatedDuringIncremental)
{ {
return false; return false;
} }

View File

@ -1008,7 +1008,7 @@ DumpHeapVisitArena(JSRuntime* rt, void* data, gc::Arena* arena,
{ {
DumpHeapTracer* dtrc = static_cast<DumpHeapTracer*>(data); DumpHeapTracer* dtrc = static_cast<DumpHeapTracer*>(data);
fprintf(dtrc->output, "# arena allockind=%u size=%u\n", fprintf(dtrc->output, "# arena allockind=%u size=%u\n",
unsigned(arena->aheader.getAllocKind()), unsigned(thingSize)); unsigned(arena->getAllocKind()), unsigned(thingSize));
} }
static void static void

View File

@ -307,7 +307,7 @@ FreeSpan ArenaLists::placeholder;
#undef CHECK_THING_SIZE_INNER #undef CHECK_THING_SIZE_INNER
#undef CHECK_THING_SIZE #undef CHECK_THING_SIZE
#define OFFSET(type) uint32_t(sizeof(ArenaHeader) + (ArenaSize - sizeof(ArenaHeader)) % sizeof(type)) #define OFFSET(type) uint32_t(ArenaHeaderSize + (ArenaSize - ArenaHeaderSize) % sizeof(type))
const uint32_t Arena::FirstThingOffsets[] = { const uint32_t Arena::FirstThingOffsets[] = {
OFFSET(JSFunction), /* AllocKind::FUNCTION */ OFFSET(JSFunction), /* AllocKind::FUNCTION */
@ -339,7 +339,7 @@ const uint32_t Arena::FirstThingOffsets[] = {
#undef OFFSET #undef OFFSET
#define COUNT(type) uint32_t((ArenaSize - sizeof(ArenaHeader)) / sizeof(type)) #define COUNT(type) uint32_t((ArenaSize - ArenaHeaderSize) / sizeof(type))
const uint32_t Arena::ThingsPerArena[] = { const uint32_t Arena::ThingsPerArena[] = {
COUNT(JSFunction), /* AllocKind::FUNCTION */ COUNT(JSFunction), /* AllocKind::FUNCTION */
@ -452,7 +452,7 @@ ArenaCellIterImpl::get<JSObject>() const
} }
void void
ArenaHeader::unmarkAll() Arena::unmarkAll()
{ {
uintptr_t* word = chunk()->bitmap.arenaBits(this); uintptr_t* word = chunk()->bitmap.arenaBits(this);
memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t)); memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t));
@ -461,6 +461,8 @@ ArenaHeader::unmarkAll()
/* static */ void /* static */ void
Arena::staticAsserts() Arena::staticAsserts()
{ {
static_assert(size_t(AllocKind::LIMIT) <= 255,
"We must be able to fit the allockind into uint8_t.");
static_assert(JS_ARRAY_LENGTH(ThingSizes) == size_t(AllocKind::LIMIT), static_assert(JS_ARRAY_LENGTH(ThingSizes) == size_t(AllocKind::LIMIT),
"We haven't defined all thing sizes."); "We haven't defined all thing sizes.");
static_assert(JS_ARRAY_LENGTH(FirstThingOffsets) == size_t(AllocKind::LIMIT), static_assert(JS_ARRAY_LENGTH(FirstThingOffsets) == size_t(AllocKind::LIMIT),
@ -477,12 +479,12 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
MOZ_ASSERT(thingSize % CellSize == 0); MOZ_ASSERT(thingSize % CellSize == 0);
MOZ_ASSERT(thingSize <= 255); MOZ_ASSERT(thingSize <= 255);
MOZ_ASSERT(aheader.allocated()); MOZ_ASSERT(allocated());
MOZ_ASSERT(thingKind == aheader.getAllocKind()); MOZ_ASSERT(thingKind == getAllocKind());
MOZ_ASSERT(thingSize == aheader.getThingSize()); MOZ_ASSERT(thingSize == getThingSize());
MOZ_ASSERT(!aheader.hasDelayedMarking); MOZ_ASSERT(!hasDelayedMarking);
MOZ_ASSERT(!aheader.markOverflow); MOZ_ASSERT(!markOverflow);
MOZ_ASSERT(!aheader.allocatedDuringIncremental); MOZ_ASSERT(!allocatedDuringIncremental);
uint_fast16_t firstThing = firstThingOffset(thingKind); uint_fast16_t firstThing = firstThingOffset(thingKind);
uint_fast16_t firstThingOrSuccessorOfLastMarkedThing = firstThing; uint_fast16_t firstThingOrSuccessorOfLastMarkedThing = firstThing;
@ -493,14 +495,14 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
size_t nmarked = 0; size_t nmarked = 0;
if (MOZ_UNLIKELY(MemProfiler::enabled())) { if (MOZ_UNLIKELY(MemProfiler::enabled())) {
for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) { for (ArenaCellIterUnderFinalize i(this); !i.done(); i.next()) {
T* t = i.get<T>(); T* t = i.get<T>();
if (t->asTenured().isMarked()) if (t->asTenured().isMarked())
MemProfiler::MarkTenured(reinterpret_cast<void*>(t)); MemProfiler::MarkTenured(reinterpret_cast<void*>(t));
} }
} }
for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) { for (ArenaCellIterUnderFinalize i(this); !i.done(); i.next()) {
T* t = i.get<T>(); T* t = i.get<T>();
if (t->asTenured().isMarked()) { if (t->asTenured().isMarked()) {
uint_fast16_t thing = uintptr_t(t) & ArenaMask; uint_fast16_t thing = uintptr_t(t) & ArenaMask;
@ -508,8 +510,8 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
// We just finished passing over one or more free things, // We just finished passing over one or more free things,
// so record a new FreeSpan. // so record a new FreeSpan.
newListTail->initBounds(firstThingOrSuccessorOfLastMarkedThing, newListTail->initBounds(firstThingOrSuccessorOfLastMarkedThing,
thing - thingSize, &aheader); thing - thingSize, this);
newListTail = newListTail->nextSpanUnchecked(&aheader); newListTail = newListTail->nextSpanUnchecked(this);
} }
firstThingOrSuccessorOfLastMarkedThing = thing + thingSize; firstThingOrSuccessorOfLastMarkedThing = thing + thingSize;
nmarked++; nmarked++;
@ -521,7 +523,7 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
} }
if (nmarked == 0) { if (nmarked == 0) {
// Do nothing. The caller will update the arena header appropriately. // Do nothing. The caller will update the arena appropriately.
MOZ_ASSERT(newListTail == &newListHead); MOZ_ASSERT(newListTail == &newListHead);
JS_EXTRA_POISON(data, JS_SWEPT_TENURED_PATTERN, sizeof(data)); JS_EXTRA_POISON(data, JS_SWEPT_TENURED_PATTERN, sizeof(data));
return nmarked; return nmarked;
@ -535,12 +537,12 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
newListTail->initAsEmpty(); newListTail->initAsEmpty();
} else { } else {
// Otherwise, end the list with a span that covers the final stretch of free things. // Otherwise, end the list with a span that covers the final stretch of free things.
newListTail->initFinal(firstThingOrSuccessorOfLastMarkedThing, lastThing, &aheader); newListTail->initFinal(firstThingOrSuccessorOfLastMarkedThing, lastThing, this);
} }
aheader.firstFreeSpan = newListHead; firstFreeSpan = newListHead;
#ifdef DEBUG #ifdef DEBUG
size_t nfree = aheader.numFreeThings(thingSize); size_t nfree = numFreeThings(thingSize);
MOZ_ASSERT(nfree + nmarked == thingsPerArena(thingKind)); MOZ_ASSERT(nfree + nmarked == thingsPerArena(thingKind));
#endif #endif
return nmarked; return nmarked;
@ -552,7 +554,7 @@ Arena::finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize)
template<typename T> template<typename T>
static inline bool static inline bool
FinalizeTypedArenas(FreeOp* fop, FinalizeTypedArenas(FreeOp* fop,
ArenaHeader** src, Arena** src,
SortedArenaList& dest, SortedArenaList& dest,
AllocKind thingKind, AllocKind thingKind,
SliceBudget& budget, SliceBudget& budget,
@ -570,17 +572,17 @@ FinalizeTypedArenas(FreeOp* fop,
size_t thingSize = Arena::thingSize(thingKind); size_t thingSize = Arena::thingSize(thingKind);
size_t thingsPerArena = Arena::thingsPerArena(thingKind); size_t thingsPerArena = Arena::thingsPerArena(thingKind);
while (ArenaHeader* aheader = *src) { while (Arena* arena = *src) {
*src = aheader->next; *src = arena->next;
size_t nmarked = aheader->getArena()->finalize<T>(fop, thingKind, thingSize); size_t nmarked = arena->finalize<T>(fop, thingKind, thingSize);
size_t nfree = thingsPerArena - nmarked; size_t nfree = thingsPerArena - nmarked;
if (nmarked) if (nmarked)
dest.insertAt(aheader, nfree); dest.insertAt(arena, nfree);
else if (keepArenas == ArenaLists::KEEP_ARENAS) else if (keepArenas == ArenaLists::KEEP_ARENAS)
aheader->chunk()->recycleArena(aheader, dest, thingsPerArena); arena->chunk()->recycleArena(arena, dest, thingsPerArena);
else else
fop->runtime()->gc.releaseArena(aheader, maybeLock.ref()); fop->runtime()->gc.releaseArena(arena, maybeLock.ref());
budget.step(thingsPerArena); budget.step(thingsPerArena);
if (budget.isOverBudget()) if (budget.isOverBudget())
@ -596,7 +598,7 @@ FinalizeTypedArenas(FreeOp* fop,
*/ */
static bool static bool
FinalizeArenas(FreeOp* fop, FinalizeArenas(FreeOp* fop,
ArenaHeader** src, Arena** src,
SortedArenaList& dest, SortedArenaList& dest,
AllocKind thingKind, AllocKind thingKind,
SliceBudget& budget, SliceBudget& budget,
@ -860,7 +862,7 @@ Chunk::findDecommittedArenaOffset()
MOZ_CRASH("No decommitted arenas found."); MOZ_CRASH("No decommitted arenas found.");
} }
ArenaHeader* Arena*
Chunk::fetchNextDecommittedArena() Chunk::fetchNextDecommittedArena()
{ {
MOZ_ASSERT(info.numArenasFreeCommitted == 0); MOZ_ASSERT(info.numArenasFreeCommitted == 0);
@ -873,9 +875,9 @@ Chunk::fetchNextDecommittedArena()
Arena* arena = &arenas[offset]; Arena* arena = &arenas[offset];
MarkPagesInUse(arena, ArenaSize); MarkPagesInUse(arena, ArenaSize);
arena->aheader.setAsNotAllocated(); arena->setAsNotAllocated();
return &arena->aheader; return arena;
} }
inline void inline void
@ -885,30 +887,30 @@ GCRuntime::updateOnFreeArenaAlloc(const ChunkInfo& info)
--numArenasFreeCommitted; --numArenasFreeCommitted;
} }
inline ArenaHeader* inline Arena*
Chunk::fetchNextFreeArena(JSRuntime* rt) Chunk::fetchNextFreeArena(JSRuntime* rt)
{ {
MOZ_ASSERT(info.numArenasFreeCommitted > 0); MOZ_ASSERT(info.numArenasFreeCommitted > 0);
MOZ_ASSERT(info.numArenasFreeCommitted <= info.numArenasFree); MOZ_ASSERT(info.numArenasFreeCommitted <= info.numArenasFree);
ArenaHeader* aheader = info.freeArenasHead; Arena* arena = info.freeArenasHead;
info.freeArenasHead = aheader->next; info.freeArenasHead = arena->next;
--info.numArenasFreeCommitted; --info.numArenasFreeCommitted;
--info.numArenasFree; --info.numArenasFree;
rt->gc.updateOnFreeArenaAlloc(info); rt->gc.updateOnFreeArenaAlloc(info);
return aheader; return arena;
} }
ArenaHeader* Arena*
Chunk::allocateArena(JSRuntime* rt, Zone* zone, AllocKind thingKind, const AutoLockGC& lock) Chunk::allocateArena(JSRuntime* rt, Zone* zone, AllocKind thingKind, const AutoLockGC& lock)
{ {
ArenaHeader* aheader = info.numArenasFreeCommitted > 0 Arena* arena = info.numArenasFreeCommitted > 0
? fetchNextFreeArena(rt) ? fetchNextFreeArena(rt)
: fetchNextDecommittedArena(); : fetchNextDecommittedArena();
aheader->init(zone, thingKind); arena->init(zone, thingKind);
updateChunkListAfterAlloc(rt, lock); updateChunkListAfterAlloc(rt, lock);
return aheader; return arena;
} }
inline void inline void
@ -918,38 +920,38 @@ GCRuntime::updateOnArenaFree(const ChunkInfo& info)
} }
void void
Chunk::addArenaToFreeList(JSRuntime* rt, ArenaHeader* aheader) Chunk::addArenaToFreeList(JSRuntime* rt, Arena* arena)
{ {
MOZ_ASSERT(!aheader->allocated()); MOZ_ASSERT(!arena->allocated());
aheader->next = info.freeArenasHead; arena->next = info.freeArenasHead;
info.freeArenasHead = aheader; info.freeArenasHead = arena;
++info.numArenasFreeCommitted; ++info.numArenasFreeCommitted;
++info.numArenasFree; ++info.numArenasFree;
rt->gc.updateOnArenaFree(info); rt->gc.updateOnArenaFree(info);
} }
void void
Chunk::addArenaToDecommittedList(JSRuntime* rt, const ArenaHeader* aheader) Chunk::addArenaToDecommittedList(JSRuntime* rt, const Arena* arena)
{ {
++info.numArenasFree; ++info.numArenasFree;
decommittedArenas.set(Chunk::arenaIndex(aheader->address())); decommittedArenas.set(Chunk::arenaIndex(arena->address()));
} }
void void
Chunk::recycleArena(ArenaHeader* aheader, SortedArenaList& dest, size_t thingsPerArena) Chunk::recycleArena(Arena* arena, SortedArenaList& dest, size_t thingsPerArena)
{ {
aheader->setAsFullyUnused(); arena->setAsFullyUnused();
dest.insertAt(aheader, thingsPerArena); dest.insertAt(arena, thingsPerArena);
} }
void void
Chunk::releaseArena(JSRuntime* rt, ArenaHeader* aheader, const AutoLockGC& lock) Chunk::releaseArena(JSRuntime* rt, Arena* arena, const AutoLockGC& lock)
{ {
MOZ_ASSERT(aheader->allocated()); MOZ_ASSERT(arena->allocated());
MOZ_ASSERT(!aheader->hasDelayedMarking); MOZ_ASSERT(!arena->hasDelayedMarking);
aheader->setAsNotAllocated(); arena->setAsNotAllocated();
addArenaToFreeList(rt, aheader); addArenaToFreeList(rt, arena);
updateChunkListAfterFree(rt, lock); updateChunkListAfterFree(rt, lock);
} }
@ -957,19 +959,19 @@ bool
Chunk::decommitOneFreeArena(JSRuntime* rt, AutoLockGC& lock) Chunk::decommitOneFreeArena(JSRuntime* rt, AutoLockGC& lock)
{ {
MOZ_ASSERT(info.numArenasFreeCommitted > 0); MOZ_ASSERT(info.numArenasFreeCommitted > 0);
ArenaHeader* aheader = fetchNextFreeArena(rt); Arena* arena = fetchNextFreeArena(rt);
updateChunkListAfterAlloc(rt, lock); updateChunkListAfterAlloc(rt, lock);
bool ok; bool ok;
{ {
AutoUnlockGC unlock(lock); AutoUnlockGC unlock(lock);
ok = MarkPagesUnused(aheader->getArena(), ArenaSize); ok = MarkPagesUnused(arena, ArenaSize);
} }
if (ok) if (ok)
addArenaToDecommittedList(rt, aheader); addArenaToDecommittedList(rt, arena);
else else
addArenaToFreeList(rt, aheader); addArenaToFreeList(rt, arena);
updateChunkListAfterFree(rt, lock); updateChunkListAfterFree(rt, lock);
return ok; return ok;
@ -979,7 +981,7 @@ void
Chunk::decommitAllArenasWithoutUnlocking(const AutoLockGC& lock) Chunk::decommitAllArenasWithoutUnlocking(const AutoLockGC& lock)
{ {
for (size_t i = 0; i < ArenasPerChunk; ++i) { for (size_t i = 0; i < ArenasPerChunk; ++i) {
if (decommittedArenas.get(i) || arenas[i].aheader.allocated()) if (decommittedArenas.get(i) || arenas[i].allocated())
continue; continue;
if (MarkPagesUnused(&arenas[i], ArenaSize)) { if (MarkPagesUnused(&arenas[i], ArenaSize)) {
@ -1068,7 +1070,7 @@ GCRuntime::pickChunk(const AutoLockGC& lock,
return chunk; return chunk;
} }
ArenaHeader* Arena*
GCRuntime::allocateArena(Chunk* chunk, Zone* zone, AllocKind thingKind, const AutoLockGC& lock) GCRuntime::allocateArena(Chunk* chunk, Zone* zone, AllocKind thingKind, const AutoLockGC& lock)
{ {
MOZ_ASSERT(chunk->hasAvailableArenas()); MOZ_ASSERT(chunk->hasAvailableArenas());
@ -1081,23 +1083,23 @@ GCRuntime::allocateArena(Chunk* chunk, Zone* zone, AllocKind thingKind, const Au
return nullptr; return nullptr;
} }
ArenaHeader* aheader = chunk->allocateArena(rt, zone, thingKind, lock); Arena* arena = chunk->allocateArena(rt, zone, thingKind, lock);
zone->usage.addGCArena(); zone->usage.addGCArena();
// Trigger an incremental slice if needed. // Trigger an incremental slice if needed.
if (!rt->isHeapMinorCollecting() && !isHeapCompacting()) if (!rt->isHeapMinorCollecting() && !isHeapCompacting())
maybeAllocTriggerZoneGC(zone, lock); maybeAllocTriggerZoneGC(zone, lock);
return aheader; return arena;
} }
void void
GCRuntime::releaseArena(ArenaHeader* aheader, const AutoLockGC& lock) GCRuntime::releaseArena(Arena* arena, const AutoLockGC& lock)
{ {
aheader->zone->usage.removeGCArena(); arena->zone->usage.removeGCArena();
if (isBackgroundSweeping()) if (isBackgroundSweeping())
aheader->zone->threshold.updateForRemovedArena(tunables); arena->zone->threshold.updateForRemovedArena(tunables);
return aheader->chunk()->releaseArena(rt, aheader, lock); return arena->chunk()->releaseArena(rt, arena, lock);
} }
GCRuntime::GCRuntime(JSRuntime* rt) : GCRuntime::GCRuntime(JSRuntime* rt) :
@ -1928,14 +1930,14 @@ ZoneHeapThreshold::updateForRemovedArena(const GCSchedulingTunables& tunables)
} }
void void
GCMarker::delayMarkingArena(ArenaHeader* aheader) GCMarker::delayMarkingArena(Arena* arena)
{ {
if (aheader->hasDelayedMarking) { if (arena->hasDelayedMarking) {
/* Arena already scheduled to be marked later */ /* Arena already scheduled to be marked later */
return; return;
} }
aheader->setNextDelayedMarking(unmarkedArenaStackTop); arena->setNextDelayedMarking(unmarkedArenaStackTop);
unmarkedArenaStackTop = aheader; unmarkedArenaStackTop = arena;
markLaterArenas++; markLaterArenas++;
} }
@ -1943,8 +1945,8 @@ void
GCMarker::delayMarkingChildren(const void* thing) GCMarker::delayMarkingChildren(const void* thing)
{ {
const TenuredCell* cell = TenuredCell::fromPointer(thing); const TenuredCell* cell = TenuredCell::fromPointer(thing);
cell->arenaHeader()->markOverflow = 1; cell->arena()->markOverflow = 1;
delayMarkingArena(cell->arenaHeader()); delayMarkingArena(cell->arena());
} }
inline void inline void
@ -1954,9 +1956,9 @@ ArenaLists::prepareForIncrementalGC(JSRuntime* rt)
FreeSpan* span = freeLists[i]; FreeSpan* span = freeLists[i];
if (span != &placeholder) { if (span != &placeholder) {
if (!span->isEmpty()) { if (!span->isEmpty()) {
ArenaHeader* aheader = span->getArena(); Arena* arena = span->getArena();
aheader->allocatedDuringIncremental = true; arena->allocatedDuringIncremental = true;
rt->gc.marker.delayMarkingArena(aheader); rt->gc.marker.delayMarkingArena(arena);
} else { } else {
freeLists[i] = &placeholder; freeLists[i] = &placeholder;
} }
@ -2022,16 +2024,16 @@ CanRelocateAllocKind(AllocKind kind)
return IsObjectAllocKind(kind); return IsObjectAllocKind(kind);
} }
ArenaHeader* Arena*
ArenaList::removeRemainingArenas(ArenaHeader** arenap) ArenaList::removeRemainingArenas(Arena** arenap)
{ {
// This is only ever called to remove arenas that are after the cursor, so // This is only ever called to remove arenas that are after the cursor, so
// we don't need to update it. // we don't need to update it.
#ifdef DEBUG #ifdef DEBUG
for (ArenaHeader* arena = *arenap; arena; arena = arena->next) for (Arena* arena = *arenap; arena; arena = arena->next)
MOZ_ASSERT(cursorp_ != &arena->next); MOZ_ASSERT(cursorp_ != &arena->next);
#endif #endif
ArenaHeader* remainingArenas = *arenap; Arena* remainingArenas = *arenap;
*arenap = nullptr; *arenap = nullptr;
check(); check();
return remainingArenas; return remainingArenas;
@ -2047,7 +2049,7 @@ ShouldRelocateAllArenas(JS::gcreason::Reason reason)
* Choose which arenas to relocate all cells from. Return an arena cursor that * Choose which arenas to relocate all cells from. Return an arena cursor that
* can be passed to removeRemainingArenas(). * can be passed to removeRemainingArenas().
*/ */
ArenaHeader** Arena**
ArenaList::pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut) ArenaList::pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut)
{ {
// Relocate the greatest number of arenas such that the number of used cells // Relocate the greatest number of arenas such that the number of used cells
@ -2065,17 +2067,17 @@ ArenaList::pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut)
if (isCursorAtEnd()) if (isCursorAtEnd())
return nullptr; return nullptr;
ArenaHeader** arenap = cursorp_; // Next arena to consider for relocation. Arena** arenap = cursorp_; // Next arena to consider for relocation.
size_t previousFreeCells = 0; // Count of free cells before arenap. size_t previousFreeCells = 0; // Count of free cells before arenap.
size_t followingUsedCells = 0; // Count of used cells after arenap. size_t followingUsedCells = 0; // Count of used cells after arenap.
size_t fullArenaCount = 0; // Number of full arenas (not relocated). size_t fullArenaCount = 0; // Number of full arenas (not relocated).
size_t nonFullArenaCount = 0; // Number of non-full arenas (considered for relocation). size_t nonFullArenaCount = 0; // Number of non-full arenas (considered for relocation).
size_t arenaIndex = 0; // Index of the next arena to consider. size_t arenaIndex = 0; // Index of the next arena to consider.
for (ArenaHeader* arena = head_; arena != *cursorp_; arena = arena->next) for (Arena* arena = head_; arena != *cursorp_; arena = arena->next)
fullArenaCount++; fullArenaCount++;
for (ArenaHeader* arena = *cursorp_; arena; arena = arena->next) { for (Arena* arena = *cursorp_; arena; arena = arena->next) {
followingUsedCells += arena->countUsedCells(); followingUsedCells += arena->countUsedCells();
nonFullArenaCount++; nonFullArenaCount++;
} }
@ -2084,7 +2086,7 @@ ArenaList::pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut)
size_t cellsPerArena = Arena::thingsPerArena((*arenap)->getAllocKind()); size_t cellsPerArena = Arena::thingsPerArena((*arenap)->getAllocKind());
while (*arenap) { while (*arenap) {
ArenaHeader* arena = *arenap; Arena* arena = *arenap;
if (followingUsedCells <= previousFreeCells) if (followingUsedCells <= previousFreeCells)
break; break;
@ -2187,25 +2189,25 @@ RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind, size_t thingSize
} }
static void static void
RelocateArena(ArenaHeader* aheader, SliceBudget& sliceBudget) RelocateArena(Arena* arena, SliceBudget& sliceBudget)
{ {
MOZ_ASSERT(aheader->allocated()); MOZ_ASSERT(arena->allocated());
MOZ_ASSERT(!aheader->hasDelayedMarking); MOZ_ASSERT(!arena->hasDelayedMarking);
MOZ_ASSERT(!aheader->markOverflow); MOZ_ASSERT(!arena->markOverflow);
MOZ_ASSERT(!aheader->allocatedDuringIncremental); MOZ_ASSERT(!arena->allocatedDuringIncremental);
Zone* zone = aheader->zone; Zone* zone = arena->zone;
AllocKind thingKind = aheader->getAllocKind(); AllocKind thingKind = arena->getAllocKind();
size_t thingSize = aheader->getThingSize(); size_t thingSize = arena->getThingSize();
for (ArenaCellIterUnderFinalize i(aheader); !i.done(); i.next()) { for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
RelocateCell(zone, i.getCell(), thingKind, thingSize); RelocateCell(zone, i.getCell(), thingKind, thingSize);
sliceBudget.step(); sliceBudget.step();
} }
#ifdef DEBUG #ifdef DEBUG
for (ArenaCellIterUnderFinalize i(aheader); !i.done(); i.next()) { for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
TenuredCell* src = i.getCell(); TenuredCell* src = i.getCell();
MOZ_ASSERT(RelocationOverlay::isCellForwarded(src)); MOZ_ASSERT(RelocationOverlay::isCellForwarded(src));
TenuredCell* dest = Forwarded(src); TenuredCell* dest = Forwarded(src);
@ -2232,13 +2234,13 @@ ShouldProtectRelocatedArenas(JS::gcreason::Reason reason)
* Relocate all arenas identified by pickArenasToRelocate: for each arena, * Relocate all arenas identified by pickArenasToRelocate: for each arena,
* relocate each cell within it, then add it to a list of relocated arenas. * relocate each cell within it, then add it to a list of relocated arenas.
*/ */
ArenaHeader* Arena*
ArenaList::relocateArenas(ArenaHeader* toRelocate, ArenaHeader* relocated, SliceBudget& sliceBudget, ArenaList::relocateArenas(Arena* toRelocate, Arena* relocated, SliceBudget& sliceBudget,
gcstats::Statistics& stats) gcstats::Statistics& stats)
{ {
check(); check();
while (ArenaHeader* arena = toRelocate) { while (Arena* arena = toRelocate) {
toRelocate = arena->next; toRelocate = arena->next;
RelocateArena(arena, sliceBudget); RelocateArena(arena, sliceBudget);
// Prepend to list of relocated arenas // Prepend to list of relocated arenas
@ -2276,7 +2278,7 @@ ShouldRelocateZone(size_t arenaCount, size_t relocCount, JS::gcreason::Reason re
} }
bool bool
ArenaLists::relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcreason::Reason reason, ArenaLists::relocateArenas(Zone* zone, Arena*& relocatedListOut, JS::gcreason::Reason reason,
SliceBudget& sliceBudget, gcstats::Statistics& stats) SliceBudget& sliceBudget, gcstats::Statistics& stats)
{ {
// This is only called from the main thread while we are doing a GC, so // This is only called from the main thread while we are doing a GC, so
@ -2293,7 +2295,7 @@ ArenaLists::relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcrea
for (auto i : AllAllocKinds()) { for (auto i : AllAllocKinds()) {
if (CanRelocateAllocKind(i)) { if (CanRelocateAllocKind(i)) {
ArenaList& al = arenaLists[i]; ArenaList& al = arenaLists[i];
ArenaHeader* allArenas = al.head(); Arena* allArenas = al.head();
al.clear(); al.clear();
relocatedListOut = al.relocateArenas(allArenas, relocatedListOut, sliceBudget, stats); relocatedListOut = al.relocateArenas(allArenas, relocatedListOut, sliceBudget, stats);
} }
@ -2301,7 +2303,7 @@ ArenaLists::relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcrea
} else { } else {
size_t arenaCount = 0; size_t arenaCount = 0;
size_t relocCount = 0; size_t relocCount = 0;
AllAllocKindArray<ArenaHeader**> toRelocate; AllAllocKindArray<Arena**> toRelocate;
for (auto i : AllAllocKinds()) { for (auto i : AllAllocKinds()) {
toRelocate[i] = nullptr; toRelocate[i] = nullptr;
@ -2316,7 +2318,7 @@ ArenaLists::relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcrea
for (auto i : AllAllocKinds()) { for (auto i : AllAllocKinds()) {
if (toRelocate[i]) { if (toRelocate[i]) {
ArenaList& al = arenaLists[i]; ArenaList& al = arenaLists[i];
ArenaHeader* arenas = al.removeRemainingArenas(toRelocate[i]); Arena* arenas = al.removeRemainingArenas(toRelocate[i]);
relocatedListOut = al.relocateArenas(arenas, relocatedListOut, sliceBudget, stats); relocatedListOut = al.relocateArenas(arenas, relocatedListOut, sliceBudget, stats);
} }
} }
@ -2326,7 +2328,7 @@ ArenaLists::relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcrea
} }
bool bool
GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, ArenaHeader*& relocatedListOut, GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, Arena*& relocatedListOut,
SliceBudget& sliceBudget) SliceBudget& sliceBudget)
{ {
gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_MOVE); gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_MOVE);
@ -2347,7 +2349,7 @@ GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, ArenaHeader*&
if (CanRelocateAllocKind(i)) { if (CanRelocateAllocKind(i)) {
ArenaList& al = zone->arenas.arenaLists[i]; ArenaList& al = zone->arenas.arenaLists[i];
size_t freeCells = 0; size_t freeCells = 0;
for (ArenaHeader* arena = al.arenaAfterCursor(); arena; arena = arena->next) for (Arena* arena = al.arenaAfterCursor(); arena; arena = arena->next)
freeCells += arena->countFreeCells(); freeCells += arena->countFreeCells();
MOZ_ASSERT(freeCells < thingsPerArena); MOZ_ASSERT(freeCells < thingsPerArena);
} }
@ -2420,7 +2422,7 @@ GCRuntime::sweepZoneAfterCompacting(Zone* zone)
template <typename T> template <typename T>
static void static void
UpdateCellPointersTyped(MovingTracer* trc, ArenaHeader* arena, JS::TraceKind traceKind) UpdateCellPointersTyped(MovingTracer* trc, Arena* arena, JS::TraceKind traceKind)
{ {
for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) { for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
T* cell = reinterpret_cast<T*>(i.getCell()); T* cell = reinterpret_cast<T*>(i.getCell());
@ -2433,7 +2435,7 @@ UpdateCellPointersTyped(MovingTracer* trc, ArenaHeader* arena, JS::TraceKind tra
* Update the interal pointers for all cells in an arena. * Update the interal pointers for all cells in an arena.
*/ */
static void static void
UpdateCellPointers(MovingTracer* trc, ArenaHeader* arena) UpdateCellPointers(MovingTracer* trc, Arena* arena)
{ {
AllocKind kind = arena->getAllocKind(); AllocKind kind = arena->getAllocKind();
JS::TraceKind traceKind = MapAllocToTraceKind(kind); JS::TraceKind traceKind = MapAllocToTraceKind(kind);
@ -2493,17 +2495,17 @@ struct ArenasToUpdate
}; };
ArenasToUpdate(Zone* zone, KindsToUpdate kinds); ArenasToUpdate(Zone* zone, KindsToUpdate kinds);
bool done() { return kind == AllocKind::LIMIT; } bool done() { return kind == AllocKind::LIMIT; }
ArenaHeader* getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned max); Arena* getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned max);
private: private:
KindsToUpdate kinds; // Selects which thing kinds to iterate KindsToUpdate kinds; // Selects which thing kinds to iterate
Zone* zone; // Zone to process Zone* zone; // Zone to process
AllocKind kind; // Current alloc kind to process AllocKind kind; // Current alloc kind to process
ArenaHeader* arena; // Next arena to process Arena* arena; // Next arena to process
AllocKind nextAllocKind(AllocKind i) { return AllocKind(uint8_t(i) + 1); } AllocKind nextAllocKind(AllocKind i) { return AllocKind(uint8_t(i) + 1); }
bool shouldProcessKind(AllocKind kind); bool shouldProcessKind(AllocKind kind);
ArenaHeader* next(AutoLockHelperThreadState& lock); Arena* next(AutoLockHelperThreadState& lock);
}; };
bool ArenasToUpdate::shouldProcessKind(AllocKind kind) bool ArenasToUpdate::shouldProcessKind(AllocKind kind)
@ -2541,7 +2543,7 @@ ArenasToUpdate::ArenasToUpdate(Zone* zone, KindsToUpdate kinds)
MOZ_ASSERT(kinds && !(kinds & ~ALL)); MOZ_ASSERT(kinds && !(kinds & ~ALL));
} }
ArenaHeader* Arena*
ArenasToUpdate::next(AutoLockHelperThreadState& lock) ArenasToUpdate::next(AutoLockHelperThreadState& lock)
{ {
// Find the next arena to update. // Find the next arena to update.
@ -2566,17 +2568,17 @@ ArenasToUpdate::next(AutoLockHelperThreadState& lock)
return nullptr; return nullptr;
} }
ArenaHeader* Arena*
ArenasToUpdate::getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned count) ArenasToUpdate::getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned count)
{ {
if (done()) if (done())
return nullptr; return nullptr;
ArenaHeader* head = nullptr; Arena* head = nullptr;
ArenaHeader* tail = nullptr; Arena* tail = nullptr;
for (unsigned i = 0; i < count; ++i) { for (unsigned i = 0; i < count; ++i) {
ArenaHeader* arena = next(lock); Arena* arena = next(lock);
if (!arena) if (!arena)
break; break;
@ -2606,7 +2608,7 @@ struct UpdateCellPointersTask : public GCParallelTask
private: private:
JSRuntime* rt_; JSRuntime* rt_;
ArenasToUpdate* source_; ArenasToUpdate* source_;
ArenaHeader* arenaList_; Arena* arenaList_;
virtual void run() override; virtual void run() override;
void getArenasToUpdate(AutoLockHelperThreadState& lock); void getArenasToUpdate(AutoLockHelperThreadState& lock);
@ -2631,7 +2633,7 @@ void
UpdateCellPointersTask::updateArenas() UpdateCellPointersTask::updateArenas()
{ {
MovingTracer trc(rt_); MovingTracer trc(rt_);
for (ArenaHeader* arena = arenaList_; for (Arena* arena = arenaList_;
arena; arena;
arena = arena->getNextArenaToUpdateAndUnlink()) arena = arena->getNextArenaToUpdateAndUnlink())
{ {
@ -2775,11 +2777,11 @@ GCRuntime::updatePointersToRelocatedCells(Zone* zone)
} }
void void
GCRuntime::protectAndHoldArenas(ArenaHeader* arenaList) GCRuntime::protectAndHoldArenas(Arena* arenaList)
{ {
for (ArenaHeader* arena = arenaList; arena; ) { for (Arena* arena = arenaList; arena; ) {
MOZ_ASSERT(arena->allocated()); MOZ_ASSERT(arena->allocated());
ArenaHeader* next = arena->next; Arena* next = arena->next;
if (!next) { if (!next) {
// Prepend to hold list before we protect the memory. // Prepend to hold list before we protect the memory.
arena->next = relocatedArenasToRelease; arena->next = relocatedArenasToRelease;
@ -2793,14 +2795,14 @@ GCRuntime::protectAndHoldArenas(ArenaHeader* arenaList)
void void
GCRuntime::unprotectHeldRelocatedArenas() GCRuntime::unprotectHeldRelocatedArenas()
{ {
for (ArenaHeader* arena = relocatedArenasToRelease; arena; arena = arena->next) { for (Arena* arena = relocatedArenasToRelease; arena; arena = arena->next) {
UnprotectPages(arena, ArenaSize); UnprotectPages(arena, ArenaSize);
MOZ_ASSERT(arena->allocated()); MOZ_ASSERT(arena->allocated());
} }
} }
void void
GCRuntime::releaseRelocatedArenas(ArenaHeader* arenaList) GCRuntime::releaseRelocatedArenas(Arena* arenaList)
{ {
AutoLockGC lock(rt); AutoLockGC lock(rt);
releaseRelocatedArenasWithoutUnlocking(arenaList, lock); releaseRelocatedArenasWithoutUnlocking(arenaList, lock);
@ -2808,28 +2810,26 @@ GCRuntime::releaseRelocatedArenas(ArenaHeader* arenaList)
} }
void void
GCRuntime::releaseRelocatedArenasWithoutUnlocking(ArenaHeader* arenaList, const AutoLockGC& lock) GCRuntime::releaseRelocatedArenasWithoutUnlocking(Arena* arenaList, const AutoLockGC& lock)
{ {
// Release the relocated arenas, now containing only forwarding pointers // Release the relocated arenas, now containing only forwarding pointers
unsigned count = 0; unsigned count = 0;
while (arenaList) { while (arenaList) {
ArenaHeader* aheader = arenaList; Arena* arena = arenaList;
arenaList = arenaList->next; arenaList = arenaList->next;
// Clear the mark bits // Clear the mark bits
aheader->unmarkAll(); arena->unmarkAll();
// Mark arena as empty // Mark arena as empty
aheader->setAsFullyUnused(); arena->setAsFullyUnused();
#if defined(JS_CRASH_DIAGNOSTICS) || defined(JS_GC_ZEAL) #if defined(JS_CRASH_DIAGNOSTICS) || defined(JS_GC_ZEAL)
Arena* arena = aheader->getArena(); JS_POISON(reinterpret_cast<void*>(arena->thingsStart()),
AllocKind thingKind = aheader->getAllocKind(); JS_MOVED_TENURED_PATTERN, arena->getThingsSpan());
JS_POISON(reinterpret_cast<void*>(arena->thingsStart(thingKind)),
JS_MOVED_TENURED_PATTERN, Arena::thingsSpan(thingKind));
#endif #endif
releaseArena(aheader, lock); releaseArena(arena, lock);
++count; ++count;
} }
} }
@ -2859,12 +2859,12 @@ GCRuntime::releaseHeldRelocatedArenasWithoutUnlocking(const AutoLockGC& lock)
} }
void void
ReleaseArenaList(JSRuntime* rt, ArenaHeader* aheader, const AutoLockGC& lock) ReleaseArenaList(JSRuntime* rt, Arena* arena, const AutoLockGC& lock)
{ {
ArenaHeader* next; Arena* next;
for (; aheader; aheader = next) { for (; arena; arena = next) {
next = aheader->next; next = arena->next;
rt->gc.releaseArena(aheader, lock); rt->gc.releaseArena(arena, lock);
} }
} }
@ -2896,18 +2896,19 @@ ArenaLists::finalizeNow(FreeOp* fop, const FinalizePhase& phase)
} }
void void
ArenaLists::finalizeNow(FreeOp* fop, AllocKind thingKind, KeepArenasEnum keepArenas, ArenaHeader** empty) ArenaLists::finalizeNow(FreeOp* fop, AllocKind thingKind, KeepArenasEnum keepArenas, Arena** empty)
{ {
MOZ_ASSERT(!IsBackgroundFinalized(thingKind)); MOZ_ASSERT(!IsBackgroundFinalized(thingKind));
forceFinalizeNow(fop, thingKind, keepArenas, empty); forceFinalizeNow(fop, thingKind, keepArenas, empty);
} }
void void
ArenaLists::forceFinalizeNow(FreeOp* fop, AllocKind thingKind, KeepArenasEnum keepArenas, ArenaHeader** empty) ArenaLists::forceFinalizeNow(FreeOp* fop, AllocKind thingKind,
KeepArenasEnum keepArenas, Arena** empty)
{ {
MOZ_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE); MOZ_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
ArenaHeader* arenas = arenaLists[thingKind].head(); Arena* arenas = arenaLists[thingKind].head();
if (!arenas) if (!arenas)
return; return;
arenaLists[thingKind].clear(); arenaLists[thingKind].clear();
@ -2973,7 +2974,7 @@ ArenaLists::queueForBackgroundSweep(FreeOp* fop, AllocKind thingKind)
} }
/*static*/ void /*static*/ void
ArenaLists::backgroundFinalize(FreeOp* fop, ArenaHeader* listHead, ArenaHeader** empty) ArenaLists::backgroundFinalize(FreeOp* fop, Arena* listHead, Arena** empty)
{ {
MOZ_ASSERT(listHead); MOZ_ASSERT(listHead);
MOZ_ASSERT(empty); MOZ_ASSERT(empty);
@ -3401,13 +3402,13 @@ GCRuntime::sweepBackgroundThings(ZoneList& zones, LifoAlloc& freeBlocks, ThreadT
return; return;
// We must finalize thing kinds in the order specified by BackgroundFinalizePhases. // We must finalize thing kinds in the order specified by BackgroundFinalizePhases.
ArenaHeader* emptyArenas = nullptr; Arena* emptyArenas = nullptr;
FreeOp fop(rt, threadType); FreeOp fop(rt, threadType);
for (unsigned phase = 0 ; phase < ArrayLength(BackgroundFinalizePhases) ; ++phase) { for (unsigned phase = 0 ; phase < ArrayLength(BackgroundFinalizePhases) ; ++phase) {
for (Zone* zone = zones.front(); zone; zone = zone->nextZone()) { for (Zone* zone = zones.front(); zone; zone = zone->nextZone()) {
for (unsigned index = 0 ; index < BackgroundFinalizePhases[phase].length ; ++index) { for (unsigned index = 0 ; index < BackgroundFinalizePhases[phase].length ; ++index) {
AllocKind kind = BackgroundFinalizePhases[phase].kinds[index]; AllocKind kind = BackgroundFinalizePhases[phase].kinds[index];
ArenaHeader* arenas = zone->arenas.arenaListsToSweep[kind]; Arena* arenas = zone->arenas.arenaListsToSweep[kind];
MOZ_RELEASE_ASSERT(uintptr_t(arenas) != uintptr_t(-1)); MOZ_RELEASE_ASSERT(uintptr_t(arenas) != uintptr_t(-1));
if (arenas) if (arenas)
ArenaLists::backgroundFinalize(&fop, arenas, &emptyArenas); ArenaLists::backgroundFinalize(&fop, arenas, &emptyArenas);
@ -4464,15 +4465,15 @@ js::gc::MarkingValidator::validate()
if (chunk->decommittedArenas.get(i)) if (chunk->decommittedArenas.get(i))
continue; continue;
Arena* arena = &chunk->arenas[i]; Arena* arena = &chunk->arenas[i];
if (!arena->aheader.allocated()) if (!arena->allocated())
continue; continue;
if (!arena->aheader.zone->isGCSweeping()) if (!arena->zone->isGCSweeping())
continue; continue;
if (arena->aheader.allocatedDuringIncremental) if (arena->allocatedDuringIncremental)
continue; continue;
AllocKind kind = arena->aheader.getAllocKind(); AllocKind kind = arena->getAllocKind();
uintptr_t thing = arena->thingsStart(kind); uintptr_t thing = arena->thingsStart();
uintptr_t end = arena->thingsEnd(); uintptr_t end = arena->thingsEnd();
while (thing < end) { while (thing < end) {
Cell* cell = (Cell*)thing; Cell* cell = (Cell*)thing;
@ -5308,7 +5309,7 @@ GCRuntime::endSweepingZoneGroup()
sweepBackgroundThings(zones, freeLifoAlloc, MainThread); sweepBackgroundThings(zones, freeLifoAlloc, MainThread);
/* Reset the list of arenas marked as being allocated during sweep phase. */ /* Reset the list of arenas marked as being allocated during sweep phase. */
while (ArenaHeader* arena = arenasAllocatedDuringSweep) { while (Arena* arena = arenasAllocatedDuringSweep) {
arenasAllocatedDuringSweep = arena->getNextAllocDuringSweep(); arenasAllocatedDuringSweep = arena->getNextAllocDuringSweep();
arena->unsetAllocDuringSweep(); arena->unsetAllocDuringSweep();
} }
@ -5412,9 +5413,9 @@ SweepThing(ObjectGroup* group, AutoClearTypeInferenceStateOnOOM* oom)
template <typename T, typename... Args> template <typename T, typename... Args>
static bool static bool
SweepArenaList(ArenaHeader** arenasToSweep, SliceBudget& sliceBudget, Args... args) SweepArenaList(Arena** arenasToSweep, SliceBudget& sliceBudget, Args... args)
{ {
while (ArenaHeader* arena = *arenasToSweep) { while (Arena* arena = *arenasToSweep) {
for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) for (ArenaCellIterUnderGC i(arena); !i.done(); i.next())
SweepThing(i.get<T>(), args...); SweepThing(i.get<T>(), args...);
@ -5668,7 +5669,7 @@ GCRuntime::compactPhase(JS::gcreason::Reason reason, SliceBudget& sliceBudget)
while (!zonesToMaybeCompact.isEmpty()) { while (!zonesToMaybeCompact.isEmpty()) {
Zone* zone = zonesToMaybeCompact.front(); Zone* zone = zonesToMaybeCompact.front();
MOZ_ASSERT(zone->isGCFinished()); MOZ_ASSERT(zone->isGCFinished());
ArenaHeader* relocatedArenas = nullptr; Arena* relocatedArenas = nullptr;
if (relocateArenas(zone, reason, relocatedArenas, sliceBudget)) { if (relocateArenas(zone, reason, relocatedArenas, sliceBudget)) {
zone->setGCState(Zone::Compact); zone->setGCState(Zone::Compact);
updatePointersToRelocatedCells(zone); updatePointersToRelocatedCells(zone);
@ -6877,8 +6878,8 @@ gc::MergeCompartments(JSCompartment* source, JSCompartment* target)
for (auto thingKind : AllAllocKinds()) { for (auto thingKind : AllAllocKinds()) {
for (ArenaIter aiter(source->zone(), thingKind); !aiter.done(); aiter.next()) { for (ArenaIter aiter(source->zone(), thingKind); !aiter.done(); aiter.next()) {
ArenaHeader* aheader = aiter.get(); Arena* arena = aiter.get();
aheader->zone = target->zone(); arena->zone = target->zone();
} }
} }
@ -7093,13 +7094,13 @@ ArenaLists::adoptArenas(JSRuntime* rt, ArenaLists* fromArenaLists)
ArenaList* toList = &arenaLists[thingKind]; ArenaList* toList = &arenaLists[thingKind];
fromList->check(); fromList->check();
toList->check(); toList->check();
ArenaHeader* next; Arena* next;
for (ArenaHeader* fromHeader = fromList->head(); fromHeader; fromHeader = next) { for (Arena* fromArena = fromList->head(); fromArena; fromArena = next) {
// Copy fromHeader->next before releasing/reinserting. // Copy fromArena->next before releasing/reinserting.
next = fromHeader->next; next = fromArena->next;
MOZ_ASSERT(!fromHeader->isEmpty()); MOZ_ASSERT(!fromArena->isEmpty());
toList->insertAtCursor(fromHeader); toList->insertAtCursor(fromArena);
} }
fromList->clear(); fromList->clear();
toList->check(); toList->check();
@ -7107,12 +7108,12 @@ ArenaLists::adoptArenas(JSRuntime* rt, ArenaLists* fromArenaLists)
} }
bool bool
ArenaLists::containsArena(JSRuntime* rt, ArenaHeader* needle) ArenaLists::containsArena(JSRuntime* rt, Arena* needle)
{ {
AutoLockGC lock(rt); AutoLockGC lock(rt);
ArenaList& list = arenaLists[needle->getAllocKind()]; ArenaList& list = arenaLists[needle->getAllocKind()];
for (ArenaHeader* aheader = list.head(); aheader; aheader = aheader->next) { for (Arena* arena = list.head(); arena; arena = arena->next) {
if (aheader == needle) if (arena == needle)
return true; return true;
} }
return false; return false;

View File

@ -291,8 +291,8 @@ class AutoMaybeStartBackgroundAllocation;
*/ */
struct SortedArenaListSegment struct SortedArenaListSegment
{ {
ArenaHeader* head; Arena* head;
ArenaHeader** tailp; Arena** tailp;
void clear() { void clear() {
head = nullptr; head = nullptr;
@ -303,21 +303,21 @@ struct SortedArenaListSegment
return tailp == &head; return tailp == &head;
} }
// Appends |aheader| to this segment. // Appends |arena| to this segment.
void append(ArenaHeader* aheader) { void append(Arena* arena) {
MOZ_ASSERT(aheader); MOZ_ASSERT(arena);
MOZ_ASSERT_IF(head, head->getAllocKind() == aheader->getAllocKind()); MOZ_ASSERT_IF(head, head->getAllocKind() == arena->getAllocKind());
*tailp = aheader; *tailp = arena;
tailp = &aheader->next; tailp = &arena->next;
} }
// Points the tail of this segment at |aheader|, which may be null. Note // Points the tail of this segment at |arena|, which may be null. Note
// that this does not change the tail itself, but merely which arena // that this does not change the tail itself, but merely which arena
// follows it. This essentially turns the tail into a cursor (see also the // follows it. This essentially turns the tail into a cursor (see also the
// description of ArenaList), but from the perspective of a SortedArenaList // description of ArenaList), but from the perspective of a SortedArenaList
// this makes no difference. // this makes no difference.
void linkTo(ArenaHeader* aheader) { void linkTo(Arena* arena) {
*tailp = aheader; *tailp = arena;
} }
}; };
@ -357,8 +357,8 @@ class ArenaList {
// //
// |cursorp_| is never null. // |cursorp_| is never null.
// //
ArenaHeader* head_; Arena* head_;
ArenaHeader** cursorp_; Arena** cursorp_;
void copy(const ArenaList& other) { void copy(const ArenaList& other) {
other.check(); other.check();
@ -394,7 +394,7 @@ class ArenaList {
MOZ_ASSERT_IF(!head_, cursorp_ == &head_); MOZ_ASSERT_IF(!head_, cursorp_ == &head_);
// If there's an arena following the cursor, it must not be full. // If there's an arena following the cursor, it must not be full.
ArenaHeader* cursor = *cursorp_; Arena* cursor = *cursorp_;
MOZ_ASSERT_IF(cursor, cursor->hasFreeThings()); MOZ_ASSERT_IF(cursor, cursor->hasFreeThings());
#endif #endif
} }
@ -417,7 +417,7 @@ class ArenaList {
} }
// This returns nullptr if the list is empty. // This returns nullptr if the list is empty.
ArenaHeader* head() const { Arena* head() const {
check(); check();
return head_; return head_;
} }
@ -433,27 +433,27 @@ class ArenaList {
} }
// This can return nullptr. // This can return nullptr.
ArenaHeader* arenaAfterCursor() const { Arena* arenaAfterCursor() const {
check(); check();
return *cursorp_; return *cursorp_;
} }
// This returns the arena after the cursor and moves the cursor past it. // This returns the arena after the cursor and moves the cursor past it.
ArenaHeader* takeNextArena() { Arena* takeNextArena() {
check(); check();
ArenaHeader* aheader = *cursorp_; Arena* arena = *cursorp_;
if (!aheader) if (!arena)
return nullptr; return nullptr;
cursorp_ = &aheader->next; cursorp_ = &arena->next;
check(); check();
return aheader; return arena;
} }
// This does two things. // This does two things.
// - Inserts |a| at the cursor. // - Inserts |a| at the cursor.
// - Leaves the cursor sitting just before |a|, if |a| is not full, or just // - Leaves the cursor sitting just before |a|, if |a| is not full, or just
// after |a|, if |a| is full. // after |a|, if |a| is full.
void insertAtCursor(ArenaHeader* a) { void insertAtCursor(Arena* a) {
check(); check();
a->next = *cursorp_; a->next = *cursorp_;
*cursorp_ = a; *cursorp_ = a;
@ -465,7 +465,7 @@ class ArenaList {
} }
// Inserts |a| at the cursor, then moves the cursor past it. // Inserts |a| at the cursor, then moves the cursor past it.
void insertBeforeCursor(ArenaHeader* a) { void insertBeforeCursor(Arena* a) {
check(); check();
a->next = *cursorp_; a->next = *cursorp_;
*cursorp_ = a; *cursorp_ = a;
@ -488,10 +488,10 @@ class ArenaList {
return *this; return *this;
} }
ArenaHeader* removeRemainingArenas(ArenaHeader** arenap); Arena* removeRemainingArenas(Arena** arenap);
ArenaHeader** pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut); Arena** pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut);
ArenaHeader* relocateArenas(ArenaHeader* toRelocate, ArenaHeader* relocated, Arena* relocateArenas(Arena* toRelocate, Arena* relocated,
SliceBudget& sliceBudget, gcstats::Statistics& stats); SliceBudget& sliceBudget, gcstats::Statistics& stats);
}; };
/* /*
@ -513,14 +513,14 @@ class SortedArenaList
private: private:
// The maximum number of GC things that an arena can hold. // The maximum number of GC things that an arena can hold.
static const size_t MaxThingsPerArena = (ArenaSize - sizeof(ArenaHeader)) / MinThingSize; static const size_t MaxThingsPerArena = (ArenaSize - ArenaHeaderSize) / MinThingSize;
size_t thingsPerArena_; size_t thingsPerArena_;
SortedArenaListSegment segments[MaxThingsPerArena + 1]; SortedArenaListSegment segments[MaxThingsPerArena + 1];
// Convenience functions to get the nth head and tail. // Convenience functions to get the nth head and tail.
ArenaHeader* headAt(size_t n) { return segments[n].head; } Arena* headAt(size_t n) { return segments[n].head; }
ArenaHeader** tailAt(size_t n) { return segments[n].tailp; } Arena** tailAt(size_t n) { return segments[n].tailp; }
public: public:
explicit SortedArenaList(size_t thingsPerArena = MaxThingsPerArena) { explicit SortedArenaList(size_t thingsPerArena = MaxThingsPerArena) {
@ -540,14 +540,14 @@ class SortedArenaList
segments[i].clear(); segments[i].clear();
} }
// Inserts a header, which has room for |nfree| more things, in its segment. // Inserts an arena, which has room for |nfree| more things, in its segment.
void insertAt(ArenaHeader* aheader, size_t nfree) { void insertAt(Arena* arena, size_t nfree) {
MOZ_ASSERT(nfree <= thingsPerArena_); MOZ_ASSERT(nfree <= thingsPerArena_);
segments[nfree].append(aheader); segments[nfree].append(arena);
} }
// Remove all empty arenas, inserting them as a linked list. // Remove all empty arenas, inserting them as a linked list.
void extractEmpty(ArenaHeader** empty) { void extractEmpty(Arena** empty) {
SortedArenaListSegment& segment = segments[thingsPerArena_]; SortedArenaListSegment& segment = segments[thingsPerArena_];
if (segment.head) { if (segment.head) {
*segment.tailp = *empty; *segment.tailp = *empty;
@ -559,7 +559,7 @@ class SortedArenaList
// Links up the tail of each non-empty segment to the head of the next // Links up the tail of each non-empty segment to the head of the next
// non-empty segment, creating a contiguous list that is returned as an // non-empty segment, creating a contiguous list that is returned as an
// ArenaList. This is not a destructive operation: neither the head nor tail // ArenaList. This is not a destructive operation: neither the head nor tail
// of any segment is modified. However, note that the ArenaHeaders in the // of any segment is modified. However, note that the Arenas in the
// resulting ArenaList should be treated as read-only unless the // resulting ArenaList should be treated as read-only unless the
// SortedArenaList is no longer needed: inserting or removing arenas would // SortedArenaList is no longer needed: inserting or removing arenas would
// invalidate the SortedArenaList. // invalidate the SortedArenaList.
@ -590,7 +590,7 @@ class ArenaLists
* free things. Initially all the spans are initialized as empty. After we * free things. Initially all the spans are initialized as empty. After we
* find a new arena with available things we move its first free span into * find a new arena with available things we move its first free span into
* the list and set the arena as fully allocated. way we do not need to * the list and set the arena as fully allocated. way we do not need to
* update the arena header after the initial allocation. When starting the * update the arena after the initial allocation. When starting the
* GC we only move the head of the of the list of spans back to the arena * GC we only move the head of the of the list of spans back to the arena
* only for the arena that was not fully allocated. * only for the arena that was not fully allocated.
*/ */
@ -612,7 +612,7 @@ class ArenaLists
AllAllocKindArray<BackgroundFinalizeState> backgroundFinalizeState; AllAllocKindArray<BackgroundFinalizeState> backgroundFinalizeState;
/* For each arena kind, a list of arenas remaining to be swept. */ /* For each arena kind, a list of arenas remaining to be swept. */
AllAllocKindArray<ArenaHeader*> arenaListsToSweep; AllAllocKindArray<Arena*> arenaListsToSweep;
/* During incremental sweeping, a list of the arenas already swept. */ /* During incremental sweeping, a list of the arenas already swept. */
AllocKind incrementalSweptArenaKind; AllocKind incrementalSweptArenaKind;
@ -620,17 +620,17 @@ class ArenaLists
// Arena lists which have yet to be swept, but need additional foreground // Arena lists which have yet to be swept, but need additional foreground
// processing before they are swept. // processing before they are swept.
ArenaHeader* gcShapeArenasToUpdate; Arena* gcShapeArenasToUpdate;
ArenaHeader* gcAccessorShapeArenasToUpdate; Arena* gcAccessorShapeArenasToUpdate;
ArenaHeader* gcScriptArenasToUpdate; Arena* gcScriptArenasToUpdate;
ArenaHeader* gcObjectGroupArenasToUpdate; Arena* gcObjectGroupArenasToUpdate;
// While sweeping type information, these lists save the arenas for the // While sweeping type information, these lists save the arenas for the
// objects which have already been finalized in the foreground (which must // objects which have already been finalized in the foreground (which must
// happen at the beginning of the GC), so that type sweeping can determine // happen at the beginning of the GC), so that type sweeping can determine
// which of the object pointers are marked. // which of the object pointers are marked.
ObjectAllocKindArray<ArenaList> savedObjectArenas; ObjectAllocKindArray<ArenaList> savedObjectArenas;
ArenaHeader* savedEmptyObjectArenas; Arena* savedEmptyObjectArenas;
public: public:
explicit ArenaLists(JSRuntime* rt) : runtime_(rt) { explicit ArenaLists(JSRuntime* rt) : runtime_(rt) {
@ -654,21 +654,21 @@ class ArenaLists
return reinterpret_cast<const void*>(&freeLists[thingKind]); return reinterpret_cast<const void*>(&freeLists[thingKind]);
} }
ArenaHeader* getFirstArena(AllocKind thingKind) const { Arena* getFirstArena(AllocKind thingKind) const {
return arenaLists[thingKind].head(); return arenaLists[thingKind].head();
} }
ArenaHeader* getFirstArenaToSweep(AllocKind thingKind) const { Arena* getFirstArenaToSweep(AllocKind thingKind) const {
return arenaListsToSweep[thingKind]; return arenaListsToSweep[thingKind];
} }
ArenaHeader* getFirstSweptArena(AllocKind thingKind) const { Arena* getFirstSweptArena(AllocKind thingKind) const {
if (thingKind != incrementalSweptArenaKind) if (thingKind != incrementalSweptArenaKind)
return nullptr; return nullptr;
return incrementalSweptArenas.head(); return incrementalSweptArenas.head();
} }
ArenaHeader* getArenaAfterCursor(AllocKind thingKind) const { Arena* getArenaAfterCursor(AllocKind thingKind) const {
return arenaLists[thingKind].arenaAfterCursor(); return arenaLists[thingKind].arenaAfterCursor();
} }
@ -690,8 +690,8 @@ class ArenaLists
for (auto i : AllAllocKinds()) { for (auto i : AllAllocKinds()) {
/* The background finalization must have stopped at this point. */ /* The background finalization must have stopped at this point. */
MOZ_ASSERT(backgroundFinalizeState[i] == BFS_DONE); MOZ_ASSERT(backgroundFinalizeState[i] == BFS_DONE);
for (ArenaHeader* aheader = arenaLists[i].head(); aheader; aheader = aheader->next) for (Arena* arena = arenaLists[i].head(); arena; arena = arena->next)
aheader->unmarkAll(); arena->unmarkAll();
} }
} }
@ -713,10 +713,10 @@ class ArenaLists
inline void prepareForIncrementalGC(JSRuntime* rt); inline void prepareForIncrementalGC(JSRuntime* rt);
/* Check if |aheader|'s arena is in use. */ /* Check if this arena is in use. */
bool arenaIsInUse(ArenaHeader* aheader, AllocKind kind) const { bool arenaIsInUse(Arena* arena, AllocKind kind) const {
MOZ_ASSERT(aheader); MOZ_ASSERT(arena);
return aheader == freeLists[kind]->getArenaUnchecked(); return arena == freeLists[kind]->getArenaUnchecked();
} }
MOZ_ALWAYS_INLINE TenuredCell* allocateFromFreeList(AllocKind thingKind, size_t thingSize) { MOZ_ALWAYS_INLINE TenuredCell* allocateFromFreeList(AllocKind thingKind, size_t thingSize) {
@ -728,8 +728,8 @@ class ArenaLists
*/ */
void adoptArenas(JSRuntime* runtime, ArenaLists* fromArenaLists); void adoptArenas(JSRuntime* runtime, ArenaLists* fromArenaLists);
/* True if the ArenaHeader in question is found in this ArenaLists */ /* True if the Arena in question is found in this ArenaLists */
bool containsArena(JSRuntime* runtime, ArenaHeader* arenaHeader); bool containsArena(JSRuntime* runtime, Arena* arena);
void checkEmptyFreeLists() { void checkEmptyFreeLists() {
#ifdef DEBUG #ifdef DEBUG
@ -742,7 +742,7 @@ class ArenaLists
MOZ_ASSERT(freeLists[kind]->isEmpty()); MOZ_ASSERT(freeLists[kind]->isEmpty());
} }
bool relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcreason::Reason reason, bool relocateArenas(Zone* zone, Arena*& relocatedListOut, JS::gcreason::Reason reason,
SliceBudget& sliceBudget, gcstats::Statistics& stats); SliceBudget& sliceBudget, gcstats::Statistics& stats);
void queueForegroundObjectsForSweep(FreeOp* fop); void queueForegroundObjectsForSweep(FreeOp* fop);
@ -752,7 +752,7 @@ class ArenaLists
bool foregroundFinalize(FreeOp* fop, AllocKind thingKind, SliceBudget& sliceBudget, bool foregroundFinalize(FreeOp* fop, AllocKind thingKind, SliceBudget& sliceBudget,
SortedArenaList& sweepList); SortedArenaList& sweepList);
static void backgroundFinalize(FreeOp* fop, ArenaHeader* listHead, ArenaHeader** empty); static void backgroundFinalize(FreeOp* fop, Arena* listHead, Arena** empty);
// When finalizing arenas, whether to keep empty arenas on the list or // When finalizing arenas, whether to keep empty arenas on the list or
// release them immediately. // release them immediately.
@ -767,17 +767,16 @@ class ArenaLists
inline void queueForBackgroundSweep(FreeOp* fop, const FinalizePhase& phase); inline void queueForBackgroundSweep(FreeOp* fop, const FinalizePhase& phase);
inline void finalizeNow(FreeOp* fop, AllocKind thingKind, inline void finalizeNow(FreeOp* fop, AllocKind thingKind,
KeepArenasEnum keepArenas, ArenaHeader** empty = nullptr); KeepArenasEnum keepArenas, Arena** empty = nullptr);
inline void forceFinalizeNow(FreeOp* fop, AllocKind thingKind, inline void forceFinalizeNow(FreeOp* fop, AllocKind thingKind,
KeepArenasEnum keepArenas, ArenaHeader** empty = nullptr); KeepArenasEnum keepArenas, Arena** empty = nullptr);
inline void queueForForegroundSweep(FreeOp* fop, AllocKind thingKind); inline void queueForForegroundSweep(FreeOp* fop, AllocKind thingKind);
inline void queueForBackgroundSweep(FreeOp* fop, AllocKind thingKind); inline void queueForBackgroundSweep(FreeOp* fop, AllocKind thingKind);
inline void mergeSweptArenas(AllocKind thingKind); inline void mergeSweptArenas(AllocKind thingKind);
TenuredCell* allocateFromArena(JS::Zone* zone, AllocKind thingKind, TenuredCell* allocateFromArena(JS::Zone* zone, AllocKind thingKind,
AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc); AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc);
inline TenuredCell* allocateFromArenaInner(JS::Zone* zone, ArenaHeader* aheader, inline TenuredCell* allocateFromArenaInner(JS::Zone* zone, Arena* arena, AllocKind kind);
AllocKind kind);
inline void normalizeBackgroundFinalizeState(AllocKind thingKind); inline void normalizeBackgroundFinalizeState(AllocKind thingKind);

View File

@ -42,14 +42,14 @@ GCRuntime::poke()
class ArenaIter class ArenaIter
{ {
ArenaHeader* aheader; Arena* arena;
ArenaHeader* unsweptHeader; Arena* unsweptArena;
ArenaHeader* sweptHeader; Arena* sweptArena;
mozilla::DebugOnly<bool> initialized; mozilla::DebugOnly<bool> initialized;
public: public:
ArenaIter() ArenaIter()
: aheader(nullptr), unsweptHeader(nullptr), sweptHeader(nullptr), initialized(false) {} : arena(nullptr), unsweptArena(nullptr), sweptArena(nullptr), initialized(false) {}
ArenaIter(JS::Zone* zone, AllocKind kind) : initialized(false) { init(zone, kind); } ArenaIter(JS::Zone* zone, AllocKind kind) : initialized(false) { init(zone, kind); }
@ -57,37 +57,37 @@ class ArenaIter
MOZ_ASSERT(!initialized); MOZ_ASSERT(!initialized);
MOZ_ASSERT(zone); MOZ_ASSERT(zone);
initialized = true; initialized = true;
aheader = zone->arenas.getFirstArena(kind); arena = zone->arenas.getFirstArena(kind);
unsweptHeader = zone->arenas.getFirstArenaToSweep(kind); unsweptArena = zone->arenas.getFirstArenaToSweep(kind);
sweptHeader = zone->arenas.getFirstSweptArena(kind); sweptArena = zone->arenas.getFirstSweptArena(kind);
if (!unsweptHeader) { if (!unsweptArena) {
unsweptHeader = sweptHeader; unsweptArena = sweptArena;
sweptHeader = nullptr; sweptArena = nullptr;
} }
if (!aheader) { if (!arena) {
aheader = unsweptHeader; arena = unsweptArena;
unsweptHeader = sweptHeader; unsweptArena = sweptArena;
sweptHeader = nullptr; sweptArena = nullptr;
} }
} }
bool done() const { bool done() const {
MOZ_ASSERT(initialized); MOZ_ASSERT(initialized);
return !aheader; return !arena;
} }
ArenaHeader* get() const { Arena* get() const {
MOZ_ASSERT(!done()); MOZ_ASSERT(!done());
return aheader; return arena;
} }
void next() { void next() {
MOZ_ASSERT(!done()); MOZ_ASSERT(!done());
aheader = aheader->next; arena = arena->next;
if (!aheader) { if (!arena) {
aheader = unsweptHeader; arena = unsweptArena;
unsweptHeader = sweptHeader; unsweptArena = sweptArena;
sweptHeader = nullptr; sweptArena = nullptr;
} }
} }
}; };
@ -96,7 +96,7 @@ class ArenaCellIterImpl
{ {
size_t firstThingOffset; size_t firstThingOffset;
size_t thingSize; size_t thingSize;
ArenaHeader* arenaAddr; Arena* arenaAddr;
FreeSpan span; FreeSpan span;
uint_fast16_t thing; uint_fast16_t thing;
mozilla::DebugOnly<bool> initialized; mozilla::DebugOnly<bool> initialized;
@ -120,25 +120,25 @@ class ArenaCellIterImpl
ArenaCellIterImpl() ArenaCellIterImpl()
: firstThingOffset(0), thingSize(0), arenaAddr(nullptr), thing(0), initialized(false) {} : firstThingOffset(0), thingSize(0), arenaAddr(nullptr), thing(0), initialized(false) {}
explicit ArenaCellIterImpl(ArenaHeader* aheader) : initialized(false) { init(aheader); } explicit ArenaCellIterImpl(Arena* arena) : initialized(false) { init(arena); }
void init(ArenaHeader* aheader) { void init(Arena* arena) {
MOZ_ASSERT(!initialized); MOZ_ASSERT(!initialized);
MOZ_ASSERT(aheader); MOZ_ASSERT(arena);
initialized = true; initialized = true;
AllocKind kind = aheader->getAllocKind(); AllocKind kind = arena->getAllocKind();
firstThingOffset = Arena::firstThingOffset(kind); firstThingOffset = Arena::firstThingOffset(kind);
thingSize = Arena::thingSize(kind); thingSize = Arena::thingSize(kind);
reset(aheader); reset(arena);
} }
// Use this to move from an Arena of a particular kind to another Arena of // Use this to move from an Arena of a particular kind to another Arena of
// the same kind. // the same kind.
void reset(ArenaHeader* aheader) { void reset(Arena* arena) {
MOZ_ASSERT(initialized); MOZ_ASSERT(initialized);
MOZ_ASSERT(aheader); MOZ_ASSERT(arena);
arenaAddr = aheader; arenaAddr = arena;
span = *aheader->getFirstFreeSpan(); span = *arena->getFirstFreeSpan();
thing = firstThingOffset; thing = firstThingOffset;
moveForwardIfFree(); moveForwardIfFree();
} }
@ -174,15 +174,15 @@ ArenaCellIterImpl::get<JSObject>() const;
class ArenaCellIterUnderGC : public ArenaCellIterImpl class ArenaCellIterUnderGC : public ArenaCellIterImpl
{ {
public: public:
explicit ArenaCellIterUnderGC(ArenaHeader* aheader) : ArenaCellIterImpl(aheader) { explicit ArenaCellIterUnderGC(Arena* arena) : ArenaCellIterImpl(arena) {
MOZ_ASSERT(aheader->zone->runtimeFromAnyThread()->isHeapBusy()); MOZ_ASSERT(arena->zone->runtimeFromAnyThread()->isHeapBusy());
} }
}; };
class ArenaCellIterUnderFinalize : public ArenaCellIterImpl class ArenaCellIterUnderFinalize : public ArenaCellIterImpl
{ {
public: public:
explicit ArenaCellIterUnderFinalize(ArenaHeader* aheader) : ArenaCellIterImpl(aheader) {} explicit ArenaCellIterUnderFinalize(Arena* arena) : ArenaCellIterImpl(arena) {}
}; };
class ZoneCellIterImpl class ZoneCellIterImpl

View File

@ -164,7 +164,7 @@ PropertyTree::getChild(ExclusiveContext* cx, Shape* parentArg, Handle<StackShape
TraceManuallyBarrieredEdge(zone->barrierTracer(), &tmp, "read barrier"); TraceManuallyBarrieredEdge(zone->barrierTracer(), &tmp, "read barrier");
MOZ_ASSERT(tmp == existingShape); MOZ_ASSERT(tmp == existingShape);
} else if (zone->isGCSweeping() && !existingShape->isMarked() && } else if (zone->isGCSweeping() && !existingShape->isMarked() &&
!existingShape->arenaHeader()->allocatedDuringIncremental) !existingShape->arena()->allocatedDuringIncremental)
{ {
/* /*
* The shape we've found is unreachable and due to be finalized, so * The shape we've found is unreachable and due to be finalized, so

View File

@ -355,9 +355,9 @@ StatsArenaCallback(JSRuntime* rt, void* data, gc::Arena* arena,
{ {
RuntimeStats* rtStats = static_cast<StatsClosure*>(data)->rtStats; RuntimeStats* rtStats = static_cast<StatsClosure*>(data)->rtStats;
// The admin space includes (a) the header and (b) the padding between the // The admin space includes (a) the header fields and (b) the padding
// end of the header and the start of the first GC thing. // between the end of the header fields and the first GC thing.
size_t allocationSpace = gc::Arena::thingsSpan(arena->aheader.getAllocKind()); size_t allocationSpace = gc::Arena::thingsSpan(arena->getAllocKind());
rtStats->currZoneStats->gcHeapArenaAdmin += gc::ArenaSize - allocationSpace; rtStats->currZoneStats->gcHeapArenaAdmin += gc::ArenaSize - allocationSpace;
// We don't call the callback on unused things. So we compute the // We don't call the callback on unused things. So we compute the

View File

@ -776,10 +776,10 @@ TypeSet::IsTypeAllocatedDuringIncremental(TypeSet::Type v)
bool rv; bool rv;
if (v.isSingletonUnchecked()) { if (v.isSingletonUnchecked()) {
JSObject* obj = v.singletonNoBarrier(); JSObject* obj = v.singletonNoBarrier();
rv = obj->isTenured() && obj->asTenured().arenaHeader()->allocatedDuringIncremental; rv = obj->isTenured() && obj->asTenured().arena()->allocatedDuringIncremental;
} else if (v.isGroupUnchecked()) { } else if (v.isGroupUnchecked()) {
ObjectGroup* group = v.groupNoBarrier(); ObjectGroup* group = v.groupNoBarrier();
rv = group->arenaHeader()->allocatedDuringIncremental; rv = group->arena()->allocatedDuringIncremental;
} else { } else {
rv = false; rv = false;
} }