Backout 2ee20348ae59 (bug 747066) for Win64 crashes

This commit is contained in:
Bill McCloskey 2012-11-20 10:45:43 -08:00
parent ffcbfdadda
commit dcc622c54b
13 changed files with 79 additions and 108 deletions

View File

@ -36,26 +36,11 @@ const size_t ChunkShift = 20;
const size_t ChunkSize = size_t(1) << ChunkShift;
const size_t ChunkMask = ChunkSize - 1;
const size_t CellShift = 3;
const size_t CellSize = size_t(1) << CellShift;
const size_t CellMask = CellSize - 1;
/* These are magic constants derived from actual offsets in gc/Heap.h. */
const size_t ChunkMarkBitmapOffset = 1032376;
const size_t ChunkMarkBitmapBits = 129024;
/*
* Live objects are marked black. How many other additional colors are available
* depends on the size of the GCThing. Objects marked gray are eligible for
* cycle collection.
*/
static const uint32_t BLACK = 0;
static const uint32_t GRAY = 1;
} /* namespace gc */
} /* namespace js */
namespace JS {
namespace shadow {
struct ArenaHeader
@ -63,39 +48,7 @@ struct ArenaHeader
JSCompartment *compartment;
};
struct Compartment
{
bool needsBarrier_;
Compartment() : needsBarrier_(false) {}
};
} /* namespace shadow */
} /* namespace JS */
namespace js {
namespace gc {
static inline uintptr_t *
GetGCThingMarkBitmap(const void *thing)
{
uintptr_t addr = uintptr_t(thing);
addr &= ~js::gc::ChunkMask;
addr |= js::gc::ChunkMarkBitmapOffset;
return reinterpret_cast<uintptr_t *>(addr);
}
static inline void
GetGCThingMarkWordAndMask(const void *thing, uint32_t color,
uintptr_t **wordp, uintptr_t *maskp)
{
uintptr_t addr = uintptr_t(thing);
size_t bit = (addr & js::gc::ChunkMask) / js::gc::CellSize + color;
JS_ASSERT(bit < js::gc::ChunkMarkBitmapBits);
uintptr_t *bitmap = GetGCThingMarkBitmap(thing);
*maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
*wordp = &bitmap[bit / JS_BITS_PER_WORD];
}
static inline shadow::ArenaHeader *
GetGCThingArena(void *thing)
@ -105,16 +58,11 @@ GetGCThingArena(void *thing)
return reinterpret_cast<shadow::ArenaHeader *>(addr);
}
} /* namespace gc */
} /* namespace js */
namespace JS {
static inline JSCompartment *
GetGCThingCompartment(void *thing)
{
JS_ASSERT(thing);
return js::gc::GetGCThingArena(thing)->compartment;
return GetGCThingArena(thing)->compartment;
}
static inline JSCompartment *
@ -123,21 +71,6 @@ GetObjectCompartment(JSObject *obj)
return GetGCThingCompartment(obj);
}
static inline bool
GCThingIsMarkedGray(void *thing)
{
uintptr_t *word, mask;
js::gc::GetGCThingMarkWordAndMask(thing, js::gc::GRAY, &word, &mask);
return *word & mask;
}
static inline bool
IsIncrementalBarrierNeededOnGCThing(void *thing)
{
JSCompartment *comp = GetGCThingCompartment(thing);
return reinterpret_cast<shadow::Compartment *>(comp)->needsBarrier_;
}
} /* namespace JS */
#endif /* js_heap_api_h___ */

View File

@ -34,6 +34,14 @@ struct Arena;
struct ArenaHeader;
struct Chunk;
/*
* Live objects are marked black. How many other additional colors are available
* depends on the size of the GCThing. Objects marked gray are eligible for
* cycle collection.
*/
static const uint32_t BLACK = 0;
static const uint32_t GRAY = 1;
/* The GC allocation kinds. */
enum AllocKind {
FINALIZE_OBJECT0,
@ -77,6 +85,10 @@ static const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OB
*/
struct Cell
{
static const size_t CellShift = 3;
static const size_t CellSize = size_t(1) << CellShift;
static const size_t CellMask = CellSize - 1;
inline uintptr_t address() const;
inline ArenaHeader *arenaHeader() const;
inline Chunk *chunk() const;
@ -104,7 +116,7 @@ const static uint32_t FreeCommittedArenasThreshold = (32 << 20) / ArenaSize;
* accessing the bitmap. In addition this allows to use some bits for colored
* marking during the cycle GC.
*/
const size_t ArenaCellCount = size_t(1) << (ArenaShift - CellShift);
const size_t ArenaCellCount = size_t(1) << (ArenaShift - Cell::CellShift);
const size_t ArenaBitmapBits = ArenaCellCount;
const size_t ArenaBitmapBytes = ArenaBitmapBits / 8;
const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
@ -133,7 +145,7 @@ const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
* fully used.
*
* Also only for the last span (|last| & 1)! = 0 as all allocation sizes are
* multiples of CellSize.
* multiples of Cell::CellSize.
*/
struct FreeSpan
{
@ -250,7 +262,7 @@ struct FreeSpan
/* See comments before FreeSpan for details. */
MOZ_ALWAYS_INLINE void *allocate(size_t thingSize) {
JS_ASSERT(thingSize % CellSize == 0);
JS_ASSERT(thingSize % Cell::CellSize == 0);
checkSpan();
uintptr_t thing = first;
if (thing < last) {
@ -271,7 +283,7 @@ struct FreeSpan
/* A version of allocate when we know that the span is not empty. */
MOZ_ALWAYS_INLINE void *infallibleAllocate(size_t thingSize) {
JS_ASSERT(thingSize % CellSize == 0);
JS_ASSERT(thingSize % Cell::CellSize == 0);
checkSpan();
uintptr_t thing = first;
if (thing < last) {
@ -319,7 +331,7 @@ struct FreeSpan
return;
}
size_t spanLength = last - first + 1;
JS_ASSERT(spanLength % CellSize == 0);
JS_ASSERT(spanLength % Cell::CellSize == 0);
/* Start and end must belong to the same arena. */
JS_ASSERT((first & ~ArenaMask) == arenaAddr);
@ -329,7 +341,7 @@ struct FreeSpan
/* The span is not the last and we have more spans to follow. */
JS_ASSERT(first <= last);
size_t spanLengthWithoutOneThing = last - first;
JS_ASSERT(spanLengthWithoutOneThing % CellSize == 0);
JS_ASSERT(spanLengthWithoutOneThing % Cell::CellSize == 0);
JS_ASSERT((first & ~ArenaMask) == arenaAddr);
@ -339,7 +351,7 @@ struct FreeSpan
* storing useless empty span reference.
*/
size_t beforeTail = ArenaSize - (last & ArenaMask);
JS_ASSERT(beforeTail >= sizeof(FreeSpan) + CellSize);
JS_ASSERT(beforeTail >= sizeof(FreeSpan) + Cell::CellSize);
FreeSpan *next = reinterpret_cast<FreeSpan *>(last);
@ -537,7 +549,7 @@ struct Arena
}
static size_t thingsPerArena(size_t thingSize) {
JS_ASSERT(thingSize % CellSize == 0);
JS_ASSERT(thingSize % Cell::CellSize == 0);
/* We should be able to fit FreeSpan in any GC thing. */
JS_ASSERT(thingSize >= sizeof(FreeSpan));
@ -587,14 +599,6 @@ struct ChunkInfo
/* Free arenas are linked together with aheader.next. */
ArenaHeader *freeArenasHead;
#if JS_BITS_PER_WORD == 32
/*
* Calculating sizes and offsets is simpler if sizeof(ChunkInfo) is
* architecture-independent.
*/
char padding[12];
#endif
/*
* Decommitted arenas are tracked by a bitmap in the chunk header. We use
* this offset to start our search iteration close to a decommitted arena
@ -652,10 +656,7 @@ struct ChunkBitmap
uintptr_t bitmap[ArenaBitmapWords * ArenasPerChunk];
MOZ_ALWAYS_INLINE void getMarkWordAndMask(const Cell *cell, uint32_t color,
uintptr_t **wordp, uintptr_t *maskp)
{
GetGCThingMarkWordAndMask(cell, color, wordp, maskp);
}
uintptr_t **wordp, uintptr_t *maskp);
MOZ_ALWAYS_INLINE bool isMarked(const Cell *cell, uint32_t color) {
uintptr_t *word, mask;
@ -707,7 +708,6 @@ struct ChunkBitmap
};
JS_STATIC_ASSERT(ArenaBitmapBytes * ArenasPerChunk == sizeof(ChunkBitmap));
JS_STATIC_ASSERT(js::gc::ChunkMarkBitmapBits == ArenaBitmapBits * ArenasPerChunk);
typedef BitArray<ArenasPerChunk> PerArenaBitmap;
@ -810,13 +810,12 @@ struct Chunk
};
JS_STATIC_ASSERT(sizeof(Chunk) == ChunkSize);
JS_STATIC_ASSERT(js::gc::ChunkMarkBitmapOffset == offsetof(Chunk, bitmap));
inline uintptr_t
Cell::address() const
{
uintptr_t addr = uintptr_t(this);
JS_ASSERT(addr % CellSize == 0);
JS_ASSERT(addr % Cell::CellSize == 0);
JS_ASSERT(Chunk::withinArenasRange(addr));
return addr;
}
@ -920,12 +919,22 @@ ArenaHeader::unsetAllocDuringSweep()
auxNextLink = 0;
}
JS_ALWAYS_INLINE void
ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32_t color,
uintptr_t **wordp, uintptr_t *maskp)
{
size_t bit = (cell->address() & ChunkMask) / Cell::CellSize + color;
JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk);
*maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
*wordp = &bitmap[bit / JS_BITS_PER_WORD];
}
static void
AssertValidColor(const void *thing, uint32_t color)
{
#ifdef DEBUG
ArenaHeader *aheader = reinterpret_cast<const Cell *>(thing)->arenaHeader();
JS_ASSERT_IF(color, color < aheader->getThingSize() / CellSize);
JS_ASSERT_IF(color, color < aheader->getThingSize() / Cell::CellSize);
#endif
}
@ -941,7 +950,7 @@ Chunk *
Cell::chunk() const
{
uintptr_t addr = uintptr_t(this);
JS_ASSERT(addr % CellSize == 0);
JS_ASSERT(addr % Cell::CellSize == 0);
addr &= ~(ChunkSize - 1);
return reinterpret_cast<Chunk *>(addr);
}

View File

@ -1118,7 +1118,7 @@ GCMarker::processMarkStackOther(SliceBudget &budget, uintptr_t tag, uintptr_t ad
if (tag == TypeTag) {
ScanTypeObject(this, reinterpret_cast<types::TypeObject *>(addr));
} else if (tag == SavedValueArrayTag) {
JS_ASSERT(!(addr & CellMask));
JS_ASSERT(!(addr & Cell::CellMask));
JSObject *obj = reinterpret_cast<JSObject *>(addr);
HeapValue *vp, *end;
if (restoreValueArray(obj, (void **)&vp, (void **)&end))
@ -1183,7 +1183,7 @@ GCMarker::processMarkStackTop(SliceBudget &budget)
if (tag == ValueArrayTag) {
JS_STATIC_ASSERT(ValueArrayTag == 0);
JS_ASSERT(!(addr & CellMask));
JS_ASSERT(!(addr & Cell::CellMask));
obj = reinterpret_cast<JSObject *>(addr);
uintptr_t addr2 = stack.pop();
uintptr_t addr3 = stack.pop();

View File

@ -50,7 +50,7 @@ using namespace js::ion;
IonOptions ion::js_IonOptions;
// Assert that IonCode is gc::Cell aligned.
JS_STATIC_ASSERT(sizeof(IonCode) % gc::CellSize == 0);
JS_STATIC_ASSERT(sizeof(IonCode) % gc::Cell::CellSize == 0);
#ifdef JS_THREADSAFE
static bool IonTLSInitialized = false;

View File

@ -50,6 +50,7 @@ JSCompartment::JSCompartment(JSRuntime *rt)
#ifdef JSGC_GENERATIONAL
gcStoreBuffer(&gcNursery),
#endif
needsBarrier_(false),
ionUsingBarriers_(false),
gcScheduled(false),
gcState(NoGC),

View File

@ -116,7 +116,7 @@ namespace js {
class AutoDebugModeGC;
}
struct JSCompartment : private JS::shadow::Compartment
struct JSCompartment
{
JSRuntime *rt;
JSPrincipals *principals;
@ -156,6 +156,7 @@ struct JSCompartment : private JS::shadow::Compartment
#endif
private:
bool needsBarrier_;
bool ionUsingBarriers_;
public:

View File

@ -546,6 +546,13 @@ js::TraceWeakMaps(WeakMapTracer *trc)
WatchpointMap::traceAll(trc);
}
JS_FRIEND_API(bool)
js::GCThingIsMarkedGray(void *thing)
{
JS_ASSERT(thing);
return reinterpret_cast<gc::Cell *>(thing)->isMarked(gc::GRAY);
}
JS_FRIEND_API(JSGCTraceKind)
js::GCThingTraceKind(void *thing)
{
@ -877,6 +884,18 @@ IsIncrementalBarrierNeeded(JSContext *cx)
return IsIncrementalBarrierNeeded(cx->runtime);
}
JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnObject(RawObject obj)
{
return obj->compartment()->needsBarrier();
}
JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnScript(JSScript *script)
{
return script->compartment()->needsBarrier();
}
extern JS_FRIEND_API(void)
IncrementalReferenceBarrier(void *ptr)
{

View File

@ -266,6 +266,9 @@ struct WeakMapTracer {
extern JS_FRIEND_API(void)
TraceWeakMaps(WeakMapTracer *trc);
extern JS_FRIEND_API(bool)
GCThingIsMarkedGray(void *thing);
JS_FRIEND_API(void)
UnmarkGrayGCThing(void *thing);
@ -882,6 +885,12 @@ IsIncrementalBarrierNeeded(JSRuntime *rt);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSContext *cx);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnObject(RawObject obj);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnScript(JSScript *obj);
extern JS_FRIEND_API(void)
IncrementalReferenceBarrier(void *ptr);

View File

@ -321,7 +321,7 @@ inline bool
Arena::finalize(FreeOp *fop, AllocKind thingKind, size_t thingSize)
{
/* Enforce requirements on size of T. */
JS_ASSERT(thingSize % CellSize == 0);
JS_ASSERT(thingSize % Cell::CellSize == 0);
JS_ASSERT(thingSize <= 255);
JS_ASSERT(aheader.allocated());

View File

@ -904,7 +904,7 @@ struct GCMarker : public JSTracer {
static void staticAsserts() {
JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag));
JS_STATIC_ASSERT(StackTagMask <= gc::CellMask);
JS_STATIC_ASSERT(StackTagMask <= gc::Cell::CellMask);
}
public:

View File

@ -928,7 +928,7 @@ struct JSScript : public js::gc::Cell
JS_STATIC_ASSERT(sizeof(JSScript::ArrayBitsT) * 8 >= JSScript::LIMIT);
/* If this fails, add/remove padding within JSScript. */
JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::CellSize == 0);
JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::Cell::CellSize == 0);
namespace js {

View File

@ -653,11 +653,11 @@ JS_STATIC_ASSERT(sizeof(JSInlineString) == sizeof(JSString));
class JSShortString : public JSInlineString
{
/* This can be any value that is a multiple of CellSize. */
/* This can be any value that is a multiple of Cell::CellSize. */
static const size_t INLINE_EXTENSION_CHARS = sizeof(JSString::Data) / sizeof(jschar);
static void staticAsserts() {
JS_STATIC_ASSERT(INLINE_EXTENSION_CHARS % js::gc::CellSize == 0);
JS_STATIC_ASSERT(INLINE_EXTENSION_CHARS % js::gc::Cell::CellSize == 0);
JS_STATIC_ASSERT(MAX_SHORT_LENGTH + 1 ==
(sizeof(JSShortString) -
offsetof(JSShortString, d.inlineStorage)) / sizeof(jschar));

View File

@ -128,7 +128,7 @@ xpc_FastGetCachedWrapper(nsWrapperCache *cache, JSObject *scope, jsval *vp)
inline JSBool
xpc_IsGrayGCThing(void *thing)
{
return JS::GCThingIsMarkedGray(thing);
return js::GCThingIsMarkedGray(thing);
}
// The cycle collector only cares about some kinds of GCthings that are
@ -146,7 +146,7 @@ xpc_UnmarkNonNullGrayObject(JSObject *obj)
{
if (xpc_IsGrayGCThing(obj))
xpc_UnmarkGrayGCThingRecursive(obj, JSTRACE_OBJECT);
else if (JS::IsIncrementalBarrierNeededOnGCThing(obj))
else if (js::IsIncrementalBarrierNeededOnObject(obj))
js::IncrementalReferenceBarrier(obj);
}
@ -155,9 +155,8 @@ xpc_UnmarkNonNullGrayObject(JSObject *obj)
MOZ_ALWAYS_INLINE JSObject *
xpc_UnmarkGrayObject(JSObject *obj)
{
if (obj) {
if (obj)
xpc_UnmarkNonNullGrayObject(obj);
}
return obj;
}
@ -167,7 +166,7 @@ xpc_UnmarkGrayScript(JSScript *script)
if (script) {
if (xpc_IsGrayGCThing(script))
xpc_UnmarkGrayGCThingRecursive(script, JSTRACE_SCRIPT);
else if (JS::IsIncrementalBarrierNeededOnGCThing(script))
else if (js::IsIncrementalBarrierNeededOnScript(script))
js::IncrementalReferenceBarrier(script);
}
return script;