Bug 609104 - Move the property tree to the compartment (r=brendan)

This commit is contained in:
Bill McCloskey 2011-01-27 15:46:39 -08:00
parent 2e5ffe22af
commit a4f9ab0da8
17 changed files with 430 additions and 652 deletions

View File

@ -648,10 +648,6 @@ JSRuntime::init(uint32 maxbytes)
}
propTreeStatFilename = getenv("JS_PROPTREE_STATFILE");
propTreeDumpFilename = getenv("JS_PROPTREE_DUMPFILE");
if (meterEmptyShapes()) {
if (!emptyShapes.init())
return false;
}
#endif
if (!(atomsCompartment = js_new<JSCompartment>(this)) ||
@ -682,7 +678,7 @@ JSRuntime::init(uint32 maxbytes)
debugMode = JS_FALSE;
return propertyTree.init() && js_InitThreads(this);
return js_InitThreads(this);
}
JSRuntime::~JSRuntime()
@ -723,7 +719,6 @@ JSRuntime::~JSRuntime()
if (debuggerLock)
JS_DESTROY_LOCK(debuggerLock);
#endif
propertyTree.finish();
}
JS_PUBLIC_API(JSRuntime *)

View File

@ -811,19 +811,6 @@ js_NewContext(JSRuntime *rt, size_t stackChunkSize)
ok = js_InitRuntimeScriptState(rt);
if (ok)
ok = js_InitRuntimeNumberState(cx);
if (ok) {
/*
* Ensure that the empty scopes initialized by
* Shape::initRuntimeState get the desired special shapes.
* (The rt->state dance above guarantees that this abuse of
* rt->shapeGen is thread-safe.)
*/
uint32 shapeGen = rt->shapeGen;
rt->shapeGen = 0;
ok = Shape::initRuntimeState(cx);
if (rt->shapeGen < shapeGen)
rt->shapeGen = shapeGen;
}
#ifdef JS_THREADSAFE
JS_EndRequest(cx);
@ -1045,7 +1032,6 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
JS_BeginRequest(cx);
#endif
Shape::finishRuntimeState(cx);
js_FinishRuntimeNumberState(cx);
/* Unpin all common atoms before final GC. */

View File

@ -1175,15 +1175,6 @@ struct JSRuntime {
/* Structured data callbacks are runtime-wide. */
const JSStructuredCloneCallbacks *structuredCloneCallbacks;
/*
* Shared scope property tree, and arena-pool for allocating its nodes.
* This really should be free of all locking overhead and allocated in
* thread-local storage, hence the JS_PROPERTY_TREE(cx) macro.
*/
js::PropertyTree propertyTree;
#define JS_PROPERTY_TREE(cx) ((cx)->runtime->propertyTree)
/*
* The propertyRemovals counter is incremented for every JSObject::clear,
* and for each JSObject::remove method call that frees a slot in the given
@ -1240,17 +1231,6 @@ struct JSRuntime {
/* Literal table maintained by jsatom.c functions. */
JSAtomState atomState;
/*
* Runtime-shared empty scopes for well-known built-in objects that lack
* class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
*/
js::EmptyShape *emptyArgumentsShape;
js::EmptyShape *emptyBlockShape;
js::EmptyShape *emptyCallShape;
js::EmptyShape *emptyDeclEnvShape;
js::EmptyShape *emptyEnumeratorShape;
js::EmptyShape *emptyWithShape;
/*
* Various metering fields are defined at the end of JSRuntime. In this
* way there is no need to recompile all the code that refers to other
@ -1276,18 +1256,12 @@ struct JSRuntime {
jsrefcount nonInlineCalls;
jsrefcount constructs;
/* Property metering. */
jsrefcount liveObjectProps;
jsrefcount liveObjectPropsPreSweep;
jsrefcount totalObjectProps;
jsrefcount livePropTreeNodes;
jsrefcount duplicatePropTreeNodes;
jsrefcount totalPropTreeNodes;
jsrefcount propTreeKidsChunks;
jsrefcount liveDictModeNodes;
/*
* NB: emptyShapes is init'ed iff at least one of these envars is set:
* NB: emptyShapes (in JSCompartment) is init'ed iff at least one
* of these envars is set:
*
* JS_PROPTREE_STATFILE statistics on the property tree forest
* JS_PROPTREE_DUMPFILE all paths in the property tree forest
@ -1297,12 +1271,6 @@ struct JSRuntime {
bool meterEmptyShapes() const { return propTreeStatFilename || propTreeDumpFilename; }
typedef js::HashSet<js::EmptyShape *,
js::DefaultHasher<js::EmptyShape *>,
js::SystemAllocPolicy> EmptyShapeSet;
EmptyShapeSet emptyShapes;
/* String instrumentation. */
jsrefcount liveStrings;
jsrefcount totalStrings;
@ -3214,19 +3182,19 @@ js_IsPropertyCacheDisabled(JSContext *cx)
}
static JS_INLINE uint32
js_RegenerateShapeForGC(JSContext *cx)
js_RegenerateShapeForGC(JSRuntime *rt)
{
JS_ASSERT(cx->runtime->gcRunning);
JS_ASSERT(cx->runtime->gcRegenShapes);
JS_ASSERT(rt->gcRunning);
JS_ASSERT(rt->gcRegenShapes);
/*
* Under the GC, compared with js_GenerateShape, we don't need to use
* atomic increments but we still must make sure that after an overflow
* the shape stays such.
*/
uint32 shape = cx->runtime->shapeGen;
uint32 shape = rt->shapeGen;
shape = (shape + 1) | (shape & js::SHAPE_OVERFLOW_BIT);
cx->runtime->shapeGen = shape;
rt->shapeGen = shape;
return shape;
}

View File

@ -64,6 +64,7 @@ JSCompartment::JSCompartment(JSRuntime *rt)
data(NULL),
marked(false),
active(false),
propertyTree(this),
debugMode(rt->debugMode),
mathCache(NULL)
{
@ -74,6 +75,9 @@ JSCompartment::JSCompartment(JSRuntime *rt)
JSCompartment::~JSCompartment()
{
Shape::finishEmptyShapes(this);
propertyTree.finish();
#if ENABLE_YARR_JIT
js_delete(regExpAllocator);
#endif
@ -107,12 +111,24 @@ JSCompartment::init()
if (!crossCompartmentWrappers.init())
return false;
#ifdef JS_TRACER
if (!InitJIT(&traceMonitor)) {
if (!propertyTree.init())
return false;
#ifdef DEBUG
if (rt->meterEmptyShapes()) {
if (!emptyShapes.init())
return false;
}
#endif
if (!Shape::initEmptyShapes(this))
return false;
#ifdef JS_TRACER
if (!InitJIT(&traceMonitor))
return false;
#endif
#if ENABLE_YARR_JIT
regExpAllocator = JSC::ExecutableAllocator::create();
if (!regExpAllocator)
@ -387,12 +403,29 @@ ScriptPoolDestroyed(JSContext *cx, mjit::JITScript *jit,
#endif
void
JSCompartment::mark(JSTracer *trc)
JSCompartment::markCrossCompartment(JSTracer *trc)
{
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront())
MarkValue(trc, e.front().key, "cross-compartment wrapper");
}
void
JSCompartment::mark(JSTracer *trc)
{
if (emptyArgumentsShape)
emptyArgumentsShape->trace(trc);
if (emptyBlockShape)
emptyBlockShape->trace(trc);
if (emptyCallShape)
emptyCallShape->trace(trc);
if (emptyDeclEnvShape)
emptyDeclEnvShape->trace(trc);
if (emptyEnumeratorShape)
emptyEnumeratorShape->trace(trc);
if (emptyWithShape)
emptyWithShape->trace(trc);
}
void
JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
{

View File

@ -398,6 +398,36 @@ struct JS_FRIEND_API(JSCompartment) {
js::mjit::JaegerCompartment *jaegerCompartment;
#endif
/*
* Shared scope property tree, and arena-pool for allocating its nodes.
*/
js::PropertyTree propertyTree;
#ifdef DEBUG
/* Property metering. */
jsrefcount livePropTreeNodes;
jsrefcount totalPropTreeNodes;
jsrefcount propTreeKidsChunks;
jsrefcount liveDictModeNodes;
#endif
/*
* Runtime-shared empty scopes for well-known built-in objects that lack
* class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
*/
js::EmptyShape *emptyArgumentsShape;
js::EmptyShape *emptyBlockShape;
js::EmptyShape *emptyCallShape;
js::EmptyShape *emptyDeclEnvShape;
js::EmptyShape *emptyEnumeratorShape;
js::EmptyShape *emptyWithShape;
typedef js::HashSet<js::EmptyShape *,
js::DefaultHasher<js::EmptyShape *>,
js::SystemAllocPolicy> EmptyShapeSet;
EmptyShapeSet emptyShapes;
bool debugMode; // true iff debug mode on
JSCList scripts; // scripts in this compartment
@ -405,12 +435,17 @@ struct JS_FRIEND_API(JSCompartment) {
js::NativeIterCache nativeIterCache;
JSCompartment(JSRuntime *cx);
JSCompartment(JSRuntime *rt);
~JSCompartment();
bool init();
/* Mark cross-compartment pointers. */
void markCrossCompartment(JSTracer *trc);
/* Mark this compartment's local roots. */
void mark(JSTracer *trc);
bool wrap(JSContext *cx, js::Value *vp);
bool wrap(JSContext *cx, JSString **strp);
bool wrap(JSContext *cx, JSObject **objp);
@ -441,8 +476,15 @@ struct JS_FRIEND_API(JSCompartment) {
}
};
#define JS_TRACE_MONITOR(cx) (cx->compartment->traceMonitor)
#define JS_SCRIPTS_TO_GC(cx) (cx->compartment->scriptsToGC)
#define JS_TRACE_MONITOR(cx) ((cx)->compartment->traceMonitor)
#define JS_SCRIPTS_TO_GC(cx) ((cx)->compartment->scriptsToGC)
#define JS_PROPERTY_TREE(cx) ((cx)->compartment->propertyTree)
#ifdef DEBUG
#define JS_COMPARTMENT_METER(x) x
#else
#define JS_COMPARTMENT_METER(x)
#endif
namespace js {
static inline MathCache *

View File

@ -595,6 +595,12 @@ DropWatchPointAndUnlock(JSContext *cx, JSWatchPoint *wp, uintN flag)
return ok;
}
/*
* Switch to the same compartment as the watch point, since changeProperty, below,
* needs to have a compartment.
*/
SwitchToCompartment sc(cx, wp->object);
/* Remove wp from the list, then restore wp->shape->setter from wp. */
++rt->debuggerMutations;
JS_REMOVE_LINK(&wp->links);

View File

@ -201,7 +201,7 @@ NewArguments(JSContext *cx, JSObject *parent, uint32 argc, JSObject &callee)
: &js_ArgumentsClass,
proto, parent, NULL, false);
argsobj->setMap(cx->runtime->emptyArgumentsShape);
argsobj->setMap(cx->compartment->emptyArgumentsShape);
argsobj->setArgsLength(argc);
argsobj->setArgsData(data);
@ -989,7 +989,7 @@ NewDeclEnvObject(JSContext *cx, JSStackFrame *fp)
return NULL;
envobj->init(cx, &js_DeclEnvClass, NULL, &fp->scopeChain(), fp, false);
envobj->setMap(cx->runtime->emptyDeclEnvShape);
envobj->setMap(cx->compartment->emptyDeclEnvShape);
return envobj;
}

View File

@ -854,7 +854,7 @@ js_FinishGC(JSRuntime *rt)
js_DumpGCStats(rt, stdout);
#endif
/* Delete all remaining Compartments. Ideally only the atomsCompartment should be left. */
/* Delete all remaining Compartments. */
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
JSCompartment *comp = *c;
comp->finishArenaLists();
@ -1027,7 +1027,6 @@ JSRuntime::setGCTriggerFactor(uint32 factor)
for (JSCompartment **c = compartments.begin(); c != compartments.end(); ++c) {
(*c)->setGCLastBytes(gcLastBytes);
}
atomsCompartment->setGCLastBytes(gcLastBytes);
}
void
@ -1731,19 +1730,6 @@ MarkRuntime(JSTracer *trc)
for (ThreadDataIter i(rt); !i.empty(); i.popFront())
i.threadData()->mark(trc);
if (rt->emptyArgumentsShape)
rt->emptyArgumentsShape->trace(trc);
if (rt->emptyBlockShape)
rt->emptyBlockShape->trace(trc);
if (rt->emptyCallShape)
rt->emptyCallShape->trace(trc);
if (rt->emptyDeclEnvShape)
rt->emptyDeclEnvShape->trace(trc);
if (rt->emptyEnumeratorShape)
rt->emptyEnumeratorShape->trace(trc);
if (rt->emptyWithShape)
rt->emptyWithShape->trace(trc);
/*
* We mark extra roots at the last thing so it can use use additional
* colors to implement cycle collection.
@ -2247,7 +2233,7 @@ PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
#endif
) {
rt->gcRegenShapes = true;
rt->shapeGen = Shape::LAST_RESERVED_SHAPE;
rt->shapeGen = 0;
rt->protoHazardShape = 0;
}
@ -2287,7 +2273,9 @@ MarkAndSweepCompartment(JSContext *cx, JSCompartment *comp, JSGCInvocationKind g
r.front()->clearMarkBitmap();
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
(*c)->mark(&gcmarker);
(*c)->markCrossCompartment(&gcmarker);
comp->mark(&gcmarker);
MarkRuntime(&gcmarker);
@ -2358,11 +2346,13 @@ MarkAndSweepCompartment(JSContext *cx, JSCompartment *comp, JSGCInvocationKind g
comp->finalizeStringArenaLists(cx);
TIMESTAMP(sweepStringEnd);
/*
* Unmark the runtime's property trees because we don't
* sweep them.
*/
js::PropertyTree::unmarkShapes(cx);
#ifdef DEBUG
/* Make sure that we didn't mark a Shape in another compartment. */
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
JS_ASSERT_IF(*c != comp, (*c)->propertyTree.checkShapesAllUnmarked(cx));
}
comp->propertyTree.dumpShapes(cx);
#endif
/*
* Destroy arenas after we finished the sweeping so finalizers can safely
@ -2400,6 +2390,9 @@ MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
r.front()->clearMarkBitmap();
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
(*c)->mark(&gcmarker);
MarkRuntime(&gcmarker);
js_MarkScriptFilenames(rt);
@ -2469,13 +2462,19 @@ MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
TIMESTAMP(sweepStringEnd);
SweepCompartments(cx, gckind);
/*
* Sweep the runtime's property trees after finalizing objects, in case any
* had watchpoints referencing tree nodes.
*
* Do this before sweeping compartments, so that we sweep all shapes in
* unreachable compartments.
*/
js::PropertyTree::sweepShapes(cx);
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
(*c)->propertyTree.sweepShapes(cx);
(*c)->propertyTree.dumpShapes(cx);
}
SweepCompartments(cx, gckind);
/*
* Sweep script filenames after sweeping functions in the generic loop
@ -2702,6 +2701,12 @@ GCUntilDone(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIM
AutoGCSession gcsession(cx);
/*
* We should not be depending on cx->compartment in the GC, so set it to
* NULL to look for violations.
*/
SwitchToCompartment(cx, (JSCompartment *)NULL);
JS_ASSERT(!rt->gcCurrentCompartment);
rt->gcCurrentCompartment = comp;

View File

@ -427,7 +427,7 @@ NewIteratorObject(JSContext *cx, uintN flags)
if (!obj)
return false;
obj->init(cx, &js_IteratorClass, NULL, NULL, NULL, false);
obj->setMap(cx->runtime->emptyEnumeratorShape);
obj->setMap(cx->compartment->emptyEnumeratorShape);
return obj;
}

View File

@ -3209,7 +3209,7 @@ js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth)
JSStackFrame *priv = js_FloatingFrameIfGenerator(cx, cx->fp());
obj->init(cx, &js_WithClass, proto, parent, priv, false);
obj->setMap(cx->runtime->emptyWithShape);
obj->setMap(cx->compartment->emptyWithShape);
OBJ_SET_BLOCK_DEPTH(cx, obj, depth);
AutoObjectRooter tvr(cx, obj);
@ -3235,7 +3235,7 @@ js_NewBlockObject(JSContext *cx)
return NULL;
blockObj->init(cx, &js_BlockClass, NULL, NULL, NULL, false);
blockObj->setMap(cx->runtime->emptyBlockShape);
blockObj->setMap(cx->compartment->emptyBlockShape);
return blockObj;
}
@ -4682,7 +4682,7 @@ js_DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &valu
* member declaration.
*/
if (obj->isDelegate() && (attrs & (JSPROP_READONLY | JSPROP_SETTER)))
cx->runtime->protoHazardShape = js_GenerateShape(cx, false);
cx->runtime->protoHazardShape = js_GenerateShape(cx);
/* Use the object's class getter and setter by default. */
clasp = obj->getClass();

View File

@ -316,6 +316,10 @@ struct JSObject : js::gc::Cell {
* for DictionaryProperties assert that the scope is in dictionary mode and
* any reachable properties are flagged as dictionary properties.
*
* For native objects, this field is always a Shape. For non-native objects,
* it points to the singleton sharedNonNative JSObjectMap, whose shape field
* is SHAPELESS.
*
* NB: these private methods do *not* update this scope's shape to track
* lastProp->shape after they finish updating the linked list in the case
* where lastProp is updated. It is up to calling code in jsscope.cpp to

View File

@ -79,69 +79,62 @@ PropertyTree::finish()
JS_FinishArenaPool(&arenaPool);
}
/*
* NB: Called with cx->runtime->gcLock held if gcLocked is true.
* On failure, return null after unlocking the GC and reporting out of memory.
*/
/* On failure, returns NULL. Does not report out of memory. */
Shape *
PropertyTree::newShape(JSContext *cx, bool gcLocked)
PropertyTree::newShapeUnchecked()
{
Shape *shape;
if (!gcLocked)
JS_LOCK_GC(cx->runtime);
shape = freeList;
if (shape) {
shape->removeFree();
} else {
JS_ARENA_ALLOCATE_CAST(shape, Shape *, &arenaPool, sizeof(Shape));
if (!shape) {
JS_UNLOCK_GC(cx->runtime);
JS_ReportOutOfMemory(cx);
if (!shape)
return NULL;
}
}
if (!gcLocked)
JS_UNLOCK_GC(cx->runtime);
JS_RUNTIME_METER(cx->runtime, livePropTreeNodes);
JS_RUNTIME_METER(cx->runtime, totalPropTreeNodes);
#ifdef DEBUG
shape->compartment = compartment;
#endif
JS_COMPARTMENT_METER(compartment->livePropTreeNodes++);
JS_COMPARTMENT_METER(compartment->totalPropTreeNodes++);
return shape;
}
/*
* NB: Called with cx->runtime->gcLock held, always.
* On failure, return null after unlocking the GC and reporting out of memory.
*/
KidsChunk *
KidsChunk::create(JSContext *cx)
Shape *
PropertyTree::newShape(JSContext *cx)
{
KidsChunk *chunk;
chunk = (KidsChunk *) js_calloc(sizeof *chunk);
if (!chunk) {
JS_UNLOCK_GC(cx->runtime);
Shape *shape = newShapeUnchecked();
if (!shape)
JS_ReportOutOfMemory(cx);
return shape;
}
static KidsHash *
HashChildren(Shape *kid1, Shape *kid2)
{
void *mem = js_malloc(sizeof(KidsHash));
if (!mem)
return NULL;
KidsHash *hash = new (mem) KidsHash();
if (!hash->init(2)) {
js_free(hash);
return NULL;
}
JS_RUNTIME_METER(cx->runtime, propTreeKidsChunks);
return chunk;
KidsHash::AddPtr addPtr = hash->lookupForAdd(kid1);
JS_ALWAYS_TRUE(hash->add(addPtr, kid1));
addPtr = hash->lookupForAdd(kid2);
JS_ASSERT(!addPtr.found());
JS_ALWAYS_TRUE(hash->add(addPtr, kid2));
return hash;
}
KidsChunk *
KidsChunk::destroy(JSContext *cx, KidsChunk *chunk)
{
JS_RUNTIME_UNMETER(cx->runtime, propTreeKidsChunks);
KidsChunk *nextChunk = chunk->next;
js_free(chunk);
return nextChunk;
}
/*
* NB: Called with cx->runtime->gcLock held, always.
* On failure, return false after unlocking the GC and reporting out of memory.
*/
bool
PropertyTree::insertChild(JSContext *cx, Shape *parent, Shape *child)
{
@ -150,89 +143,45 @@ PropertyTree::insertChild(JSContext *cx, Shape *parent, Shape *child)
JS_ASSERT(!child->inDictionary());
JS_ASSERT(!JSID_IS_VOID(parent->id));
JS_ASSERT(!JSID_IS_VOID(child->id));
child->setParent(parent);
JS_ASSERT(cx->compartment == compartment);
JS_ASSERT(child->compartment == parent->compartment);
KidsPointer *kidp = &parent->kids;
if (kidp->isNull()) {
child->setParent(parent);
kidp->setShape(child);
return true;
}
Shape *shape;
if (kidp->isShape()) {
shape = kidp->toShape();
Shape *shape = kidp->toShape();
JS_ASSERT(shape != child);
if (shape->matches(child)) {
/*
* Duplicate child created while racing to getChild on the same
* node label. See PropertyTree::getChild, further below.
*/
JS_RUNTIME_METER(cx->runtime, duplicatePropTreeNodes);
}
JS_ASSERT(!shape->matches(child));
KidsChunk *chunk = KidsChunk::create(cx);
if (!chunk)
return false;
parent->kids.setChunk(chunk);
chunk->kids[0] = shape;
chunk->kids[1] = child;
return true;
}
if (kidp->isChunk()) {
KidsChunk **chunkp;
KidsChunk *chunk = kidp->toChunk();
do {
for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
shape = chunk->kids[i];
if (!shape) {
chunk->kids[i] = child;
return true;
}
JS_ASSERT(shape != child);
if (shape->matches(child)) {
/*
* Duplicate child, see comment above. In this case, we
* must let the duplicate be inserted at this level in the
* tree, so we keep iterating, looking for an empty slot in
* which to insert.
*/
JS_ASSERT(shape != child);
JS_RUNTIME_METER(cx->runtime, duplicatePropTreeNodes);
}
}
chunkp = &chunk->next;
} while ((chunk = *chunkp) != NULL);
chunk = KidsChunk::create(cx);
if (!chunk)
return false;
*chunkp = chunk;
chunk->kids[0] = child;
return true;
}
KidsHash *hash = kidp->toHash();
KidsHash::AddPtr addPtr = hash->lookupForAdd(child);
if (!addPtr) {
if (!hash->add(addPtr, child)) {
JS_UNLOCK_GC(cx->runtime);
KidsHash *hash = HashChildren(shape, child);
if (!hash) {
JS_ReportOutOfMemory(cx);
return false;
}
} else {
// FIXME ignore duplicate child case here, going thread-local soon!
kidp->setHash(hash);
child->setParent(parent);
return true;
}
KidsHash *hash = kidp->toHash();
KidsHash::AddPtr addPtr = hash->lookupForAdd(child);
JS_ASSERT(!addPtr.found());
if (!hash->add(addPtr, child)) {
JS_ReportOutOfMemory(cx);
return false;
}
child->setParent(parent);
return true;
}
/* NB: Called with cx->runtime->gcLock held. */
void
PropertyTree::removeChild(JSContext *cx, Shape *child)
PropertyTree::removeChild(Shape *child)
{
JS_ASSERT(!child->inDictionary());
@ -248,97 +197,9 @@ PropertyTree::removeChild(JSContext *cx, Shape *child)
return;
}
if (kidp->isChunk()) {
KidsChunk *list = kidp->toChunk();
KidsChunk *chunk = list;
KidsChunk **chunkp = &list;
do {
for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
if (chunk->kids[i] == child) {
KidsChunk *lastChunk = chunk;
uintN j;
if (!lastChunk->next) {
j = i + 1;
} else {
j = 0;
do {
chunkp = &lastChunk->next;
lastChunk = *chunkp;
} while (lastChunk->next);
}
for (; j < MAX_KIDS_PER_CHUNK; j++) {
if (!lastChunk->kids[j])
break;
}
--j;
if (chunk != lastChunk || j > i)
chunk->kids[i] = lastChunk->kids[j];
lastChunk->kids[j] = NULL;
if (j == 0) {
*chunkp = NULL;
if (!list)
parent->kids.setNull();
KidsChunk::destroy(cx, lastChunk);
}
return;
}
}
chunkp = &chunk->next;
} while ((chunk = *chunkp) != NULL);
return;
}
kidp->toHash()->remove(child);
}
static KidsHash *
HashChunks(KidsChunk *chunk, uintN n)
{
void *mem = js_malloc(sizeof(KidsHash));
if (!mem)
return NULL;
KidsHash *hash = new (mem) KidsHash();
if (!hash->init(n)) {
js_free(hash);
return NULL;
}
do {
for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
Shape *shape = chunk->kids[i];
if (!shape)
break;
KidsHash::AddPtr addPtr = hash->lookupForAdd(shape);
if (!addPtr) {
/*
* Infallible, we right-sized via hash->init(n) just above.
* Assert just in case jshashtable.h ever regresses.
*/
JS_ALWAYS_TRUE(hash->add(addPtr, shape));
} else {
/*
* Duplicate child case, we don't handle this race,
* multi-threaded shapes are going away...
*/
}
}
} while ((chunk = chunk->next) != NULL);
return hash;
}
/*
* Called without cx->runtime->gcLock held. This function acquires that lock
* only when inserting a new child. Thus there may be races to find or add a
* node that result in duplicates. We expect such races to be rare!
*
* We use cx->runtime->gcLock, not ...->rtLock, to avoid nesting the former
* inside the latter in js_GenerateShape below.
*/
Shape *
PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child)
{
@ -348,95 +209,36 @@ PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child)
JS_ASSERT(!JSID_IS_VOID(parent->id));
/*
* Because chunks are appended at the end and never deleted except by
* the GC, we can search without taking the runtime's GC lock. We may
* miss a matching shape added by another thread, and make a duplicate
* one, but that is an unlikely, therefore small, cost. The property
* tree has extremely low fan-out below its root in popular embeddings
* with real-world workloads.
*
* Patterns such as defining closures that capture a constructor's
* environment as getters or setters on the new object that is passed
* in as |this| can significantly increase fan-out below the property
* The property tree has extremely low fan-out below its root in
* popular embeddings with real-world workloads. Patterns such as
* defining closures that capture a constructor's environment as
* getters or setters on the new object that is passed in as
* |this| can significantly increase fan-out below the property
* tree root -- see bug 335700 for details.
*/
KidsPointer *kidp = &parent->kids;
if (!kidp->isNull()) {
if (kidp->isShape()) {
shape = kidp->toShape();
if (shape->matches(&child))
return shape;
} else if (kidp->isChunk()) {
KidsChunk *chunk = kidp->toChunk();
uintN n = 0;
do {
for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
shape = chunk->kids[i];
if (!shape) {
n += i;
if (n >= CHUNK_HASH_THRESHOLD) {
/*
* kidp->isChunk() was true, but if we're racing it
* may not be by this point. FIXME: thread "safety"
* is for the birds!
*/
if (!kidp->isHash()) {
chunk = kidp->toChunk();
KidsHash *hash = HashChunks(chunk, n);
if (!hash) {
JS_ReportOutOfMemory(cx);
return NULL;
}
JS_LOCK_GC(cx->runtime);
if (kidp->isHash()) {
hash->~KidsHash();
js_free(hash);
} else {
// FIXME unsafe race with kidp->is/toChunk() above.
// But this is all going single-threaded soon...
while (chunk)
chunk = KidsChunk::destroy(cx, chunk);
kidp->setHash(hash);
}
goto locked_not_found;
}
}
goto not_found;
}
if (shape->matches(&child))
return shape;
}
n += MAX_KIDS_PER_CHUNK;
} while ((chunk = chunk->next) != NULL);
} else {
JS_LOCK_GC(cx->runtime);
shape = *kidp->toHash()->lookup(&child);
if (shape)
goto out;
goto locked_not_found;
}
if (kidp->isShape()) {
shape = kidp->toShape();
if (shape->matches(&child))
return shape;
} else if (kidp->isHash()) {
shape = *kidp->toHash()->lookup(&child);
if (shape)
return shape;
} else {
/* If kidp->isNull(), we always insert. */
}
not_found:
JS_LOCK_GC(cx->runtime);
locked_not_found:
shape = newShape(cx, true);
shape = newShape(cx);
if (!shape)
return NULL;
new (shape) Shape(child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs,
child.flags, child.shortid, js_GenerateShape(cx, true));
child.flags, child.shortid, js_GenerateShape(cx));
if (!insertChild(cx, parent, shape))
return NULL;
out:
JS_UNLOCK_GC(cx->runtime);
return shape;
}
@ -447,23 +249,6 @@ KidsPointer::checkConsistency(const Shape *aKid) const
{
if (isShape()) {
JS_ASSERT(toShape() == aKid);
} else if (isChunk()) {
bool found = false;
for (KidsChunk *chunk = toChunk(); chunk; chunk = chunk->next) {
for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
if (!chunk->kids[i]) {
JS_ASSERT(!chunk->next);
for (uintN j = i + 1; j < MAX_KIDS_PER_CHUNK; j++)
JS_ASSERT(!chunk->kids[j]);
break;
}
if (chunk->kids[i] == aKid) {
JS_ASSERT(!found);
found = true;
}
}
}
JS_ASSERT(found);
} else {
JS_ASSERT(isHash());
KidsHash *hash = toHash();
@ -541,29 +326,17 @@ void
js::PropertyTree::meter(JSBasicStats *bs, Shape *node)
{
uintN nkids = 0;
const KidsPointer &kids = node->kids;
if (!kids.isNull()) {
if (kids.isShape()) {
meter(bs, kids.toShape());
nkids = 1;
} else if (kids.isChunk()) {
for (KidsChunk *chunk = kids.toChunk(); chunk; chunk = chunk->next) {
for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
Shape *kid = chunk->kids[i];
if (!kid)
break;
meter(bs, kid);
nkids++;
}
}
} else {
const KidsHash &hash = *kids.toHash();
for (KidsHash::Range range = hash.all(); !range.empty(); range.popFront()) {
Shape *kid = range.front();
meter(bs, kid);
nkids++;
}
const KidsPointer &kidp = node->kids;
if (kidp.isShape()) {
meter(bs, kidp.toShape());
nkids = 1;
} else if (kidp.isHash()) {
const KidsHash &hash = *kidp.toHash();
for (KidsHash::Range range = hash.all(); !range.empty(); range.popFront()) {
Shape *kid = range.front();
meter(bs, kid);
nkids++;
}
}
@ -588,17 +361,6 @@ Shape::dumpSubtree(JSContext *cx, int level, FILE *fp) const
Shape *kid = kids.toShape();
JS_ASSERT(kid->parent == this);
kid->dumpSubtree(cx, level, fp);
} else if (kids.isChunk()) {
KidsChunk *chunk = kids.toChunk();
do {
for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
Shape *kid = chunk->kids[i];
if (!kid)
break;
JS_ASSERT(kid->parent == this);
kid->dumpSubtree(cx, level, fp);
}
} while ((chunk = chunk->next) != NULL);
} else {
const KidsHash &hash = *kids.toHash();
for (KidsHash::Range range = hash.all(); !range.empty(); range.popFront()) {
@ -614,20 +376,12 @@ Shape::dumpSubtree(JSContext *cx, int level, FILE *fp) const
#endif /* DEBUG */
JS_ALWAYS_INLINE void
js::PropertyTree::orphanKids(JSContext *cx, Shape *shape)
js::PropertyTree::orphanChildren(Shape *shape)
{
KidsPointer *kidp = &shape->kids;
JS_ASSERT(!kidp->isNull());
/*
* Note that JS_PROPERTY_TREE(cx).removeChild(cx, shape) precedes the call
* to orphanKids in sweepShapes, below. Therefore the grandparent must have
* either no kids left, or else space in chunks or a hash for more than one
* kid.
*/
JS_ASSERT_IF(shape->parent, !shape->parent->kids.isShape());
if (kidp->isShape()) {
Shape *kid = kidp->toShape();
@ -635,21 +389,6 @@ js::PropertyTree::orphanKids(JSContext *cx, Shape *shape)
JS_ASSERT(kid->parent == shape);
kid->parent = NULL;
}
} else if (kidp->isChunk()) {
KidsChunk *chunk = kidp->toChunk();
do {
for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) {
Shape *kid = chunk->kids[i];
if (!kid)
break;
if (!JSID_IS_VOID(kid->id)) {
JS_ASSERT(kid->parent == shape);
kid->parent = NULL;
}
}
} while ((chunk = KidsChunk::destroy(cx, chunk)) != NULL);
} else {
KidsHash *hash = kidp->toHash();
@ -671,12 +410,14 @@ js::PropertyTree::orphanKids(JSContext *cx, Shape *shape)
void
js::PropertyTree::sweepShapes(JSContext *cx)
{
JSRuntime *rt = compartment->rt;
#ifdef DEBUG
JSBasicStats bs;
uint32 livePropCapacity = 0, totalLiveCount = 0;
static FILE *logfp;
if (!logfp) {
if (const char *filename = cx->runtime->propTreeStatFilename)
if (const char *filename = rt->propTreeStatFilename)
logfp = fopen(filename, "w");
}
@ -685,18 +426,18 @@ js::PropertyTree::sweepShapes(JSContext *cx)
uint32 empties;
{
typedef JSRuntime::EmptyShapeSet HS;
typedef JSCompartment::EmptyShapeSet HS;
HS &h = cx->runtime->emptyShapes;
HS &h = compartment->emptyShapes;
empties = h.count();
MeterKidCount(&bs, empties);
for (HS::Range r = h.all(); !r.empty(); r.popFront())
meter(&bs, r.front());
}
double props = cx->runtime->liveObjectPropsPreSweep;
double nodes = cx->runtime->livePropTreeNodes;
double dicts = cx->runtime->liveDictModeNodes;
double props = rt->liveObjectPropsPreSweep;
double nodes = compartment->livePropTreeNodes;
double dicts = compartment->liveDictModeNodes;
/* Empty scope nodes are never hashed, so subtract them from nodes. */
JS_ASSERT(nodes - dicts == bs.sum);
@ -718,7 +459,7 @@ js::PropertyTree::sweepShapes(JSContext *cx)
* already GC'ed from the root ply, but we will avoid re-orphaning their
* kids, because the kids member will already be null.
*/
JSArena **ap = &JS_PROPERTY_TREE(cx).arenaPool.first.next;
JSArena **ap = &arenaPool.first.next;
while (JSArena *a = *ap) {
Shape *limit = (Shape *) a->avail;
uintN liveCount = 0;
@ -738,11 +479,11 @@ js::PropertyTree::sweepShapes(JSContext *cx)
*/
if (shape->marked()) {
shape->clearMark();
if (cx->runtime->gcRegenShapes) {
if (rt->gcRegenShapes) {
if (shape->hasRegenFlag())
shape->clearRegenFlag();
else
shape->shape = js_RegenerateShapeForGC(cx);
shape->shape = js_RegenerateShapeForGC(rt);
}
liveCount++;
continue;
@ -750,18 +491,18 @@ js::PropertyTree::sweepShapes(JSContext *cx)
#ifdef DEBUG
if ((shape->flags & Shape::SHARED_EMPTY) &&
cx->runtime->meterEmptyShapes()) {
cx->runtime->emptyShapes.remove((EmptyShape *) shape);
rt->meterEmptyShapes()) {
compartment->emptyShapes.remove((EmptyShape *) shape);
}
#endif
if (shape->inDictionary()) {
JS_RUNTIME_UNMETER(cx->runtime, liveDictModeNodes);
JS_COMPARTMENT_METER(compartment->liveDictModeNodes--);
} else {
/*
* Here, shape is garbage to collect, but its parent might not
* be, so we may have to remove it from its parent's kids hash,
* chunk list, or kid singleton pointer set.
* be, so we may have to remove it from its parent's kids hash
* or kid singleton pointer set.
*
* Without a separate mark-clearing pass, we can't tell whether
* shape->parent is live at this point, so we must remove shape
@ -771,10 +512,10 @@ js::PropertyTree::sweepShapes(JSContext *cx)
* tree node's kids' parent links when sweeping that node.
*/
if (shape->parent)
JS_PROPERTY_TREE(cx).removeChild(cx, shape);
removeChild(shape);
if (!shape->kids.isNull())
orphanKids(cx, shape);
orphanChildren(shape);
}
/*
@ -782,15 +523,15 @@ js::PropertyTree::sweepShapes(JSContext *cx)
* shape is on the freelist.
*/
shape->freeTable(cx);
shape->insertFree(&JS_PROPERTY_TREE(cx).freeList);
JS_RUNTIME_UNMETER(cx->runtime, livePropTreeNodes);
shape->insertFree(&freeList);
JS_COMPARTMENT_METER(compartment->livePropTreeNodes--);
}
/* If a contains no live properties, return it to the malloc heap. */
if (liveCount == 0) {
for (Shape *shape = (Shape *) a->base; shape < limit; shape++)
shape->removeFree();
JS_ARENA_DESTROY(&JS_PROPERTY_TREE(cx).arenaPool, a, ap);
JS_ARENA_DESTROY(&arenaPool, a, ap);
} else {
#ifdef DEBUG
livePropCapacity += limit - (Shape *) a->base;
@ -804,7 +545,7 @@ js::PropertyTree::sweepShapes(JSContext *cx)
if (logfp) {
fprintf(logfp,
"\nProperty tree stats for gcNumber %lu\n",
(unsigned long) cx->runtime->gcNumber);
(unsigned long) rt->gcNumber);
fprintf(logfp, "arenautil %g%%\n",
(totalLiveCount && livePropCapacity)
@ -813,93 +554,77 @@ js::PropertyTree::sweepShapes(JSContext *cx)
#define RATE(f1, f2) (((double)js_scope_stats.f1 / js_scope_stats.f2) * 100.0)
fprintf(logfp,
"Scope search stats:\n"
" searches: %6u\n"
" hits: %6u %5.2f%% of searches\n"
" misses: %6u %5.2f%%\n"
" hashes: %6u %5.2f%%\n"
" hashHits: %6u %5.2f%% (%5.2f%% of hashes)\n"
" hashMisses: %6u %5.2f%% (%5.2f%%)\n"
" steps: %6u %5.2f%% (%5.2f%%)\n"
" stepHits: %6u %5.2f%% (%5.2f%%)\n"
" stepMisses: %6u %5.2f%% (%5.2f%%)\n"
" initSearches: %6u\n"
" changeSearches: %6u\n"
" tableAllocFails: %6u\n"
" toDictFails: %6u\n"
" wrapWatchFails: %6u\n"
" adds: %6u\n"
" addFails: %6u\n"
" puts: %6u\n"
" redundantPuts: %6u\n"
" putFails: %6u\n"
" changes: %6u\n"
" changeFails: %6u\n"
" compresses: %6u\n"
" grows: %6u\n"
" removes: %6u\n"
" removeFrees: %6u\n"
" uselessRemoves: %6u\n"
" shrinks: %6u\n",
js_scope_stats.searches,
js_scope_stats.hits, RATE(hits, searches),
js_scope_stats.misses, RATE(misses, searches),
js_scope_stats.hashes, RATE(hashes, searches),
js_scope_stats.hashHits, RATE(hashHits, searches), RATE(hashHits, hashes),
js_scope_stats.hashMisses, RATE(hashMisses, searches), RATE(hashMisses, hashes),
js_scope_stats.steps, RATE(steps, searches), RATE(steps, hashes),
js_scope_stats.stepHits, RATE(stepHits, searches), RATE(stepHits, hashes),
js_scope_stats.stepMisses, RATE(stepMisses, searches), RATE(stepMisses, hashes),
js_scope_stats.initSearches,
js_scope_stats.changeSearches,
js_scope_stats.tableAllocFails,
js_scope_stats.toDictFails,
js_scope_stats.wrapWatchFails,
js_scope_stats.adds,
js_scope_stats.addFails,
js_scope_stats.puts,
js_scope_stats.redundantPuts,
js_scope_stats.putFails,
js_scope_stats.changes,
js_scope_stats.changeFails,
js_scope_stats.compresses,
js_scope_stats.grows,
js_scope_stats.removes,
js_scope_stats.removeFrees,
js_scope_stats.uselessRemoves,
js_scope_stats.shrinks);
/* This data is global, so only print it once per GC. */
if (compartment == rt->atomsCompartment) {
fprintf(logfp,
"Scope search stats:\n"
" searches: %6u\n"
" hits: %6u %5.2f%% of searches\n"
" misses: %6u %5.2f%%\n"
" hashes: %6u %5.2f%%\n"
" hashHits: %6u %5.2f%% (%5.2f%% of hashes)\n"
" hashMisses: %6u %5.2f%% (%5.2f%%)\n"
" steps: %6u %5.2f%% (%5.2f%%)\n"
" stepHits: %6u %5.2f%% (%5.2f%%)\n"
" stepMisses: %6u %5.2f%% (%5.2f%%)\n"
" initSearches: %6u\n"
" changeSearches: %6u\n"
" tableAllocFails: %6u\n"
" toDictFails: %6u\n"
" wrapWatchFails: %6u\n"
" adds: %6u\n"
" addFails: %6u\n"
" puts: %6u\n"
" redundantPuts: %6u\n"
" putFails: %6u\n"
" changes: %6u\n"
" changeFails: %6u\n"
" compresses: %6u\n"
" grows: %6u\n"
" removes: %6u\n"
" removeFrees: %6u\n"
" uselessRemoves: %6u\n"
" shrinks: %6u\n",
js_scope_stats.searches,
js_scope_stats.hits, RATE(hits, searches),
js_scope_stats.misses, RATE(misses, searches),
js_scope_stats.hashes, RATE(hashes, searches),
js_scope_stats.hashHits, RATE(hashHits, searches), RATE(hashHits, hashes),
js_scope_stats.hashMisses, RATE(hashMisses, searches), RATE(hashMisses, hashes),
js_scope_stats.steps, RATE(steps, searches), RATE(steps, hashes),
js_scope_stats.stepHits, RATE(stepHits, searches), RATE(stepHits, hashes),
js_scope_stats.stepMisses, RATE(stepMisses, searches), RATE(stepMisses, hashes),
js_scope_stats.initSearches,
js_scope_stats.changeSearches,
js_scope_stats.tableAllocFails,
js_scope_stats.toDictFails,
js_scope_stats.wrapWatchFails,
js_scope_stats.adds,
js_scope_stats.addFails,
js_scope_stats.puts,
js_scope_stats.redundantPuts,
js_scope_stats.putFails,
js_scope_stats.changes,
js_scope_stats.changeFails,
js_scope_stats.compresses,
js_scope_stats.grows,
js_scope_stats.removes,
js_scope_stats.removeFrees,
js_scope_stats.uselessRemoves,
js_scope_stats.shrinks);
}
#undef RATE
fflush(logfp);
}
if (const char *filename = cx->runtime->propTreeDumpFilename) {
char pathname[1024];
JS_snprintf(pathname, sizeof pathname, "%s.%lu",
filename, (unsigned long)cx->runtime->gcNumber);
FILE *dumpfp = fopen(pathname, "w");
if (dumpfp) {
typedef JSRuntime::EmptyShapeSet HS;
HS &h = cx->runtime->emptyShapes;
for (HS::Range r = h.all(); !r.empty(); r.popFront()) {
Shape *empty = r.front();
empty->dumpSubtree(cx, 0, dumpfp);
putc('\n', dumpfp);
}
fclose(dumpfp);
}
}
#endif /* DEBUG */
}
void
js::PropertyTree::unmarkShapes(JSContext *cx)
bool
js::PropertyTree::checkShapesAllUnmarked(JSContext *cx)
{
JSArena **ap = &JS_PROPERTY_TREE(cx).arenaPool.first.next;
JSArena **ap = &arenaPool.first.next;
while (JSArena *a = *ap) {
Shape *limit = (Shape *) a->avail;
@ -909,8 +634,37 @@ js::PropertyTree::unmarkShapes(JSContext *cx)
continue;
if (shape->marked())
shape->clearMark();
return false;
}
ap = &a->next;
}
return true;
}
void
js::PropertyTree::dumpShapes(JSContext *cx)
{
#ifdef DEBUG
JSRuntime *rt = compartment->rt;
if (const char *filename = rt->propTreeDumpFilename) {
char pathname[1024];
JS_snprintf(pathname, sizeof pathname, "%s.%lu",
filename, (unsigned long)rt->gcNumber);
FILE *dumpfp = fopen(pathname, "w");
if (dumpfp) {
typedef JSCompartment::EmptyShapeSet HS;
HS &h = compartment->emptyShapes;
for (HS::Range r = h.all(); !r.empty(); r.popFront()) {
Shape *empty = r.front();
empty->dumpSubtree(cx, 0, dumpfp);
putc('\n', dumpfp);
}
fclose(dumpfp);
}
}
#endif
}

View File

@ -46,19 +46,6 @@
namespace js {
enum {
MAX_KIDS_PER_CHUNK = 10U,
CHUNK_HASH_THRESHOLD = 30U
};
struct KidsChunk {
js::Shape *kids[MAX_KIDS_PER_CHUNK];
KidsChunk *next;
static KidsChunk *create(JSContext *cx);
static KidsChunk *destroy(JSContext *cx, KidsChunk *chunk);
};
struct ShapeHasher {
typedef js::Shape *Key;
typedef const js::Shape *Lookup;
@ -73,9 +60,8 @@ class KidsPointer {
private:
enum {
SHAPE = 0,
CHUNK = 1,
HASH = 2,
TAG = 3
HASH = 1,
TAG = 1
};
jsuword w;
@ -84,7 +70,6 @@ class KidsPointer {
bool isNull() const { return !w; }
void setNull() { w = 0; }
bool isShapeOrNull() const { return (w & TAG) == SHAPE; }
bool isShape() const { return (w & TAG) == SHAPE && !isNull(); }
js::Shape *toShape() const {
JS_ASSERT(isShape());
@ -96,17 +81,6 @@ class KidsPointer {
w = reinterpret_cast<jsuword>(shape) | SHAPE;
}
bool isChunk() const { return (w & TAG) == CHUNK; }
KidsChunk *toChunk() const {
JS_ASSERT(isChunk());
return reinterpret_cast<KidsChunk *>(w & ~jsuword(TAG));
}
void setChunk(KidsChunk *chunk) {
JS_ASSERT(chunk);
JS_ASSERT((reinterpret_cast<jsuword>(chunk) & TAG) == 0);
w = reinterpret_cast<jsuword>(chunk) | CHUNK;
}
bool isHash() const { return (w & TAG) == HASH; }
KidsHash *toHash() const {
JS_ASSERT(isHash());
@ -127,24 +101,35 @@ class PropertyTree
{
friend struct ::JSFunction;
JSArenaPool arenaPool;
js::Shape *freeList;
JSCompartment *compartment;
JSArenaPool arenaPool;
js::Shape *freeList;
bool insertChild(JSContext *cx, js::Shape *parent, js::Shape *child);
void removeChild(JSContext *cx, js::Shape *child);
void removeChild(js::Shape *child);
PropertyTree();
public:
enum { MAX_HEIGHT = 64 };
PropertyTree(JSCompartment *comp)
: compartment(comp), freeList(NULL)
{
PodZero(&arenaPool);
}
bool init();
void finish();
js::Shape *newShape(JSContext *cx, bool gcLocked = false);
js::Shape *newShapeUnchecked();
js::Shape *newShape(JSContext *cx);
js::Shape *getChild(JSContext *cx, js::Shape *parent, const js::Shape &child);
static void orphanKids(JSContext *cx, js::Shape *shape);
static void sweepShapes(JSContext *cx);
static void unmarkShapes(JSContext *cx);
void orphanChildren(js::Shape *shape);
void sweepShapes(JSContext *cx);
bool checkShapesAllUnmarked(JSContext *cx);
void dumpShapes(JSContext *cx);
#ifdef DEBUG
static void meter(JSBasicStats *bs, js::Shape *node);
#endif

View File

@ -71,12 +71,10 @@ using namespace js;
using namespace js::gc;
uint32
js_GenerateShape(JSContext *cx, bool gcLocked)
js_GenerateShape(JSRuntime *rt)
{
JSRuntime *rt;
uint32 shape;
rt = cx->runtime;
shape = JS_ATOMIC_INCREMENT(&rt->shapeGen);
JS_ASSERT(shape != 0);
if (shape >= SHAPE_OVERFLOW_BIT) {
@ -90,13 +88,19 @@ js_GenerateShape(JSContext *cx, bool gcLocked)
shape = SHAPE_OVERFLOW_BIT;
#ifdef JS_THREADSAFE
Conditionally<AutoLockGC> lockIf(!gcLocked, rt);
AutoLockGC lockIf(rt);
#endif
TriggerGC(rt);
}
return shape;
}
uint32
js_GenerateShape(JSContext *cx)
{
return js_GenerateShape(cx->runtime);
}
bool
JSObject::ensureClassReservedSlotsForEmptyObject(JSContext *cx)
{
@ -198,11 +202,10 @@ Shape::hashify(JSRuntime *rt)
#endif
static inline bool
InitField(JSContext *cx, EmptyShape *JSRuntime:: *field, Class *clasp, uint32 shape)
InitField(JSCompartment *comp, EmptyShape *JSCompartment:: *field, Class *clasp)
{
if (EmptyShape *emptyShape = EmptyShape::create(cx, clasp)) {
cx->runtime->*field = emptyShape;
JS_ASSERT(emptyShape->shape == shape);
if (EmptyShape *emptyShape = EmptyShape::create(comp, clasp)) {
comp->*field = emptyShape;
return true;
}
return false;
@ -210,7 +213,7 @@ InitField(JSContext *cx, EmptyShape *JSRuntime:: *field, Class *clasp, uint32 sh
/* static */
bool
Shape::initRuntimeState(JSContext *cx)
Shape::initEmptyShapes(JSCompartment *comp)
{
/*
* NewArguments allocates dslots to have enough room for the argc of the
@ -222,12 +225,10 @@ Shape::initRuntimeState(JSContext *cx)
* arguments objects. This helps ensure that any arguments object needing
* its own mutable scope (with unique shape) is a rare event.
*/
if (!InitField(cx, &JSRuntime::emptyArgumentsShape, &js_ArgumentsClass,
Shape::EMPTY_ARGUMENTS_SHAPE)) {
if (!InitField(comp, &JSCompartment::emptyArgumentsShape, &js_ArgumentsClass))
return false;
}
if (!InitField(cx, &JSRuntime::emptyBlockShape, &js_BlockClass, Shape::EMPTY_BLOCK_SHAPE))
if (!InitField(comp, &JSCompartment::emptyBlockShape, &js_BlockClass))
return false;
/*
@ -235,23 +236,19 @@ Shape::initRuntimeState(JSContext *cx)
* and vars do not force the creation of a mutable scope for the particular
* call object being accessed.
*/
if (!InitField(cx, &JSRuntime::emptyCallShape, &js_CallClass, Shape::EMPTY_CALL_SHAPE))
if (!InitField(comp, &JSCompartment::emptyCallShape, &js_CallClass))
return false;
/* A DeclEnv object holds the name binding for a named function expression. */
if (!InitField(cx, &JSRuntime::emptyDeclEnvShape, &js_DeclEnvClass,
Shape::EMPTY_DECL_ENV_SHAPE)) {
if (!InitField(comp, &JSCompartment::emptyDeclEnvShape, &js_DeclEnvClass))
return false;
}
/* Non-escaping native enumerator objects share this empty scope. */
if (!InitField(cx, &JSRuntime::emptyEnumeratorShape, &js_IteratorClass,
Shape::EMPTY_ENUMERATOR_SHAPE)) {
if (!InitField(comp, &JSCompartment::emptyEnumeratorShape, &js_IteratorClass))
return false;
}
/* Same drill for With objects. */
if (!InitField(cx, &JSRuntime::emptyWithShape, &js_WithClass, Shape::EMPTY_WITH_SHAPE))
if (!InitField(comp, &JSCompartment::emptyWithShape, &js_WithClass))
return false;
return true;
@ -259,16 +256,14 @@ Shape::initRuntimeState(JSContext *cx)
/* static */
void
Shape::finishRuntimeState(JSContext *cx)
Shape::finishEmptyShapes(JSCompartment *comp)
{
JSRuntime *rt = cx->runtime;
rt->emptyArgumentsShape = NULL;
rt->emptyBlockShape = NULL;
rt->emptyCallShape = NULL;
rt->emptyDeclEnvShape = NULL;
rt->emptyEnumeratorShape = NULL;
rt->emptyWithShape = NULL;
comp->emptyArgumentsShape = NULL;
comp->emptyBlockShape = NULL;
comp->emptyCallShape = NULL;
comp->emptyDeclEnvShape = NULL;
comp->emptyEnumeratorShape = NULL;
comp->emptyWithShape = NULL;
}
JS_STATIC_ASSERT(sizeof(JSHashNumber) == 4);
@ -591,12 +586,12 @@ Shape::newDictionaryShape(JSContext *cx, const Shape &child, Shape **listp)
new (dprop) Shape(child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs,
(child.flags & ~FROZEN) | IN_DICTIONARY, child.shortid,
js_GenerateShape(cx, false), child.slotSpan);
js_GenerateShape(cx), child.slotSpan);
dprop->listp = NULL;
dprop->insertIntoDictionary(listp);
JS_RUNTIME_METER(cx->runtime, liveDictModeNodes);
JS_COMPARTMENT_METER(cx->compartment->liveDictModeNodes++);
return dprop;
}
@ -613,7 +608,7 @@ Shape::newDictionaryShapeForAddProperty(JSContext *cx, jsid id,
shape->parent = NULL;
shape->listp = NULL;
JS_RUNTIME_METER(cx->runtime, liveDictModeNodes);
JS_COMPARTMENT_METER(cx->compartment->liveDictModeNodes++);
return shape;
}
@ -758,12 +753,6 @@ JSObject::checkShapeConsistency()
}
prev = shape;
}
if (throttle == 0) {
JS_ASSERT(!shape->table);
JS_ASSERT(JSID_IS_EMPTY(shape->id));
JS_ASSERT(shape->slot == SHAPE_INVALID_SLOT);
}
}
}
#else
@ -853,7 +842,6 @@ JSObject::addPropertyInternal(JSContext *cx, jsid id,
}
#ifdef DEBUG
LIVE_SCOPE_METER(cx, ++cx->runtime->liveObjectProps);
JS_RUNTIME_METER(cx->runtime, totalObjectProps);
#endif
CHECK_SHAPE_CONSISTENCY(this);
METER(adds);
@ -990,7 +978,7 @@ JSObject::putProperty(JSContext *cx, jsid id,
* we regenerate only lastProp->shape. We will clearOwnShape(), which
* sets objShape to lastProp->shape.
*/
lastProp->shape = js_GenerateShape(cx, false);
lastProp->shape = js_GenerateShape(cx);
clearOwnShape();
} else {
/*
@ -1101,7 +1089,7 @@ JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN m
updateFlags(shape);
/* See the corresponding code in putProperty. */
lastProp->shape = js_GenerateShape(cx, false);
lastProp->shape = js_GenerateShape(cx);
clearOwnShape();
if (!js_UpdateWatchpointsForShape(cx, this, shape)) {
@ -1340,7 +1328,7 @@ JSObject::generateOwnShape(JSContext *cx)
tr->forgetGuardedShapesForObject(this);
#endif
setOwnShape(js_GenerateShape(cx, false));
setOwnShape(js_GenerateShape(cx));
}
void
@ -1479,6 +1467,9 @@ PrintPropertyMethod(JSTracer *trc, char *buf, size_t bufsize)
void
Shape::trace(JSTracer *trc) const
{
JSRuntime *rt = trc->context->runtime;
JS_ASSERT_IF(rt->gcCurrentCompartment, compartment == rt->gcCurrentCompartment);
if (IS_GC_MARKING_TRACER(trc))
mark();

View File

@ -306,21 +306,15 @@ struct Shape : public JSObjectMap
public:
inline void freeTable(JSContext *cx);
static bool initRuntimeState(JSContext *cx);
static void finishRuntimeState(JSContext *cx);
enum {
EMPTY_ARGUMENTS_SHAPE = 1,
EMPTY_BLOCK_SHAPE = 2,
EMPTY_CALL_SHAPE = 3,
EMPTY_DECL_ENV_SHAPE = 4,
EMPTY_ENUMERATOR_SHAPE = 5,
EMPTY_WITH_SHAPE = 6,
LAST_RESERVED_SHAPE = 6
};
static bool initEmptyShapes(JSCompartment *comp);
static void finishEmptyShapes(JSCompartment *comp);
jsid id;
#ifdef DEBUG
JSCompartment *compartment;
#endif
protected:
union {
js::PropertyOp rawGetter; /* getter and setter hooks or objects */
@ -510,7 +504,7 @@ struct Shape : public JSObjectMap
uintN flags, intN shortid, uint32 shape = INVALID_SHAPE, uint32 slotSpan = 0);
/* Used by EmptyShape (see jsscopeinlines.h). */
Shape(JSContext *cx, Class *aclasp);
Shape(JSCompartment *comp, Class *aclasp);
bool marked() const { return (flags & MARK) != 0; }
void mark() const { flags |= MARK; }
@ -638,15 +632,22 @@ struct Shape : public JSObjectMap
struct EmptyShape : public js::Shape
{
EmptyShape(JSContext *cx, js::Class *aclasp);
EmptyShape(JSCompartment *comp, js::Class *aclasp);
js::Class *getClass() const { return clasp; };
static EmptyShape *create(JSCompartment *comp, js::Class *clasp) {
js::Shape *eprop = comp->propertyTree.newShapeUnchecked();
if (!eprop)
return NULL;
return new (eprop) EmptyShape(comp, clasp);
}
static EmptyShape *create(JSContext *cx, js::Class *clasp) {
js::Shape *eprop = JS_PROPERTY_TREE(cx).newShape(cx);
if (!eprop)
return NULL;
return new (eprop) EmptyShape(cx, clasp);
return new (eprop) EmptyShape(cx->compartment, clasp);
}
};
@ -737,6 +738,7 @@ JSObject::setLastProperty(const js::Shape *shape)
JS_ASSERT(!inDictionaryMode());
JS_ASSERT(!JSID_IS_VOID(shape->id));
JS_ASSERT_IF(lastProp, !JSID_IS_VOID(lastProp->id));
JS_ASSERT(shape->compartment == compartment());
lastProp = const_cast<js::Shape *>(shape);
}
@ -805,12 +807,11 @@ Shape::insertIntoDictionary(js::Shape **dictp)
((shape)->hasShortID() ? INT_TO_JSID((shape)->shortid) \
: (shape)->id)
#ifndef JS_THREADSAFE
# define js_GenerateShape(cx, gcLocked) js_GenerateShape (cx)
#endif
extern uint32
js_GenerateShape(JSRuntime *rt);
extern uint32
js_GenerateShape(JSContext *cx, bool gcLocked);
js_GenerateShape(JSContext *cx);
#ifdef DEBUG
struct JSScopeStats {

View File

@ -107,7 +107,7 @@ JSObject::updateShape(JSContext *cx)
JS_ASSERT(isNative());
js::LeaveTraceIfGlobalObject(cx, this);
if (hasOwnShape())
setOwnShape(js_GenerateShape(cx, false));
setOwnShape(js_GenerateShape(cx));
else
objShape = lastProp->shape;
}
@ -146,13 +146,13 @@ JSObject::trace(JSTracer *trc)
* it must have the same shape as lastProp.
*/
if (!shape->hasRegenFlag()) {
shape->shape = js_RegenerateShapeForGC(cx);
shape->shape = js_RegenerateShapeForGC(cx->runtime);
shape->setRegenFlag();
}
uint32 newShape = shape->shape;
if (hasOwnShape()) {
newShape = js_RegenerateShapeForGC(cx);
newShape = js_RegenerateShapeForGC(cx->runtime);
JS_ASSERT(newShape != shape->shape);
}
objShape = newShape;
@ -180,10 +180,18 @@ Shape::Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot,
}
inline
Shape::Shape(JSContext *cx, Class *aclasp)
: JSObjectMap(js_GenerateShape(cx, false), JSSLOT_FREE(aclasp)), numSearches(0), table(NULL),
id(JSID_EMPTY), clasp(aclasp), rawSetter(NULL), slot(SHAPE_INVALID_SLOT), attrs(0),
flags(SHARED_EMPTY), shortid(0), parent(NULL)
Shape::Shape(JSCompartment *comp, Class *aclasp)
: JSObjectMap(js_GenerateShape(comp->rt), JSSLOT_FREE(aclasp)),
numSearches(0),
table(NULL),
id(JSID_EMPTY),
clasp(aclasp),
rawSetter(NULL),
slot(SHAPE_INVALID_SLOT),
attrs(0),
flags(SHARED_EMPTY),
shortid(0),
parent(NULL)
{
kids.setNull();
}
@ -276,12 +284,12 @@ Shape::set(JSContext* cx, JSObject* obj, js::Value* vp) const
}
inline
EmptyShape::EmptyShape(JSContext *cx, js::Class *aclasp)
: js::Shape(cx, aclasp)
EmptyShape::EmptyShape(JSCompartment *comp, js::Class *aclasp)
: js::Shape(comp, aclasp)
{
#ifdef DEBUG
if (cx->runtime->meterEmptyShapes())
cx->runtime->emptyShapes.put(this);
if (comp->rt->meterEmptyShapes())
comp->emptyShapes.put(this);
#endif
}

View File

@ -52,14 +52,14 @@ namespace js {
inline
Bindings::Bindings(JSContext *cx)
: lastBinding(cx->runtime->emptyCallShape), nargs(0), nvars(0), nupvars(0)
: lastBinding(cx->compartment->emptyCallShape), nargs(0), nvars(0), nupvars(0)
{
}
inline void
Bindings::transfer(JSContext *cx, Bindings *bindings)
{
JS_ASSERT(lastBinding == cx->runtime->emptyCallShape);
JS_ASSERT(lastBinding == cx->compartment->emptyCallShape);
*this = *bindings;
#ifdef DEBUG
@ -74,7 +74,7 @@ Bindings::transfer(JSContext *cx, Bindings *bindings)
inline void
Bindings::clone(JSContext *cx, Bindings *bindings)
{
JS_ASSERT(lastBinding == cx->runtime->emptyCallShape);
JS_ASSERT(lastBinding == cx->compartment->emptyCallShape);
/*
* Non-dictionary bindings are fine to share, as are dictionary bindings if