Bug 961969 - Assert that all shapes in a lineage have the same numFixedSlots (r=billm)

--HG--
extra : rebase_source : 044a5e199bca060f6f854a8f01b9d48970c0e3db
This commit is contained in:
Luke Wagner 2014-01-21 16:25:37 -06:00
parent 85cd622dfd
commit 3a24b74ad5
5 changed files with 102 additions and 106 deletions

View File

@ -126,70 +126,68 @@ Shape::removeChild(Shape *child)
}
Shape *
PropertyTree::getChild(ExclusiveContext *cx, Shape *parent_, const StackShape &child)
PropertyTree::getChild(ExclusiveContext *cx, Shape *parentArg, const StackShape &child)
{
{
Shape *shape = nullptr;
RootedShape parent(cx, parentArg);
JS_ASSERT(parent);
JS_ASSERT(parent_);
Shape *existingShape = nullptr;
/*
* The property tree has extremely low fan-out below its root in
* popular embeddings with real-world workloads. Patterns such as
* defining closures that capture a constructor's environment as
* getters or setters on the new object that is passed in as
* |this| can significantly increase fan-out below the property
* tree root -- see bug 335700 for details.
*/
KidsPointer *kidp = &parent_->kids;
if (kidp->isShape()) {
Shape *kid = kidp->toShape();
if (kid->matches(child))
shape = kid;
} else if (kidp->isHash()) {
if (KidsHash::Ptr p = kidp->toHash()->lookup(child))
shape = *p;
} else {
/* If kidp->isNull(), we always insert. */
}
#ifdef JSGC_INCREMENTAL
if (shape) {
JS::Zone *zone = shape->zone();
if (zone->needsBarrier()) {
/*
* We need a read barrier for the shape tree, since these are weak
* pointers.
*/
Shape *tmp = shape;
MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "read barrier");
JS_ASSERT(tmp == shape);
} else if (zone->isGCSweeping() && !shape->isMarked() &&
!shape->arenaHeader()->allocatedDuringIncremental)
{
/*
* The shape we've found is unreachable and due to be finalized, so
* remove our weak reference to it and don't use it.
*/
JS_ASSERT(parent_->isMarked());
parent_->removeChild(shape);
shape = nullptr;
}
}
#endif
if (shape)
return shape;
/*
* The property tree has extremely low fan-out below its root in
* popular embeddings with real-world workloads. Patterns such as
* defining closures that capture a constructor's environment as
* getters or setters on the new object that is passed in as
* |this| can significantly increase fan-out below the property
* tree root -- see bug 335700 for details.
*/
KidsPointer *kidp = &parent->kids;
if (kidp->isShape()) {
Shape *kid = kidp->toShape();
if (kid->matches(child))
existingShape = kid;
} else if (kidp->isHash()) {
if (KidsHash::Ptr p = kidp->toHash()->lookup(child))
existingShape = *p;
} else {
/* If kidp->isNull(), we always insert. */
}
#ifdef JSGC_INCREMENTAL
if (existingShape) {
JS::Zone *zone = existingShape->zone();
if (zone->needsBarrier()) {
/*
* We need a read barrier for the shape tree, since these are weak
* pointers.
*/
Shape *tmp = existingShape;
MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "read barrier");
JS_ASSERT(tmp == existingShape);
} else if (zone->isGCSweeping() && !existingShape->isMarked() &&
!existingShape->arenaHeader()->allocatedDuringIncremental)
{
/*
* The shape we've found is unreachable and due to be finalized, so
* remove our weak reference to it and don't use it.
*/
JS_ASSERT(parent->isMarked());
parent->removeChild(existingShape);
existingShape = nullptr;
}
}
#endif
if (existingShape)
return existingShape;
StackShape::AutoRooter childRoot(cx, &child);
RootedShape parent(cx, parent_);
Shape *shape = newShape(cx);
if (!shape)
return nullptr;
new (shape) Shape(child, child.numFixedSlots());
new (shape) Shape(child, parent->numFixedSlots());
if (!insertChild(cx, parent, shape))
return nullptr;

View File

@ -82,24 +82,35 @@ Bindings::initWithTemporaryStorage(ExclusiveContext *cx, InternalBindingsHandle
self->numArgs_ = numArgs;
self->numVars_ = numVars;
/*
* Get the initial shape to use when creating CallObjects for this script.
* Since unaliased variables are, by definition, only accessed by local
* operations and never through the scope chain, only give shapes to
* aliased variables. While the debugger may observe any scope object at
* any time, such accesses are mediated by DebugScopeProxy (see
* DebugScopeProxy::handleUnaliasedAccess).
*/
// Get the initial shape to use when creating CallObjects for this script.
// After creation, a CallObject's shape may change completely (via direct eval() or
// other operations that mutate the lexical scope). However, since the
// lexical bindings added to the initial shape are permanent and the
// allocKind/nfixed of a CallObject cannot change, one may assume that the
// slot location (whether in the fixed or dynamic slots) of a variable is
// the same as in the initial shape. (This is assumed by the interpreter and
// JITs when interpreting/compiling aliasedvar ops.)
JS_STATIC_ASSERT(CallObject::RESERVED_SLOTS == 2);
gc::AllocKind allocKind = gc::FINALIZE_OBJECT2_BACKGROUND;
JS_ASSERT(gc::GetGCKindSlots(allocKind) == CallObject::RESERVED_SLOTS);
RootedShape initial(cx,
// Since unaliased variables are, by definition, only accessed by local
// operations and never through the scope chain, only give shapes to
// aliased variables. While the debugger may observe any scope object at
// any time, such accesses are mediated by DebugScopeProxy (see
// DebugScopeProxy::handleUnaliasedAccess).
uint32_t nslots = CallObject::RESERVED_SLOTS;
for (BindingIter bi(self); bi; bi++) {
if (bi->aliased())
nslots++;
}
// Put as many of nslots inline into the object header as possible.
uint32_t nfixed = gc::GetGCKindSlots(gc::GetGCObjectKind(nslots));
// Start with the empty shape and then append one shape per aliased binding.
RootedShape shape(cx,
EmptyShape::getInitialShape(cx, &CallObject::class_, nullptr, nullptr, nullptr,
allocKind, BaseShape::VAROBJ | BaseShape::DELEGATE));
if (!initial)
nfixed, BaseShape::VAROBJ | BaseShape::DELEGATE));
if (!shape)
return false;
self->callObjShape_.init(initial);
#ifdef DEBUG
HashSet<PropertyName *> added(cx);
@ -107,44 +118,41 @@ Bindings::initWithTemporaryStorage(ExclusiveContext *cx, InternalBindingsHandle
return false;
#endif
BindingIter bi(self);
uint32_t slot = CallObject::RESERVED_SLOTS;
for (uint32_t i = 0, n = self->count(); i < n; i++, bi++) {
for (BindingIter bi(self); bi; bi++) {
if (!bi->aliased())
continue;
#ifdef DEBUG
/* The caller ensures no duplicate aliased names. */
// The caller ensures no duplicate aliased names.
JS_ASSERT(!added.has(bi->name()));
if (!added.put(bi->name()))
return false;
#endif
StackBaseShape base(cx, &CallObject::class_, cx->global(), nullptr,
BaseShape::VAROBJ | BaseShape::DELEGATE);
StackBaseShape stackBase(cx, &CallObject::class_, nullptr, nullptr,
BaseShape::VAROBJ | BaseShape::DELEGATE);
UnownedBaseShape *nbase = BaseShape::getUnowned(cx, base);
if (!nbase)
UnownedBaseShape *base = BaseShape::getUnowned(cx, stackBase);
if (!base)
return false;
RootedId id(cx, NameToId(bi->name()));
uint32_t nfixed = gc::GetGCKindSlots(gc::GetGCObjectKind(slot + 1));
unsigned attrs = JSPROP_PERMANENT | JSPROP_ENUMERATE |
unsigned attrs = JSPROP_PERMANENT |
JSPROP_ENUMERATE |
(bi->kind() == CONSTANT ? JSPROP_READONLY : 0);
StackShape child(base, NameToId(bi->name()), slot, attrs, 0, 0);
StackShape child(nbase, id, slot, nfixed, attrs, 0, 0);
Shape *shape = cx->compartment()->propertyTree.getChild(cx, self->callObjShape_, child);
shape = cx->compartment()->propertyTree.getChild(cx, shape, child);
if (!shape)
return false;
self->callObjShape_ = shape;
JS_ASSERT(slot < nslots);
slot++;
}
JS_ASSERT(slot == nslots);
JS_ASSERT(!self->callObjShape_->inDictionary());
JS_ASSERT(!bi);
JS_ASSERT(!shape->inDictionary());
self->callObjShape_.init(shape);
return true;
}

View File

@ -684,7 +684,7 @@ GlobalObject::addIntrinsicValue(JSContext *cx, HandleId id, HandleValue value)
RootedShape last(cx, holder->lastProperty());
Rooted<UnownedBaseShape*> base(cx, last->base()->unowned());
StackShape child(base, id, slot, holder->numFixedSlots(), 0, 0, 0);
StackShape child(base, id, slot, 0, 0, 0);
RootedShape shape(cx, cx->compartment()->propertyTree.getChild(cx, last, child));
if (!shape)
return false;

View File

@ -620,7 +620,7 @@ JSObject::addPropertyInternal(typename ExecutionModeTraits<mode>::ExclusiveConte
return nullptr;
}
StackShape child(nbase, id, slot, obj->numFixedSlots(), attrs, flags, shortid);
StackShape child(nbase, id, slot, attrs, flags, shortid);
shape = getOrLookupChildProperty<mode>(cx, obj, last, child);
}
@ -704,7 +704,7 @@ js::NewReshapedObject(JSContext *cx, HandleTypeObject type, JSObject *parent,
return nullptr;
}
StackShape child(nbase, id, i, res->numFixedSlots(), JSPROP_ENUMERATE, 0, 0);
StackShape child(nbase, id, i, JSPROP_ENUMERATE, 0, 0);
newShape = cx->compartment()->propertyTree.getChild(cx, newShape, child);
if (!newShape)
return nullptr;
@ -840,7 +840,7 @@ JSObject::putProperty(typename ExecutionModeTraits<mode>::ExclusiveContextType c
* Now that we've possibly preserved slot, check whether all members match.
* If so, this is a redundant "put" and we can return without more work.
*/
if (shape->matchesParamsAfterId(nbase, slot, obj->numFixedSlots(), attrs, flags, shortid))
if (shape->matchesParamsAfterId(nbase, slot, attrs, flags, shortid))
return shape;
/*
@ -901,7 +901,7 @@ JSObject::putProperty(typename ExecutionModeTraits<mode>::ExclusiveContextType c
JS_ASSERT(shape == obj->lastProperty());
/* Find or create a property tree node labeled by our arguments. */
StackShape child(nbase, id, slot, obj->numFixedSlots(), attrs, flags, shortid);
StackShape child(nbase, id, slot, attrs, flags, shortid);
RootedShape parent(cx, shape->parent);
Shape *newShape = getOrLookupChildProperty<mode>(cx, obj, parent, child);

View File

@ -1146,18 +1146,17 @@ class Shape : public gc::BarrieredCell<Shape>
bool matches(const Shape *other) const {
return propid_.get() == other->propid_.get() &&
matchesParamsAfterId(other->base(), other->maybeSlot(), other->numFixedSlots(),
other->attrs, other->flags, other->shortid_);
matchesParamsAfterId(other->base(), other->maybeSlot(), other->attrs, other->flags,
other->shortid_);
}
inline bool matches(const StackShape &other) const;
bool matchesParamsAfterId(BaseShape *base, uint32_t aslot, uint32_t afixed, unsigned aattrs,
unsigned aflags, int ashortid) const
bool matchesParamsAfterId(BaseShape *base, uint32_t aslot, unsigned aattrs, unsigned aflags,
int ashortid) const
{
return base->unowned() == this->base()->unowned() &&
maybeSlot() == aslot &&
numFixedSlots() == afixed &&
attrs == aattrs &&
((flags ^ aflags) & PUBLIC_FLAGS) == 0 &&
shortid_ == ashortid;
@ -1502,17 +1501,15 @@ struct StackShape
UnownedBaseShape *base;
jsid propid;
uint32_t slot_;
uint32_t nfixed_;
uint8_t attrs;
uint8_t flags;
int16_t shortid;
explicit StackShape(UnownedBaseShape *base, jsid propid, uint32_t slot,
uint32_t nfixed, unsigned attrs, unsigned flags, int shortid)
unsigned attrs, unsigned flags, int shortid)
: base(base),
propid(propid),
slot_(slot),
nfixed_(nfixed),
attrs(uint8_t(attrs)),
flags(uint8_t(flags)),
shortid(int16_t(shortid))
@ -1526,7 +1523,6 @@ struct StackShape
: base(shape->base()->unowned()),
propid(shape->propidRef()),
slot_(shape->maybeSlot()),
nfixed_(shape->numFixedSlots()),
attrs(shape->attrs),
flags(shape->flags),
shortid(shape->shortid_)
@ -1548,10 +1544,6 @@ struct StackShape
slot_ = slot;
}
uint32_t numFixedSlots() const {
return nfixed_;
}
HashNumber hash() const {
HashNumber hash = uintptr_t(base);
@ -1560,7 +1552,6 @@ struct StackShape
hash = mozilla::RotateLeft(hash, 4) ^ attrs;
hash = mozilla::RotateLeft(hash, 4) ^ shortid;
hash = mozilla::RotateLeft(hash, 4) ^ slot_;
hash = mozilla::RotateLeft(hash, 4) ^ nfixed_;
hash = mozilla::RotateLeft(hash, 4) ^ JSID_BITS(propid);
return hash;
}
@ -1710,8 +1701,7 @@ inline bool
Shape::matches(const StackShape &other) const
{
return propid_.get() == other.propid &&
matchesParamsAfterId(other.base, other.slot_, other.nfixed_, other.attrs, other.flags,
other.shortid);
matchesParamsAfterId(other.base, other.slot_, other.attrs, other.flags, other.shortid);
}
template<> struct RootKind<Shape *> : SpecificRootKind<Shape *, THING_ROOT_SHAPE> {};